fix clippy lints
This commit is contained in:
parent
4a499fcde9
commit
c52a94a3e5
|
@ -11,9 +11,10 @@ use scrap::{Capturer, Display};
|
|||
|
||||
use crate::{
|
||||
capture,
|
||||
image_processing::{self, hash_image, Region, extract_and_filter},
|
||||
config::Config,
|
||||
image_processing::{self, extract_and_filter, hash_image, Region},
|
||||
ocr,
|
||||
state::{AppState, DebugOcrFrame, LapState, RaceState, SharedAppState}, config::Config,
|
||||
state::{AppState, DebugOcrFrame, LapState, RaceState, SharedAppState},
|
||||
};
|
||||
|
||||
fn is_finished_lap(state: &AppState, frame: &LapState) -> bool {
|
||||
|
@ -67,15 +68,17 @@ fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
|
|||
state.frames_without_lap = 0;
|
||||
|
||||
if state.current_race.is_none() {
|
||||
let mut race = RaceState::default();
|
||||
race.screencap = Some(
|
||||
RetainedImage::from_image_bytes(
|
||||
"screencap",
|
||||
&image_processing::to_png_bytes(&image),
|
||||
)
|
||||
.expect("failed to save screenshot"),
|
||||
);
|
||||
race.race_time = Some(SystemTime::now());
|
||||
let race = RaceState {
|
||||
screencap: Some(
|
||||
RetainedImage::from_image_bytes(
|
||||
"screencap",
|
||||
&image_processing::to_png_bytes(&image),
|
||||
)
|
||||
.expect("failed to save screenshot"),
|
||||
),
|
||||
race_time: Some(SystemTime::now()),
|
||||
..Default::default()
|
||||
};
|
||||
state.current_race = Some(race);
|
||||
}
|
||||
} else {
|
||||
|
|
22
src/main.rs
22
src/main.rs
|
@ -24,9 +24,11 @@ use state::{AppState, RaceState, SharedAppState, LapState};
|
|||
use stats_writer::export_race_stats;
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let mut app_state = AppState::default();
|
||||
app_state.config = Arc::new(Config::load().unwrap());
|
||||
app_state.learned = Arc::new(LearnedConfig::load().unwrap());
|
||||
let app_state = AppState {
|
||||
config: Arc::new(Config::load().unwrap()),
|
||||
learned: Arc::new(LearnedConfig::load().unwrap()),
|
||||
..Default::default()
|
||||
};
|
||||
let state = Arc::new(Mutex::new(app_state));
|
||||
{
|
||||
let state = state.clone();
|
||||
|
@ -150,10 +152,8 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
|
|||
|
||||
if lap.striked {
|
||||
ui.colored_label(Color32::RED, "Striked from the record");
|
||||
} else {
|
||||
if ui.button("Strike").clicked() {
|
||||
lap.striked = true;
|
||||
}
|
||||
} else if ui.button("Strike").clicked() {
|
||||
lap.striked = true;
|
||||
}
|
||||
|
||||
if lap.debug {
|
||||
|
@ -162,10 +162,8 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
|
|||
}
|
||||
ui.end_row();
|
||||
// TODO(DEBUG): ???
|
||||
} else {
|
||||
if ui.button("Debug").clicked( ){
|
||||
lap.debug = true;
|
||||
}
|
||||
} else if ui.button("Debug").clicked( ){
|
||||
lap.debug = true;
|
||||
}
|
||||
|
||||
ui.end_row();
|
||||
|
@ -263,7 +261,7 @@ impl eframe::App for MyApp {
|
|||
egui::SidePanel::right("screenshots").show(ctx, |ui| {
|
||||
egui::ScrollArea::vertical().show(ui, |ui| {
|
||||
let mut screenshots_sorted: Vec<_> = state.saved_frames.iter().collect();
|
||||
screenshots_sorted.sort_by_key(|(name, _)| name.clone());
|
||||
screenshots_sorted.sort_by_key(|(name, _)| *name);
|
||||
for (name, image) in screenshots_sorted {
|
||||
ui.label(name);
|
||||
if ui.button(&image.img_hash).on_hover_text("Copy").clicked() {
|
||||
|
|
49
src/ocr.rs
49
src/ocr.rs
|
@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
|
|||
|
||||
use crate::{
|
||||
config::{Config, LearnedConfig},
|
||||
image_processing::{hash_image, extract_and_filter},
|
||||
image_processing::{extract_and_filter, hash_image},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
|
@ -49,6 +49,34 @@ async fn run_ocr(image: &RgbImage, url: &str) -> Result<Option<String>> {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
async fn run_ocr_cached(
|
||||
ocr_cache: Arc<RwLock<HashMap<String, Option<String>>>>,
|
||||
hash: String,
|
||||
region: &crate::image_processing::Region,
|
||||
config: Arc<Config>,
|
||||
filtered_image: image::ImageBuffer<image::Rgb<u8>, Vec<u8>>,
|
||||
) -> Option<String> {
|
||||
let cached = {
|
||||
let locked = ocr_cache.read().unwrap();
|
||||
locked.get(&hash).cloned()
|
||||
};
|
||||
let use_cache = region.use_ocr_cache.unwrap_or(true) && config.use_ocr_cache.unwrap_or(true);
|
||||
if let Some(cached) = cached {
|
||||
if use_cache {
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
match run_ocr(&filtered_image, &config.ocr_server_endpoint).await {
|
||||
Ok(v) => {
|
||||
if use_cache {
|
||||
ocr_cache.write().unwrap().insert(hash.clone(), v.clone());
|
||||
}
|
||||
v
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
pub async fn ocr_all_regions(
|
||||
image: &RgbImage,
|
||||
|
@ -72,24 +100,7 @@ pub async fn ocr_all_regions(
|
|||
let value = if let Some(learned_value) = learned.learned_images.get(&hash) {
|
||||
Some(learned_value.clone())
|
||||
} else {
|
||||
let cached = {
|
||||
let locked = ocr_cache.read().unwrap();
|
||||
locked.get(&hash).cloned()
|
||||
};
|
||||
let use_cache = region.use_ocr_cache.unwrap_or(true) && config.use_ocr_cache.unwrap_or(true);
|
||||
if cached.is_some() && use_cache {
|
||||
cached.unwrap()
|
||||
} else {
|
||||
match run_ocr(&filtered_image, &config.ocr_server_endpoint).await {
|
||||
Ok(v) => {
|
||||
if config.use_ocr_cache.unwrap_or(true) {
|
||||
ocr_cache.write().unwrap().insert(hash.clone(), v.clone());
|
||||
}
|
||||
v
|
||||
}
|
||||
Err(_) => None
|
||||
}
|
||||
}
|
||||
run_ocr_cached(ocr_cache, hash, ®ion, config, filtered_image).await
|
||||
};
|
||||
results.lock().unwrap().insert(region.name, value);
|
||||
}));
|
||||
|
|
Loading…
Reference in New Issue