fix clippy lints

This commit is contained in:
Scott Pruett 2022-05-23 20:58:28 -04:00
parent 4a499fcde9
commit c52a94a3e5
3 changed files with 54 additions and 42 deletions

View File

@ -11,9 +11,10 @@ use scrap::{Capturer, Display};
use crate::{ use crate::{
capture, capture,
image_processing::{self, hash_image, Region, extract_and_filter}, config::Config,
image_processing::{self, extract_and_filter, hash_image, Region},
ocr, ocr,
state::{AppState, DebugOcrFrame, LapState, RaceState, SharedAppState}, config::Config, state::{AppState, DebugOcrFrame, LapState, RaceState, SharedAppState},
}; };
fn is_finished_lap(state: &AppState, frame: &LapState) -> bool { fn is_finished_lap(state: &AppState, frame: &LapState) -> bool {
@ -67,15 +68,17 @@ fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
state.frames_without_lap = 0; state.frames_without_lap = 0;
if state.current_race.is_none() { if state.current_race.is_none() {
let mut race = RaceState::default(); let race = RaceState {
race.screencap = Some( screencap: Some(
RetainedImage::from_image_bytes( RetainedImage::from_image_bytes(
"screencap", "screencap",
&image_processing::to_png_bytes(&image), &image_processing::to_png_bytes(&image),
) )
.expect("failed to save screenshot"), .expect("failed to save screenshot"),
); ),
race.race_time = Some(SystemTime::now()); race_time: Some(SystemTime::now()),
..Default::default()
};
state.current_race = Some(race); state.current_race = Some(race);
} }
} else { } else {

View File

@ -24,9 +24,11 @@ use state::{AppState, RaceState, SharedAppState, LapState};
use stats_writer::export_race_stats; use stats_writer::export_race_stats;
fn main() -> anyhow::Result<()> { fn main() -> anyhow::Result<()> {
let mut app_state = AppState::default(); let app_state = AppState {
app_state.config = Arc::new(Config::load().unwrap()); config: Arc::new(Config::load().unwrap()),
app_state.learned = Arc::new(LearnedConfig::load().unwrap()); learned: Arc::new(LearnedConfig::load().unwrap()),
..Default::default()
};
let state = Arc::new(Mutex::new(app_state)); let state = Arc::new(Mutex::new(app_state));
{ {
let state = state.clone(); let state = state.clone();
@ -150,11 +152,9 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
if lap.striked { if lap.striked {
ui.colored_label(Color32::RED, "Striked from the record"); ui.colored_label(Color32::RED, "Striked from the record");
} else { } else if ui.button("Strike").clicked() {
if ui.button("Strike").clicked() {
lap.striked = true; lap.striked = true;
} }
}
if lap.debug { if lap.debug {
if ui.button("Hide debug").clicked() { if ui.button("Hide debug").clicked() {
@ -162,11 +162,9 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
} }
ui.end_row(); ui.end_row();
// TODO(DEBUG): ??? // TODO(DEBUG): ???
} else { } else if ui.button("Debug").clicked( ){
if ui.button("Debug").clicked( ){
lap.debug = true; lap.debug = true;
} }
}
ui.end_row(); ui.end_row();
} }
@ -263,7 +261,7 @@ impl eframe::App for MyApp {
egui::SidePanel::right("screenshots").show(ctx, |ui| { egui::SidePanel::right("screenshots").show(ctx, |ui| {
egui::ScrollArea::vertical().show(ui, |ui| { egui::ScrollArea::vertical().show(ui, |ui| {
let mut screenshots_sorted: Vec<_> = state.saved_frames.iter().collect(); let mut screenshots_sorted: Vec<_> = state.saved_frames.iter().collect();
screenshots_sorted.sort_by_key(|(name, _)| name.clone()); screenshots_sorted.sort_by_key(|(name, _)| *name);
for (name, image) in screenshots_sorted { for (name, image) in screenshots_sorted {
ui.label(name); ui.label(name);
if ui.button(&image.img_hash).on_hover_text("Copy").clicked() { if ui.button(&image.img_hash).on_hover_text("Copy").clicked() {

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use crate::{ use crate::{
config::{Config, LearnedConfig}, config::{Config, LearnedConfig},
image_processing::{hash_image, extract_and_filter}, image_processing::{extract_and_filter, hash_image},
}; };
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -49,6 +49,34 @@ async fn run_ocr(image: &RgbImage, url: &str) -> Result<Option<String>> {
Ok(result) Ok(result)
} }
async fn run_ocr_cached(
ocr_cache: Arc<RwLock<HashMap<String, Option<String>>>>,
hash: String,
region: &crate::image_processing::Region,
config: Arc<Config>,
filtered_image: image::ImageBuffer<image::Rgb<u8>, Vec<u8>>,
) -> Option<String> {
let cached = {
let locked = ocr_cache.read().unwrap();
locked.get(&hash).cloned()
};
let use_cache = region.use_ocr_cache.unwrap_or(true) && config.use_ocr_cache.unwrap_or(true);
if let Some(cached) = cached {
if use_cache {
return cached;
}
}
match run_ocr(&filtered_image, &config.ocr_server_endpoint).await {
Ok(v) => {
if use_cache {
ocr_cache.write().unwrap().insert(hash.clone(), v.clone());
}
v
}
Err(_) => None,
}
}
#[tokio::main(flavor = "current_thread")] #[tokio::main(flavor = "current_thread")]
pub async fn ocr_all_regions( pub async fn ocr_all_regions(
image: &RgbImage, image: &RgbImage,
@ -72,24 +100,7 @@ pub async fn ocr_all_regions(
let value = if let Some(learned_value) = learned.learned_images.get(&hash) { let value = if let Some(learned_value) = learned.learned_images.get(&hash) {
Some(learned_value.clone()) Some(learned_value.clone())
} else { } else {
let cached = { run_ocr_cached(ocr_cache, hash, &region, config, filtered_image).await
let locked = ocr_cache.read().unwrap();
locked.get(&hash).cloned()
};
let use_cache = region.use_ocr_cache.unwrap_or(true) && config.use_ocr_cache.unwrap_or(true);
if cached.is_some() && use_cache {
cached.unwrap()
} else {
match run_ocr(&filtered_image, &config.ocr_server_endpoint).await {
Ok(v) => {
if config.use_ocr_cache.unwrap_or(true) {
ocr_cache.write().unwrap().insert(hash.clone(), v.clone());
}
v
}
Err(_) => None
}
}
}; };
results.lock().unwrap().insert(region.name, value); results.lock().unwrap().insert(region.name, value);
})); }));