add debug UI

This commit is contained in:
Scott Pruett 2022-05-23 22:39:13 -04:00
parent c52a94a3e5
commit 25edcfe316
6 changed files with 187 additions and 83 deletions

View File

@ -24,4 +24,4 @@ reqwest = { version = "0.11", features = ["json"] }
img_hash = "3"
csv = "1"
time = { version = "0.3", features = ["formatting"] }
time = { version = "0.3", features = ["formatting", "local-offset"] }

View File

@ -1,6 +1,9 @@
{
"learned_images": {
"AAAAAAAAAAA=": ""
"bGxobGhkZAg=": "92",
"AAAAAAAAAAA=": "",
"bGzs3MhsbEA=": "90",
"bGzMbMjsbBA=": "98"
},
"learned_tracks": {}
}

View File

@ -123,6 +123,7 @@ fn add_saved_frame(
saved_frames: &mut HashMap<String, DebugOcrFrame>,
frame: &RgbImage,
region: &Region,
ocr_results: &HashMap<String, Option<String>>,
) {
let extracted = extract_and_filter(frame, region);
let retained =
@ -135,11 +136,14 @@ fn add_saved_frame(
image: retained,
rgb_image: extracted,
img_hash: hash,
recognized_text: ocr_results.get(&region.name).and_then(|p| p.clone()),
},
);
}
fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()> {
let frame = capture::get_frame(capturer)?;
let (config, learned_config, ocr_cache) = {
let locked = state.lock().unwrap();
(
@ -148,11 +152,10 @@ fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()>
locked.ocr_cache.clone(),
)
};
let frame = capture::get_frame(capturer)?;
let ocr_results = ocr::ocr_all_regions(&frame, config.clone(), learned_config, ocr_cache);
if state.lock().unwrap().debug_frames {
let debug_frames = save_frames_from(&frame, config.as_ref());
let debug_frames = save_frames_from(&frame, config.as_ref(), &ocr_results);
state.lock().unwrap().saved_frames = debug_frames;
}
{
@ -165,13 +168,17 @@ fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()>
Ok(())
}
pub fn save_frames_from(frame: &RgbImage, config: &Config) -> HashMap<String, DebugOcrFrame> {
pub fn save_frames_from(
frame: &RgbImage,
config: &Config,
ocr_results: &HashMap<String, Option<String>>,
) -> HashMap<String, DebugOcrFrame> {
let mut saved_frames = HashMap::new();
for region in &config.ocr_regions {
add_saved_frame(&mut saved_frames, frame, region);
add_saved_frame(&mut saved_frames, frame, region, ocr_results);
}
if let Some(track_region) = &config.track_region {
add_saved_frame(&mut saved_frames, frame, track_region);
add_saved_frame(&mut saved_frames, frame, track_region, ocr_results);
}
saved_frames
}

View File

@ -9,18 +9,23 @@ mod state;
mod stats_writer;
use std::{
collections::HashMap,
ops::DerefMut,
sync::{Arc, Mutex},
thread,
time::Duration,
};
use config::{Config, LearnedConfig};
use control_loop::save_frames_from;
use eframe::{
egui::{self, Ui, Visuals},
emath::Vec2,
epaint::Color32,
};
use state::{AppState, RaceState, SharedAppState, LapState};
use egui_extras::RetainedImage;
use image_processing::to_png_bytes;
use state::{AppState, DebugOcrFrame, LapState, OcrCache, RaceState, SharedAppState};
use stats_writer::export_race_stats;
fn main() -> anyhow::Result<()> {
@ -41,7 +46,7 @@ fn main() -> anyhow::Result<()> {
eframe::run_native(
"Supper OCR",
options,
Box::new(|_cc| Box::new(MyApp::new(state))),
Box::new(|_cc| Box::new(AppUi::new(state))),
);
}
@ -92,27 +97,45 @@ fn label_time_delta(ui: &mut Ui, time: Duration, old: Option<Duration>) {
}
}
struct MyApp {
state: SharedAppState,
struct DebugLap {
screenshot: RetainedImage,
debug_regions: HashMap<String, DebugOcrFrame>,
}
#[derive(Default)]
struct UiState {
config_load_err: Option<String>,
hash_to_learn: String,
value_to_learn: String,
debug_lap: Option<DebugLap>,
}
impl MyApp {
#[derive(Default)]
struct AppUi {
state: SharedAppState,
ui_state: UiState,
}
impl AppUi {
pub fn new(state: SharedAppState) -> Self {
Self {
state,
config_load_err: None,
hash_to_learn: "".to_owned(),
value_to_learn: "".to_owned(),
..Default::default()
}
}
}
fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
fn show_race_state(
ui: &mut Ui,
ui_state: &mut UiState,
race_name: &str,
race: &mut RaceState,
config: Arc<Config>,
learned: Arc<LearnedConfig>,
ocr_cache: Arc<OcrCache>,
) {
egui::Grid::new(format!("race:{}", race_name)).show(ui, |ui| {
ui.label("Lap");
ui.label("Time");
@ -156,14 +179,14 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
lap.striked = true;
}
if lap.debug {
if ui.button("Hide debug").clicked() {
lap.debug = false;
}
ui.end_row();
// TODO(DEBUG): ???
} else if ui.button("Debug").clicked( ){
lap.debug = true;
if ui.button("Debug").clicked() {
open_debug_lap(
ui_state,
lap,
config.clone(),
learned.clone(),
ocr_cache.clone(),
)
}
ui.end_row();
@ -173,10 +196,110 @@ fn show_race_state(ui: &mut Ui, race_name: &str, race: &mut RaceState) {
});
}
impl eframe::App for MyApp {
fn show_debug_frames(ui: &mut Ui, debug_frames: &HashMap<String, DebugOcrFrame>) {
let mut screenshots_sorted: Vec<_> = debug_frames.iter().collect();
screenshots_sorted.sort_by_key(|(name, _)| *name);
for (name, debug_image) in screenshots_sorted {
ui.label(name);
if let Some(text) = &debug_image.recognized_text {
ui.label(text);
}
if ui
.button(&debug_image.img_hash)
.on_hover_text("Copy")
.clicked()
{
ui.output().copied_text = debug_image.img_hash.clone();
}
debug_image.image.show_max_size(ui, Vec2::new(300.0, 300.0));
ui.separator();
}
}
fn show_config_controls(ui: &mut Ui, ui_state: &mut UiState, state: &mut AppState) {
if ui.button("Reload config").clicked() {
match Config::load() {
Ok(c) => {
state.config = Arc::new(c);
ui_state.config_load_err = None;
}
Err(e) => {
ui_state.config_load_err = Some(format!("failed to load config: {:?}", e));
}
}
}
if let Some(e) = &ui_state.config_load_err {
ui.colored_label(Color32::RED, e);
}
ui.separator();
ui.label("Hash");
ui.text_edit_singleline(&mut ui_state.hash_to_learn);
ui.label("Value");
ui.text_edit_singleline(&mut ui_state.value_to_learn);
if ui.button("Learn").clicked() {
let mut learned_config = (*state.learned).clone();
learned_config.learned_images.insert(
ui_state.hash_to_learn.clone(),
ui_state.value_to_learn.clone(),
);
learned_config.save().unwrap();
state.learned = Arc::new(learned_config);
ui_state.hash_to_learn = "".to_owned();
ui_state.value_to_learn = "".to_owned();
}
}
fn open_debug_lap(
ui_state: &mut UiState,
lap: &LapState,
config: Arc<Config>,
learned: Arc<LearnedConfig>,
ocr_cache: Arc<OcrCache>,
) {
if let Some(screenshot) = &lap.screenshot {
let ocr_results = ocr::ocr_all_regions(
&screenshot,
config.clone(),
learned.clone(),
ocr_cache.clone(),
);
let debug_lap = DebugLap {
screenshot: RetainedImage::from_image_bytes("debug-lap", &to_png_bytes(screenshot))
.unwrap(),
debug_regions: save_frames_from(&screenshot, &*config, &ocr_results),
};
ui_state.debug_lap = Some(debug_lap);
}
}
impl eframe::App for AppUi {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
ctx.set_visuals(Visuals::dark());
let mut state = self.state.lock().unwrap();
let mut debug_lap_window = self.ui_state.debug_lap.is_some();
let window = egui::Window::new("Debug Lap").open(&mut debug_lap_window);
window.show(ctx, |ui| {
egui::ScrollArea::vertical().show(ui, |ui| {
if let Some(debug_lap) = &self.ui_state.debug_lap {
debug_lap
.screenshot
.show_max_size(ui, Vec2::new(800.0, 600.0));
ui.separator();
if let Some(debug_lap) = &self.ui_state.debug_lap {
show_debug_frames(ui, &debug_lap.debug_regions);
}
show_config_controls(ui, &mut self.ui_state, state.deref_mut());
}
});
});
if !debug_lap_window {
self.ui_state.debug_lap = None;
}
ctx.set_visuals(Visuals::dark());
egui::SidePanel::left("frame").show(ctx, |ui| {
if let Some(frame) = &state.last_frame {
ui.heading("Race data");
@ -202,30 +325,39 @@ impl eframe::App for MyApp {
));
}
if state.debug_frames {
ui.separator();
ui.heading("Raw OCR results");
let mut raw_data_sorted: Vec<_> = state.raw_data.iter().collect();
raw_data_sorted.sort();
for (key, val) in raw_data_sorted {
ui.label(format!("{}: {:?}", key, val));
}
}
ui.separator();
ui.checkbox(&mut state.debug_frames, "Debug OCR regions");
});
egui::CentralPanel::default().show(ctx, |ui| {
egui::ScrollArea::vertical().show(ui, |ui| {
let config = state.config.clone();
let learned = state.learned.clone();
let ocr_cache = state.ocr_cache.clone();
if let Some(race) = &mut state.current_race {
ui.heading("Current Race");
show_race_state(ui, "current", race);
show_race_state(
ui,
&mut self.ui_state,
"current",
race,
config.clone(),
learned.clone(),
ocr_cache.clone(),
);
}
let len = state.past_races.len();
for (i, race) in state.past_races.iter_mut().enumerate() {
ui.separator();
ui.heading(format!("Race #{}", len - i));
show_race_state(ui, &format!("{}", i), race);
show_race_state(
ui,
&mut self.ui_state,
&format!("{}: {}", i, race.name()),
race,
config.clone(),
learned.clone(),
ocr_cache.clone(),
);
if let Some(img) = &race.screencap {
img.show_max_size(ui, Vec2::new(600.0, 500.0));
}
@ -260,48 +392,8 @@ impl eframe::App for MyApp {
if state.debug_frames {
egui::SidePanel::right("screenshots").show(ctx, |ui| {
egui::ScrollArea::vertical().show(ui, |ui| {
let mut screenshots_sorted: Vec<_> = state.saved_frames.iter().collect();
screenshots_sorted.sort_by_key(|(name, _)| *name);
for (name, image) in screenshots_sorted {
ui.label(name);
if ui.button(&image.img_hash).on_hover_text("Copy").clicked() {
ui.output().copied_text = image.img_hash.clone();
}
image.image.show_max_size(ui, ui.available_size());
}
if ui.button("Reload config").clicked() {
match Config::load() {
Ok(c) => {
state.config = Arc::new(c);
self.config_load_err = None;
}
Err(e) => {
self.config_load_err =
Some(format!("failed to load config: {:?}", e));
}
}
}
if let Some(e) = &self.config_load_err {
ui.colored_label(Color32::RED, e);
}
ui.separator();
ui.label("Hash");
ui.text_edit_singleline(&mut self.hash_to_learn);
ui.label("Value");
ui.text_edit_singleline(&mut self.value_to_learn);
if ui.button("Learn").clicked() {
let mut learned_config = (*state.learned).clone();
learned_config
.learned_images
.insert(self.hash_to_learn.clone(), self.value_to_learn.clone());
learned_config.save().unwrap();
state.learned = Arc::new(learned_config);
self.hash_to_learn = "".to_owned();
self.value_to_learn = "".to_owned();
}
show_debug_frames(ui, &state.saved_frames);
show_config_controls(ui, &mut self.ui_state, state.deref_mut());
});
});
}

View File

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use crate::{
config::{Config, LearnedConfig},
image_processing::{extract_and_filter, hash_image},
image_processing::{extract_and_filter, hash_image}, state::OcrCache,
};
#[derive(Serialize, Deserialize, Debug)]
@ -82,7 +82,7 @@ pub async fn ocr_all_regions(
image: &RgbImage,
config: Arc<Config>,
learned: Arc<LearnedConfig>,
ocr_cache: Arc<RwLock<HashMap<String, Option<String>>>>,
ocr_cache: Arc<OcrCache>,
) -> HashMap<String, Option<String>> {
let results = Arc::new(Mutex::new(HashMap::new()));

View File

@ -95,8 +95,10 @@ pub struct DebugOcrFrame {
pub image: RetainedImage,
pub rgb_image: RgbImage,
pub img_hash: String,
pub recognized_text: Option<String>,
}
pub type OcrCache = RwLock<HashMap<String, Option<String>>>;
#[derive(Default)]
pub struct AppState {
pub raw_data: HashMap<String, Option<String>>,
@ -114,7 +116,7 @@ pub struct AppState {
pub config: Arc<Config>,
pub learned: Arc<LearnedConfig>,
pub ocr_cache: Arc<RwLock<HashMap<String, Option<String>>>>,
pub ocr_cache: Arc<OcrCache>,
}
pub type SharedAppState = Arc<Mutex<AppState>>;