more testing
|
@ -53,6 +53,13 @@
|
|||
"height": 43,
|
||||
"threshold": null,
|
||||
"use_ocr_cache": null
|
||||
},
|
||||
{
|
||||
"name": "position",
|
||||
"x": 3,
|
||||
"y": 52,
|
||||
"width": 107,
|
||||
"height": 79
|
||||
}
|
||||
],
|
||||
"track_region": {
|
||||
|
|
|
@ -63,13 +63,13 @@ fn merge_frames(prev: &LapState, next: &LapState) -> LapState {
|
|||
..Default::default()
|
||||
}
|
||||
}
|
||||
fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
|
||||
if frame.lap_time.is_some() {
|
||||
state.last_frame = Some(frame.clone());
|
||||
fn handle_new_frame(state: &mut AppState, lap_state: LapState, image: &RgbImage) {
|
||||
if lap_state.lap_time.is_some() {
|
||||
state.last_frame = Some(lap_state.clone());
|
||||
state.frames_without_lap = 0;
|
||||
|
||||
if state.current_race.is_none() {
|
||||
let track_hash = get_track_hash(state.config.as_ref(), &image);
|
||||
let track_hash = get_track_hash(state.config.as_ref(), image);
|
||||
let track_name = state
|
||||
.learned_tracks
|
||||
.infer_track(&track_hash, state.config.as_ref());
|
||||
|
@ -78,7 +78,7 @@ fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
|
|||
screencap: Some(
|
||||
RetainedImage::from_image_bytes(
|
||||
"screencap",
|
||||
&image_processing::to_png_bytes(&image),
|
||||
&image_processing::to_png_bytes(image),
|
||||
)
|
||||
.expect("failed to save screenshot"),
|
||||
),
|
||||
|
@ -100,12 +100,12 @@ fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
|
|||
}
|
||||
}
|
||||
|
||||
if is_finished_lap(state, &frame) {
|
||||
let mut merged = merge_frames(state.buffered_frames.back().unwrap(), &frame);
|
||||
if is_finished_lap(state, &lap_state) {
|
||||
let mut merged = merge_frames(state.buffered_frames.back().unwrap(), &lap_state);
|
||||
if let Some(lap) = &merged.lap {
|
||||
merged.lap = Some(lap - 1);
|
||||
}
|
||||
merged.screenshot = Some(to_png_bytes(&image));
|
||||
merged.screenshot = Some(to_png_bytes(image));
|
||||
|
||||
if let Some(race) = state.current_race.as_mut() {
|
||||
if let Some(prev_lap) = race.laps.last() {
|
||||
|
@ -122,7 +122,7 @@ fn handle_new_frame(state: &mut AppState, frame: LapState, image: RgbImage) {
|
|||
}
|
||||
}
|
||||
|
||||
state.buffered_frames.push_back(frame);
|
||||
state.buffered_frames.push_back(lap_state);
|
||||
if state.buffered_frames.len() >= 20 {
|
||||
state.buffered_frames.pop_front();
|
||||
}
|
||||
|
@ -176,9 +176,7 @@ pub fn ocr_all_regions(
|
|||
results
|
||||
}
|
||||
|
||||
fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()> {
|
||||
let frame = capture::get_frame(capturer)?;
|
||||
|
||||
fn analyze_frame(frame: &RgbImage, state: &SharedAppState) {
|
||||
let (ocr_db, config, should_sample) = {
|
||||
let locked = state.lock().unwrap();
|
||||
(
|
||||
|
@ -187,19 +185,23 @@ fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()>
|
|||
locked.should_sample_ocr_data,
|
||||
)
|
||||
};
|
||||
let ocr_results = ocr_all_regions(ocr_db.as_ref(), &frame, config.as_ref(), should_sample);
|
||||
let ocr_results = ocr_all_regions(ocr_db.as_ref(), frame, config.as_ref(), should_sample);
|
||||
|
||||
if state.lock().unwrap().debug_frames {
|
||||
let debug_frames = save_frames_from(&frame, config.as_ref(), &ocr_results);
|
||||
let debug_frames = save_frames_from(frame, config.as_ref(), &ocr_results);
|
||||
state.lock().unwrap().saved_frames = debug_frames;
|
||||
}
|
||||
{
|
||||
let mut state = state.lock().unwrap();
|
||||
let parsed = LapState::parse(&ocr_results);
|
||||
|
||||
state.raw_data = ocr_results;
|
||||
handle_new_frame(&mut state, parsed, frame);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_loop_once(capturer: &mut Capturer, state: &SharedAppState) -> Result<()> {
|
||||
let frame = capture::get_frame(capturer)?;
|
||||
analyze_frame(&frame, state);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -228,3 +230,48 @@ pub fn run_control_loop(state: SharedAppState) {
|
|||
thread::sleep(Duration::from_millis(interval));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::{
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
config::load_config_or_make_default,
|
||||
ocr_db::OcrDatabase,
|
||||
state::{AppState, SharedAppState},
|
||||
};
|
||||
|
||||
use super::analyze_frame;
|
||||
|
||||
fn make_test_state() -> SharedAppState {
|
||||
let state = AppState {
|
||||
config: Arc::new(
|
||||
load_config_or_make_default("src/configs/config.default.json", "").unwrap(),
|
||||
),
|
||||
ocr_db: Arc::new(OcrDatabase::load().unwrap()),
|
||||
..Default::default()
|
||||
};
|
||||
Arc::new(Mutex::new(state))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_analysis() {
|
||||
let state = make_test_state();
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-full-1.png")).unwrap();
|
||||
analyze_frame(&image.to_rgb8(), &state);
|
||||
|
||||
let lap_state = state.lock().unwrap().last_frame.as_ref().unwrap().clone();
|
||||
assert_eq!(4, lap_state.lap.unwrap());
|
||||
assert_eq!(95, lap_state.health.unwrap());
|
||||
assert_eq!(79, lap_state.gas.unwrap());
|
||||
assert_eq!(76, lap_state.tyres.unwrap());
|
||||
|
||||
assert!(
|
||||
Duration::from_secs(24) <= lap_state.lap_time.unwrap()
|
||||
&& lap_state.lap_time.unwrap() <= Duration::from_secs(25)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
13
src/main.rs
|
@ -13,7 +13,6 @@ mod training_ui;
|
|||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
ops::DerefMut,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Mutex},
|
||||
thread,
|
||||
|
@ -248,13 +247,11 @@ fn show_debug_frames(
|
|||
ui.text_edit_singleline(&mut debug_image.recognized_text);
|
||||
debug_image.image.show_max_size(ui, Vec2::new(300.0, 300.0));
|
||||
|
||||
if name != "track" {
|
||||
if ui.button("Learn OCR").clicked() {
|
||||
let hashes = ocr::compute_box_hashes(&debug_image.rgb_image);
|
||||
ocr_db
|
||||
.learn_phrase(&hashes, &debug_image.recognized_text)
|
||||
.unwrap();
|
||||
}
|
||||
if name != "track" && ui.button("Learn OCR").clicked() {
|
||||
let hashes = ocr::compute_box_hashes(&debug_image.rgb_image);
|
||||
ocr_db
|
||||
.learn_phrase(&hashes, &debug_image.recognized_text)
|
||||
.unwrap();
|
||||
}
|
||||
ui.separator();
|
||||
}
|
||||
|
|
|
@ -116,7 +116,7 @@ pub fn compute_box_hashes(image: &RgbImage) -> Vec<ImageHash> {
|
|||
|
||||
#[test]
|
||||
fn test_bounding_boxes() {
|
||||
let image_bytes = include_bytes!("test_data/test-image-2.png");
|
||||
let image_bytes = include_bytes!("test_data/test-montserrat.png");
|
||||
let image = image::load_from_memory(image_bytes).unwrap().to_rgb8();
|
||||
let boxes = get_character_bounding_boxes(&image);
|
||||
assert_eq!(boxes.len(), 10);
|
||||
|
@ -128,7 +128,7 @@ fn test_bounding_boxes() {
|
|||
|
||||
#[test]
|
||||
fn test_box_hashes() {
|
||||
let image_bytes = include_bytes!("test_data/test-image-2.png");
|
||||
let image_bytes = include_bytes!("test_data/test-montserrat.png");
|
||||
let image = image::load_from_memory(image_bytes).unwrap().to_rgb8();
|
||||
let hashes = compute_box_hashes(&image);
|
||||
assert_eq!(hashes.len(), 10);
|
||||
|
|
|
@ -115,22 +115,22 @@ fn test_ocr() {
|
|||
serde_json::from_str(include_str!("configs/ocr.default.json")).unwrap();
|
||||
let db: OcrDatabase = (&raw).into();
|
||||
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-image-3.png"))
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-time-1.png"))
|
||||
.unwrap()
|
||||
.to_rgb8();
|
||||
assert_eq!(db.ocr_image(&image), "00:30.625");
|
||||
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-image-4.png"))
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-time-2.png"))
|
||||
.unwrap()
|
||||
.to_rgb8();
|
||||
assert_eq!(db.ocr_image(&image), "00:20.296");
|
||||
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-image-num-1.png"))
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-num-1.png"))
|
||||
.unwrap()
|
||||
.to_rgb8();
|
||||
assert_eq!(db.ocr_image(&image), "1");
|
||||
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-image-blank.png"))
|
||||
let image = image::load_from_memory(include_bytes!("test_data/test-blank.png"))
|
||||
.unwrap()
|
||||
.to_rgb8();
|
||||
assert_eq!(db.ocr_image(&image), "");
|
||||
|
|
|
@ -133,7 +133,6 @@ pub struct DebugOcrFrame {
|
|||
|
||||
#[derive(Default)]
|
||||
pub struct AppState {
|
||||
pub raw_data: HashMap<String, String>,
|
||||
pub last_frame: Option<LapState>,
|
||||
|
||||
pub buffered_frames: VecDeque<LapState>,
|
||||
|
|
Before Width: | Height: | Size: 183 B After Width: | Height: | Size: 183 B |
After Width: | Height: | Size: 5.2 MiB |
After Width: | Height: | Size: 4.4 MiB |
Before Width: | Height: | Size: 53 KiB |
Before Width: | Height: | Size: 51 KiB After Width: | Height: | Size: 51 KiB |
Before Width: | Height: | Size: 322 B After Width: | Height: | Size: 322 B |
Before Width: | Height: | Size: 707 B After Width: | Height: | Size: 707 B |
Before Width: | Height: | Size: 724 B After Width: | Height: | Size: 724 B |