add rec processor

This commit is contained in:
VALLONGOL 2025-06-23 08:00:07 +02:00
parent a1e04c4a9a
commit 4c155a10f9
8 changed files with 1124 additions and 400 deletions

View File

@ -1,7 +1,11 @@
{
"last_opened_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_568-n13.out",
"last_output_file": "C:\\src\\____GitProjects\\radar_data_reader\\_25-05-15-12-22-52_sata_568-n13.csv",
"active_export_profile_name": "Default",
"last_opened_out_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_568-n13.out",
"last_opened_rec_file": "C:/src/____GitProjects/radar_data_reader/_rec/_25-05-15-12-22-52_sata_345.rec",
"last_out_output_dir": "C:\\src\\____GitProjects\\radar_data_reader",
"last_rec_output_dir": "C:\\src\\____GitProjects\\radar_data_reader\\_rec",
"active_out_export_profile_name": "Default",
"active_rec_csv_profile_name": "Default",
"active_rec_json_profile_name": "Default",
"export_profiles": [
{
"name": "Default",
@ -37,9 +41,34 @@
"translate_with_enum": true
},
{
"column_name": "waveform",
"data_path": "main_header.ge_header.mode.waveform",
"translate_with_enum": true
"column_name": "true_heading_rad",
"data_path": "main_header.ge_header.general_settings.navigation.attitude.true_heading_rad",
"translate_with_enum": false
},
{
"column_name": "lat_rad",
"data_path": "main_header.ge_header.general_settings.navigation.geo_pos.lat_rad",
"translate_with_enum": false
},
{
"column_name": "lon_rad",
"data_path": "main_header.ge_header.general_settings.navigation.geo_pos.lon_rad",
"translate_with_enum": false
},
{
"column_name": "altitude_m",
"data_path": "main_header.ge_header.general_settings.navigation.geo_pos.altitude_m",
"translate_with_enum": false
},
{
"column_name": "nav_az_rad",
"data_path": "main_header.ge_header.general_settings.antenna.position.nav_az_rad",
"translate_with_enum": false
},
{
"column_name": "nav_el_rad",
"data_path": "main_header.ge_header.general_settings.antenna.position.nav_el_rad",
"translate_with_enum": false
}
]
}

42
config/rec_flows.json Normal file
View File

@ -0,0 +1,42 @@
{
"base": "0x80000000",
"pingpong_offset": "0x01000000",
"flows": [
{
"name": "HDR",
"base_offset": "0x0",
"size": 4096,
"id": 1213223748
},
{
"name": "SUM",
"base_offset": "0x00101000",
"size": "1M",
"id": 5068115
},
{
"name": "GUARD",
"base_offset": "0x00801000",
"size": "1M",
"id": 1380013383
},
{
"name": "DAZ",
"base_offset": "0x80401000",
"size": "1M",
"id": 5914948
},
{
"name": "DEL",
"base_offset": "0x80701000",
"size": "1M",
"id": 4998468
},
{
"name": "VIDEO",
"base_offset": "0xEF000000",
"size": "4M",
"id": null
}
]
}

View File

@ -6,14 +6,18 @@ Orchestrates the interaction between the GUI and the core processing logic using
"""
import multiprocessing as mp
import csv
import json
import os
import subprocess
import sys
from pathlib import Path
from typing import List, Any, Dict
from typing import List, Any, Dict, Tuple
from tkinter import filedialog
import tkinter as tk
from ..utils.config_manager import ConfigManager
from ..core.file_reader import run_worker_process
from ..core.rec_processor import run_rec_processor_worker
from ..core.data_structures import DataBatch
from ..core.data_enums import ENUM_REGISTRY, get_enum_name
from ..utils import logger
@ -24,174 +28,342 @@ log = logger.get_logger(__name__)
def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any:
"""
Safely retrieves a value from a DataBatch object using the path from an ExportField.
If the field is marked for translation, it attempts to convert the numeric value
to its string representation using the corresponding Enum.
"""
path = field.data_path
try:
parts = path.split(".")
if not parts:
return "N/A"
if path == "batch_id":
return batch.batch_id
current_obj = batch
for part in parts:
if current_obj is None:
return "N/A"
current_obj = getattr(current_obj, part, None)
value = current_obj if current_obj is not None else "N/A"
if field.translate_with_enum and isinstance(value, int):
enum_class = ENUM_REGISTRY.get(path)
if enum_class:
return get_enum_name(enum_class, value)
return value
except AttributeError:
log.warning(f"Could not find attribute for path: {path}")
return "N/A"
def _write_json_row(file_handle, row_dict: Dict[str, Any]):
json_string = json.dumps(row_dict)
file_handle.write(json_string + "\n")
class AppController:
"""The main controller of the application."""
def __init__(self, config_manager: ConfigManager):
self.config_manager = config_manager
self.view = None
self.profile_editor_window: ProfileEditorWindow | None = None
self.worker_process: mp.Process | None = None
self.is_processing = False
self.worker_process: mp.Process | None = None
self.command_queue = mp.Queue()
self.result_queue = mp.Queue()
self.active_export_profile: ExportProfile | None = None
self.csv_file_handle = None
self.csv_writer = None
self.active_export_profiles: Dict[str, ExportProfile] = {}
self.output_file_handles: Dict[str, Any] = {}
self.csv_writers: Dict[str, Any] = {}
def bind_view(self, view):
self.view = view
self._load_initial_config()
def _load_initial_config(self):
if last_file := self.config_manager.get("last_opened_file"):
if last_file := self.config_manager.get("last_opened_out_file"):
if Path(last_file).is_file():
self.view.set_filepath(last_file)
self.view.out_filepath_var.set(last_file)
self.on_out_config_changed()
if last_output_file := self.config_manager.get("last_output_file"):
self.view.set_output_filepath(last_output_file)
if last_dir := self.config_manager.get("last_out_output_dir"):
self.view.out_output_dir_var.set(last_dir)
if not self.view.get_output_filepath() and self.view.get_filepath():
self._propose_output_filepath(self.view.get_filepath())
if last_file := self.config_manager.get("last_opened_rec_file"):
if Path(last_file).is_file():
self.view.rec_filepath_var.set(last_file)
self.on_rec_config_changed()
if last_dir := self.config_manager.get("last_rec_output_dir"):
self.view.rec_output_dir_var.set(last_dir)
profiles = self.config_manager.get_export_profiles()
active_profile_name = self.config_manager.get("active_export_profile_name")
self.view.update_export_profiles(profiles, active_profile_name)
def _propose_output_filepath(self, input_path_str: str):
if not input_path_str:
return
proposed_path = Path(input_path_str).with_suffix(".csv")
self.view.set_output_filepath(str(proposed_path))
def select_file(self):
current_path = self.view.get_filepath()
if filepath := self.view.ask_open_filename(current_path):
self.view.set_filepath(filepath)
self._propose_output_filepath(filepath)
def select_output_file(self):
current_path = self.view.get_output_filepath()
if new_path := self.view.ask_save_as_filename(current_path):
self.view.set_output_filepath(new_path)
def open_output_folder(self):
output_path_str = self.view.get_output_filepath()
if not output_path_str:
log.warning("Cannot open folder: output file path is not set.")
return
folder_path = Path(output_path_str).parent
if not folder_path.is_dir():
log.error(f"Cannot open folder: Directory '{folder_path}' does not exist.")
return
log.info(f"Opening output folder: {folder_path}")
try:
if sys.platform == "win32":
os.startfile(folder_path)
elif sys.platform == "darwin":
subprocess.run(["open", folder_path])
else:
subprocess.run(["xdg-open", folder_path])
except Exception as e:
log.error(f"Failed to open output folder: {e}")
def start_processing(self):
if self.is_processing:
log.warning("Processing is already in progress.")
return
filepath_str = self.view.get_filepath()
if not filepath_str or not Path(filepath_str).is_file():
log.error("No valid input file selected to process.")
return
output_filepath_str = self.view.get_output_filepath()
if not output_filepath_str:
log.error("No output file path specified.")
return
active_profile_name = self.view.get_active_profile_name()
profiles = self.config_manager.get_export_profiles()
self.active_export_profile = next(
(p for p in profiles if p.name == active_profile_name), None
self.view.update_export_profiles(
profiles=profiles,
active_out_profile=self.config_manager.get(
"active_out_export_profile_name"
),
active_rec_csv=self.config_manager.get("active_rec_csv_profile_name"),
active_rec_json=self.config_manager.get("active_rec_json_profile_name"),
)
if not self.active_export_profile:
log.error(f"No valid export profile named '{active_profile_name}' found.")
def on_out_config_changed(self, *args):
filepath_str = self.view.out_filepath_var.get()
if not filepath_str:
return
p = Path(filepath_str)
if not self.view.out_output_dir_var.get():
self.view.out_output_dir_var.set(str(p.parent))
self.view.out_basename_var.set(p.stem)
def on_rec_config_changed(self, *args):
try:
file_count = self.view.rec_file_count_var.get()
except (tk.TclError, ValueError):
return
filepath_str = self.view.rec_filepath_var.get()
if not filepath_str:
return
p = Path(filepath_str)
if not self.view.rec_output_dir_var.get():
self.view.rec_output_dir_var.set(str(p.parent))
try:
output_path = Path(output_filepath_str)
log.info(
f"Opening {output_path} for row-by-row saving using profile '{self.active_export_profile.name}'."
)
self.csv_file_handle = open(output_path, "w", encoding="utf-8", newline="")
self.csv_writer = csv.writer(self.csv_file_handle)
header = [field.column_name for field in self.active_export_profile.fields]
self.csv_writer.writerow(header)
except IOError as e:
log.error(f"Failed to open CSV file for writing: {e}")
stem_parts = p.stem.split("_")
seq_num = int(stem_parts[-1])
base_stem = "_".join(stem_parts[:-1])
except (ValueError, IndexError):
base_stem = p.stem
new_stem = f"{base_stem}_n{file_count}"
self.view.rec_basename_var.set(new_stem)
def select_output_dir(self, dir_var: tk.StringVar):
initial_dir = dir_var.get() if dir_var.get() else None
if new_dir := filedialog.askdirectory(
initialdir=initial_dir, title="Select Output Directory"
):
dir_var.set(new_dir)
def _select_file(self, path_var: tk.StringVar, file_types: List[Tuple[str, str]]):
initial_dir = (
Path(path_var.get()).parent
if path_var.get() and Path(path_var.get()).exists()
else None
)
if filepath := filedialog.askopenfilename(
initialdir=initial_dir, filetypes=file_types
):
path_var.set(filepath)
def select_out_file(self):
self._select_file(
self.view.out_filepath_var,
[("Radar Output", "*.out"), ("All files", "*.*")],
)
def select_rec_file(self):
self._select_file(
self.view.rec_filepath_var,
[("Recorder Data", "*.rec"), ("All files", "*.*")],
)
def _prepare_output_files(
self, options: Dict[str, Any], output_dir: Path, basename: str
) -> bool:
self.output_file_handles.clear()
self.csv_writers.clear()
self.active_export_profiles.clear()
profiles = self.config_manager.get_export_profiles()
try:
if options.get("generate_csv"):
profile_name = (
self.view.rec_csv_profile_var.get()
if options.get("is_rec")
else self.view.out_csv_profile_var.get()
)
profile = next((p for p in profiles if p.name == profile_name), None)
if not profile:
raise ValueError(f"CSV export profile '{profile_name}' not found.")
self.active_export_profiles["csv"] = profile
path = (output_dir / basename).with_suffix(".csv")
log.info(f"Preparing CSV output: {path}")
fh = open(path, "w", encoding="utf-8", newline="")
self.output_file_handles["csv"] = fh
self.csv_writers["csv"] = csv.writer(fh)
self.csv_writers["csv"].writerow(
[field.column_name for field in profile.fields]
)
if options.get("generate_json"):
profile_name = (
self.view.rec_json_profile_var.get()
if options.get("is_rec")
else self.view.out_json_profile_var.get()
)
profile = next((p for p in profiles if p.name == profile_name), None)
if not profile:
raise ValueError(f"JSON export profile '{profile_name}' not found.")
self.active_export_profiles["json"] = profile
path = (output_dir / basename).with_suffix(".json")
log.info(f"Preparing JSON output: {path}")
self.output_file_handles["json"] = open(path, "w", encoding="utf-8")
return True
except (IOError, ValueError) as e:
log.error(f"Failed to prepare output files: {e}")
self._close_all_files()
return False
def start_out_processing(self):
if self.is_processing:
log.warning("Processing already in progress.")
return
filepath_str = self.view.out_filepath_var.get()
output_dir_str = self.view.out_output_dir_var.get()
basename = self.view.out_basename_var.get()
if not all([filepath_str, output_dir_str, basename]):
log.error("Please set input file, output directory, and base filename.")
return
output_options = {
"generate_csv": self.view.out_output_csv_var.get(),
"generate_json": self.view.out_output_json_var.get(),
"is_rec": False,
}
if not any(v for k, v in output_options.items() if k != "is_rec"):
log.error("Please select at least one output format (CSV or JSON).")
return
if not self._prepare_output_files(
output_options, Path(output_dir_str), basename
):
return
self.is_processing = True
self.view.start_processing_ui()
self.config_manager.set("last_opened_file", filepath_str)
self.config_manager.set("last_output_file", output_filepath_str)
self.config_manager.set("active_export_profile_name", active_profile_name)
self.config_manager.set("last_opened_out_file", filepath_str)
self.config_manager.set("last_out_output_dir", output_dir_str)
self.config_manager.set(
"active_out_export_profile_name", self.view.out_csv_profile_var.get()
)
self.config_manager.save_config()
active_profile = self.active_export_profiles.get(
"csv"
) or self.active_export_profiles.get("json")
worker_args = (
Path(filepath_str),
self.command_queue,
self.result_queue,
active_profile,
)
self._launch_worker(run_worker_process, worker_args)
def start_rec_processing(self):
if self.is_processing:
log.warning("A process is already running.")
return
first_file_str = self.view.rec_filepath_var.get()
output_dir_str = self.view.rec_output_dir_var.get()
basename = self.view.rec_basename_var.get()
if not all([first_file_str, output_dir_str, basename]):
log.error("Please set input file, output directory, and base filename.")
return
output_options = {
"generate_out": self.view.rec_output_out_var.get(),
"generate_csv": self.view.rec_output_csv_var.get(),
"generate_json": self.view.rec_output_json_var.get(),
"extract_video": self.view.rec_extract_video_var.get(),
"is_rec": True,
}
if not any(v for k, v in output_options.items() if k != "is_rec"):
log.error("Please select at least one output option.")
return
if output_options["generate_csv"] or output_options["generate_json"]:
if not self._prepare_output_files(
output_options, Path(output_dir_str), basename
):
return
file_list = self._get_rec_file_sequence()
if not file_list:
return
log.info(
f"--- Starting REC Sequence Processing on {len(file_list)} file(s) ---"
)
self.is_processing = True
self.view.start_processing_ui()
self.config_manager.set("last_opened_rec_file", first_file_str)
self.config_manager.set("last_rec_output_dir", output_dir_str)
self.config_manager.set(
"active_rec_csv_profile_name", self.view.rec_csv_profile_var.get()
)
self.config_manager.set(
"active_rec_json_profile_name", self.view.rec_json_profile_var.get()
)
self.config_manager.save_config()
worker_args = (
file_list,
output_options,
Path(output_dir_str),
basename,
self.command_queue,
self.result_queue,
)
self._launch_worker(run_rec_processor_worker, worker_args)
def _get_rec_file_sequence(self) -> List[Path]:
first_file_str = self.view.rec_filepath_var.get()
if not first_file_str:
return []
file_count = self.view.rec_file_count_var.get()
try:
base_path = Path(first_file_str)
name_parts = base_path.name.split("_")
seq_num = int(name_parts[-1].split(".")[0])
base_name = "_".join(name_parts[:-1]) + "_"
extension = base_path.suffix
file_list = [
base_path.with_name(f"{base_name}{seq_num + i}{extension}")
for i in range(file_count)
]
existing_files = [f for f in file_list if f.exists()]
if len(existing_files) < len(file_list):
log.warning(
f"Found {len(existing_files)} of {len(file_list)} expected files in the sequence."
)
if not existing_files:
log.error("No valid files found in the sequence.")
return existing_files
except (ValueError, IndexError) as e:
log.error(
f"Could not parse file sequence from name '{first_file_str}'. Error: {e}"
)
return []
def _launch_worker(self, target_func, args_tuple: Tuple):
while not self.command_queue.empty():
self.command_queue.get()
while not self.result_queue.empty():
self.result_queue.get()
self.worker_process = mp.Process(
target=run_worker_process,
args=(
Path(filepath_str),
self.command_queue,
self.result_queue,
self.active_export_profile,
),
daemon=True,
target=target_func, args=args_tuple, daemon=True
)
self.worker_process.start()
self.view.poll_result_queue()
def stop_processing(self):
@ -202,96 +374,110 @@ class AppController:
self.command_queue.put("STOP")
def handle_data_batch(self, batch: DataBatch):
if (
not self.csv_writer
or not self.active_export_profile
or not self.csv_file_handle
):
log.warning(
"Received a data batch but no CSV writer is configured. Skipping."
)
if self.csv_writers.get("csv"):
profile = self.active_export_profiles["csv"]
row_values = [
_get_value_from_path(batch, field) for field in profile.fields
]
self.csv_writers["csv"].writerow(row_values)
if self.output_file_handles.get("json"):
profile = self.active_export_profiles["json"]
row_dict = {
field.column_name: _get_value_from_path(batch, field)
for field in profile.fields
}
_write_json_row(self.output_file_handles["json"], row_dict)
if batch.batch_id % 20 == 0:
for fh in self.output_file_handles.values():
fh.flush()
def _close_all_files(self):
for file_handle in self.output_file_handles.values():
try:
file_handle.close()
except Exception as e:
log.error(f"Error closing file handle {file_handle.name}: {e}")
self.output_file_handles.clear()
self.csv_writers.clear()
def open_folder_from_path(self, folder_path_str: str):
if not folder_path_str:
log.warning("Cannot open folder: path is not set.")
return
folder_path = Path(folder_path_str)
if not folder_path.is_dir():
log.error(f"Cannot open folder: Directory '{folder_path}' does not exist.")
return
log.info(f"Opening folder: {folder_path}")
try:
row_values = [
_get_value_from_path(batch, field)
for field in self.active_export_profile.fields
]
self.csv_writer.writerow(row_values)
self.csv_file_handle.flush()
if sys.platform == "win32":
os.startfile(folder_path)
elif sys.platform == "darwin":
subprocess.run(["open", folder_path])
else:
subprocess.run(["xdg-open", folder_path])
except Exception as e:
log.error(
f"An unexpected error occurred during CSV row writing: {e}",
exc_info=True,
)
def _log_summary(self, stats: Dict[str, int]):
"""Formats and logs a summary of the processing results."""
log.info("--- Processing Summary ---")
total_blocks = stats.get("total_blocks_scanned", 0)
batches = stats.get("total_batches_found", 0)
log.info(f"Total Blocks Scanned: {total_blocks}")
log.info(f"Total Batches Found (based on headers): {batches}")
block_types = sorted([key for key in stats if key.startswith("found_")])
if block_types:
log.info("Block Types Found:")
for key in block_types:
block_name = key.replace("found_", "")
count = stats[key]
log.info(f" - {block_name}: {count}")
skipped = stats.get("skipped_blocks", 0)
if skipped > 0:
log.warning(f"Skipped Blocks (out of bounds): {skipped}")
failed = stats.get("failed_to_parse_blocks", 0)
if failed > 0:
log.error(f"Failed to Parse Blocks: {failed}")
output_path = self.view.get_output_filepath()
if output_path and Path(output_path).exists():
log.info(f"Output file saved to: {output_path}")
elif output_path:
log.warning(
f"Output file path was set to {output_path}, but file may not have been fully written if processing was interrupted."
)
log.info("--------------------------")
log.error(f"Failed to open folder: {e}")
def handle_worker_completion(self, msg: Dict[str, Any]):
was_interrupted = msg.get("interrupted", False)
status = "Interrupted by user" if was_interrupted else "Processing Complete"
log.info(f"--- {status}. Finalizing export. ---")
if self.csv_file_handle:
try:
self.csv_file_handle.close()
log.info("CSV file saved and closed successfully.")
except IOError as e:
log.error(f"Error closing CSV file: {e}")
self._close_all_files()
self.csv_file_handle = None
self.csv_writer = None
self.active_export_profile = None
self.active_export_profiles.clear()
self.is_processing = False
self.worker_process = None
self.view.update_ui_for_processing_state(False)
if stats := msg.get("stats"):
self._log_summary(stats)
def open_profile_editor(self):
if self.profile_editor_window and self.profile_editor_window.winfo_exists():
self.profile_editor_window.lift()
self.profile_editor_window.focus_force()
return
def _log_summary(self, stats: Dict[str, int]):
log.info("--- Processing Summary ---")
if "total_srio_blocks_found" in stats:
log.info(f"Total SRIO Blocks Scanned: {stats['total_srio_blocks_found']}")
block_types = sorted([key for key in stats if key.startswith("mapped_")])
if block_types:
log.info("Mapped & Processed Block Types:")
for key in block_types:
log.info(f" - {key.replace('mapped_', '')}: {stats[key]}")
else:
total_blocks = stats.get("total_blocks_scanned", 0)
batches = stats.get("total_batches_found", 0)
log.info(f"Total Blocks Scanned: {total_blocks}")
log.info(f"Total Batches Found: {batches}")
block_types = sorted([key for key in stats if key.startswith("found_")])
if block_types:
log.info("Block Types Found:")
for key in block_types:
log.info(f" - {key.replace('found_', '')}: {stats[key]}")
if skipped := stats.get("unmapped_srio_blocks", 0):
log.warning(f"Unmapped SRIO Blocks: {skipped}")
if failed := stats.get("failed_to_parse_blocks", 0):
log.error(f"Failed to Parse Blocks: {failed}")
log.info("--------------------------")
def open_profile_editor(self):
if (
self.view.profile_editor_window
and self.view.profile_editor_window.winfo_exists()
):
self.view.profile_editor_window.lift()
self.view.profile_editor_window.focus_force()
return
profiles = self.config_manager.get_export_profiles()
self.profile_editor_window = ProfileEditorWindow(
master=self.view, controller=self, profiles=profiles
self.view.profile_editor_window = ProfileEditorWindow(
master=self.view.master, controller=self, profiles=profiles
)
self.profile_editor_window.wait_window()
self.view.profile_editor_window.wait_window()
self._load_initial_config()
def save_export_profiles(self, profiles: List[ExportProfile]):
@ -307,8 +493,5 @@ class AppController:
if self.worker_process.is_alive():
log.warning("Worker process did not exit gracefully, terminating.")
self.worker_process.terminate()
if self.csv_file_handle:
self.csv_file_handle.close()
self._close_all_files()
logger.shutdown_logging_system()

View File

@ -65,7 +65,10 @@ def run_worker_process(
reader = RadarFileReader(filepath)
if not reader.load_and_find_blocks():
result_queue.put(
{"type": "error", "message": "Failed to load or find blocks."}
{
"type": "error",
"message": f"Failed to load or find blocks in {filepath.name}.",
}
)
return
@ -108,7 +111,6 @@ def run_worker_process(
batch.batch_id = batch_count
result_queue.put({"type": "data_batch", "data": batch})
# Final stats update
stats["total_batches_found"] = stats.get("found_DSPHDRIN", 0)
result_queue.put(
{"type": "complete", "interrupted": interrupted, "stats": dict(stats)}
@ -144,16 +146,26 @@ class RadarFileReader:
self.block_metadata: List[Tuple[int, int]] | None = None
def load_and_find_blocks(self) -> bool:
log.info(f"Loading data from {self.file_path}...")
"""
Loads data from file (if not already in memory) and finds valid data blocks.
"""
try:
self.data_vector = np.fromfile(str(self.file_path), dtype="<u4")
log.info(f"Loaded {self.data_vector.size} 32-bit words.")
# --- THIS IS THE CORRECTED LOGIC ---
# Only read from file if data_vector has not been pre-loaded.
if self.data_vector is None:
log.info(f"Loading data from {self.file_path}...")
self.data_vector = np.fromfile(str(self.file_path), dtype="<u4")
log.info(f"Loaded {self.data_vector.size} 32-bit words.")
else:
log.info(
f"Using pre-loaded data vector with {self.data_vector.size} words."
)
log.info("Scanning for valid data block markers...")
all_marker_indices = np.where(self.data_vector == BLOCK_MARKER)[0]
if all_marker_indices.size < 2:
self.block_metadata = []
log.warning("Not enough block markers found in file.")
log.warning("Not enough block markers found in data.")
return True
potential_starts_mask = np.diff(all_marker_indices) == 1

View File

@ -0,0 +1,322 @@
# radar_data_reader/core/rec_processor.py
"""
Worker process logic for reading, reassembling, and processing .rec file streams,
mimicking the "Flow" logic from the C++ application.
"""
import multiprocessing as mp
import logging
from pathlib import Path
from typing import List, Dict, Any, Iterator, Tuple, Optional, Union
from dataclasses import dataclass, field
import struct
import queue
from collections import defaultdict
import numpy as np
from ..utils import logger
from .data_structures import DataBatch
from .file_reader import RadarFileReader
from .struct_parser import parse_block
from .data_structures import (
DspHeaderIn,
BLOCK_ID_HDR_IN_1,
SIGNAL_ID_SUM,
SIGNAL_ID_GUARD,
SIGNAL_ID_DAZ,
SIGNAL_ID_DEL,
)
log = logger.get_logger(__name__)
SRIO_MARKER = b"\x5b\x53\x41\x14\x54\x5d"
@dataclass
class SrioHeader:
address: int
size: int
type: int
dst: int
src: int
STRUCT_FORMAT = "<IHHBB"
STRUCT_SIZE = struct.calcsize(STRUCT_FORMAT)
# Default address map, mirroring the C++ application's hardcoded default JSON.
DEFAULT_ADDRESS_MAP_CONFIG = {
"base": "0x80000000",
"flows": [
# --- Main Data Flows (relative to 'base' or absolute) ---
{"name": "HDR", "base_offset": "0x0", "size": "4K", "id": BLOCK_ID_HDR_IN_1},
{"name": "SUM", "base_offset": "0x00101000", "size": "2M", "id": SIGNAL_ID_SUM},
{
"name": "GUARD",
"base_offset": "0x00801000",
"size": "2M",
"id": SIGNAL_ID_GUARD,
},
{"name": "DAZ", "base": "0x80401000", "size": "2M", "id": SIGNAL_ID_DAZ},
{"name": "DEL", "base": "0x80701000", "size": "2M", "id": SIGNAL_ID_DEL},
# --- Other flows that might be present in the recording ---
{"name": "SWHDR", "base": "0xA0000000", "size": "4K", "id": None},
{"name": "SWSUM", "base": "0xA0010000", "size": "1M", "id": None},
{"name": "SWGUARD", "base": "0xA0110000", "size": "1M", "id": None},
{"name": "SOFT", "base": "0xF0000000", "size": "4M", "id": None},
{"name": "SOFTDFE", "base": "0xE0000000", "size": "4M", "id": None},
{"name": "VIDEO", "base": "0xEF000000", "size": "4M", "id": None},
{"name": "TIMER", "base": "0x44A10000", "size": "8K", "id": None},
{"name": "DISPHEADER", "base": "0x56004000", "size": "4k", "id": None},
],
}
@dataclass
class Flow:
"""Represents a configured data flow."""
name: str
base_address: int
end_address: int
block_id: Optional[int]
buffer: bytearray = field(default_factory=bytearray)
is_active: bool = False
def hit(self, address: int) -> bool:
"""Checks if a given address falls within this flow's range."""
return self.base_address <= address < self.end_address
def _parse_size(size_str: Union[str, int]) -> int:
"""Parses a size string like '4K' or '1M' into bytes."""
if isinstance(size_str, int):
return size_str
size_str = str(size_str).upper()
if "K" in size_str:
return int(size_str.replace("K", "")) * 1024
if "M" in size_str:
return int(size_str.replace("M", "")) * 1024 * 1024
return int(size_str)
def _stream_srio_blocks(file_list: List[Path]) -> Iterator[Tuple[SrioHeader, bytes]]:
"""Reads a sequence of .rec files and yields SRIO blocks."""
current_file_handle = None
try:
for filepath in file_list:
if not filepath.exists():
log.warning(f"File not found in sequence: {filepath.name}. Skipping.")
continue
log.info(f"Processing REC file: {filepath.name}")
current_file_handle = open(filepath, "rb")
buffer = b""
while True:
if len(buffer) < 8192:
chunk = current_file_handle.read(65536)
if not chunk:
break
buffer += chunk
marker_pos = buffer.find(SRIO_MARKER)
if marker_pos == -1:
buffer = buffer[-len(SRIO_MARKER) :]
continue
header_start = marker_pos + len(SRIO_MARKER)
if len(buffer) < header_start + SrioHeader.STRUCT_SIZE:
continue
header_bytes = buffer[
header_start : header_start + SrioHeader.STRUCT_SIZE
]
try:
header = SrioHeader(
*struct.unpack(SrioHeader.STRUCT_FORMAT, header_bytes)
)
except struct.error:
buffer = buffer[marker_pos + 1 :]
continue
payload_size = 256
payload_start = header_start + SrioHeader.STRUCT_SIZE
if len(buffer) < payload_start + payload_size:
continue
payload = buffer[payload_start : payload_start + payload_size]
yield header, payload
buffer = buffer[payload_start + payload_size :]
finally:
if current_file_handle:
current_file_handle.close()
def _flush_flow_buffer(
flow: Flow,
out_stream_buffer: bytearray,
out_file_handle: Optional[Any],
stats: Dict[str, int],
):
"""Finalizes a reassembled flow, creates an .out block, and writes it."""
if not flow.buffer:
return
log.debug(f"Flushing flow '{flow.name}' with {len(flow.buffer)} bytes.")
stats[f"mapped_{flow.name}_blocks"] += 1
if flow.block_id is not None:
payload_size = len(flow.buffer)
header_size_bytes = 34 * 4
out_block_size = header_size_bytes + payload_size
out_header = bytearray(header_size_bytes)
struct.pack_into("<I", out_header, 0, 0x5A5A5A5A)
struct.pack_into("<I", out_header, 4, 0x5A5A5A5A)
struct.pack_into("<I", out_header, 5 * 4, out_block_size)
struct.pack_into("<I", out_header, 17 * 4, flow.block_id)
if out_file_handle:
out_file_handle.write(out_header)
out_file_handle.write(flow.buffer)
out_stream_buffer.extend(out_header)
out_stream_buffer.extend(flow.buffer)
flow.buffer.clear()
flow.is_active = False
def _process_virtual_out_file(
out_data: np.ndarray, result_queue: mp.Queue, stats: Dict[str, int]
):
"""Takes a numpy array representing an in-memory .out file and parses it."""
log.info("Starting parsing of reassembled .out data...")
reader = RadarFileReader(file_path=Path("virtual.out"))
reader.data_vector = out_data
if not reader.load_and_find_blocks():
log.error(
"Failed to find any valid .out blocks in the reassembled data stream."
)
return
total_out_blocks = len(reader.block_metadata or [])
stats["total_out_blocks_generated"] = total_out_blocks
result_queue.put({"type": "start", "total": total_out_blocks})
batch_generator = reader.process_and_generate_batches(stats)
for i, (batch, blocks_done_count) in enumerate(batch_generator):
batch_count = i + 1
main_header = batch.main_header
file_batch_counter = (
main_header.ge_header.signal_descr.batch_counter if main_header else "N/A"
)
timetag = main_header.ge_header.signal_descr.ttag if main_header else "N/A"
progress_data = {
"type": "progress",
"batch_id": batch_count,
"file_batch_counter": file_batch_counter,
"timetag": timetag,
"blocks_done": blocks_done_count,
}
result_queue.put(progress_data)
batch.batch_id = batch_count
result_queue.put({"type": "data_batch", "data": batch})
def run_rec_processor_worker(
file_list: List[Path],
output_options: Dict[str, Any],
output_dir: Path,
output_basename: str,
command_queue: mp.Queue,
result_queue: mp.Queue,
):
"""Main worker function for processing .rec files."""
log.info("REC processor worker started.")
stats: Dict[str, int] = defaultdict(int)
out_file_handle = None
out_stream_buffer = bytearray()
# --- Initialize Flows from the hardcoded default config ---
flows: List[Flow] = []
base_addr = int(DEFAULT_ADDRESS_MAP_CONFIG.get("base", "0x0"), 16)
for flow_config in DEFAULT_ADDRESS_MAP_CONFIG.get("flows", []):
if "base_offset" in flow_config:
offset = int(flow_config.get("base_offset", "0x0"), 16)
flow_base = base_addr + offset
else:
flow_base = int(flow_config.get("base", "0x0"), 16)
flows.append(
Flow(
name=flow_config.get("name"),
base_address=flow_base,
end_address=flow_base + _parse_size(flow_config.get("size", 0)),
block_id=flow_config.get("id"),
)
)
active_flow: Optional[Flow] = None
try:
if output_options.get("generate_out"):
out_filepath = (output_dir / output_basename).with_suffix(".out")
out_file_handle = open(out_filepath, "wb")
log.info(f"Opened {out_filepath} for writing concatenated .out file.")
block_count = 0
for header, payload in _stream_srio_blocks(file_list):
block_count += 1
stats["total_srio_blocks_found"] += 1
hit_flow = next((f for f in flows if f.hit(header.address)), None)
if hit_flow:
if active_flow and active_flow is not hit_flow:
_flush_flow_buffer(
active_flow, out_stream_buffer, out_file_handle, stats
)
active_flow = hit_flow
active_flow.is_active = True
active_flow.buffer.extend(payload)
else:
stats["unmapped_srio_blocks"] += 1
if block_count % 10000 == 0:
try:
if command_queue.get_nowait() == "STOP":
log.warning("Stop command received. Halting REC processing.")
break
except queue.Empty:
pass
if active_flow:
_flush_flow_buffer(active_flow, out_stream_buffer, out_file_handle, stats)
if (
output_options.get("generate_csv") or output_options.get("generate_json")
) and out_stream_buffer:
virtual_out_data = np.frombuffer(out_stream_buffer, dtype="<u4")
_process_virtual_out_file(virtual_out_data, result_queue, stats)
# If we only generated a .out or did nothing, we must send the completion message ourselves.
if not (
output_options.get("generate_csv") or output_options.get("generate_json")
):
result_queue.put(
{"type": "complete", "interrupted": False, "stats": dict(stats)}
)
log.info(f"Finished REC processing. Found {block_count} SRIO blocks.")
except Exception as e:
log.error(
f"An unhandled exception occurred in REC processor: {e}", exc_info=True
)
result_queue.put({"type": "error", "message": f"REC processor failed: {e}"})
finally:
if out_file_handle:
out_file_handle.close()
log.info("Closed .out file.")

View File

@ -1,5 +1,5 @@
"""
Main View for the Radar Data Reader application.
Main View for the Radar Data Reader application, now featuring a tabbed interface.
"""
import tkinter as tk
@ -36,11 +36,12 @@ class MainWindow(tk.Frame):
self.gui_update_queue = controller.result_queue
self.total_blocks_for_progress = 0
self.profile_editor_window = None
self._init_vars()
self.pack(fill=tk.BOTH, expand=True)
self.master.title("Radar Data Reader")
self.master.geometry("800x750")
self.master.title("Radar Data Reader & Processor")
self.master.geometry("800x850")
self._create_widgets()
self._setup_gui_logging(logging_config)
@ -50,21 +51,36 @@ class MainWindow(tk.Frame):
def _init_vars(self):
"""Initialize all Tkinter variables."""
self.filepath_var = tk.StringVar()
self.output_filepath_var = tk.StringVar()
self.active_profile_var = tk.StringVar()
self.status_bar_var = tk.StringVar(value="Ready")
self.out_filepath_var = tk.StringVar()
self.out_output_dir_var = tk.StringVar()
self.out_basename_var = tk.StringVar()
self.out_output_csv_var = tk.BooleanVar(value=True)
self.out_output_json_var = tk.BooleanVar(value=False)
self.out_csv_profile_var = tk.StringVar()
self.out_json_profile_var = tk.StringVar()
self.rec_filepath_var = tk.StringVar()
self.rec_file_count_var = tk.IntVar(value=1)
self.rec_output_dir_var = tk.StringVar()
self.rec_basename_var = tk.StringVar()
self.rec_output_out_var = tk.BooleanVar(value=True)
self.rec_output_csv_var = tk.BooleanVar(value=False)
self.rec_output_json_var = tk.BooleanVar(value=False)
self.rec_extract_video_var = tk.BooleanVar(value=False)
self.rec_csv_profile_var = tk.StringVar()
self.rec_json_profile_var = tk.StringVar()
self.progress_text_var = tk.StringVar(value="N/A")
self.batches_found_var = tk.StringVar(value="N/A")
self.file_batch_counter_var = tk.StringVar(value="N/A")
self.timetag_var = tk.StringVar(value="N/A")
self.progress_bar_var = tk.DoubleVar(value=0)
self.status_bar_var = tk.StringVar(value="Ready")
def _create_widgets(self):
"""Create all the widgets for the main window."""
menu_bar = tk.Menu(self.master)
self.master.config(menu=menu_bar)
file_menu = tk.Menu(menu_bar, tearoff=0)
menu_bar.add_cascade(label="File", menu=file_menu)
file_menu.add_command(
@ -76,117 +92,22 @@ class MainWindow(tk.Frame):
main_frame = tk.Frame(self)
main_frame.pack(padx=10, pady=10, fill=tk.BOTH, expand=True)
main_frame.rowconfigure(2, weight=1)
main_frame.columnconfigure(0, weight=1)
controls_frame = ttk.LabelFrame(main_frame, text="Controls")
controls_frame.grid(row=0, column=0, sticky="ew", pady=(0, 5))
controls_frame.columnconfigure(1, weight=1)
notebook = ttk.Notebook(main_frame)
notebook.grid(row=0, column=0, sticky="nsew", pady=(0, 10))
ttk.Label(controls_frame, text="Export Profile:").grid(
row=0, column=0, padx=5, pady=5, sticky="w"
)
self.profile_combobox = ttk.Combobox(
controls_frame, textvariable=self.active_profile_var, state="readonly"
)
self.profile_combobox.grid(row=0, column=1, padx=(0, 5), pady=5, sticky="ew")
self.process_button = ttk.Button(
controls_frame, text="Process File", command=self.on_process_click
)
self.process_button.grid(row=0, column=2, padx=(0, 5), pady=5, sticky="ew")
self.stop_button = ttk.Button(
controls_frame, text="Stop", command=self.on_stop_click, state=tk.DISABLED
)
self.stop_button.grid(row=0, column=3, padx=(5, 5), pady=5, sticky="ew")
out_processor_tab = ttk.Frame(notebook, padding="10")
rec_processor_tab = ttk.Frame(notebook, padding="10")
file_select_frame = ttk.Frame(controls_frame)
file_select_frame.grid(
row=1, column=0, columnspan=4, sticky="ew", padx=5, pady=(0, 5)
)
file_select_frame.columnconfigure(1, weight=1)
ttk.Label(file_select_frame, text="Radar File:").grid(
row=0, column=0, padx=(0, 5)
)
self.file_entry = ttk.Entry(
file_select_frame, textvariable=self.filepath_var, state="readonly"
)
self.file_entry.grid(row=0, column=1, sticky="ew")
self.browse_button = ttk.Button(
file_select_frame, text="Browse...", command=self.on_browse_click
)
self.browse_button.grid(row=0, column=2, padx=(5, 0))
notebook.add(out_processor_tab, text="OUT Processor")
notebook.add(rec_processor_tab, text="REC Processor")
output_file_frame = ttk.Frame(controls_frame)
output_file_frame.grid(
row=2, column=0, columnspan=4, sticky="ew", padx=5, pady=(0, 5)
)
output_file_frame.columnconfigure(1, weight=1)
ttk.Label(output_file_frame, text="Output CSV File:").grid(
row=0, column=0, padx=(0, 5)
)
self.output_file_entry = ttk.Entry(
output_file_frame, textvariable=self.output_filepath_var
)
self.output_file_entry.grid(row=0, column=1, sticky="ew")
self.save_as_button = ttk.Button(
output_file_frame, text="Save As...", command=self.on_save_as_click
)
self.save_as_button.grid(row=0, column=2, padx=(5, 0))
self.open_folder_button = ttk.Button(
output_file_frame,
text="Open Folder",
command=self.on_open_folder_click,
state=tk.DISABLED,
)
self.open_folder_button.grid(row=0, column=3, padx=(5, 0))
self._create_out_processor_tab(out_processor_tab)
self._create_rec_processor_tab(rec_processor_tab)
status_frame = ttk.LabelFrame(main_frame, text="Live Data & Progress")
status_frame.grid(row=1, column=0, sticky="ew", pady=5)
status_frame.columnconfigure(1, weight=1)
self.progress_bar = ttk.Progressbar(
status_frame, variable=self.progress_bar_var, maximum=100
)
self.progress_bar.grid(
row=0, column=0, columnspan=2, sticky="ew", padx=5, pady=(5, 2)
)
ttk.Label(status_frame, text="Progress:", anchor="e").grid(
row=1, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(status_frame, textvariable=self.progress_text_var, anchor="w").grid(
row=1, column=1, padx=5, pady=2, sticky="w"
)
ttk.Label(status_frame, text="Batches Found:", anchor="e").grid(
row=2, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(status_frame, textvariable=self.batches_found_var, anchor="w").grid(
row=2, column=1, padx=5, pady=2, sticky="w"
)
ttk.Label(status_frame, text="File Batch Counter:", anchor="e").grid(
row=3, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(
status_frame, textvariable=self.file_batch_counter_var, anchor="w"
).grid(row=3, column=1, padx=5, pady=2, sticky="w")
ttk.Label(status_frame, text="File TimeTag:", anchor="e").grid(
row=4, column=0, padx=(10, 5), pady=(2, 5), sticky="e"
)
ttk.Label(status_frame, textvariable=self.timetag_var, anchor="w").grid(
row=4, column=1, padx=5, pady=(2, 5), sticky="w"
)
log_frame = ttk.LabelFrame(main_frame, text="Log Console")
log_frame.grid(row=2, column=0, sticky="nsew", pady=(5, 0))
log_frame.rowconfigure(0, weight=1)
log_frame.columnconfigure(0, weight=1)
self.log_widget = scrolledtext.ScrolledText(
log_frame, state=tk.DISABLED, wrap=tk.WORD
)
self.log_widget.grid(row=0, column=0, sticky="nsew", padx=5, pady=5)
self._create_live_data_frame(main_frame)
self._create_log_console_frame(main_frame)
self.status_bar = ttk.Label(
self,
@ -197,63 +118,286 @@ class MainWindow(tk.Frame):
)
self.status_bar.pack(side=tk.BOTTOM, fill=tk.X)
def _create_out_processor_tab(self, parent):
parent.columnconfigure(1, weight=1)
input_frame = ttk.LabelFrame(parent, text="Input")
input_frame.grid(row=0, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
input_frame.columnconfigure(1, weight=1)
ttk.Label(input_frame, text="Input .out File:").grid(
row=0, column=0, padx=5, pady=5, sticky="w"
)
out_file_entry = ttk.Entry(
input_frame, textvariable=self.out_filepath_var, state="readonly"
)
out_file_entry.grid(row=0, column=1, sticky="ew", padx=5)
self.out_browse_button = ttk.Button(
input_frame, text="Browse...", command=self.controller.select_out_file
)
self.out_browse_button.grid(row=0, column=2, padx=5, pady=5)
self.out_filepath_var.trace_add("write", self.controller.on_out_config_changed)
output_frame = ttk.LabelFrame(parent, text="Output Configuration")
output_frame.grid(row=1, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
output_frame.columnconfigure(1, weight=1)
ttk.Label(output_frame, text="Output Directory:").grid(
row=0, column=0, padx=5, pady=5, sticky="w"
)
out_dir_entry = ttk.Entry(output_frame, textvariable=self.out_output_dir_var)
out_dir_entry.grid(row=0, column=1, sticky="ew", padx=5)
out_dir_buttons_frame = ttk.Frame(output_frame)
out_dir_buttons_frame.grid(row=0, column=2, padx=5)
ttk.Button(
out_dir_buttons_frame,
text="Browse...",
command=lambda: self.controller.select_output_dir(self.out_output_dir_var),
).pack(side=tk.LEFT)
ttk.Button(
out_dir_buttons_frame,
text="Open...",
command=lambda: self.controller.open_folder_from_path(
self.out_output_dir_var.get()
),
).pack(side=tk.LEFT, padx=(5, 0))
ttk.Label(output_frame, text="Base Filename:").grid(
row=1, column=0, padx=5, pady=5, sticky="w"
)
out_basename_entry = ttk.Entry(output_frame, textvariable=self.out_basename_var)
out_basename_entry.grid(row=1, column=1, columnspan=2, sticky="ew", padx=5)
options_frame = ttk.LabelFrame(parent, text="Output Formats")
options_frame.grid(row=2, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
options_frame.columnconfigure(1, weight=1)
ttk.Checkbutton(
options_frame, text="Generate .csv file", variable=self.out_output_csv_var
).grid(row=0, column=0, sticky="w", padx=5, pady=2)
self.out_csv_profile_combobox = ttk.Combobox(
options_frame,
textvariable=self.out_csv_profile_var,
state="readonly",
width=20,
)
self.out_csv_profile_combobox.grid(row=0, column=1, sticky="w", padx=5)
ttk.Checkbutton(
options_frame, text="Generate .json file", variable=self.out_output_json_var
).grid(row=1, column=0, sticky="w", padx=5, pady=2)
self.out_json_profile_combobox = ttk.Combobox(
options_frame,
textvariable=self.out_json_profile_var,
state="readonly",
width=20,
)
self.out_json_profile_combobox.grid(row=1, column=1, sticky="w", padx=5)
action_frame = ttk.Frame(parent)
action_frame.grid(row=3, column=0, columnspan=2, pady=(10, 0))
self.out_process_button = ttk.Button(
action_frame,
text="Process .out File",
command=self.controller.start_out_processing,
)
self.out_process_button.pack(side=tk.LEFT, padx=5)
self.out_stop_button = ttk.Button(
action_frame,
text="Stop",
command=self.controller.stop_processing,
state=tk.DISABLED,
)
self.out_stop_button.pack(side=tk.LEFT, padx=5)
def _create_rec_processor_tab(self, parent):
parent.columnconfigure(1, weight=1)
input_frame = ttk.LabelFrame(parent, text="Input REC Sequence")
input_frame.grid(row=0, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
input_frame.columnconfigure(1, weight=1)
ttk.Label(input_frame, text="First .rec File:").grid(
row=0, column=0, padx=5, pady=5, sticky="w"
)
rec_file_entry = ttk.Entry(
input_frame, textvariable=self.rec_filepath_var, state="readonly"
)
rec_file_entry.grid(row=0, column=1, sticky="ew", padx=5)
ttk.Button(
input_frame, text="Browse...", command=self.controller.select_rec_file
).grid(row=0, column=2, padx=5)
ttk.Label(input_frame, text="Number of Files:").grid(
row=1, column=0, padx=5, pady=5, sticky="w"
)
rec_file_count_spinbox = ttk.Spinbox(
input_frame,
from_=1,
to=1000,
textvariable=self.rec_file_count_var,
width=10,
)
rec_file_count_spinbox.grid(row=1, column=1, padx=5, pady=5, sticky="w")
self.rec_file_count_var.trace_add(
"write", self.controller.on_rec_config_changed
)
self.rec_filepath_var.trace_add("write", self.controller.on_rec_config_changed)
output_frame = ttk.LabelFrame(parent, text="Output Configuration")
output_frame.grid(row=1, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
output_frame.columnconfigure(1, weight=1)
ttk.Label(output_frame, text="Output Directory:").grid(
row=0, column=0, padx=5, pady=5, sticky="w"
)
rec_dir_entry = ttk.Entry(output_frame, textvariable=self.rec_output_dir_var)
rec_dir_entry.grid(row=0, column=1, sticky="ew", padx=5)
rec_dir_buttons_frame = ttk.Frame(output_frame)
rec_dir_buttons_frame.grid(row=0, column=2, padx=5)
ttk.Button(
rec_dir_buttons_frame,
text="Browse...",
command=lambda: self.controller.select_output_dir(self.rec_output_dir_var),
).pack(side=tk.LEFT)
ttk.Button(
rec_dir_buttons_frame,
text="Open...",
command=lambda: self.controller.open_folder_from_path(
self.rec_output_dir_var.get()
),
).pack(side=tk.LEFT, padx=(5, 0))
ttk.Label(output_frame, text="Base Filename:").grid(
row=1, column=0, padx=5, pady=5, sticky="w"
)
rec_basename_entry = ttk.Entry(output_frame, textvariable=self.rec_basename_var)
rec_basename_entry.grid(row=1, column=1, columnspan=2, sticky="ew", padx=5)
options_frame = ttk.LabelFrame(parent, text="Output Formats")
options_frame.grid(row=2, column=0, columnspan=2, sticky="ew", padx=5, pady=5)
options_frame.columnconfigure(1, weight=1)
ttk.Checkbutton(
options_frame, text="Generate .out file", variable=self.rec_output_out_var
).grid(row=0, column=0, columnspan=2, sticky="w", padx=5)
ttk.Checkbutton(
options_frame,
text="Extract video stream",
variable=self.rec_extract_video_var,
).grid(row=1, column=0, columnspan=2, sticky="w", padx=5)
ttk.Checkbutton(
options_frame, text="Generate .csv file", variable=self.rec_output_csv_var
).grid(row=2, column=0, sticky="w", padx=5)
self.rec_csv_profile_combobox = ttk.Combobox(
options_frame,
textvariable=self.rec_csv_profile_var,
state="readonly",
width=20,
)
self.rec_csv_profile_combobox.grid(row=2, column=1, sticky="w", padx=5)
ttk.Checkbutton(
options_frame, text="Generate .json file", variable=self.rec_output_json_var
).grid(row=3, column=0, sticky="w", padx=5)
self.rec_json_profile_combobox = ttk.Combobox(
options_frame,
textvariable=self.rec_json_profile_var,
state="readonly",
width=20,
)
self.rec_json_profile_combobox.grid(row=3, column=1, sticky="w", padx=5)
action_frame = ttk.Frame(parent)
action_frame.grid(row=3, column=0, columnspan=2, pady=(10, 0))
self.rec_process_button = ttk.Button(
action_frame,
text="Process REC Sequence",
command=self.controller.start_rec_processing,
)
self.rec_process_button.pack()
def _create_live_data_frame(self, parent):
status_frame = ttk.LabelFrame(parent, text="Live Data & Progress")
status_frame.grid(row=1, column=0, sticky="nsew", pady=(0, 5))
status_frame.columnconfigure(1, weight=1)
self.progress_bar = ttk.Progressbar(
status_frame, variable=self.progress_bar_var, maximum=100
)
self.progress_bar.grid(
row=0, column=0, columnspan=2, sticky="ew", padx=5, pady=(5, 2)
)
ttk.Label(status_frame, text="Progress:", anchor="e").grid(
row=1, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(status_frame, textvariable=self.progress_text_var, anchor="w").grid(
row=1, column=1, padx=5, pady=2, sticky="w"
)
ttk.Label(status_frame, text="Batches Found:", anchor="e").grid(
row=2, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(status_frame, textvariable=self.batches_found_var, anchor="w").grid(
row=2, column=1, padx=5, pady=2, sticky="w"
)
ttk.Label(status_frame, text="File Batch Counter:", anchor="e").grid(
row=3, column=0, padx=(10, 5), pady=2, sticky="e"
)
ttk.Label(
status_frame, textvariable=self.file_batch_counter_var, anchor="w"
).grid(row=3, column=1, padx=5, pady=2, sticky="w")
ttk.Label(status_frame, text="File TimeTag:", anchor="e").grid(
row=4, column=0, padx=(10, 5), pady=(2, 5), sticky="e"
)
ttk.Label(status_frame, textvariable=self.timetag_var, anchor="w").grid(
row=4, column=1, padx=5, pady=(2, 5), sticky="w"
)
def _create_log_console_frame(self, parent):
log_frame = ttk.LabelFrame(parent, text="Log Console")
log_frame.grid(row=2, column=0, sticky="nsew", pady=(5, 0))
log_frame.rowconfigure(0, weight=1)
log_frame.columnconfigure(0, weight=1)
self.log_widget = scrolledtext.ScrolledText(
log_frame, state=tk.DISABLED, wrap=tk.WORD
)
self.log_widget.grid(row=0, column=0, sticky="nsew", padx=5, pady=5)
parent.rowconfigure(2, weight=1)
def _setup_gui_logging(self, logging_config):
logger.add_tkinter_handler(self.log_widget, self.master, logging_config)
def set_filepath(self, path: str):
self.filepath_var.set(path)
def get_filepath(self) -> str:
return self.filepath_var.get()
def set_output_filepath(self, path: str):
self.output_filepath_var.set(path)
if path and Path(path).parent.exists():
self.open_folder_button.config(state=tk.NORMAL)
else:
self.open_folder_button.config(state=tk.DISABLED)
def get_output_filepath(self) -> str:
return self.output_filepath_var.get()
def on_open_folder_click(self):
self.controller.open_output_folder()
def ask_open_filename(self, current_path: str) -> str:
initial_dir = (
Path(current_path).parent
if current_path and Path(current_path).exists()
else Path.cwd()
)
return filedialog.askopenfilename(
initialdir=initial_dir,
filetypes=[("Radar Output", "*.out"), ("All files", "*.*")],
)
def ask_save_as_filename(self, current_path: str) -> str:
initial_dir = Path(current_path).parent if current_path else Path.cwd()
initial_file = Path(current_path).name if current_path else ""
return filedialog.asksaveasfilename(
initialdir=initial_dir,
initialfile=initial_file,
defaultextension=".csv",
filetypes=[("CSV files", "*.csv"), ("All files", "*.*")],
)
def update_export_profiles(
self, profiles: List[ExportProfile], active_profile_name: str
self,
profiles: List[ExportProfile],
active_out_profile: str,
active_rec_csv: str,
active_rec_json: str,
):
profile_names = [p.name for p in profiles]
self.profile_combobox["values"] = profile_names
if active_profile_name in profile_names:
self.active_profile_var.set(active_profile_name)
elif profile_names:
self.active_profile_var.set(profile_names[0])
else:
self.active_profile_var.set("")
def get_active_profile_name(self) -> str:
return self.active_profile_var.get()
profile_names = [p.name for p in profiles] if profiles else []
all_combos = [
(
self.out_csv_profile_combobox,
self.out_csv_profile_var,
active_out_profile,
),
(
self.out_json_profile_combobox,
self.out_json_profile_var,
active_out_profile,
),
(self.rec_csv_profile_combobox, self.rec_csv_profile_var, active_rec_csv),
(
self.rec_json_profile_combobox,
self.rec_json_profile_var,
active_rec_json,
),
]
for combo, var, active_name in all_combos:
combo["values"] = profile_names
if active_name in profile_names:
var.set(active_name)
elif profile_names:
var.set(profile_names[0])
else:
var.set("")
def start_processing_ui(self):
self.update_ui_for_processing_state(True)
@ -266,11 +410,18 @@ class MainWindow(tk.Frame):
def update_ui_for_processing_state(self, is_processing: bool):
state = tk.DISABLED if is_processing else tk.NORMAL
self.browse_button.config(state=state)
self.save_as_button.config(state=state)
self.process_button.config(state=state)
self.profile_combobox.config(state=state)
self.stop_button.config(state=tk.NORMAL if is_processing else tk.DISABLED)
# Disable all major controls on both tabs
for widget in [
self.out_browse_button,
self.out_process_button,
self.rec_process_button,
]:
if widget.winfo_exists():
widget.config(state=state)
# Specifically manage stop buttons, assuming one shared stop button for now
self.out_stop_button.config(state=tk.NORMAL if is_processing else tk.DISABLED)
if is_processing:
self.status_bar_var.set("Processing... Please wait.")
@ -330,18 +481,9 @@ class MainWindow(tk.Frame):
if self.controller.is_processing:
self.after(100, self.poll_result_queue)
def on_browse_click(self):
self.controller.select_file()
def on_process_click(self):
self.controller.start_processing()
def on_stop_click(self):
self.controller.stop_processing()
def on_close(self):
self.controller.shutdown()
self.master.destroy()
def on_save_as_click(self):
self.controller.select_output_file()

View File

@ -24,44 +24,42 @@ class ConfigManager:
@staticmethod
def _get_default_config() -> Dict[str, Any]:
"""Provides the default configuration structure."""
# Updated default profile with correct data paths for the new architecture
default_profile = ExportProfile(
name="Default",
fields=[
ExportField(column_name="batch_id", data_path="batch_id"),
ExportField(column_name="timetag", data_path="main_header.timetag"),
ExportField(
column_name="timetag",
data_path="main_header.ge_header.signal_descr.ttag",
),
ExportField(
column_name="file_batch_counter",
data_path="main_header.batch_counter",
data_path="main_header.ge_header.signal_descr.batch_counter",
),
ExportField(column_name="npri", data_path="main_header.npri"),
ExportField(column_name="nrbin", data_path="main_header.nrbin"),
],
)
return {
"last_opened_file": "",
"last_output_file": "",
"active_export_profile_name": "Default",
"last_opened_out_file": "",
"last_opened_rec_file": "",
"last_out_output_dir": "",
"last_rec_output_dir": "",
"active_out_export_profile_name": "Default",
"active_rec_csv_profile_name": "Default",
"active_rec_json_profile_name": "Default",
"export_profiles": [default_profile.to_dict()],
}
def load_config(self) -> None:
"""Loads the configuration from the JSON file."""
"""Loads the main application configuration from the JSON file."""
log.info(f"Attempting to load configuration from: {self.config_path}")
if self.config_path.is_file():
try:
with open(self.config_path, "r", encoding="utf-8") as f:
loaded_data = json.load(f)
self.config["last_opened_file"] = loaded_data.get(
"last_opened_file", ""
)
self.config["last_output_file"] = loaded_data.get(
"last_output_file", ""
)
self.config["active_export_profile_name"] = loaded_data.get(
"active_export_profile_name", "Default"
)
default_conf = self._get_default_config()
for key in default_conf:
self.config[key] = loaded_data.get(key, default_conf[key])
profiles_data = loaded_data.get("export_profiles", [])
self.export_profiles = [
@ -100,14 +98,10 @@ class ConfigManager:
"""Saves the current configuration to the JSON file."""
log.info(f"Saving configuration to: {self.config_path}")
data_to_save = {
"last_opened_file": self.get("last_opened_file", ""),
"last_output_file": self.get("last_output_file", ""),
"active_export_profile_name": self.get(
"active_export_profile_name", "Default"
),
"export_profiles": [profile.to_dict() for profile in self.export_profiles],
}
data_to_save = self.config.copy()
data_to_save["export_profiles"] = [
profile.to_dict() for profile in self.export_profiles
]
try:
self.config_path.parent.mkdir(parents=True, exist_ok=True)

View File

@ -434,7 +434,7 @@ def setup_basic_logging( # MODIFIED: Renamed from setup_logging
)
print("INFO: Basic centralized queued logging system setup complete.", flush=True)
logging.getLogger("FlightMonitor.LoggerTest").info(
logging.getLogger("read_radar_data.LoggerTest").info(
"Basic logging initialized. This is a test from setup_basic_logging."
)
@ -502,7 +502,7 @@ def add_tkinter_handler( # NEW FUNCTION
"INFO: TkinterTextHandler added and configured successfully.",
flush=True,
)
logging.getLogger("FlightMonitor.LoggerTest").info(
logging.getLogger("read_radar_data.LoggerTest").info(
"TkinterTextHandler added. This is a test from add_tkinter_handler."
)
except Exception as e: