diff --git a/config/config.json b/config/config.json index 1e4602f..731823b 100644 --- a/config/config.json +++ b/config/config.json @@ -1,7 +1,11 @@ { - "last_opened_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_568-n13.out", - "last_output_file": "C:\\src\\____GitProjects\\radar_data_reader\\_25-05-15-12-22-52_sata_568-n13.csv", - "active_export_profile_name": "Default", + "last_opened_out_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_568-n13.out", + "last_opened_rec_file": "C:/src/____GitProjects/radar_data_reader/_rec/_25-05-15-12-22-52_sata_345.rec", + "last_out_output_dir": "C:\\src\\____GitProjects\\radar_data_reader", + "last_rec_output_dir": "C:\\src\\____GitProjects\\radar_data_reader\\_rec", + "active_out_export_profile_name": "Default", + "active_rec_csv_profile_name": "Default", + "active_rec_json_profile_name": "Default", "export_profiles": [ { "name": "Default", @@ -37,9 +41,34 @@ "translate_with_enum": true }, { - "column_name": "waveform", - "data_path": "main_header.ge_header.mode.waveform", - "translate_with_enum": true + "column_name": "true_heading_rad", + "data_path": "main_header.ge_header.general_settings.navigation.attitude.true_heading_rad", + "translate_with_enum": false + }, + { + "column_name": "lat_rad", + "data_path": "main_header.ge_header.general_settings.navigation.geo_pos.lat_rad", + "translate_with_enum": false + }, + { + "column_name": "lon_rad", + "data_path": "main_header.ge_header.general_settings.navigation.geo_pos.lon_rad", + "translate_with_enum": false + }, + { + "column_name": "altitude_m", + "data_path": "main_header.ge_header.general_settings.navigation.geo_pos.altitude_m", + "translate_with_enum": false + }, + { + "column_name": "nav_az_rad", + "data_path": "main_header.ge_header.general_settings.antenna.position.nav_az_rad", + "translate_with_enum": false + }, + { + "column_name": "nav_el_rad", + "data_path": "main_header.ge_header.general_settings.antenna.position.nav_el_rad", + "translate_with_enum": false } ] } diff --git a/config/rec_flows.json b/config/rec_flows.json new file mode 100644 index 0000000..c37deb8 --- /dev/null +++ b/config/rec_flows.json @@ -0,0 +1,42 @@ +{ + "base": "0x80000000", + "pingpong_offset": "0x01000000", + "flows": [ + { + "name": "HDR", + "base_offset": "0x0", + "size": 4096, + "id": 1213223748 + }, + { + "name": "SUM", + "base_offset": "0x00101000", + "size": "1M", + "id": 5068115 + }, + { + "name": "GUARD", + "base_offset": "0x00801000", + "size": "1M", + "id": 1380013383 + }, + { + "name": "DAZ", + "base_offset": "0x80401000", + "size": "1M", + "id": 5914948 + }, + { + "name": "DEL", + "base_offset": "0x80701000", + "size": "1M", + "id": 4998468 + }, + { + "name": "VIDEO", + "base_offset": "0xEF000000", + "size": "4M", + "id": null + } + ] +} \ No newline at end of file diff --git a/radar_data_reader/core/app_controller.py b/radar_data_reader/core/app_controller.py index 1d82dc0..01aa5e4 100644 --- a/radar_data_reader/core/app_controller.py +++ b/radar_data_reader/core/app_controller.py @@ -6,14 +6,18 @@ Orchestrates the interaction between the GUI and the core processing logic using """ import multiprocessing as mp import csv +import json import os import subprocess import sys from pathlib import Path -from typing import List, Any, Dict +from typing import List, Any, Dict, Tuple +from tkinter import filedialog +import tkinter as tk from ..utils.config_manager import ConfigManager from ..core.file_reader import run_worker_process +from ..core.rec_processor import run_rec_processor_worker from ..core.data_structures import DataBatch from ..core.data_enums import ENUM_REGISTRY, get_enum_name from ..utils import logger @@ -24,174 +28,342 @@ log = logger.get_logger(__name__) def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any: - """ - Safely retrieves a value from a DataBatch object using the path from an ExportField. - If the field is marked for translation, it attempts to convert the numeric value - to its string representation using the corresponding Enum. - """ path = field.data_path try: parts = path.split(".") if not parts: return "N/A" - if path == "batch_id": return batch.batch_id - current_obj = batch for part in parts: if current_obj is None: return "N/A" current_obj = getattr(current_obj, part, None) - value = current_obj if current_obj is not None else "N/A" - if field.translate_with_enum and isinstance(value, int): enum_class = ENUM_REGISTRY.get(path) if enum_class: return get_enum_name(enum_class, value) - return value - except AttributeError: log.warning(f"Could not find attribute for path: {path}") return "N/A" +def _write_json_row(file_handle, row_dict: Dict[str, Any]): + json_string = json.dumps(row_dict) + file_handle.write(json_string + "\n") + + class AppController: """The main controller of the application.""" def __init__(self, config_manager: ConfigManager): self.config_manager = config_manager self.view = None - self.profile_editor_window: ProfileEditorWindow | None = None - self.worker_process: mp.Process | None = None self.is_processing = False + self.worker_process: mp.Process | None = None self.command_queue = mp.Queue() self.result_queue = mp.Queue() - self.active_export_profile: ExportProfile | None = None - self.csv_file_handle = None - self.csv_writer = None + + self.active_export_profiles: Dict[str, ExportProfile] = {} + self.output_file_handles: Dict[str, Any] = {} + self.csv_writers: Dict[str, Any] = {} def bind_view(self, view): self.view = view self._load_initial_config() def _load_initial_config(self): - if last_file := self.config_manager.get("last_opened_file"): + if last_file := self.config_manager.get("last_opened_out_file"): if Path(last_file).is_file(): - self.view.set_filepath(last_file) + self.view.out_filepath_var.set(last_file) + self.on_out_config_changed() - if last_output_file := self.config_manager.get("last_output_file"): - self.view.set_output_filepath(last_output_file) + if last_dir := self.config_manager.get("last_out_output_dir"): + self.view.out_output_dir_var.set(last_dir) - if not self.view.get_output_filepath() and self.view.get_filepath(): - self._propose_output_filepath(self.view.get_filepath()) + if last_file := self.config_manager.get("last_opened_rec_file"): + if Path(last_file).is_file(): + self.view.rec_filepath_var.set(last_file) + self.on_rec_config_changed() + + if last_dir := self.config_manager.get("last_rec_output_dir"): + self.view.rec_output_dir_var.set(last_dir) profiles = self.config_manager.get_export_profiles() - active_profile_name = self.config_manager.get("active_export_profile_name") - self.view.update_export_profiles(profiles, active_profile_name) - - def _propose_output_filepath(self, input_path_str: str): - if not input_path_str: - return - proposed_path = Path(input_path_str).with_suffix(".csv") - self.view.set_output_filepath(str(proposed_path)) - - def select_file(self): - current_path = self.view.get_filepath() - if filepath := self.view.ask_open_filename(current_path): - self.view.set_filepath(filepath) - self._propose_output_filepath(filepath) - - def select_output_file(self): - current_path = self.view.get_output_filepath() - if new_path := self.view.ask_save_as_filename(current_path): - self.view.set_output_filepath(new_path) - - def open_output_folder(self): - output_path_str = self.view.get_output_filepath() - if not output_path_str: - log.warning("Cannot open folder: output file path is not set.") - return - folder_path = Path(output_path_str).parent - if not folder_path.is_dir(): - log.error(f"Cannot open folder: Directory '{folder_path}' does not exist.") - return - log.info(f"Opening output folder: {folder_path}") - try: - if sys.platform == "win32": - os.startfile(folder_path) - elif sys.platform == "darwin": - subprocess.run(["open", folder_path]) - else: - subprocess.run(["xdg-open", folder_path]) - except Exception as e: - log.error(f"Failed to open output folder: {e}") - - def start_processing(self): - if self.is_processing: - log.warning("Processing is already in progress.") - return - - filepath_str = self.view.get_filepath() - if not filepath_str or not Path(filepath_str).is_file(): - log.error("No valid input file selected to process.") - return - - output_filepath_str = self.view.get_output_filepath() - if not output_filepath_str: - log.error("No output file path specified.") - return - - active_profile_name = self.view.get_active_profile_name() - profiles = self.config_manager.get_export_profiles() - self.active_export_profile = next( - (p for p in profiles if p.name == active_profile_name), None + self.view.update_export_profiles( + profiles=profiles, + active_out_profile=self.config_manager.get( + "active_out_export_profile_name" + ), + active_rec_csv=self.config_manager.get("active_rec_csv_profile_name"), + active_rec_json=self.config_manager.get("active_rec_json_profile_name"), ) - if not self.active_export_profile: - log.error(f"No valid export profile named '{active_profile_name}' found.") + def on_out_config_changed(self, *args): + filepath_str = self.view.out_filepath_var.get() + if not filepath_str: + return + p = Path(filepath_str) + if not self.view.out_output_dir_var.get(): + self.view.out_output_dir_var.set(str(p.parent)) + self.view.out_basename_var.set(p.stem) + + def on_rec_config_changed(self, *args): + try: + file_count = self.view.rec_file_count_var.get() + except (tk.TclError, ValueError): return + filepath_str = self.view.rec_filepath_var.get() + if not filepath_str: + return + + p = Path(filepath_str) + if not self.view.rec_output_dir_var.get(): + self.view.rec_output_dir_var.set(str(p.parent)) + try: - output_path = Path(output_filepath_str) - log.info( - f"Opening {output_path} for row-by-row saving using profile '{self.active_export_profile.name}'." - ) - self.csv_file_handle = open(output_path, "w", encoding="utf-8", newline="") - self.csv_writer = csv.writer(self.csv_file_handle) - header = [field.column_name for field in self.active_export_profile.fields] - self.csv_writer.writerow(header) - except IOError as e: - log.error(f"Failed to open CSV file for writing: {e}") + stem_parts = p.stem.split("_") + seq_num = int(stem_parts[-1]) + base_stem = "_".join(stem_parts[:-1]) + except (ValueError, IndexError): + base_stem = p.stem + + new_stem = f"{base_stem}_n{file_count}" + self.view.rec_basename_var.set(new_stem) + + def select_output_dir(self, dir_var: tk.StringVar): + initial_dir = dir_var.get() if dir_var.get() else None + if new_dir := filedialog.askdirectory( + initialdir=initial_dir, title="Select Output Directory" + ): + dir_var.set(new_dir) + + def _select_file(self, path_var: tk.StringVar, file_types: List[Tuple[str, str]]): + initial_dir = ( + Path(path_var.get()).parent + if path_var.get() and Path(path_var.get()).exists() + else None + ) + if filepath := filedialog.askopenfilename( + initialdir=initial_dir, filetypes=file_types + ): + path_var.set(filepath) + + def select_out_file(self): + self._select_file( + self.view.out_filepath_var, + [("Radar Output", "*.out"), ("All files", "*.*")], + ) + + def select_rec_file(self): + self._select_file( + self.view.rec_filepath_var, + [("Recorder Data", "*.rec"), ("All files", "*.*")], + ) + + def _prepare_output_files( + self, options: Dict[str, Any], output_dir: Path, basename: str + ) -> bool: + self.output_file_handles.clear() + self.csv_writers.clear() + self.active_export_profiles.clear() + + profiles = self.config_manager.get_export_profiles() + + try: + if options.get("generate_csv"): + profile_name = ( + self.view.rec_csv_profile_var.get() + if options.get("is_rec") + else self.view.out_csv_profile_var.get() + ) + profile = next((p for p in profiles if p.name == profile_name), None) + if not profile: + raise ValueError(f"CSV export profile '{profile_name}' not found.") + + self.active_export_profiles["csv"] = profile + path = (output_dir / basename).with_suffix(".csv") + log.info(f"Preparing CSV output: {path}") + fh = open(path, "w", encoding="utf-8", newline="") + self.output_file_handles["csv"] = fh + self.csv_writers["csv"] = csv.writer(fh) + self.csv_writers["csv"].writerow( + [field.column_name for field in profile.fields] + ) + + if options.get("generate_json"): + profile_name = ( + self.view.rec_json_profile_var.get() + if options.get("is_rec") + else self.view.out_json_profile_var.get() + ) + profile = next((p for p in profiles if p.name == profile_name), None) + if not profile: + raise ValueError(f"JSON export profile '{profile_name}' not found.") + + self.active_export_profiles["json"] = profile + path = (output_dir / basename).with_suffix(".json") + log.info(f"Preparing JSON output: {path}") + self.output_file_handles["json"] = open(path, "w", encoding="utf-8") + + return True + except (IOError, ValueError) as e: + log.error(f"Failed to prepare output files: {e}") + self._close_all_files() + return False + + def start_out_processing(self): + if self.is_processing: + log.warning("Processing already in progress.") + return + + filepath_str = self.view.out_filepath_var.get() + output_dir_str = self.view.out_output_dir_var.get() + basename = self.view.out_basename_var.get() + + if not all([filepath_str, output_dir_str, basename]): + log.error("Please set input file, output directory, and base filename.") + return + + output_options = { + "generate_csv": self.view.out_output_csv_var.get(), + "generate_json": self.view.out_output_json_var.get(), + "is_rec": False, + } + if not any(v for k, v in output_options.items() if k != "is_rec"): + log.error("Please select at least one output format (CSV or JSON).") + return + + if not self._prepare_output_files( + output_options, Path(output_dir_str), basename + ): return self.is_processing = True self.view.start_processing_ui() - self.config_manager.set("last_opened_file", filepath_str) - self.config_manager.set("last_output_file", output_filepath_str) - self.config_manager.set("active_export_profile_name", active_profile_name) + self.config_manager.set("last_opened_out_file", filepath_str) + self.config_manager.set("last_out_output_dir", output_dir_str) + self.config_manager.set( + "active_out_export_profile_name", self.view.out_csv_profile_var.get() + ) self.config_manager.save_config() + active_profile = self.active_export_profiles.get( + "csv" + ) or self.active_export_profiles.get("json") + + worker_args = ( + Path(filepath_str), + self.command_queue, + self.result_queue, + active_profile, + ) + self._launch_worker(run_worker_process, worker_args) + + def start_rec_processing(self): + if self.is_processing: + log.warning("A process is already running.") + return + + first_file_str = self.view.rec_filepath_var.get() + output_dir_str = self.view.rec_output_dir_var.get() + basename = self.view.rec_basename_var.get() + + if not all([first_file_str, output_dir_str, basename]): + log.error("Please set input file, output directory, and base filename.") + return + + output_options = { + "generate_out": self.view.rec_output_out_var.get(), + "generate_csv": self.view.rec_output_csv_var.get(), + "generate_json": self.view.rec_output_json_var.get(), + "extract_video": self.view.rec_extract_video_var.get(), + "is_rec": True, + } + if not any(v for k, v in output_options.items() if k != "is_rec"): + log.error("Please select at least one output option.") + return + + if output_options["generate_csv"] or output_options["generate_json"]: + if not self._prepare_output_files( + output_options, Path(output_dir_str), basename + ): + return + + file_list = self._get_rec_file_sequence() + if not file_list: + return + + log.info( + f"--- Starting REC Sequence Processing on {len(file_list)} file(s) ---" + ) + self.is_processing = True + self.view.start_processing_ui() + + self.config_manager.set("last_opened_rec_file", first_file_str) + self.config_manager.set("last_rec_output_dir", output_dir_str) + self.config_manager.set( + "active_rec_csv_profile_name", self.view.rec_csv_profile_var.get() + ) + self.config_manager.set( + "active_rec_json_profile_name", self.view.rec_json_profile_var.get() + ) + self.config_manager.save_config() + + worker_args = ( + file_list, + output_options, + Path(output_dir_str), + basename, + self.command_queue, + self.result_queue, + ) + self._launch_worker(run_rec_processor_worker, worker_args) + + def _get_rec_file_sequence(self) -> List[Path]: + first_file_str = self.view.rec_filepath_var.get() + if not first_file_str: + return [] + file_count = self.view.rec_file_count_var.get() + try: + base_path = Path(first_file_str) + name_parts = base_path.name.split("_") + seq_num = int(name_parts[-1].split(".")[0]) + base_name = "_".join(name_parts[:-1]) + "_" + extension = base_path.suffix + file_list = [ + base_path.with_name(f"{base_name}{seq_num + i}{extension}") + for i in range(file_count) + ] + existing_files = [f for f in file_list if f.exists()] + if len(existing_files) < len(file_list): + log.warning( + f"Found {len(existing_files)} of {len(file_list)} expected files in the sequence." + ) + if not existing_files: + log.error("No valid files found in the sequence.") + return existing_files + except (ValueError, IndexError) as e: + log.error( + f"Could not parse file sequence from name '{first_file_str}'. Error: {e}" + ) + return [] + + def _launch_worker(self, target_func, args_tuple: Tuple): while not self.command_queue.empty(): self.command_queue.get() while not self.result_queue.empty(): self.result_queue.get() self.worker_process = mp.Process( - target=run_worker_process, - args=( - Path(filepath_str), - self.command_queue, - self.result_queue, - self.active_export_profile, - ), - daemon=True, + target=target_func, args=args_tuple, daemon=True ) self.worker_process.start() - self.view.poll_result_queue() def stop_processing(self): @@ -202,96 +374,110 @@ class AppController: self.command_queue.put("STOP") def handle_data_batch(self, batch: DataBatch): - if ( - not self.csv_writer - or not self.active_export_profile - or not self.csv_file_handle - ): - log.warning( - "Received a data batch but no CSV writer is configured. Skipping." - ) + if self.csv_writers.get("csv"): + profile = self.active_export_profiles["csv"] + row_values = [ + _get_value_from_path(batch, field) for field in profile.fields + ] + self.csv_writers["csv"].writerow(row_values) + + if self.output_file_handles.get("json"): + profile = self.active_export_profiles["json"] + row_dict = { + field.column_name: _get_value_from_path(batch, field) + for field in profile.fields + } + _write_json_row(self.output_file_handles["json"], row_dict) + + if batch.batch_id % 20 == 0: + for fh in self.output_file_handles.values(): + fh.flush() + + def _close_all_files(self): + for file_handle in self.output_file_handles.values(): + try: + file_handle.close() + except Exception as e: + log.error(f"Error closing file handle {file_handle.name}: {e}") + self.output_file_handles.clear() + self.csv_writers.clear() + + def open_folder_from_path(self, folder_path_str: str): + if not folder_path_str: + log.warning("Cannot open folder: path is not set.") return + folder_path = Path(folder_path_str) + if not folder_path.is_dir(): + log.error(f"Cannot open folder: Directory '{folder_path}' does not exist.") + return + + log.info(f"Opening folder: {folder_path}") try: - row_values = [ - _get_value_from_path(batch, field) - for field in self.active_export_profile.fields - ] - self.csv_writer.writerow(row_values) - self.csv_file_handle.flush() + if sys.platform == "win32": + os.startfile(folder_path) + elif sys.platform == "darwin": + subprocess.run(["open", folder_path]) + else: + subprocess.run(["xdg-open", folder_path]) except Exception as e: - log.error( - f"An unexpected error occurred during CSV row writing: {e}", - exc_info=True, - ) - - def _log_summary(self, stats: Dict[str, int]): - """Formats and logs a summary of the processing results.""" - log.info("--- Processing Summary ---") - - total_blocks = stats.get("total_blocks_scanned", 0) - batches = stats.get("total_batches_found", 0) - - log.info(f"Total Blocks Scanned: {total_blocks}") - log.info(f"Total Batches Found (based on headers): {batches}") - - block_types = sorted([key for key in stats if key.startswith("found_")]) - if block_types: - log.info("Block Types Found:") - for key in block_types: - block_name = key.replace("found_", "") - count = stats[key] - log.info(f" - {block_name}: {count}") - - skipped = stats.get("skipped_blocks", 0) - if skipped > 0: - log.warning(f"Skipped Blocks (out of bounds): {skipped}") - - failed = stats.get("failed_to_parse_blocks", 0) - if failed > 0: - log.error(f"Failed to Parse Blocks: {failed}") - - output_path = self.view.get_output_filepath() - if output_path and Path(output_path).exists(): - log.info(f"Output file saved to: {output_path}") - elif output_path: - log.warning( - f"Output file path was set to {output_path}, but file may not have been fully written if processing was interrupted." - ) - log.info("--------------------------") + log.error(f"Failed to open folder: {e}") def handle_worker_completion(self, msg: Dict[str, Any]): was_interrupted = msg.get("interrupted", False) status = "Interrupted by user" if was_interrupted else "Processing Complete" log.info(f"--- {status}. Finalizing export. ---") - if self.csv_file_handle: - try: - self.csv_file_handle.close() - log.info("CSV file saved and closed successfully.") - except IOError as e: - log.error(f"Error closing CSV file: {e}") + self._close_all_files() - self.csv_file_handle = None - self.csv_writer = None - self.active_export_profile = None + self.active_export_profiles.clear() self.is_processing = False self.worker_process = None + self.view.update_ui_for_processing_state(False) + if stats := msg.get("stats"): self._log_summary(stats) - def open_profile_editor(self): - if self.profile_editor_window and self.profile_editor_window.winfo_exists(): - self.profile_editor_window.lift() - self.profile_editor_window.focus_force() - return + def _log_summary(self, stats: Dict[str, int]): + log.info("--- Processing Summary ---") + if "total_srio_blocks_found" in stats: + log.info(f"Total SRIO Blocks Scanned: {stats['total_srio_blocks_found']}") + block_types = sorted([key for key in stats if key.startswith("mapped_")]) + if block_types: + log.info("Mapped & Processed Block Types:") + for key in block_types: + log.info(f" - {key.replace('mapped_', '')}: {stats[key]}") + else: + total_blocks = stats.get("total_blocks_scanned", 0) + batches = stats.get("total_batches_found", 0) + log.info(f"Total Blocks Scanned: {total_blocks}") + log.info(f"Total Batches Found: {batches}") + block_types = sorted([key for key in stats if key.startswith("found_")]) + if block_types: + log.info("Block Types Found:") + for key in block_types: + log.info(f" - {key.replace('found_', '')}: {stats[key]}") + if skipped := stats.get("unmapped_srio_blocks", 0): + log.warning(f"Unmapped SRIO Blocks: {skipped}") + if failed := stats.get("failed_to_parse_blocks", 0): + log.error(f"Failed to Parse Blocks: {failed}") + log.info("--------------------------") + + def open_profile_editor(self): + if ( + self.view.profile_editor_window + and self.view.profile_editor_window.winfo_exists() + ): + self.view.profile_editor_window.lift() + self.view.profile_editor_window.focus_force() + return profiles = self.config_manager.get_export_profiles() - self.profile_editor_window = ProfileEditorWindow( - master=self.view, controller=self, profiles=profiles + self.view.profile_editor_window = ProfileEditorWindow( + master=self.view.master, controller=self, profiles=profiles ) - self.profile_editor_window.wait_window() + self.view.profile_editor_window.wait_window() self._load_initial_config() def save_export_profiles(self, profiles: List[ExportProfile]): @@ -307,8 +493,5 @@ class AppController: if self.worker_process.is_alive(): log.warning("Worker process did not exit gracefully, terminating.") self.worker_process.terminate() - - if self.csv_file_handle: - self.csv_file_handle.close() - + self._close_all_files() logger.shutdown_logging_system() diff --git a/radar_data_reader/core/file_reader.py b/radar_data_reader/core/file_reader.py index 0ba9978..113c7d7 100644 --- a/radar_data_reader/core/file_reader.py +++ b/radar_data_reader/core/file_reader.py @@ -65,7 +65,10 @@ def run_worker_process( reader = RadarFileReader(filepath) if not reader.load_and_find_blocks(): result_queue.put( - {"type": "error", "message": "Failed to load or find blocks."} + { + "type": "error", + "message": f"Failed to load or find blocks in {filepath.name}.", + } ) return @@ -108,7 +111,6 @@ def run_worker_process( batch.batch_id = batch_count result_queue.put({"type": "data_batch", "data": batch}) - # Final stats update stats["total_batches_found"] = stats.get("found_DSPHDRIN", 0) result_queue.put( {"type": "complete", "interrupted": interrupted, "stats": dict(stats)} @@ -144,16 +146,26 @@ class RadarFileReader: self.block_metadata: List[Tuple[int, int]] | None = None def load_and_find_blocks(self) -> bool: - log.info(f"Loading data from {self.file_path}...") + """ + Loads data from file (if not already in memory) and finds valid data blocks. + """ try: - self.data_vector = np.fromfile(str(self.file_path), dtype=" bool: + """Checks if a given address falls within this flow's range.""" + return self.base_address <= address < self.end_address + + +def _parse_size(size_str: Union[str, int]) -> int: + """Parses a size string like '4K' or '1M' into bytes.""" + if isinstance(size_str, int): + return size_str + size_str = str(size_str).upper() + if "K" in size_str: + return int(size_str.replace("K", "")) * 1024 + if "M" in size_str: + return int(size_str.replace("M", "")) * 1024 * 1024 + return int(size_str) + + +def _stream_srio_blocks(file_list: List[Path]) -> Iterator[Tuple[SrioHeader, bytes]]: + """Reads a sequence of .rec files and yields SRIO blocks.""" + current_file_handle = None + try: + for filepath in file_list: + if not filepath.exists(): + log.warning(f"File not found in sequence: {filepath.name}. Skipping.") + continue + + log.info(f"Processing REC file: {filepath.name}") + current_file_handle = open(filepath, "rb") + buffer = b"" + + while True: + if len(buffer) < 8192: + chunk = current_file_handle.read(65536) + if not chunk: + break + buffer += chunk + + marker_pos = buffer.find(SRIO_MARKER) + if marker_pos == -1: + buffer = buffer[-len(SRIO_MARKER) :] + continue + + header_start = marker_pos + len(SRIO_MARKER) + if len(buffer) < header_start + SrioHeader.STRUCT_SIZE: + continue + + header_bytes = buffer[ + header_start : header_start + SrioHeader.STRUCT_SIZE + ] + try: + header = SrioHeader( + *struct.unpack(SrioHeader.STRUCT_FORMAT, header_bytes) + ) + except struct.error: + buffer = buffer[marker_pos + 1 :] + continue + + payload_size = 256 + payload_start = header_start + SrioHeader.STRUCT_SIZE + if len(buffer) < payload_start + payload_size: + continue + + payload = buffer[payload_start : payload_start + payload_size] + yield header, payload + buffer = buffer[payload_start + payload_size :] + finally: + if current_file_handle: + current_file_handle.close() + + +def _flush_flow_buffer( + flow: Flow, + out_stream_buffer: bytearray, + out_file_handle: Optional[Any], + stats: Dict[str, int], +): + """Finalizes a reassembled flow, creates an .out block, and writes it.""" + if not flow.buffer: + return + + log.debug(f"Flushing flow '{flow.name}' with {len(flow.buffer)} bytes.") + stats[f"mapped_{flow.name}_blocks"] += 1 + + if flow.block_id is not None: + payload_size = len(flow.buffer) + header_size_bytes = 34 * 4 + out_block_size = header_size_bytes + payload_size + + out_header = bytearray(header_size_bytes) + struct.pack_into(" str: - return self.filepath_var.get() - - def set_output_filepath(self, path: str): - self.output_filepath_var.set(path) - if path and Path(path).parent.exists(): - self.open_folder_button.config(state=tk.NORMAL) - else: - self.open_folder_button.config(state=tk.DISABLED) - - def get_output_filepath(self) -> str: - return self.output_filepath_var.get() - - def on_open_folder_click(self): - self.controller.open_output_folder() - - def ask_open_filename(self, current_path: str) -> str: - initial_dir = ( - Path(current_path).parent - if current_path and Path(current_path).exists() - else Path.cwd() - ) - return filedialog.askopenfilename( - initialdir=initial_dir, - filetypes=[("Radar Output", "*.out"), ("All files", "*.*")], - ) - - def ask_save_as_filename(self, current_path: str) -> str: - initial_dir = Path(current_path).parent if current_path else Path.cwd() - initial_file = Path(current_path).name if current_path else "" - return filedialog.asksaveasfilename( - initialdir=initial_dir, - initialfile=initial_file, - defaultextension=".csv", - filetypes=[("CSV files", "*.csv"), ("All files", "*.*")], - ) - def update_export_profiles( - self, profiles: List[ExportProfile], active_profile_name: str + self, + profiles: List[ExportProfile], + active_out_profile: str, + active_rec_csv: str, + active_rec_json: str, ): - profile_names = [p.name for p in profiles] - self.profile_combobox["values"] = profile_names - if active_profile_name in profile_names: - self.active_profile_var.set(active_profile_name) - elif profile_names: - self.active_profile_var.set(profile_names[0]) - else: - self.active_profile_var.set("") - - def get_active_profile_name(self) -> str: - return self.active_profile_var.get() + profile_names = [p.name for p in profiles] if profiles else [] + all_combos = [ + ( + self.out_csv_profile_combobox, + self.out_csv_profile_var, + active_out_profile, + ), + ( + self.out_json_profile_combobox, + self.out_json_profile_var, + active_out_profile, + ), + (self.rec_csv_profile_combobox, self.rec_csv_profile_var, active_rec_csv), + ( + self.rec_json_profile_combobox, + self.rec_json_profile_var, + active_rec_json, + ), + ] + for combo, var, active_name in all_combos: + combo["values"] = profile_names + if active_name in profile_names: + var.set(active_name) + elif profile_names: + var.set(profile_names[0]) + else: + var.set("") def start_processing_ui(self): self.update_ui_for_processing_state(True) @@ -266,11 +410,18 @@ class MainWindow(tk.Frame): def update_ui_for_processing_state(self, is_processing: bool): state = tk.DISABLED if is_processing else tk.NORMAL - self.browse_button.config(state=state) - self.save_as_button.config(state=state) - self.process_button.config(state=state) - self.profile_combobox.config(state=state) - self.stop_button.config(state=tk.NORMAL if is_processing else tk.DISABLED) + + # Disable all major controls on both tabs + for widget in [ + self.out_browse_button, + self.out_process_button, + self.rec_process_button, + ]: + if widget.winfo_exists(): + widget.config(state=state) + + # Specifically manage stop buttons, assuming one shared stop button for now + self.out_stop_button.config(state=tk.NORMAL if is_processing else tk.DISABLED) if is_processing: self.status_bar_var.set("Processing... Please wait.") @@ -330,18 +481,9 @@ class MainWindow(tk.Frame): if self.controller.is_processing: self.after(100, self.poll_result_queue) - def on_browse_click(self): - self.controller.select_file() - - def on_process_click(self): - self.controller.start_processing() - def on_stop_click(self): self.controller.stop_processing() def on_close(self): self.controller.shutdown() self.master.destroy() - - def on_save_as_click(self): - self.controller.select_output_file() diff --git a/radar_data_reader/utils/config_manager.py b/radar_data_reader/utils/config_manager.py index 48d602c..4a4e085 100644 --- a/radar_data_reader/utils/config_manager.py +++ b/radar_data_reader/utils/config_manager.py @@ -24,44 +24,42 @@ class ConfigManager: @staticmethod def _get_default_config() -> Dict[str, Any]: """Provides the default configuration structure.""" - # Updated default profile with correct data paths for the new architecture default_profile = ExportProfile( name="Default", fields=[ ExportField(column_name="batch_id", data_path="batch_id"), - ExportField(column_name="timetag", data_path="main_header.timetag"), + ExportField( + column_name="timetag", + data_path="main_header.ge_header.signal_descr.ttag", + ), ExportField( column_name="file_batch_counter", - data_path="main_header.batch_counter", + data_path="main_header.ge_header.signal_descr.batch_counter", ), - ExportField(column_name="npri", data_path="main_header.npri"), - ExportField(column_name="nrbin", data_path="main_header.nrbin"), ], ) return { - "last_opened_file": "", - "last_output_file": "", - "active_export_profile_name": "Default", + "last_opened_out_file": "", + "last_opened_rec_file": "", + "last_out_output_dir": "", + "last_rec_output_dir": "", + "active_out_export_profile_name": "Default", + "active_rec_csv_profile_name": "Default", + "active_rec_json_profile_name": "Default", "export_profiles": [default_profile.to_dict()], } def load_config(self) -> None: - """Loads the configuration from the JSON file.""" + """Loads the main application configuration from the JSON file.""" log.info(f"Attempting to load configuration from: {self.config_path}") if self.config_path.is_file(): try: with open(self.config_path, "r", encoding="utf-8") as f: loaded_data = json.load(f) - self.config["last_opened_file"] = loaded_data.get( - "last_opened_file", "" - ) - self.config["last_output_file"] = loaded_data.get( - "last_output_file", "" - ) - self.config["active_export_profile_name"] = loaded_data.get( - "active_export_profile_name", "Default" - ) + default_conf = self._get_default_config() + for key in default_conf: + self.config[key] = loaded_data.get(key, default_conf[key]) profiles_data = loaded_data.get("export_profiles", []) self.export_profiles = [ @@ -100,14 +98,10 @@ class ConfigManager: """Saves the current configuration to the JSON file.""" log.info(f"Saving configuration to: {self.config_path}") - data_to_save = { - "last_opened_file": self.get("last_opened_file", ""), - "last_output_file": self.get("last_output_file", ""), - "active_export_profile_name": self.get( - "active_export_profile_name", "Default" - ), - "export_profiles": [profile.to_dict() for profile in self.export_profiles], - } + data_to_save = self.config.copy() + data_to_save["export_profiles"] = [ + profile.to_dict() for profile in self.export_profiles + ] try: self.config_path.parent.mkdir(parents=True, exist_ok=True) diff --git a/radar_data_reader/utils/logger.py b/radar_data_reader/utils/logger.py index 3be45f7..f51baa0 100644 --- a/radar_data_reader/utils/logger.py +++ b/radar_data_reader/utils/logger.py @@ -434,7 +434,7 @@ def setup_basic_logging( # MODIFIED: Renamed from setup_logging ) print("INFO: Basic centralized queued logging system setup complete.", flush=True) - logging.getLogger("FlightMonitor.LoggerTest").info( + logging.getLogger("read_radar_data.LoggerTest").info( "Basic logging initialized. This is a test from setup_basic_logging." ) @@ -502,7 +502,7 @@ def add_tkinter_handler( # NEW FUNCTION "INFO: TkinterTextHandler added and configured successfully.", flush=True, ) - logging.getLogger("FlightMonitor.LoggerTest").info( + logging.getLogger("read_radar_data.LoggerTest").info( "TkinterTextHandler added. This is a test from add_tkinter_handler." ) except Exception as e: