SXXXXXXX_RadarDataReader/run_debug_analysis.py
VALLONGOL 5de2650675 add
2025-11-12 13:43:30 +01:00

104 lines
3.3 KiB
Python

import multiprocessing
from pathlib import Path
import logging
from radar_data_reader.utils.config_manager import ConfigManager
from radar_data_reader.core.app_controller import AppController
from radar_data_reader.utils import logger
from radar_data_reader.core.file_reader import run_worker_process
def run_analysis():
"""
Runs the radar data processing without a GUI to generate the debug log.
"""
# Setup logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)-8s] %(name)-20s: %(message)s",
datefmt="%H:%M:%S",
)
log = logging.getLogger(__name__)
# Setup config manager
config_path = Path("config/config.json").resolve()
if not config_path.is_file():
log.error(f"Config file not found at: {config_path}")
return
config_manager = ConfigManager(config_path)
config_manager.load_config()
# Get file paths from config
filepath_str = config_manager.get("last_opened_out_file")
output_dir_str = config_manager.get("last_out_output_dir")
if not filepath_str or not output_dir_str:
log.error("'last_opened_out_file' or 'last_out_output_dir' not found in config.json.")
return
filepath = Path(filepath_str)
output_dir = Path(output_dir_str)
basename = filepath.stem
if not filepath.is_file():
log.error(f"Input file not found: {filepath}")
return
# Instantiate controller
controller = AppController(config_manager)
# Manually set options to force debug dump
options = {
"generate_csv": False,
"csv_profile_name": "trackingdata",
"csv_use_tab": False,
"generate_json": True,
"json_profile_name": "trackingdata",
"use_full_path_headers": False,
"analysis_only": False,
"debug_json_dump": True
}
log.info(f"Starting processing for file: {filepath}")
log.info("Debug dump is enabled. This may take a while.")
# Run the processing
# Note: This is a simplified version of the flow in the GUI
# It doesn't use a separate thread or the result queue polling mechanism
if not controller._prepare_out_processor_files(output_dir, basename, options):
log.error("Failed to prepare output files.")
return
active_profile = controller.active_export_profiles.get("json")
if not active_profile:
log.error("Could not find the 'trackingdata' export profile.")
return
# Directly call the worker process function
try:
# The worker process expects queues, even if we don't use them fully here
command_queue = multiprocessing.Queue()
result_queue = multiprocessing.Queue()
run_worker_process(
filepath=filepath,
command_queue=command_queue,
result_queue=result_queue,
profile=active_profile,
enable_profiling=False,
debug_json_dump=True
)
log.info("Processing completed.")
log.info(f"Debug log should be in 'parsing_debug.log'")
except Exception as e:
log.error(f"An error occurred during processing: {e}", exc_info=True)
if __name__ == "__main__":
# This is crucial for PyInstaller executables
multiprocessing.freeze_support()
run_analysis()