add new tab for segments analysis

This commit is contained in:
VALLONGOL 2025-07-14 13:07:25 +02:00
parent 3242b53418
commit c814943739
8 changed files with 696 additions and 184 deletions

View File

@ -444,6 +444,10 @@
"silent_overwrite": true
}
},
"segment_processor_config": {
"last_output_dir": "C:/src/____GitProjects/radar_data_reader/out_analisys",
"create_separate_folders": false
},
"segment_export_g_reconvert_config": {
"cpp_executable_path": "C:/src/GRIFO-E/REP/Projects/Tools/wsLuna/g_reconvert/Debug/g_reconvert.exe",
"analyze_only": false,

View File

@ -31,20 +31,20 @@ from .data_enums import ENUM_REGISTRY, get_enum_name
from .export_profiles import ExportProfile, ExportField
from .export_manager import ExportManager, ExportJob
from .flight_analyzer import FlightAnalyzer
# --- MODIFICA: Nuovo import ---
from .out_processor import OutProcessor
from .segment_processor import SegmentProcessor
from ..utils import logger
from ..gui.profile_editor_window import ProfileEditorWindow
from ..gui.rec_config_window import RecConfigWindow
from ..gui.segment_export_config_window import SegmentExportConfigWindow
from ..gui.segment_processor_tab import SegmentProcessorTab
log = logger.get_logger(__name__)
def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any:
# ... (invariata) ...
try:
path = field.data_path
if path == "batch_id": return batch.batch_id
@ -72,7 +72,6 @@ def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any:
def _convert_ctypes_for_json(obj: Any) -> Any:
# ... (invariata) ...
if isinstance(obj, (int, float, str, bool)) or obj is None: return obj
if isinstance(obj, (ctypes._SimpleCData)): return obj.value
if isinstance(obj, CtypesStructureBase):
@ -91,7 +90,7 @@ class AppController:
def __init__(self, config_manager: ConfigManager):
self.config_manager = config_manager
self.view = None
self.view: Optional[tk.Tk] = None
self.is_processing = False
self.worker_process: Optional[mp.Process] = None
self.command_queue = mp.Queue()
@ -99,8 +98,8 @@ class AppController:
self.export_manager = ExportManager(self.result_queue)
self.flight_analyzer = FlightAnalyzer(config_manager, self.result_queue)
# --- MODIFICA: Istanziazione del nuovo controller specializzato ---
self.out_processor = OutProcessor(self.command_queue, self.result_queue)
self.segment_processor: Optional[SegmentProcessor] = None
self.active_export_profiles: Dict[str, ExportProfile] = {}
self.output_file_handles: Dict[str, Any] = {}
@ -113,15 +112,14 @@ class AppController:
self.total_files_for_analysis: int = 0
self.total_segments_for_export: int = 0
def bind_view(self, view):
# ... (invariata) ...
def bind_view(self, view: Any):
self.view = view
self._load_initial_config()
self.export_manager.start()
def shutdown(self):
log.info("Controller shutting down.")
self.export_manager.stop() # Ora questa chiamata funzionerà
self.export_manager.stop()
if self.is_processing:
self.stop_processing()
@ -135,7 +133,6 @@ class AppController:
logger.shutdown_logging_system()
def _load_initial_config(self):
# ... (invariata) ...
if last_file := self.config_manager.get("last_opened_out_file"):
if Path(last_file).is_file(): self.view.out_filepath_var.set(last_file)
self.on_out_config_changed()
@ -146,11 +143,18 @@ class AppController:
self.on_rec_config_changed()
if last_dir := self.config_manager.get("last_rec_output_dir"):
self.view.rec_output_dir_var.set(last_dir)
profiles = self.config_manager.get_export_profiles()
self.view.update_export_profiles(profiles=profiles, active_out_profile=self.config_manager.get("active_out_export_profile_name"))
self.view.update_export_profiles(
profiles=profiles,
active_out_profile=self.config_manager.get("active_out_export_profile_name")
)
sp_config = self.config_manager.get("segment_processor_config", {})
self.view.segment_processor_tab.output_dir_var.set(sp_config.get("last_output_dir", ""))
self.view.segment_processor_tab.create_separate_folders_var.set(sp_config.get("create_separate_folders", True))
def on_out_config_changed(self, *args):
# ... (invariata) ...
filepath_str = self.view.out_filepath_var.get()
if not filepath_str: return
p = Path(filepath_str)
@ -158,7 +162,6 @@ class AppController:
self.view.out_basename_var.set(p.stem)
def on_rec_config_changed(self, *args):
# ... (invariata) ...
try:
file_count = self.view.rec_file_count_var.get()
except (tk.TclError, ValueError): return
@ -171,13 +174,11 @@ class AppController:
self.view.rec_basename_var.set(new_stem)
def select_output_dir(self, dir_var: tk.StringVar):
# ... (invariata) ...
initial_dir = dir_var.get() if dir_var.get() else None
if new_dir := filedialog.askdirectory(initialdir=initial_dir, title="Select Output Directory"):
dir_var.set(new_dir)
def _select_file(self, path_var: tk.StringVar, file_types: List[Tuple[str, str]]):
# ... (invariata) ...
initial_dir = (Path(path_var.get()).parent if path_var.get() and Path(path_var.get()).exists() else None)
if filepath := filedialog.askopenfilename(initialdir=initial_dir, filetypes=file_types):
path_var.set(filepath)
@ -189,7 +190,6 @@ class AppController:
self._select_file(self.view.rec_filepath_var, [("Recorder Data", "*.rec"), ("All files", "*.*")])
def _prepare_out_processor_files(self) -> bool:
# ... (invariata) ...
self.output_file_handles.clear()
self.csv_writers.clear()
self.active_export_profiles.clear()
@ -203,8 +203,8 @@ class AppController:
self.active_export_profiles["csv"] = profile
path, delimiter = (output_dir / basename).with_suffix(".csv"), "\t" if self.view.out_csv_use_tab_var.get() else ","
fh = open(path, "w", encoding="utf-8", newline="")
self.output_file_handles["csv"], self.csv_writers["csv"] = fh, csv.writer(fh, delimiter=delimiter)
self.csv_writers["csv"].writerow([field.data_path if use_full_path else field.column_name for field in profile.fields])
self.csv_writers["csv"].writerow([field.column_name for field in profile.fields])
self.output_file_handles["csv"] = fh
if self.view.out_output_json_var.get():
profile = next((p for p in profiles if p.name == self.view.out_json_profile_var.get()), None)
if not profile: raise ValueError(f"JSON profile '{self.view.out_json_profile_var.get()}' not found.")
@ -215,7 +215,6 @@ class AppController:
self._close_all_files()
return False
# --- MODIFICA: `start_out_processing` ora delega ---
def start_out_processing(self):
if self.is_processing:
log.warning("Processing already in progress.")
@ -238,7 +237,6 @@ class AppController:
self.config_manager.set("active_out_export_profile_name", self.view.out_csv_profile_var.get())
self.config_manager.save_config()
# Seleziona un profilo attivo (CSV o JSON) da passare al worker
active_profile = self.active_export_profiles.get("csv") or self.active_export_profiles.get("json")
if not active_profile:
log.error("No active export profile found for processing.")
@ -246,14 +244,11 @@ class AppController:
self.view.update_ui_for_processing_state(False)
return
# Delega l'avvio del processo al controller specializzato
self.out_processor.start_processing(Path(filepath_str), active_profile)
self.worker_process = self.out_processor.get_worker()
self.view.poll_result_queue()
def _build_cpp_command_list(self) -> List[str]:
# ... (invariata) ...
config = self.config_manager.get_cpp_converter_config()
exe_path = config.get("cpp_executable_path")
if not exe_path or not Path(exe_path).is_file(): raise ValueError("g_reconvert.exe path is not set or is invalid.")
@ -271,7 +266,6 @@ class AppController:
return command
def start_rec_conversion(self):
# ... (invariata, ma potrebbe essere il prossimo candidato al refactoring) ...
if self.is_processing: log.warning("A process is already running."); return
try:
command_list, output_dir = self._build_cpp_command_list(), self.view.rec_output_dir_var.get()
@ -283,7 +277,6 @@ class AppController:
self._launch_worker(run_cpp_converter, worker_args)
def _launch_worker(self, target_func, args_tuple: Tuple):
# ... (invariata, ancora usata da start_rec_conversion) ...
while not self.command_queue.empty(): self.command_queue.get_nowait()
while not self.result_queue.empty(): self.result_queue.get_nowait()
self.worker_process = mp.Process(target=target_func, args=args_tuple, daemon=True)
@ -291,12 +284,13 @@ class AppController:
self.view.poll_result_queue()
def stop_processing(self):
# ... (invariata) ...
if not self.is_processing or not self.worker_process: return
if not self.is_processing: return
log.info("Stop requested by user.")
self.command_queue.put("STOP")
if self.worker_process and self.worker_process.is_alive():
self.command_queue.put("STOP")
if self.segment_processor:
pass
#... tutte le altre funzioni da _close_all_files fino alla fine rimangono le stesse per ora...
def _close_all_files(self):
for fh in self.output_file_handles.values():
try: fh.close()
@ -306,13 +300,16 @@ class AppController:
def handle_data_batch(self, batch: DataBatch):
use_full_path = self.view.out_use_full_path_var.get()
if self.csv_writers.get("csv"):
profile, row_values = self.active_export_profiles["csv"], [_get_value_from_path(batch, field) for field in profile.fields]
if "csv" in self.csv_writers:
profile = self.active_export_profiles["csv"]
row_values = [_get_value_from_path(batch, field) for field in profile.fields]
self.csv_writers["csv"].writerow(row_values)
if "json" in self.active_export_profiles:
profile, row_dict = self.active_export_profiles["json"], {}
profile = self.active_export_profiles["json"]
row_dict = {}
for field in profile.fields:
raw_value, serializable_value = _get_value_from_path(batch, field), _convert_ctypes_for_json(raw_value)
raw_value = _get_value_from_path(batch, field)
serializable_value = _convert_ctypes_for_json(raw_value)
key = field.data_path if use_full_path else field.column_name
row_dict[key] = serializable_value
self.json_data_buffer.append(row_dict)
@ -350,6 +347,7 @@ class AppController:
self._close_all_files()
self.is_processing = False
self.worker_process = None
self.segment_processor = None
self.view.update_ui_for_processing_state(False)
is_cpp_success = "Conversion process completed successfully" in msg.get("message", "")
self.view.update_rec_tab_buttons_state(conversion_successful=is_cpp_success)
@ -408,15 +406,11 @@ class AppController:
log.info("g_reconverter configuration saved.")
def save_segment_export_config(self, new_config: Dict[str, Any]):
"""Saves the segment export configuration, preserving the main executable path."""
full_config = self.config_manager.get("segment_export_config", {})
full_config.update(new_config)
main_cpp_config = self.config_manager.get_cpp_converter_config()
if "g_reconverter_options" in full_config:
full_config["g_reconverter_options"]["cpp_executable_path"] = main_cpp_config.get("cpp_executable_path")
self.config_manager.set("segment_export_config", full_config)
self.config_manager.save_config()
log.info("Segment export configuration saved.")
@ -495,6 +489,7 @@ class AppController:
log.error("Cannot load previous analysis: Pandas is not installed."); return
try:
summary_df = pd.read_csv(summary_csv_path)
self.last_flight_summary_df = summary_df
self.view.populate_timeline_from_dataframe(summary_df)
self.view.analyzer_info_var.set("Successfully loaded previous analysis.")
self.view.open_flight_folder_button.config(state=tk.NORMAL)
@ -519,48 +514,42 @@ class AppController:
self.view.start_processing_ui()
self.view.analyzer_info_var.set(f"Starting analysis for flight: {flight_name}...")
# Delega l'avvio dell'analisi al controller specializzato
self.flight_analyzer.start_analysis(rec_folder, flight_name)
# --- MODIFICA QUI ---
# Aggiorna il worker process del controller principale
self.worker_process = self.flight_analyzer.worker_process
# Il polling della coda rimane qui perché è un compito della UI
self.view.poll_result_queue()
def handle_final_analysis_steps(self):
self.flight_analyzer.handle_final_analysis_steps()
def handle_analysis_summary_data(self, msg: Dict[str, Any]):
"""Handles the final summary data from the analyzer to update the view."""
summary_df = msg.get("data")
flight_folder_path = msg.get("flight_folder_path")
if self.view and summary_df is not None:
self.last_flight_summary_df = summary_df
self.view.populate_timeline_from_dataframe(summary_df)
log.info(f"Analysis summary received for flight folder: {flight_folder_path}")
def start_segment_export(self):
if self.is_processing:
messagebox.showwarning("Process Busy", "Cannot start export while another process is running.", parent=self.view); return
messagebox.showwarning("Process Busy", "Cannot start export while another process is running.", parent=self.view)
return
selected_item_ids = self.view.flight_timeline_tree.selection()
if not selected_item_ids:
messagebox.showinfo("No Selection", "Please select one or more segments to export.", parent=self.view); return
messagebox.showinfo("No Selection", "Please select one or more segments to export.", parent=self.view)
return
if self.last_flight_summary_df is None:
messagebox.showerror("Error", "Flight summary data is not available. Cannot proceed with export.")
return
self.total_segments_for_export = len(selected_item_ids)
log.info(f"Queuing {self.total_segments_for_export} segment(s) for export.")
# Svuota la coda prima di aggiungere nuovi job
while not self.export_manager.job_queue.empty():
try: self.export_manager.job_queue.get_nowait()
except queue.Empty: break
# Accoda i job
rec_folder = Path(self.view.analyzer_rec_folder_var.get())
export_config = self.config_manager.get("segment_export_config")
folder_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}")
@ -571,37 +560,112 @@ class AppController:
return
for iid in selected_item_ids:
segment_data = self.view.flight_timeline_tree.item(iid)['values']
safe_folder_name = self._generate_segment_folder_name(folder_template, segment_data)
segment_folder = current_flight_path / safe_folder_name
job = ExportJob(segment_data, segment_folder, rec_folder, export_config)
self.export_manager.submit_job(job)
try:
row_index = self.view.flight_timeline_tree.index(iid)
segment_info_dict = self.last_flight_summary_df.iloc[row_index].to_dict()
placeholders = {
"{Segment}": str(segment_info_dict.get('Segment (Mode | Scale | WF)', '')),
"{StartBatch}": str(segment_info_dict.get('Start Batch', '')),
"{EndBatch}": str(segment_info_dict.get('End Batch', '')),
"{BatchCount}": str(segment_info_dict.get('Batch Count', '')),
}
folder_name = folder_template
for key, value in placeholders.items():
folder_name = folder_name.replace(key, value)
safe_folder_name = re.sub(r'[\\/*?:"<>|]', "-", folder_name)
segment_folder = current_flight_path / safe_folder_name
job = ExportJob(segment_info_dict, segment_folder, rec_folder, export_config)
self.export_manager.submit_job(job)
except (ValueError, IndexError) as e:
log.error(f"Could not process selection '{iid}': {e}", exc_info=True)
continue
# --- MODIFICA QUI: Aggiungi la "poison pill" alla fine della coda ---
self.export_manager.submit_job(None)
# Avvia l'interfaccia di elaborazione
self.is_processing = True
self.view.start_processing_ui()
self.view.analyzer_info_var.set(f"Exporting {self.total_segments_for_export} segment(s)...")
# Avvia il worker thread (se non è già attivo)
self.export_manager.start()
self.view.poll_result_queue()
def load_segments_for_processing(self):
log.info("Loading segments for batch processing...")
if self.is_processing:
messagebox.showwarning("Busy", "Cannot load segments while another process is running.", parent=self.view)
return
def _generate_segment_folder_name(self, template: str, segment_data: List[Any]) -> str:
placeholders = {
"{Segment}": str(segment_data[0]),
"{StartBatch}": str(segment_data[1]),
"{EndBatch}": str(segment_data[2]),
"{BatchCount}": str(segment_data[3]),
flight_folder = self.flight_analyzer.current_flight_folder_path
if not flight_folder or not flight_folder.is_dir():
messagebox.showerror("No Flight Loaded", "Please run a flight analysis first on the 'Flight Analyzer' tab.", parent=self.view)
return
if self.last_flight_summary_df is None:
messagebox.showerror("No Summary Data", "Flight summary data not found. Please re-run the analysis.", parent=self.view)
return
try:
export_config = self.config_manager.get("segment_export_config", {})
naming_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}_{StartBatch}-{EndBatch}")
verified_segments = SegmentProcessor.scan_for_segments(flight_folder, self.last_flight_summary_df, naming_template)
if not verified_segments:
log.warning("No segments found or summary is empty.")
self.view.segment_processor_tab.populate_segments(verified_segments)
log.info(f"Found and verified {len(verified_segments)} segments.")
except Exception as e:
log.error(f"Failed to load and verify segments: {e}", exc_info=True)
messagebox.showerror("Error", f"An error occurred while loading segments:\n{e}", parent=self.view)
def start_segment_batch_processing(self):
if self.is_processing:
messagebox.showwarning("Busy", "Another process is already running.", parent=self.view)
return
tab: SegmentProcessorTab = self.view.segment_processor_tab
selected_segments = tab.get_selected_segments_data()
if not selected_segments:
messagebox.showinfo("No Selection", "Please select at least one 'Ready' segment to process.", parent=self.view)
return
config = {
"segments_to_process": selected_segments,
"output_dir": tab.output_dir_var.get(),
"generate_csv": tab.generate_csv_var.get(),
"generate_json": tab.generate_json_var.get(),
"create_separate_folders": tab.create_separate_folders_var.get(),
"csv_profile_name": tab.csv_profile_var.get(),
"json_profile_name": tab.json_profile_var.get(),
"csv_use_tab": tab.csv_use_tab_var.get(),
"use_full_path_headers": tab.use_full_path_var.get(),
"profiles": self.config_manager.get_export_profiles()
}
folder_name = template
for key, value in placeholders.items(): folder_name = folder_name.replace(key, value)
return re.sub(r'[\\/*?:"<>|]', "-", folder_name)
#def _export_finalizer(self):
# self.export_manager.job_queue.join()
# if self.is_processing:
# self.result_queue.put({"type": "complete", "message": "All segments exported."})
if not config["output_dir"]:
messagebox.showerror("Configuration Error", "Please specify an output directory.", parent=self.view)
return
if not config["generate_csv"] and not config["generate_json"]:
messagebox.showerror("Configuration Error", "Please select at least one output format (CSV or JSON).", parent=self.view)
return
sp_config = self.config_manager.get("segment_processor_config", {})
sp_config["last_output_dir"] = config["output_dir"]
sp_config["create_separate_folders"] = config["create_separate_folders"]
self.config_manager.set("segment_processor_config", sp_config)
self.config_manager.save_config()
self.is_processing = True
self.view.start_processing_ui()
self.segment_processor = SegmentProcessor(config, self.result_queue, self.command_queue)
self.segment_processor.start()
self.view.poll_result_queue()

View File

@ -16,13 +16,14 @@ log = logger.get_logger(__name__)
class ExportJob:
"""A dataclass-like object to hold all information for a single export job."""
def __init__(self, segment_data: List[Any], segment_folder: Path, rec_folder: Path, export_config: Dict[str, Any]):
self.status: str = str(segment_data[0])
self.start_batch: int = int(segment_data[1])
self.end_batch: int = int(segment_data[2])
self.batch_count: int = int(segment_data[3])
self.start_file: str = str(segment_data[5])
self.end_file: str = str(segment_data[6])
def __init__(self, segment_info: Dict[str, Any], segment_folder: Path, rec_folder: Path, export_config: Dict[str, Any]):
# Inizializzazione robusta tramite dizionario
self.status: str = str(segment_info.get('Segment (Mode | Scale | WF)', ''))
self.start_batch: int = int(segment_info.get('Start Batch', 0))
self.end_batch: int = int(segment_info.get('End Batch', 0))
self.batch_count: int = int(segment_info.get('Batch Count', 0))
self.start_file: str = str(segment_info.get('Start File', ''))
self.end_file: str = str(segment_info.get('End File', ''))
self.segment_folder = segment_folder
self.rec_folder = rec_folder
@ -30,6 +31,9 @@ class ExportJob:
self.cpp_config = export_config.get("g_reconverter_options", {})
self.report_path = segment_folder / "export_report.txt"
# ... il resto della classe ExportManager rimane invariato ...
# ... Ti fornisco comunque il file completo per sicurezza ...
class ExportManager:
"""
Manages a queue of export jobs and processes them in a background thread.
@ -41,9 +45,6 @@ class ExportManager:
self._stop_event = threading.Event()
def start(self):
"""
Starts the worker thread if it's not already running.
"""
if self._worker_thread is not None and self._worker_thread.is_alive():
log.warning("Export worker thread is already running.")
return
@ -53,9 +54,6 @@ class ExportManager:
self._worker_thread.start()
def stop(self):
"""
Signals the worker thread to stop and waits for it to terminate.
"""
log.info("Stopping ExportManager worker thread...")
if self._worker_thread and self._worker_thread.is_alive():
self.job_queue.put(None)
@ -66,7 +64,6 @@ class ExportManager:
log.info("ExportManager worker thread was not running.")
def submit_job(self, job: Optional[ExportJob]):
"""Adds a new export job or a poison pill (None) to the queue."""
self.job_queue.put(job)
def _build_cpp_command(self, job: ExportJob) -> List[str]:
@ -75,11 +72,13 @@ class ExportManager:
if not exe_path or not Path(exe_path).is_file():
raise ValueError(f"g_reconverter executable not found at: {exe_path}")
# Ora job.start_file e job.end_file dovrebbero essere corretti
first_rec_path = job.rec_folder / job.start_file
start_num_match = re.search(r'_(\d+)\.rec$', job.start_file)
end_num_match = re.search(r'_(\d+)\.rec$', job.end_file)
if not start_num_match or not end_num_match:
log.error(f"Could not parse sequence number from filenames: '{job.start_file}', '{job.end_file}'")
raise ValueError("Could not extract sequence number from REC filenames.")
start_num, end_num = int(start_num_match.group(1)), int(end_num_match.group(1))
@ -90,32 +89,24 @@ class ExportManager:
output_log_path = job.segment_folder / f"{output_basename}.log"
command = [str(exe_path), str(first_rec_path)]
base_output_dir_str = str(job.segment_folder)
if not base_output_dir_str.endswith(os.sep):
base_output_dir_str += os.sep
command.append(f"//b={base_output_dir_str}")
command.append(f"/l={str(output_log_path)}")
command.append(f"/n={num_files_to_process}")
if config.get("post_process"): command.append(f"/p={config.get('post_process_level', '1')}")
if config.get("video_show"): command.append("/vshow")
if config.get("video_save"): command.append("/vsave")
if config.get("gps_save_track"): command.append("/gps")
if config.get("no_sign"): command.append("/nosign")
command.append(f"/o={str(output_out_path)}")
if config.get("silent_overwrite", True):
command.append("//o")
return command
def _process_jobs(self):
"""
Processes jobs from the queue, using ProcessRunner for execution.
"""
log.info(f"ExportManager worker thread started (ID: {threading.get_ident()}). Waiting for jobs.")
while True:
@ -123,10 +114,9 @@ class ExportManager:
if job is None:
log.info("Poison pill received. All export jobs processed.")
# --- MODIFICA CRUCIALE: Invia il segnale di completamento finale ---
self.result_queue.put({"type": "complete", "message": "All segments exported."})
self.job_queue.task_done()
break # Esce dal ciclo e termina il thread
break
log_capture = []
command_list = []
@ -134,15 +124,12 @@ class ExportManager:
try:
log.info(f"Preparing export job for segment: {job.status}")
if job.segment_folder.exists():
shutil.rmtree(job.segment_folder)
job.segment_folder.mkdir(parents=True)
command_list = self._build_cpp_command(job)
local_runner_queue = queue.Queue()
runner = ProcessRunner(
command_list=command_list,
result_queue=local_runner_queue,
@ -182,11 +169,9 @@ class ExportManager:
f.write(f"Source Files: From {job.start_file} to {job.end_file}\n")
f.write(f"Export Status: {'SUCCESS' if is_successful else 'FAILED'}\n")
f.write(f"Return Code: {return_code}\n\n")
if command_list:
f.write("--- g_reconverter Command ---\n")
f.write(" ".join(map(str, command_list)) + "\n\n")
f.write("--- g_reconverter Log ---\n")
f.write("\n".join(log_capture))

View File

@ -12,6 +12,7 @@ import csv
import re
from pathlib import Path
from typing import List, Optional
from datetime import timedelta
try:
import pandas as pd
@ -24,6 +25,7 @@ from ..core.cpp_runner import run_cpp_converter
log = logger.get_logger(__name__)
TICK_DURATION_S = 64e-6
class FlightAnalyzer:
"""Manages the multi-step process of analyzing a flight folder."""
@ -33,16 +35,12 @@ class FlightAnalyzer:
self.result_queue = result_queue
self.worker_process: Optional[mp.Process] = None
self.current_flight_folder_path: Optional[Path] = None
self.current_flight_name: Optional[str] = None
def start_analysis(
self, rec_folder_str: str, flight_name: str
) -> threading.Thread:
"""
Starts the main analysis orchestrator in a separate thread.
Returns:
The thread object that has started the analysis process.
"""
self.current_flight_name = flight_name
analysis_thread = threading.Thread(
target=self._flight_analysis_orchestrator,
args=(rec_folder_str, flight_name),
@ -52,7 +50,6 @@ class FlightAnalyzer:
return analysis_thread
def _flight_analysis_orchestrator(self, rec_folder_str: str, flight_name: str):
"""Orchestrates the C++ analysis and subsequent Python parsing."""
self.current_flight_folder_path = None
try:
workspace_dir = Path.cwd() / "flight_workspace"
@ -84,14 +81,13 @@ class FlightAnalyzer:
log.info(f"Running g_reconverter for full analysis: {' '.join(command_list)}")
# Create and start the worker process directly
self.worker_process = mp.Process(
target=run_cpp_converter,
args=(command_list, self.result_queue, str(flight_dir), True),
daemon=True,
)
self.worker_process.start()
self.worker_process.join() # Wait for the C++ part to finish
self.worker_process.join()
log.info("g_reconverter full analysis process finished.")
self.result_queue.put({"type": "cpp_complete"})
@ -101,10 +97,6 @@ class FlightAnalyzer:
self.result_queue.put({"type": "error", "message": str(e)})
def handle_final_analysis_steps(self):
"""
Handles the Python-side parsing and summarization after the C++ process completes.
This is called by the main controller in response to a 'cpp_complete' message.
"""
if not self.current_flight_folder_path:
log.error("Cannot run final analysis steps: flight folder path is not set.")
self.result_queue.put({"type": "error", "message": "Internal state error: flight folder path missing."})
@ -113,9 +105,7 @@ class FlightAnalyzer:
try:
log.info("C++ part complete. Starting Python-side analysis...")
all_txt_files = list(self.current_flight_folder_path.glob("pp-*.txt"))
summary_files = [
f for f in all_txt_files if "aesa" not in f.name.lower()
]
summary_files = [f for f in all_txt_files if "aesa" not in f.name.lower()]
if not summary_files:
raise FileNotFoundError("Main summary file not found after analysis.")
@ -132,14 +122,15 @@ class FlightAnalyzer:
storyboard_df, self.current_flight_folder_path
)
# Instead of populating the timeline directly, send the data back to the controller
self._create_flight_report_txt(summary_df, self.current_flight_folder_path)
self.result_queue.put({
"type": "analysis_summary_data",
"data": summary_df,
"flight_folder_path": self.current_flight_folder_path
})
log.info("Flight analysis complete. Storyboard and summary saved.")
log.info("Flight analysis complete. All artifacts saved.")
self.result_queue.put({"type": "complete", "message": "Analysis successful."})
except Exception as e:
@ -147,7 +138,6 @@ class FlightAnalyzer:
self.result_queue.put({"type": "error", "message": str(e)})
def _make_columns_unique(self, columns: List[str]) -> List[str]:
"""Ensures all column names in a list are unique by appending a suffix."""
final_cols, counts = [], {}
for col in columns:
if col in counts:
@ -161,39 +151,37 @@ class FlightAnalyzer:
def _parse_and_save_storyboard(
self, txt_path: Path, output_dir: Path
) -> Optional["pd.DataFrame"]:
"""Parses the raw text storyboard file from the C++ tool into a DataFrame."""
if pd is None:
log.error("Pandas library is not installed, cannot parse storyboard.")
return None
try:
with open(txt_path, "r", encoding="utf-8", errors="ignore") as f:
header_line = f.readline()
raw_columns = [h.strip() for h in header_line.strip().split(";") if h.strip()]
header_line = f.readline().strip()
raw_columns = [h.strip() for h in header_line.split(";") if h.strip()]
unique_column_names = self._make_columns_unique(raw_columns)
num_columns = len(unique_column_names)
clean_data = []
reader = csv.reader(f, delimiter=";")
for row in reader:
if len(row) >= num_columns:
clean_data.append([field.strip() for field in row[:num_columns]])
if not clean_data:
log.warning(f"No valid data rows parsed from {txt_path.name}")
return None
storyboard_df = pd.DataFrame(clean_data, columns=unique_column_names)
# Data cleaning and type conversion
batch_col, ttag_col = "Batch", "TTAG"
if batch_col in storyboard_df.columns:
storyboard_df[batch_col] = pd.to_numeric(storyboard_df[batch_col], errors="coerce")
if ttag_col in storyboard_df.columns:
storyboard_df[ttag_col] = pd.to_numeric(storyboard_df[ttag_col], errors="coerce")
storyboard_df = pd.read_csv(
txt_path,
sep=';',
header=0,
names=unique_column_names,
on_bad_lines='skip',
encoding='utf-8',
encoding_errors='ignore'
)
# --- MODIFICA CRUCIALE: Pulizia di tutte le colonne stringa ---
for col in storyboard_df.select_dtypes(include=['object']).columns:
storyboard_df[col] = storyboard_df[col].str.strip()
storyboard_df.dropna(subset=[batch_col, ttag_col], inplace=True)
storyboard_df[batch_col] = storyboard_df[batch_col].astype(int)
storyboard_df[ttag_col] = storyboard_df[ttag_col].astype(int)
numeric_cols = ["Batch", "TTAG"]
for col in numeric_cols:
if col in storyboard_df.columns:
storyboard_df[col] = pd.to_numeric(storyboard_df[col], errors="coerce")
storyboard_df.dropna(subset=["Batch", "TTAG"], inplace=True)
storyboard_df["Batch"] = storyboard_df["Batch"].astype(int)
storyboard_df["TTAG"] = storyboard_df["TTAG"].astype(int)
except Exception as e:
log.error(f"Failed to read or process summary file {txt_path.name}: {e}")
@ -204,32 +192,32 @@ class FlightAnalyzer:
return None
csv_path = output_dir / "flight_storyboard.csv"
json_path = output_dir / "flight_storyboard.json"
log.info(f"Saving full storyboard to {csv_path}")
storyboard_df.to_csv(csv_path, index=False)
log.info(f"Saving full storyboard to {json_path}")
storyboard_df.to_json(json_path, orient="records", indent=4)
return storyboard_df
def _create_and_save_summary(
self, storyboard_df: "pd.DataFrame", output_dir: Path
) -> "pd.DataFrame":
"""Aggregates the detailed storyboard into a segment-based summary."""
df = storyboard_df.copy()
df['Mode'] = df['Mode'].str.strip().replace('', 'N/A')
df['Mode.3'] = df['Mode.3'].str.strip().replace('', 'N/A')
df['Scal.2'] = df['Scal.2'].str.strip().replace('', 'N/A')
df['WF'] = df['WF'].str.strip().replace('', 'N/A')
df['WF.2'] = df['WF.2'].str.strip().replace('', 'N/A')
df['Mode'] = df.get('Mode', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
df['Mode.3'] = df.get('Mode.3', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
df['Scal.2'] = df.get('Scal.2', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
df['WF'] = df.get('WF', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
df['WF.2'] = df.get('WF.2', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
df['status'] = (
df['Mode'].astype(str) + '-' + df['Mode.3'].astype(str) + '_' +
df['Scal.2'].astype(str) + '_' + 'wf-' +
df['WF'].astype(str) + '-' + df['WF.2'].astype(str)
df['Mode'] + '-' + df['Mode.3'] + '_' +
df['Scal.2'] + '_' + 'wf-' +
df['WF'] + '-' + df['WF.2']
)
df['status_changed'] = df['status'].ne(df['status'].shift())
min_ttag = df['TTAG'].min()
df['flight_time_s'] = (df['TTAG'] - min_ttag) * TICK_DURATION_S
change_indices = df[df['status_changed']].index.tolist()
if not change_indices or change_indices[0] != 0:
change_indices.insert(0, 0)
@ -238,20 +226,22 @@ class FlightAnalyzer:
summary_records = []
for i in range(len(change_indices) - 1):
start_loc = change_indices[i]
end_loc = change_indices[i+1] - 1
start_loc, end_loc = change_indices[i], change_indices[i+1] - 1
segment = df.loc[start_loc:end_loc]
if segment.empty:
continue
if segment.empty: continue
TICK_DURATION_S = 64e-6
start_time_s = segment['flight_time_s'].iloc[0]
end_time_s = segment['flight_time_s'].iloc[-1]
summary_records.append({
'Segment (Mode | Scale | WF)': segment['status'].iloc[0],
'Start Batch': segment['Batch'].iloc[0],
'End Batch': segment['Batch'].iloc[-1],
'Batch Count': segment['Batch'].iloc[-1] - segment['Batch'].iloc[0] + 1,
'Duration (s)': (segment['TTAG'].iloc[-1] - segment['TTAG'].iloc[0]) * TICK_DURATION_S,
'Duration (s)': end_time_s - start_time_s,
'start_time_str': str(timedelta(seconds=int(start_time_s))),
'end_time_str': str(timedelta(seconds=int(end_time_s))),
'Start File': segment['file'].iloc[0],
'End File': segment['file'].iloc[-1],
'# Files': segment['file'].nunique()
@ -267,6 +257,48 @@ class FlightAnalyzer:
log.info(f"Saving aggregated summary to {json_path}")
summary_df.to_json(json_path, orient="records", indent=4)
# Flight report is not generated here anymore, can be done by controller or another service
return summary_df
return summary_df
def _create_flight_report_txt(self, summary_df: "pd.DataFrame", output_dir: Path):
report_path = output_dir / "flight_report.txt"
log.info(f"Generating human-readable flight report to {report_path}")
try:
total_duration = summary_df['Duration (s)'].sum()
total_batches = summary_df['Batch Count'].sum()
num_segments = len(summary_df)
with open(report_path, "w", encoding="utf-8") as f:
f.write("=" * 80 + "\n")
f.write(f" FLIGHT ANALYSIS REPORT - {self.current_flight_name} \n")
f.write("=" * 80 + "\n\n")
f.write("--- FLIGHT OVERVIEW ---\n")
f.write(f"Total Duration: {total_duration:.2f} seconds\n")
f.write(f"Total Batches: {total_batches}\n")
f.write(f"Total Segments: {num_segments}\n\n")
f.write("--- SEGMENT SUMMARY ---\n")
report_df = summary_df.copy()
report_df['Duration (s)'] = report_df['Duration (s)'].map('{:.2f}'.format)
report_df = report_df[[
'Segment (Mode | Scale | WF)',
'Start Batch', 'End Batch', 'Batch Count',
'start_time_str', 'end_time_str', 'Duration (s)',
'Start File', 'End File', '# Files'
]]
report_df.rename(columns={
'start_time_str': 'Start Time',
'end_time_str': 'End Time'
}, inplace=True)
df_string = report_df.to_string(index=True)
f.write(df_string)
f.write("\n\n" + "=" * 80 + "\n")
f.write(" End of Report\n")
except Exception as e:
log.error(f"Failed to generate flight text report: {e}")

View File

@ -0,0 +1,209 @@
# radar_data_reader/core/segment_processor.py
"""
Contains the business logic for the Segment Processor feature.
This class handles scanning for exported segments and processing their .out
files in a batch operation to extract structured data.
"""
import threading
import queue
import re
import multiprocessing as mp
import json
import csv
from pathlib import Path
from typing import Dict, Any, List
try:
import pandas as pd
except ImportError:
pd = None
from ..utils import logger
from .file_reader import run_worker_process
from ..core.export_profiles import ExportProfile
log = logger.get_logger(__name__)
def _get_value_from_path(batch: Any, path: str) -> Any:
# Questa è una versione semplificata per il processor.
# In una versione futura, si potrebbe unificare con quella in AppController.
try:
if path == "batch_id": return getattr(batch, 'batch_id', 'N/A')
parts = re.split(r"\.|\[", path)
current_obj = batch
for part in parts:
if current_obj is None: return "N/A"
if part.endswith("]"):
index_str = part[:-1]
if not index_str.isdigit(): return "N/A"
try:
current_obj = current_obj[int(index_str)]
except (IndexError, TypeError): return "N/A"
else:
current_obj = getattr(current_obj, part, None)
return current_obj if current_obj is not None else "N/A"
except Exception:
return "N/A"
def _convert_ctypes_for_json(obj: Any) -> Any:
# Funzione di supporto per la serializzazione JSON
if isinstance(obj, (int, float, str, bool)) or obj is None: return obj
if hasattr(obj, '_length_'): # È un array ctypes
return [_convert_ctypes_for_json(item) for item in obj]
if hasattr(obj, '_fields_'): # È una struttura ctypes
return {field: _convert_ctypes_for_json(getattr(obj, field)) for field, _ in obj._fields_}
if isinstance(obj, bytes):
return obj.hex()
return obj
class SegmentProcessor:
"""Manages the batch processing of exported flight segments."""
def __init__(self, config: Dict[str, Any], result_queue: queue.Queue, command_queue: queue.Queue):
self.config = config
self.result_queue = result_queue
self.command_queue = command_queue
self._worker_thread: threading.Thread = None
self._stop_event = threading.Event()
def start(self):
"""Starts the batch processing in a background thread."""
if self._worker_thread and self._worker_thread.is_alive():
log.warning("Segment processing is already running.")
return
self._stop_event.clear()
self._worker_thread = threading.Thread(target=self._run_batch_processing, daemon=True)
self._worker_thread.start()
def _run_batch_processing(self):
"""The main loop for processing segments, executed in a thread."""
segments_to_process = self.config.get("segments_to_process", [])
output_dir = Path(self.config.get("output_dir"))
create_folders = self.config.get("create_separate_folders", True)
profiles = self.config.get("profiles", [])
csv_profile_name = self.config.get("csv_profile_name")
json_profile_name = self.config.get("json_profile_name")
csv_profile = next((p for p in profiles if p.name == csv_profile_name), None)
json_profile = next((p for p in profiles if p.name == json_profile_name), None)
log.info(f"Starting batch processing for {len(segments_to_process)} segments.")
for i, segment_info in enumerate(segments_to_process):
if self._stop_event.is_set():
log.info("Stop command received, halting batch processing.")
break
segment_name = segment_info.get("folder_name")
out_file_path = Path(segment_info.get("out_file_path"))
log.info(f"Processing segment {i+1}/{len(segments_to_process)}: {segment_name}")
self.result_queue.put({
"type": "batch_progress",
"current": i + 1,
"total": len(segments_to_process),
"segment_name": segment_name
})
# Determina la cartella di output per questo segmento
current_output_dir = output_dir
if create_folders:
current_output_dir = output_dir / segment_name
current_output_dir.mkdir(parents=True, exist_ok=True)
# Esegui il worker e gestisci l'output
self._process_single_segment(out_file_path, current_output_dir, segment_name, csv_profile, json_profile)
log.info("Batch processing finished.")
self.result_queue.put({"type": "complete", "message": "Segment batch processing complete."})
def _process_single_segment(self, in_path: Path, out_dir: Path, base_name: str, csv_p: ExportProfile, json_p: ExportProfile):
"""Processes a single .out file and generates CSV/JSON outputs."""
# Creiamo code locali per questo worker specifico
worker_cmd_q = mp.Queue()
worker_res_q = mp.Queue()
# Il profilo passato al worker serve solo a guidare il parsing,
# l'esportazione vera la gestiamo qui.
active_profile = csv_p or json_p
if not active_profile: return # Skip if no profiles are active
worker_args = (in_path, worker_cmd_q, worker_res_q, active_profile)
process = mp.Process(target=run_worker_process, args=worker_args, daemon=True)
process.start()
csv_writer, json_data, csv_fh = None, [], None
try:
if csv_p:
csv_path = (out_dir / base_name).with_suffix(".csv")
csv_fh = open(csv_path, "w", encoding="utf-8", newline="")
csv_writer = csv.writer(csv_fh)
csv_writer.writerow([field.column_name for field in csv_p.fields])
while True:
msg = worker_res_q.get()
if msg['type'] == 'data_batch':
batch = msg['data']
if csv_writer:
row = [_get_value_from_path(batch, field.data_path) for field in csv_p.fields]
csv_writer.writerow(row)
if json_p:
json_row = {field.column_name: _get_value_from_path(batch, field.data_path) for field in json_p.fields}
json_data.append(_convert_ctypes_for_json(json_row))
elif msg['type'] == 'complete':
break
elif msg['type'] == 'error':
log.error(f"Worker for {in_path.name} failed: {msg['message']}")
break
finally:
if csv_fh:
csv_fh.close()
if json_p and json_data:
json_path = (out_dir / base_name).with_suffix(".json")
with open(json_path, "w", encoding="utf-8") as f:
json.dump(json_data, f, indent=4)
process.join(timeout=2)
if process.is_alive():
process.terminate()
@staticmethod
def scan_for_segments(flight_path: Path, flight_summary_df: pd.DataFrame, naming_template: str) -> List[Dict[str, Any]]:
"""Scans a flight workspace to find and verify exported segments."""
if flight_summary_df is None or flight_summary_df.empty:
return []
verified_segments = []
for _, row in flight_summary_df.iterrows():
placeholders = {
"{Segment}": str(row.get('Segment (Mode | Scale | WF)', '')),
"{StartBatch}": str(row.get('Start Batch', '')),
"{EndBatch}": str(row.get('End Batch', '')),
"{BatchCount}": str(row.get('Batch Count', '')),
}
folder_name = naming_template
for key, value in placeholders.items():
folder_name = folder_name.replace(key, value)
safe_folder_name = re.sub(r'[\\/*?:"<>|]', "-", folder_name)
segment_path = flight_path / safe_folder_name
out_file_path = segment_path / f"{safe_folder_name}.out"
segment_info = row.to_dict()
segment_info['folder_name'] = safe_folder_name
if segment_path.is_dir() and out_file_path.is_file() and out_file_path.stat().st_size > 0:
segment_info['is_exported_and_valid'] = True
segment_info['out_file_path'] = out_file_path
else:
segment_info['is_exported_and_valid'] = False
segment_info['out_file_path'] = None
verified_segments.append(segment_info)
return verified_segments

View File

@ -14,6 +14,8 @@ import queue
from .gui_utils import center_window
from ..utils import logger
from ..core.export_profiles import ExportProfile
# --- NUOVO IMPORT ---
from .segment_processor_tab import SegmentProcessorTab
log = logger.get_logger(__name__)
@ -99,12 +101,16 @@ class MainWindow(tk.Frame):
self.notebook.grid(row=0, column=0, sticky="nsew", pady=(0, 10))
self.flight_analyzer_tab = ttk.Frame(self.notebook, padding="10")
# --- CREAZIONE NUOVA TAB ---
self.segment_processor_tab = SegmentProcessorTab(self.notebook, self.controller)
self.out_processor_tab = ttk.Frame(self.notebook, padding="10")
self.rec_converter_tab = ttk.Frame(self.notebook, padding="10")
self.notebook.add(self.flight_analyzer_tab, text="Flight Analyzer")
self.notebook.add(self.out_processor_tab, text="OUT Processor")
self.notebook.add(self.rec_converter_tab, text="REC to OUT Converter")
# --- AGGIUNTA TAB AL NOTEBOOK NELL'ORDINE CORRETTO ---
self.notebook.add(self.flight_analyzer_tab, text="1. Flight Analyzer")
self.notebook.add(self.segment_processor_tab, text="2. Segment Processor")
self.notebook.add(self.out_processor_tab, text="3. Single OUT Processor")
self.notebook.add(self.rec_converter_tab, text="4. REC to OUT Converter")
self._create_flight_analyzer_tab(self.flight_analyzer_tab)
self._create_out_processor_tab(self.out_processor_tab)
@ -296,20 +302,30 @@ class MainWindow(tk.Frame):
self.log_widget.tag_config("WARNING", foreground="orange")
self.log_widget.tag_config("DEBUG", foreground="gray")
self.log_widget.tag_config("CMD", foreground="blue")
# Aggiungiamo un tag per il log specifico dell'export
self.log_widget.tag_config("EXPORT_LOG", foreground="purple")
def _setup_gui_logging(self, logging_config):
logger.add_tkinter_handler(self.log_widget, self.master, logging_config)
def update_export_profiles(self, profiles: List[ExportProfile], **kwargs):
profile_names = [p.name for p in profiles] if profiles else []
active_out_profile = kwargs.get("active_out_profile", "")
for combo, var, active_name in [(self.out_csv_profile_combobox, self.out_csv_profile_var, active_out_profile), (self.out_json_profile_combobox, self.out_json_profile_var, active_out_profile)]:
combo["values"] = profile_names
if active_name in profile_names: var.set(active_name)
elif profile_names: var.set(profile_names[0])
else: var.set("")
active_profile_name = kwargs.get("active_out_profile", "")
# Aggiorna i combobox nella tab "Single OUT Processor"
self.out_csv_profile_combobox["values"] = profile_names
self.out_json_profile_combobox["values"] = profile_names
if active_profile_name in profile_names:
self.out_csv_profile_var.set(active_profile_name)
self.out_json_profile_var.set(active_profile_name)
elif profile_names:
self.out_csv_profile_var.set(profile_names[0])
self.out_json_profile_var.set(profile_names[0])
else:
self.out_csv_profile_var.set("")
self.out_json_profile_var.set("")
# Aggiorna i combobox nella nuova tab "Segment Processor"
self.segment_processor_tab.update_export_profiles(profile_names, active_profile_name)
def _reset_progress(self):
self.analyzer_progress_var.set(0)
@ -326,11 +342,19 @@ class MainWindow(tk.Frame):
def update_ui_for_processing_state(self, is_processing: bool):
state = tk.DISABLED if is_processing else tk.NORMAL
# --- Pulsanti comuni ---
self.out_browse_button.config(state=state)
self.rec_convert_button.config(state=state)
self.start_analysis_button.config(state=state)
self.export_segment_button.config(state=state)
# --- Pulsanti della nuova tab ---
self.segment_processor_tab.load_segments_button.config(state=state)
self.segment_processor_tab.process_button.config(state=tk.NORMAL if not is_processing and self.segment_processor_tab.process_button['state'] != tk.DISABLED else tk.DISABLED)
self.out_stop_button.config(state=tk.NORMAL if is_processing else tk.DISABLED)
if not is_processing:
can_process_generated = self.controller.last_generated_out_file and self.controller.last_generated_out_file.exists()
self.process_generated_out_button.config(state=tk.NORMAL if can_process_generated else tk.DISABLED)
@ -364,11 +388,7 @@ class MainWindow(tk.Frame):
log_level = level_map.get(level_str, logging.INFO)
log.log(log_level, f"[C++ Runner] {msg.get('message')}")
elif msg_type == "export_log":
# Usa il logger INFO ma con un tag diverso per la GUI
log.info(f"[C++ Export] {msg.get('message')}")
# Questo è un piccolo trucco: logghiamo normalmente, ma potremmo
# anche inserire direttamente nel widget con un tag custom se necessario.
# Per ora, usiamo il sistema di logging standard.
elif msg_type == "start":
self.total_blocks_for_progress = msg.get("total", 0)
elif msg_type == "file_progress":
@ -408,8 +428,8 @@ class MainWindow(tk.Frame):
def populate_timeline_from_dataframe(self, summary_df: "pd.DataFrame"):
"""Clears and populates the flight timeline treeview with summary data."""
if summary_df.empty:
log.warning("Timeline population skipped: dataframe is empty.")
if summary_df is None or summary_df.empty:
log.warning("Timeline population skipped: dataframe is empty or None.")
return
self.flight_timeline_tree.delete(*self.flight_timeline_tree.get_children())

View File

@ -0,0 +1,193 @@
# radar_data_reader/gui/segment_processor_tab.py
"""
Defines the GUI layout and widgets for the 'Segment Processor' tab.
This tab allows users to perform batch processing on previously exported
flight segments.
"""
import tkinter as tk
from tkinter import ttk
from typing import List, Dict, Any, Callable
class SegmentProcessorTab(ttk.Frame):
"""
The UI frame for the Segment Processor feature.
"""
def __init__(self, master: ttk.Notebook, controller: Any):
super().__init__(master, padding="10")
self.controller = controller
self.segments_data_store: List[Dict[str, Any]] = []
# --- Tkinter Variables ---
self.output_dir_var = tk.StringVar()
self.generate_csv_var = tk.BooleanVar(value=True)
self.generate_json_var = tk.BooleanVar(value=False)
self.csv_profile_var = tk.StringVar()
self.json_profile_var = tk.StringVar()
self.create_separate_folders_var = tk.BooleanVar(value=True)
self.csv_use_tab_var = tk.BooleanVar(value=False)
self.use_full_path_var = tk.BooleanVar(value=False)
self._create_widgets()
def _create_widgets(self):
"""Creates and lays out all widgets for the tab."""
self.columnconfigure(0, weight=1)
self.rowconfigure(1, weight=1)
action_frame = ttk.Frame(self)
action_frame.grid(row=0, column=0, sticky="ew", pady=(0, 10))
self.load_segments_button = ttk.Button(
action_frame,
text="Load Exported Segments from Current Flight",
command=self.controller.load_segments_for_processing
)
self.load_segments_button.pack(side=tk.LEFT)
segments_frame = ttk.LabelFrame(self, text="Available Segments")
segments_frame.grid(row=1, column=0, sticky="nsew", pady=10)
segments_frame.columnconfigure(0, weight=1)
segments_frame.rowconfigure(0, weight=1)
self.segments_tree = ttk.Treeview(
segments_frame,
columns=("status", "details"),
show="headings",
selectmode="extended"
)
self.segments_tree.heading("status", text="Status")
self.segments_tree.heading("details", text="Segment Information")
self.segments_tree.column("status", width=120, anchor="center", stretch=False)
self.segments_tree.column("details", width=500, stretch=True)
self.segments_tree.grid(row=0, column=0, sticky="nsew")
scrollbar = ttk.Scrollbar(segments_frame, orient="vertical", command=self.segments_tree.yview)
self.segments_tree.configure(yscrollcommand=scrollbar.set)
scrollbar.grid(row=0, column=1, sticky="ns")
self.segments_tree.tag_configure("ready", foreground="green")
self.segments_tree.tag_configure("not_exported", foreground="gray")
help_label = ttk.Label(
segments_frame,
text="Tip: Segments in gray are not exported and cannot be selected. Use the 'Flight Analyzer' tab to export them.",
wraplength=600, justify=tk.LEFT, style="Italic.TLabel"
)
help_label.grid(row=1, column=0, columnspan=2, sticky="w", padx=5, pady=5)
self.master.style = ttk.Style()
self.master.style.configure("Italic.TLabel", font=("", 9, "italic"))
selection_frame = ttk.Frame(self)
selection_frame.grid(row=2, column=0, sticky="ew", pady=(5, 10))
ttk.Button(selection_frame, text="Select All Ready", command=self._select_all_ready).pack(side=tk.LEFT)
ttk.Button(selection_frame, text="Select None", command=self._select_none).pack(side=tk.LEFT, padx=5)
output_config_frame = ttk.LabelFrame(self, text="Batch Processing Output Configuration")
output_config_frame.grid(row=3, column=0, sticky="ew", pady=10)
output_config_frame.columnconfigure(1, weight=1)
# Row 0: Output Directory
ttk.Label(output_config_frame, text="Output Directory:").grid(row=0, column=0, padx=5, pady=5, sticky="w")
dir_entry = ttk.Entry(output_config_frame, textvariable=self.output_dir_var)
dir_entry.grid(row=0, column=1, sticky="ew", padx=5)
ttk.Button(
output_config_frame, text="Browse...",
command=lambda: self.controller.select_output_dir(self.output_dir_var)
).grid(row=0, column=2, padx=5)
# Row 1 & 2: Formats and Profiles
formats_frame = ttk.Frame(output_config_frame)
formats_frame.grid(row=1, column=0, columnspan=3, sticky="ew", padx=10, pady=5)
formats_frame.columnconfigure(1, weight=1)
ttk.Checkbutton(formats_frame, text="Generate .csv file", variable=self.generate_csv_var).grid(row=0, column=0, sticky="w")
self.csv_profile_combobox = ttk.Combobox(formats_frame, textvariable=self.csv_profile_var, state="readonly", width=30)
self.csv_profile_combobox.grid(row=0, column=1, sticky="w")
ttk.Checkbutton(formats_frame, text="Generate .json file", variable=self.generate_json_var).grid(row=1, column=0, sticky="w")
self.json_profile_combobox = ttk.Combobox(formats_frame, textvariable=self.json_profile_var, state="readonly", width=30)
self.json_profile_combobox.grid(row=1, column=1, sticky="w")
# Row 3: Other Checkbox Options
other_options_frame = ttk.Frame(output_config_frame)
other_options_frame.grid(row=2, column=0, columnspan=3, sticky='w', padx=10, pady=5)
ttk.Checkbutton(
other_options_frame, text="Create a separate folder for each segment's analysis",
variable=self.create_separate_folders_var
).pack(side=tk.LEFT, anchor="w")
ttk.Checkbutton(
other_options_frame, text="Use Tab Separator (CSV)",
variable=self.csv_use_tab_var
).pack(side=tk.LEFT, anchor="w", padx=(20, 0))
ttk.Checkbutton(
other_options_frame, text="Use Full Path for Headers",
variable=self.use_full_path_var
).pack(side=tk.LEFT, anchor="w", padx=(20, 0))
self.process_button = ttk.Button(
self,
text="Process Selected Segments",
command=self.controller.start_segment_batch_processing,
state=tk.DISABLED
)
self.process_button.grid(row=4, column=0, sticky="e", pady=(10, 0))
def _select_all_ready(self):
self.segments_tree.selection_remove(self.segments_tree.selection())
ready_items = self.segments_tree.tag_has("ready")
if ready_items:
self.segments_tree.selection_set(ready_items)
def _select_none(self):
self.segments_tree.selection_remove(self.segments_tree.selection())
def populate_segments(self, segments_data: List[Dict[str, Any]]):
self.segments_data_store = segments_data
for i in self.segments_tree.get_children():
self.segments_tree.delete(i)
has_selectable_items = False
for segment in self.segments_data_store:
# Costruisci la stringa dettagliata
details = segment.get('Segment (Mode | Scale | WF)', 'Unknown Segment')
start_time = segment.get('start_time_str', 'N/A')
end_time = segment.get('end_time_str', 'N/A')
duration = segment.get('Duration (s)', 0)
details_str = f"{details} | Start: {start_time}, End: {end_time}, Duration: {duration:.2f}s"
if segment.get('is_exported_and_valid'):
status_text, tag = "Ready", "ready"
has_selectable_items = True
else:
status_text, tag = "Not Exported", "not_exported"
iid = segment.get('folder_name')
self.segments_tree.insert("", "end", iid=iid, values=(status_text, details_str), tags=(tag,))
self.process_button.config(state=tk.NORMAL if has_selectable_items else tk.DISABLED)
def get_selected_segments_data(self) -> List[Dict[str, Any]]:
selected_iids = self.segments_tree.selection()
selected_data = []
for segment in self.segments_data_store:
if segment.get('folder_name') in selected_iids and segment.get('is_exported_and_valid'):
selected_data.append(segment)
return selected_data
def update_export_profiles(self, profile_names: List[str], active_profile_name: str):
for combo, var in [
(self.csv_profile_combobox, self.csv_profile_var),
(self.json_profile_combobox, self.json_profile_var)
]:
combo["values"] = profile_names
if active_profile_name in profile_names:
var.set(active_profile_name)
elif profile_names:
var.set(profile_names[0])
else:
var.set("")

View File

@ -100,6 +100,11 @@ class ConfigManager:
"export_profiles": [default_export_profile.to_dict()],
"cpp_converter_config": default_cpp_config,
"segment_export_config": default_segment_export_config,
# --- NUOVA CHIAVE DI CONFIGURAZIONE ---
"segment_processor_config": {
"last_output_dir": "",
"create_separate_folders": True,
}
}
def load_config(self) -> None: