fix tab processor segment

This commit is contained in:
VALLONGOL 2025-07-14 13:23:06 +02:00
parent c814943739
commit b049d80bd3
4 changed files with 107 additions and 83 deletions

View File

@ -446,7 +446,7 @@
},
"segment_processor_config": {
"last_output_dir": "C:/src/____GitProjects/radar_data_reader/out_analisys",
"create_separate_folders": false
"create_separate_folders": true
},
"segment_export_g_reconvert_config": {
"cpp_executable_path": "C:/src/GRIFO-E/REP/Projects/Tools/wsLuna/g_reconvert/Debug/g_reconvert.exe",

View File

@ -197,18 +197,28 @@ class AppController:
try:
output_dir, basename, profiles = Path(self.view.out_output_dir_var.get()), self.view.out_basename_var.get(), self.config_manager.get_export_profiles()
use_full_path = self.view.out_use_full_path_var.get()
if self.view.out_output_csv_var.get():
profile = next((p for p in profiles if p.name == self.view.out_csv_profile_var.get()), None)
if not profile: raise ValueError(f"CSV profile '{self.view.out_csv_profile_var.get()}' not found.")
self.active_export_profiles["csv"] = profile
path, delimiter = (output_dir / basename).with_suffix(".csv"), "\t" if self.view.out_csv_use_tab_var.get() else ","
csv_filename = f"{basename}_{profile.name}.csv"
path = output_dir / csv_filename
delimiter = "\t" if self.view.out_csv_use_tab_var.get() else ","
fh = open(path, "w", encoding="utf-8", newline="")
self.csv_writers["csv"].writerow([field.column_name for field in profile.fields])
header = [field.data_path if use_full_path else field.column_name for field in profile.fields]
csv_writer = csv.writer(fh, delimiter=delimiter)
csv_writer.writerow(header)
self.output_file_handles["csv"] = fh
self.csv_writers["csv"] = csv_writer
if self.view.out_output_json_var.get():
profile = next((p for p in profiles if p.name == self.view.out_json_profile_var.get()), None)
if not profile: raise ValueError(f"JSON profile '{self.view.out_json_profile_var.get()}' not found.")
self.active_export_profiles["json"] = profile
return True
except (IOError, ValueError) as e:
log.error(f"Failed to prepare output files: {e}")
@ -216,17 +226,12 @@ class AppController:
return False
def start_out_processing(self):
if self.is_processing:
log.warning("Processing already in progress.")
return
if self.is_processing: log.warning("Processing already in progress."); return
if not all([self.view.out_filepath_var.get(), self.view.out_output_dir_var.get(), self.view.out_basename_var.get()]):
log.error("Please set input file, output directory, and base filename.")
return
log.error("Please set input file, output directory, and base filename."); return
if not any([self.view.out_output_csv_var.get(), self.view.out_output_json_var.get()]):
log.error("Please select at least one output format (CSV or JSON).")
return
if not self._prepare_out_processor_files():
return
log.error("Please select at least one output format (CSV or JSON)."); return
if not self._prepare_out_processor_files(): return
self.is_processing = True
self.view.start_processing_ui()
@ -240,8 +245,7 @@ class AppController:
active_profile = self.active_export_profiles.get("csv") or self.active_export_profiles.get("json")
if not active_profile:
log.error("No active export profile found for processing.")
self.is_processing = False
self.view.update_ui_for_processing_state(False)
self.is_processing = False; self.view.update_ui_for_processing_state(False)
return
self.out_processor.start_processing(Path(filepath_str), active_profile)
@ -321,8 +325,10 @@ class AppController:
if "json" in self.active_export_profiles: log.info("JSON export enabled, but no data. Skipping file creation.")
return
try:
profile = self.active_export_profiles["json"]
output_dir, basename = Path(self.view.out_output_dir_var.get()), self.view.out_basename_var.get()
path = (output_dir / basename).with_suffix(".json")
json_filename = f"{basename}_{profile.name}.json"
path = output_dir / json_filename
log.info(f"Writing {len(self.json_data_buffer)} records to JSON file: {path}")
with open(path, "w", encoding="utf-8") as f: json.dump(self.json_data_buffer, f, indent=4)
log.info("JSON file written successfully.")
@ -531,13 +537,11 @@ class AppController:
def start_segment_export(self):
if self.is_processing:
messagebox.showwarning("Process Busy", "Cannot start export while another process is running.", parent=self.view)
return
messagebox.showwarning("Process Busy", "Cannot start export while another process is running.", parent=self.view); return
selected_item_ids = self.view.flight_timeline_tree.selection()
if not selected_item_ids:
messagebox.showinfo("No Selection", "Please select one or more segments to export.", parent=self.view)
return
messagebox.showinfo("No Selection", "Please select one or more segments to export.", parent=self.view); return
if self.last_flight_summary_df is None:
messagebox.showerror("Error", "Flight summary data is not available. Cannot proceed with export.")
@ -552,7 +556,7 @@ class AppController:
rec_folder = Path(self.view.analyzer_rec_folder_var.get())
export_config = self.config_manager.get("segment_export_config")
folder_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}")
folder_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}_{StartBatch}-{EndBatch}")
current_flight_path = self.flight_analyzer.current_flight_folder_path
if not current_flight_path:
log.error("Cannot start export, flight analysis folder path is unknown.")

View File

@ -25,10 +25,7 @@ from ..core.export_profiles import ExportProfile
log = logger.get_logger(__name__)
def _get_value_from_path(batch: Any, path: str) -> Any:
# Questa è una versione semplificata per il processor.
# In una versione futura, si potrebbe unificare con quella in AppController.
try:
if path == "batch_id": return getattr(batch, 'batch_id', 'N/A')
parts = re.split(r"\.|\[", path)
@ -47,19 +44,16 @@ def _get_value_from_path(batch: Any, path: str) -> Any:
except Exception:
return "N/A"
def _convert_ctypes_for_json(obj: Any) -> Any:
# Funzione di supporto per la serializzazione JSON
if isinstance(obj, (int, float, str, bool)) or obj is None: return obj
if hasattr(obj, '_length_'): # È un array ctypes
if hasattr(obj, '_length_'):
return [_convert_ctypes_for_json(item) for item in obj]
if hasattr(obj, '_fields_'): # È una struttura ctypes
if hasattr(obj, '_fields_'):
return {field: _convert_ctypes_for_json(getattr(obj, field)) for field, _ in obj._fields_}
if isinstance(obj, bytes):
return obj.hex()
return obj
class SegmentProcessor:
"""Manages the batch processing of exported flight segments."""
def __init__(self, config: Dict[str, Any], result_queue: queue.Queue, command_queue: queue.Queue):
@ -84,14 +78,11 @@ class SegmentProcessor:
segments_to_process = self.config.get("segments_to_process", [])
output_dir = Path(self.config.get("output_dir"))
create_folders = self.config.get("create_separate_folders", True)
profiles = self.config.get("profiles", [])
csv_profile_name = self.config.get("csv_profile_name")
json_profile_name = self.config.get("json_profile_name")
csv_profile = next((p for p in profiles if p.name == csv_profile_name), None)
json_profile = next((p for p in profiles if p.name == json_profile_name), None)
profiles = self.config.get("profiles", [])
csv_profile = next((p for p in profiles if p.name == self.config.get("csv_profile_name")), None)
json_profile = next((p for p in profiles if p.name == self.config.get("json_profile_name")), None)
log.info(f"Starting batch processing for {len(segments_to_process)} segments.")
for i, segment_info in enumerate(segments_to_process):
@ -110,28 +101,20 @@ class SegmentProcessor:
"segment_name": segment_name
})
# Determina la cartella di output per questo segmento
current_output_dir = output_dir
if create_folders:
current_output_dir = output_dir / segment_name
current_output_dir = output_dir / segment_name if create_folders else output_dir
current_output_dir.mkdir(parents=True, exist_ok=True)
# Esegui il worker e gestisci l'output
self._process_single_segment(out_file_path, current_output_dir, segment_name, csv_profile, json_profile)
log.info("Batch processing finished.")
self.result_queue.put({"type": "complete", "message": "Segment batch processing complete."})
def _process_single_segment(self, in_path: Path, out_dir: Path, base_name: str, csv_p: ExportProfile, json_p: ExportProfile):
"""Processes a single .out file and generates CSV/JSON outputs."""
# Creiamo code locali per questo worker specifico
worker_cmd_q = mp.Queue()
worker_res_q = mp.Queue()
# Il profilo passato al worker serve solo a guidare il parsing,
# l'esportazione vera la gestiamo qui.
active_profile = csv_p or json_p
if not active_profile: return # Skip if no profiles are active
active_profile = csv_p if self.config.get("generate_csv") else json_p
if not active_profile: return
worker_args = (in_path, worker_cmd_q, worker_res_q, active_profile)
process = mp.Process(target=run_worker_process, args=worker_args, daemon=True)
@ -140,22 +123,32 @@ class SegmentProcessor:
csv_writer, json_data, csv_fh = None, [], None
try:
if csv_p:
csv_path = (out_dir / base_name).with_suffix(".csv")
if self.config.get("generate_csv") and csv_p:
csv_filename = f"{base_name}_{csv_p.name}.csv"
csv_path = out_dir / csv_filename
csv_fh = open(csv_path, "w", encoding="utf-8", newline="")
csv_writer = csv.writer(csv_fh)
csv_writer.writerow([field.column_name for field in csv_p.fields])
delimiter = "\t" if self.config.get("csv_use_tab") else ","
csv_writer = csv.writer(csv_fh, delimiter=delimiter)
header = [field.data_path if self.config.get("use_full_path_headers") else field.column_name for field in csv_p.fields]
csv_writer.writerow(header)
if self.config.get("generate_json") and json_p:
json_data = [] # Assicurati che sia vuota per ogni segmento
while True:
msg = worker_res_q.get()
if msg['type'] == 'data_batch':
batch = msg['data']
if csv_writer:
if csv_writer and csv_p:
row = [_get_value_from_path(batch, field.data_path) for field in csv_p.fields]
csv_writer.writerow(row)
if json_p:
json_row = {field.column_name: _get_value_from_path(batch, field.data_path) for field in json_p.fields}
json_data.append(_convert_ctypes_for_json(json_row))
if json_data is not None and json_p:
row_dict = {}
for field in json_p.fields:
key = field.data_path if self.config.get("use_full_path_headers") else field.column_name
raw_value = _get_value_from_path(batch, field.data_path)
row_dict[key] = _convert_ctypes_for_json(raw_value)
json_data.append(row_dict)
elif msg['type'] == 'complete':
break
elif msg['type'] == 'error':
@ -164,8 +157,9 @@ class SegmentProcessor:
finally:
if csv_fh:
csv_fh.close()
if json_p and json_data:
json_path = (out_dir / base_name).with_suffix(".json")
if self.config.get("generate_json") and json_p and json_data:
json_filename = f"{base_name}_{json_p.name}.json"
json_path = out_dir / json_filename
with open(json_path, "w", encoding="utf-8") as f:
json.dump(json_data, f, indent=4)
process.join(timeout=2)
@ -174,10 +168,9 @@ class SegmentProcessor:
@staticmethod
def scan_for_segments(flight_path: Path, flight_summary_df: pd.DataFrame, naming_template: str) -> List[Dict[str, Any]]:
"""Scans a flight workspace to find and verify exported segments."""
if flight_summary_df is None or flight_summary_df.empty:
return []
verified_segments = []
for _, row in flight_summary_df.iterrows():
placeholders = {

View File

@ -30,6 +30,9 @@ class SegmentProcessorTab(ttk.Frame):
self.create_separate_folders_var = tk.BooleanVar(value=True)
self.csv_use_tab_var = tk.BooleanVar(value=False)
self.use_full_path_var = tk.BooleanVar(value=False)
self.progress_var = tk.DoubleVar(value=0)
self.progress_text_var = tk.StringVar(value="N/A")
self._create_widgets()
@ -53,16 +56,24 @@ class SegmentProcessorTab(ttk.Frame):
segments_frame.columnconfigure(0, weight=1)
segments_frame.rowconfigure(0, weight=1)
# --- MODIFICA: Nuova struttura della Treeview ---
self.segments_tree = ttk.Treeview(
segments_frame,
columns=("status", "details"),
columns=("status", "name", "start_time", "end_time", "duration"),
show="headings",
selectmode="extended"
)
self.segments_tree.heading("status", text="Status")
self.segments_tree.heading("details", text="Segment Information")
self.segments_tree.column("status", width=120, anchor="center", stretch=False)
self.segments_tree.column("details", width=500, stretch=True)
self.segments_tree.heading("name", text="Segment Name")
self.segments_tree.heading("start_time", text="Start Time")
self.segments_tree.heading("end_time", text="End Time")
self.segments_tree.heading("duration", text="Duration (s)")
self.segments_tree.column("status", width=100, anchor="center", stretch=False)
self.segments_tree.column("name", width=300, stretch=True)
self.segments_tree.column("start_time", width=100, anchor="center", stretch=False)
self.segments_tree.column("end_time", width=100, anchor="center", stretch=False)
self.segments_tree.column("duration", width=100, anchor="center", stretch=False)
self.segments_tree.grid(row=0, column=0, sticky="nsew")
scrollbar = ttk.Scrollbar(segments_frame, orient="vertical", command=self.segments_tree.yview)
@ -91,7 +102,6 @@ class SegmentProcessorTab(ttk.Frame):
output_config_frame.grid(row=3, column=0, sticky="ew", pady=10)
output_config_frame.columnconfigure(1, weight=1)
# Row 0: Output Directory
ttk.Label(output_config_frame, text="Output Directory:").grid(row=0, column=0, padx=5, pady=5, sticky="w")
dir_entry = ttk.Entry(output_config_frame, textvariable=self.output_dir_var)
dir_entry.grid(row=0, column=1, sticky="ew", padx=5)
@ -100,20 +110,23 @@ class SegmentProcessorTab(ttk.Frame):
command=lambda: self.controller.select_output_dir(self.output_dir_var)
).grid(row=0, column=2, padx=5)
# Row 1 & 2: Formats and Profiles
formats_frame = ttk.Frame(output_config_frame)
formats_frame.grid(row=1, column=0, columnspan=3, sticky="ew", padx=10, pady=5)
formats_frame.columnconfigure(1, weight=1)
formats_frame.columnconfigure(2, weight=1)
ttk.Checkbutton(formats_frame, text="Generate .csv file", variable=self.generate_csv_var).grid(row=0, column=0, sticky="w")
self.csv_check = ttk.Checkbutton(formats_frame, text="Generate .csv file", variable=self.generate_csv_var, command=self._on_format_toggle)
self.csv_check.grid(row=0, column=0, sticky="w")
ttk.Label(formats_frame, text="CSV Profile:").grid(row=0, column=1, sticky="e", padx=(10, 2))
self.csv_profile_combobox = ttk.Combobox(formats_frame, textvariable=self.csv_profile_var, state="readonly", width=30)
self.csv_profile_combobox.grid(row=0, column=1, sticky="w")
self.csv_profile_combobox.grid(row=0, column=2, sticky="ew")
ttk.Checkbutton(formats_frame, text="Generate .json file", variable=self.generate_json_var).grid(row=1, column=0, sticky="w")
self.json_check = ttk.Checkbutton(formats_frame, text="Generate .json file", variable=self.generate_json_var, command=self._on_format_toggle)
self.json_check.grid(row=1, column=0, sticky="w")
ttk.Label(formats_frame, text="JSON Profile:").grid(row=1, column=1, sticky="e", padx=(10, 2))
self.json_profile_combobox = ttk.Combobox(formats_frame, textvariable=self.json_profile_var, state="readonly", width=30)
self.json_profile_combobox.grid(row=1, column=1, sticky="w")
self.json_profile_combobox.grid(row=1, column=2, sticky="ew")
self._on_format_toggle()
# Row 3: Other Checkbox Options
other_options_frame = ttk.Frame(output_config_frame)
other_options_frame.grid(row=2, column=0, columnspan=3, sticky='w', padx=10, pady=5)
ttk.Checkbutton(
@ -128,14 +141,30 @@ class SegmentProcessorTab(ttk.Frame):
other_options_frame, text="Use Full Path for Headers",
variable=self.use_full_path_var
).pack(side=tk.LEFT, anchor="w", padx=(20, 0))
# --- NUOVO: Progress Frame ---
progress_frame = ttk.LabelFrame(self, text="Processing Progress")
progress_frame.grid(row=4, column=0, sticky="ew", pady=(10,0))
progress_frame.columnconfigure(0, weight=1)
self.progress_bar = ttk.Progressbar(progress_frame, variable=self.progress_var)
self.progress_bar.grid(row=0, column=0, sticky="ew", padx=5, pady=5)
self.progress_label = ttk.Label(progress_frame, textvariable=self.progress_text_var)
self.progress_label.grid(row=1, column=0, sticky="w", padx=5, pady=(0, 5))
self.process_button = ttk.Button(
self,
progress_frame,
text="Process Selected Segments",
command=self.controller.start_segment_batch_processing,
state=tk.DISABLED
)
self.process_button.grid(row=4, column=0, sticky="e", pady=(10, 0))
self.process_button.grid(row=0, column=1, rowspan=2, padx=10)
def _on_format_toggle(self):
"""Enable/disable profile comboboxes based on checkbox state."""
self.csv_profile_combobox.config(state=tk.NORMAL if self.generate_csv_var.get() else tk.DISABLED)
self.json_profile_combobox.config(state=tk.NORMAL if self.generate_json_var.get() else tk.DISABLED)
def _select_all_ready(self):
self.segments_tree.selection_remove(self.segments_tree.selection())
@ -152,13 +181,11 @@ class SegmentProcessorTab(ttk.Frame):
self.segments_tree.delete(i)
has_selectable_items = False
for segment in self.segments_data_store:
# Costruisci la stringa dettagliata
details = segment.get('Segment (Mode | Scale | WF)', 'Unknown Segment')
for i, segment in enumerate(self.segments_data_store):
name = segment.get('Segment (Mode | Scale | WF)', 'Unknown Segment')
start_time = segment.get('start_time_str', 'N/A')
end_time = segment.get('end_time_str', 'N/A')
duration = segment.get('Duration (s)', 0)
details_str = f"{details} | Start: {start_time}, End: {end_time}, Duration: {duration:.2f}s"
if segment.get('is_exported_and_valid'):
status_text, tag = "Ready", "ready"
@ -166,16 +193,16 @@ class SegmentProcessorTab(ttk.Frame):
else:
status_text, tag = "Not Exported", "not_exported"
iid = segment.get('folder_name')
self.segments_tree.insert("", "end", iid=iid, values=(status_text, details_str), tags=(tag,))
self.segments_tree.insert("", "end", iid=str(i), values=(status_text, name, start_time, end_time, f"{duration:.2f}"), tags=(tag,))
self.process_button.config(state=tk.NORMAL if has_selectable_items else tk.DISABLED)
def get_selected_segments_data(self) -> List[Dict[str, Any]]:
selected_iids = self.segments_tree.selection()
selected_indices = [int(iid) for iid in self.segments_tree.selection()]
selected_data = []
for segment in self.segments_data_store:
if segment.get('folder_name') in selected_iids and segment.get('is_exported_and_valid'):
for index in selected_indices:
segment = self.segments_data_store[index]
if segment.get('is_exported_and_valid'):
selected_data.append(segment)
return selected_data