add flight analyze

This commit is contained in:
VALLONGOL 2025-07-09 07:50:43 +02:00
parent 06b1fe79e1
commit 98c1ed357a
3 changed files with 382 additions and 4 deletions

View File

@ -1,9 +1,9 @@
{ {
"last_opened_out_file": "C:\\src\\____GitProjects\\radar_data_reader\\_rec\\_25-05-15-12-22-52_sata_345-n15.out", "last_opened_out_file": "C:/src/____GitProjects/radar_data_reader/flight_workspace/250515_122252_Flight/flight_summary.out",
"last_opened_rec_file": "C:/src/____GitProjects/radar_data_reader/_rec/_25-05-15-12-22-52_sata_345.rec", "last_opened_rec_file": "C:/src/____GitProjects/radar_data_reader/_rec/_25-05-15-12-22-52_sata_345.rec",
"last_out_output_dir": "C:/src/____GitProjects/radar_data_reader/_rec", "last_out_output_dir": "C:/src/____GitProjects/radar_data_reader/flight_workspace/250515_122252_Flight",
"last_rec_output_dir": "C:\\src\\____GitProjects\\radar_data_reader\\_rec", "last_rec_output_dir": "C:\\src\\____GitProjects\\radar_data_reader\\_rec",
"last_flight_folder": "//tsclient/F/__DATI_VOLI/Volo12 - Maggio 2025/_rec", "last_flight_folder": "C:/__Voli/Volo_12_25maggio2025/rec",
"active_out_export_profile_name": "gsp_data", "active_out_export_profile_name": "gsp_data",
"export_profiles": [ "export_profiles": [
{ {

View File

@ -17,6 +17,8 @@ from tkinter import filedialog, messagebox
import tkinter as tk import tkinter as tk
import ctypes import ctypes
import threading import threading
import shutil
import time
from ..utils.config_manager import ConfigManager from ..utils.config_manager import ConfigManager
from ..core.file_reader import run_worker_process from ..core.file_reader import run_worker_process
@ -601,3 +603,379 @@ class AppController:
log.error(f"Error in folder analysis worker: {e}", exc_info=True) log.error(f"Error in folder analysis worker: {e}", exc_info=True)
self.view.analyzer_info_var.set(f"Error during folder analysis: {e}") self.view.analyzer_info_var.set(f"Error during folder analysis: {e}")
self.view.start_analysis_button.config(state=tk.DISABLED) self.view.start_analysis_button.config(state=tk.DISABLED)
def _flight_analysis_worker(self, rec_folder_str: str, flight_name: str):
"""
Worker thread that performs the full flight summary analysis.
"""
try:
# 1. Setup workspace directory structure
workspace_dir = Path.cwd() / "flight_workspace"
flight_dir = workspace_dir / flight_name
flight_dir.mkdir(parents=True, exist_ok=True)
log.info(f"Workspace for flight '{flight_name}' created at: {flight_dir}")
# 2. Find .rec files
rec_files = sorted(Path(rec_folder_str).glob("*.rec"))
if not rec_files:
raise FileNotFoundError("No .rec files found to process in the specified folder.")
first_rec_file = str(rec_files[0])
num_rec_files = len(rec_files)
summary_out_path = flight_dir / "flight_summary.out"
self.last_generated_out_file = summary_out_path
# 3. Build the g_reconverter command
cpp_config = self.config_manager.get_cpp_converter_config()
exe_path = cpp_config.get("cpp_executable_path")
if not exe_path or not Path(exe_path).is_file():
raise ValueError("g_reconverter.exe path is not set or is invalid in config.")
command_list = [
exe_path,
first_rec_file,
f"/n={num_rec_files}",
f"/o={str(summary_out_path)}",
"/nosign", # Create a lightweight summary
"/gps", # Generate GPS track for the whole flight
]
if cpp_config.get("silent_overwrite"):
command_list.append("//o")
log.info(f"Running g_reconverter for summary: {' '.join(command_list)}")
# 4. Run g_reconverter using the existing multiprocess runner
# The working directory for the subprocess is the flight_dir itself.
self._launch_worker(run_cpp_converter, (command_list, self.result_queue, str(flight_dir)))
# Block the worker thread (not the GUI) until the C++ process finishes
self.worker_process.join()
log.info("g_reconverter summary generation process has finished.")
# 5. Verify the result
if not summary_out_path.is_file() or summary_out_path.stat().st_size == 0:
raise FileNotFoundError(f"g_reconverter failed to create a valid summary file: {summary_out_path}")
# TODO in next step: Run Python parser on the summary file
log.info("Flight summary .out file created successfully.")
self.view.analyzer_info_var.set("Summary file created. Next: parsing...")
except Exception as e:
log.error(f"Flight analysis worker failed: {e}", exc_info=True)
self.view.analyzer_info_var.set(f"ERROR: {e}")
finally:
# Reset the UI state
self.is_processing = False
self.view.update_ui_for_processing_state(False)
def _flight_analysis_worker(self, rec_folder_str: str, flight_name: str):
"""
Worker thread that performs the full flight summary analysis.
"""
try:
# 1. Setup workspace directory structure
workspace_dir = Path.cwd() / "flight_workspace"
flight_dir = workspace_dir / flight_name
flight_dir.mkdir(parents=True, exist_ok=True)
log.info(f"Workspace for flight '{flight_name}' created at: {flight_dir}")
# 2. Find .rec files
rec_files = sorted(Path(rec_folder_str).glob("*.rec"))
if not rec_files:
raise FileNotFoundError("No .rec files found to process in the specified folder.")
first_rec_file = str(rec_files[0])
num_rec_files = len(rec_files)
summary_out_path = flight_dir / "flight_summary.out"
self.last_generated_out_file = summary_out_path
# 3. Build the g_reconverter command
cpp_config = self.config_manager.get_cpp_converter_config()
exe_path = cpp_config.get("cpp_executable_path")
if not exe_path or not Path(exe_path).is_file():
raise ValueError("g_reconverter.exe path is not set or is invalid in config.")
command_list = [
exe_path,
first_rec_file,
f"/n={num_rec_files}",
f"/o={str(summary_out_path)}",
"/nosign", # Create a lightweight summary
"/gps", # Generate GPS track for the whole flight
]
if cpp_config.get("silent_overwrite"):
command_list.append("//o")
log.info(f"Running g_reconverter for summary: {' '.join(command_list)}")
# 4. Run g_reconverter using the existing multiprocess runner
# The working directory for the subprocess is the flight_dir itself.
self._launch_worker(run_cpp_converter, (command_list, self.result_queue, str(flight_dir)))
# Block the worker thread (not the GUI) until the C++ process finishes
self.worker_process.join()
log.info("g_reconverter summary generation process has finished.")
# 5. Verify the result
if not summary_out_path.is_file() or summary_out_path.stat().st_size == 0:
raise FileNotFoundError(f"g_reconverter failed to create a valid summary file: {summary_out_path}")
# TODO in next step: Run Python parser on the summary file
log.info("Flight summary .out file created successfully.")
self.view.analyzer_info_var.set("Summary file created. Next: parsing...")
except Exception as e:
log.error(f"Flight analysis worker failed: {e}", exc_info=True)
self.view.analyzer_info_var.set(f"ERROR: {e}")
finally:
# Reset the UI state
self.is_processing = False
self.view.update_ui_for_processing_state(False)
def start_flight_analysis(self):
"""
Starts the main flight analysis process in a background thread.
This orchestrates g_reconverter and the python parser.
"""
if self.is_processing:
log.warning("Cannot start flight analysis, a process is already running.")
return
flight_name = self.view.analyzer_flight_name_var.get().strip()
rec_folder = self.view.analyzer_rec_folder_var.get()
if not flight_name or not rec_folder:
messagebox.showerror(
"Setup Incomplete",
"Please select a recordings folder and provide a flight name.",
parent=self.view
)
return
# --- Network Path Check ---
# Warn the user if they select a network path, as external processes might fail.
if rec_folder.startswith("\\\\"):
log.warning(f"Network path detected: {rec_folder}")
response = messagebox.askyesno(
"Network Path Warning",
f"The selected folder '{rec_folder}' is on a network path.\n\n"
"External tools like g_reconverter.exe may fail to access these paths directly.\n\n"
"It is strongly recommended to use a local folder or a mapped network drive (e.g., Z:\\MyFlightData) for analysis.\n\n"
"Do you want to continue anyway?",
parent=self.view
)
if not response:
log.info("User cancelled analysis due to network path warning.")
return # User chose not to continue
# --- Prepare the UI for a long-running task ---
self.is_processing = True
# Use the main UI state update method, assuming it disables relevant buttons
self.view.start_processing_ui()
self.view.analyzer_info_var.set(f"Starting analysis for flight: {flight_name}...")
self.view.update_idletasks()
# --- Launch the worker thread to handle the analysis ---
analysis_thread = threading.Thread(
target=self._flight_analysis_worker,
args=(rec_folder, flight_name),
daemon=True
)
analysis_thread.start()
def _flight_analysis_worker(self, rec_folder_str: str, flight_name: str):
"""
Worker thread that performs the full flight summary analysis.
"""
try:
# 1. Setup workspace directory structure
workspace_dir = Path.cwd() / "flight_workspace"
flight_dir = workspace_dir / flight_name
flight_dir.mkdir(parents=True, exist_ok=True)
log.info(f"Workspace for flight '{flight_name}' created at: {flight_dir}")
# 2. Prepare for g_reconverter
rec_files = sorted(Path(rec_folder_str).glob("*.rec"))
if not rec_files:
raise FileNotFoundError("No .rec files found to process.")
first_rec_file = str(rec_files[0])
num_rec_files = len(rec_files)
summary_out_path = flight_dir / "flight_summary.out"
self.last_generated_out_file = summary_out_path
# --- Build the g_reconverter command for summary generation ---
cpp_config = self.config_manager.get_cpp_converter_config()
exe_path = cpp_config.get("cpp_executable_path")
if not exe_path or not Path(exe_path).is_file():
raise ValueError("g_reconverter.exe path is not valid.")
command_list = [
exe_path,
first_rec_file,
f"/n={num_rec_files}",
f"/o={str(summary_out_path)}",
"/nosign",
"/gps"
]
if cpp_config.get("silent_overwrite"):
command_list.append("//o")
log.info(f"Running g_reconverter for summary: {' '.join(command_list)}")
# 3. Run g_reconverter
self._launch_worker(run_cpp_converter, (command_list, self.result_queue, str(flight_dir)))
self.worker_process.join() # Block until the C++ worker process finishes
log.info("g_reconverter summary generation finished.")
# --- NUOVO: Aggiungiamo una piccola pausa per il filesystem ---
time.sleep(0.5) # Attendi mezzo secondo per assicurarsi che il file sia pronto
# --- NUOVO: Controllo robusto sul file di output ---
if not summary_out_path.is_file() or summary_out_path.stat().st_size == 0:
log.error(f"g_reconverter did not produce a valid output file at {summary_out_path}")
raise FileNotFoundError(f"g_reconverter failed to create summary file: {summary_out_path}")
# 4. Run Python parser on the lightweight .out file
log.info(f"Now parsing summary file: {summary_out_path}")
self.view.analyzer_info_var.set("Parsing summary file...")
summary_profile = self._create_default_summary_profile()
# --- NUOVO: Svuota le code prima di lanciare il prossimo worker ---
# Questo è FONDAMENTALE per evitare che il messaggio di completamento
# del primo worker venga letto prematuramente.
while not self.result_queue.empty():
self.result_queue.get_nowait()
while not self.command_queue.empty():
self.command_queue.get_nowait()
log.info("Queues cleared, launching Python parser worker.")
self._prepare_and_run_python_parser(summary_out_path, flight_dir, summary_profile)
self.worker_process.join() # Block until the Python worker process finishes
log.info("Python parser finished creating flight_summary.csv")
# 5. Load the resulting CSV and populate the timeline view
summary_csv_path = flight_dir / "flight_summary.csv"
if summary_csv_path.is_file() and summary_csv_path.stat().st_size > 0:
log.info(f"Loading summary from {summary_csv_path} to populate timeline.")
df = pd.read_csv(summary_csv_path)
self._populate_timeline_from_dataframe(df)
log.info("Flight analysis summary completed successfully.")
self.view.analyzer_info_var.set("Analysis complete. Timeline populated.")
self.view.export_segment_button.config(state=tk.NORMAL)
else:
log.error("Python parser ran but flight_summary.csv is empty or missing.")
raise FileNotFoundError("flight_summary.csv was not created or is empty.")
except Exception as e:
log.error(f"Flight analysis worker failed: {e}", exc_info=True)
self.view.analyzer_info_var.set(f"ERROR: {e}")
finally:
self.is_processing = False
# Chiudi eventuali file rimasti aperti dal parser
self._close_all_files()
self.view.update_ui_for_processing_state(False)
def _create_default_summary_profile(self) -> ExportProfile:
"""Creates a hardcoded default profile for flight summary generation."""
log.debug("Creating default profile for flight summary.")
return ExportProfile(
name="_internal_summary",
fields=[
ExportField(column_name="batch_id", data_path="batch_id"),
ExportField(column_name="ttag", data_path="main_header.ge_header.signal_descr.ttag"),
ExportField(column_name="master_mode", data_path="main_header.ge_header.mode.master_mode", translate_with_enum=True),
# Add more fields if needed for the timeline
]
)
def _prepare_and_run_python_parser(self, out_file: Path, output_dir: Path, profile: ExportProfile):
"""Prepares and runs the Python .out file parser worker."""
# Setup output files for CSV
self.output_file_handles.clear()
self.csv_writers.clear()
self.active_export_profiles.clear()
self.json_data_buffer.clear() # Not used for summary, but good to clear
summary_csv_path = output_dir / "flight_summary.csv"
fh = open(summary_csv_path, "w", encoding="utf-8", newline="")
self.output_file_handles["csv"] = fh
csv_writer = csv.writer(fh, delimiter=",")
self.csv_writers["csv"] = csv_writer
# Write headers
headers = [field.column_name for field in profile.fields]
csv_writer.writerow(headers)
self.active_export_profiles["csv"] = profile
# Launch the python parser worker
worker_args = (out_file, self.command_queue, self.result_queue, profile)
self._launch_worker(run_worker_process, worker_args)
def _get_robust_path(self, path_obj: Path) -> str:
"""
Takes a Path object and returns a robust string path that subprocesses can handle.
If the path is a remote client path (e.g., \\tsclient\F), it attempts to map it
to a network drive (K:) and returns the path on that drive.
"""
# Using os.path.normpath to handle slashes consistently
path_str = os.path.normpath(str(path_obj.resolve()))
if not path_str.lower().startswith("\\\\tsclient\\"):
return path_str
log.info(f"Detected RDP/Hyper-V client path: {path_str}")
drive_letter = "K"
drive_path = f"{drive_letter}:"
try:
# The share path is always the first two components after the slashes
# e.g., for \\tsclient\F\folder, the share is \\tsclient\F
parts = path_obj.parts
if len(parts) < 3:
log.error(f"Cannot determine share path from invalid client path: {path_str}")
return path_str # Fallback
# This is the path to map, e.g., \\tsclient\F
share_to_map = os.path.join(f"\\\\{parts[1]}", parts[2])
# Check if the drive is already mapped to the correct share
# net use command output is often in cp850 or similar, decode with fallback
net_use_output = subprocess.check_output("net use", shell=True, text=True, errors='replace').strip()
is_correctly_mapped = False
for line in net_use_output.splitlines():
# A successful mapping line looks like:
# OK Z: \\tsclient\F ...
if line.strip().startswith("OK") and f"{drive_path: <12}{share_to_map}" in line:
is_correctly_mapped = True
break
if is_correctly_mapped:
log.info(f"Drive {drive_path} is already correctly mapped to {share_to_map}. Using it.")
else:
# If mapped to something else, or not mapped at all, (re)map it.
if f"{drive_path}" in net_use_output:
log.warning(f"Drive {drive_path} is mapped to another resource or is unavailable. Attempting to remap.")
# Using shell=True for net use is more robust with complex commands
subprocess.run(f'net use {drive_path} /delete /yes', check=True, shell=True)
log.info(f"Attempting to map '{share_to_map}' to drive {drive_path}...")
subprocess.run(f'net use {drive_path} "{share_to_map}"', check=True, shell=True)
log.info(f"Successfully mapped network drive {drive_path}")
# Reconstruct the final path using the new drive letter
# The new path starts with 'Z:' followed by the rest of the original parts
new_path_parts = [drive_path] + list(parts[3:])
robust_path = os.path.join(*new_path_parts)
log.info(f"Path converted to mapped drive: {robust_path}")
return robust_path
except (subprocess.CalledProcessError, FileNotFoundError) as e:
log.error(f"Failed to map network drive: {e}. Falling back to original path.")
return path_str

View File

@ -161,7 +161,7 @@ class MainWindow(tk.Frame):
action_frame.grid(row=1, column=0, sticky="ew", padx=5, pady=10) action_frame.grid(row=1, column=0, sticky="ew", padx=5, pady=10)
self.start_analysis_button = ttk.Button( self.start_analysis_button = ttk.Button(
action_frame, text="Start Flight Analysis", command=lambda: print("TODO: Start Analysis"), action_frame, text="Start Flight Analysis", command=self.controller.start_flight_analysis,
state=tk.DISABLED state=tk.DISABLED
) )
self.start_analysis_button.pack(side=tk.LEFT) self.start_analysis_button.pack(side=tk.LEFT)