fix mode into table
add change folder for analisys
This commit is contained in:
parent
5175132deb
commit
b9b37a9aa8
@ -4,6 +4,7 @@
|
||||
"last_out_output_dir": "C:/src/____GitProjects/radar_data_reader/flight_workspace/250515_122252_Flight",
|
||||
"last_rec_output_dir": "C:\\src\\____GitProjects\\radar_data_reader\\_rec",
|
||||
"last_flight_folder": "C:/__Voli/Volo_12_25maggio2025/rec",
|
||||
"last_flight_workspace_parent_dir": "C:\\src\\____GitProjects\\radar_data_reader\\flight_workspace",
|
||||
"active_out_export_profile_name": "gsp_data",
|
||||
"export_profiles": [
|
||||
{
|
||||
@ -451,8 +452,8 @@
|
||||
"create_separate_folders": true
|
||||
},
|
||||
"flight_analysis_options": {
|
||||
"aggregate_by_scale": false,
|
||||
"aggregate_by_waveform": false
|
||||
"aggregate_by_scale": true,
|
||||
"aggregate_by_waveform": true
|
||||
},
|
||||
"segment_export_g_reconvert_config": {
|
||||
"cpp_executable_path": "C:/src/GRIFO-E/REP/Projects/Tools/wsLuna/g_reconvert/Debug/g_reconvert.exe",
|
||||
|
||||
@ -6,10 +6,10 @@
|
||||
import re
|
||||
|
||||
# --- Version Data (Generated) ---
|
||||
__version__ = "v.0.0.0.38-0-g7251cf8-dirty"
|
||||
GIT_COMMIT_HASH = "7251cf8e301716f904aab60b759fe8d9abd6664b"
|
||||
__version__ = "v.0.0.0.39-0-g4066d4b"
|
||||
GIT_COMMIT_HASH = "4066d4b92bffb9779b5bb530ff48d938a3f3fc95"
|
||||
GIT_BRANCH = "master"
|
||||
BUILD_TIMESTAMP = "2025-07-14T13:26:54.921185+00:00"
|
||||
BUILD_TIMESTAMP = "2025-07-14T13:43:12.150911+00:00"
|
||||
IS_GIT_REPO = True
|
||||
|
||||
# --- Default Values (for comparison or fallback) ---
|
||||
|
||||
@ -47,29 +47,24 @@ log = logger.get_logger(__name__)
|
||||
def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any:
|
||||
try:
|
||||
path = field.data_path
|
||||
if path == "batch_id":
|
||||
return batch.batch_id
|
||||
if path == "batch_id": return batch.batch_id
|
||||
parts = re.split(r"\.|\[", path)
|
||||
current_obj = batch
|
||||
for part in parts:
|
||||
if current_obj is None:
|
||||
return "N/A"
|
||||
if current_obj is None: return "N/A"
|
||||
if part.endswith("]"):
|
||||
index_str = part[:-1]
|
||||
if not index_str.isdigit():
|
||||
return "N/A"
|
||||
if not index_str.isdigit(): return "N/A"
|
||||
try:
|
||||
current_obj = current_obj[int(index_str)]
|
||||
except (IndexError, TypeError):
|
||||
return "N/A"
|
||||
except (IndexError, TypeError): return "N/A"
|
||||
else:
|
||||
current_obj = getattr(current_obj, part, None)
|
||||
value = current_obj if current_obj is not None else "N/A"
|
||||
if field.translate_with_enum and isinstance(value, int):
|
||||
enum_path = re.sub(r"\[\d+\]", "", path)
|
||||
enum_class = ENUM_REGISTRY.get(enum_path)
|
||||
if enum_class:
|
||||
return get_enum_name(enum_class, value)
|
||||
if enum_class: return get_enum_name(enum_class, value)
|
||||
return value
|
||||
except Exception as e:
|
||||
log.warning(f"Could not resolve path '{field.data_path}': {e}")
|
||||
@ -77,20 +72,16 @@ def _get_value_from_path(batch: DataBatch, field: ExportField) -> Any:
|
||||
|
||||
|
||||
def _convert_ctypes_for_json(obj: Any) -> Any:
|
||||
if isinstance(obj, (int, float, str, bool)) or obj is None:
|
||||
return obj
|
||||
if isinstance(obj, (ctypes._SimpleCData)):
|
||||
return obj.value
|
||||
if isinstance(obj, (int, float, str, bool)) or obj is None: return obj
|
||||
if isinstance(obj, (ctypes._SimpleCData)): return obj.value
|
||||
if isinstance(obj, CtypesStructureBase):
|
||||
result = {}
|
||||
for field_name, _ in obj._fields_:
|
||||
if field_name.startswith("_"):
|
||||
continue
|
||||
if field_name.startswith("_"): continue
|
||||
value = getattr(obj, field_name)
|
||||
result[field_name] = _convert_ctypes_for_json(value)
|
||||
return result
|
||||
if isinstance(obj, ctypes.Array):
|
||||
return [_convert_ctypes_for_json(item) for item in obj]
|
||||
if isinstance(obj, ctypes.Array): return [_convert_ctypes_for_json(item) for item in obj]
|
||||
return obj
|
||||
|
||||
|
||||
@ -143,93 +134,67 @@ class AppController:
|
||||
|
||||
def _load_initial_config(self):
|
||||
if last_file := self.config_manager.get("last_opened_out_file"):
|
||||
if Path(last_file).is_file():
|
||||
self.view.out_filepath_var.set(last_file)
|
||||
if Path(last_file).is_file(): self.view.out_filepath_var.set(last_file)
|
||||
self.on_out_config_changed()
|
||||
if last_dir := self.config_manager.get("last_out_output_dir"):
|
||||
self.view.out_output_dir_var.set(last_dir)
|
||||
if last_file := self.config_manager.get("last_opened_rec_file"):
|
||||
if Path(last_file).is_file():
|
||||
self.view.rec_filepath_var.set(last_file)
|
||||
if Path(last_file).is_file(): self.view.rec_filepath_var.set(last_file)
|
||||
self.on_rec_config_changed()
|
||||
if last_dir := self.config_manager.get("last_rec_output_dir"):
|
||||
self.view.rec_output_dir_var.set(last_dir)
|
||||
|
||||
if last_workspace_parent := self.config_manager.get("last_flight_workspace_parent_dir"):
|
||||
self.view.analyzer_workspace_dir_var.set(last_workspace_parent)
|
||||
|
||||
profiles = self.config_manager.get_export_profiles()
|
||||
self.view.update_export_profiles(
|
||||
profiles=profiles,
|
||||
active_out_profile=self.config_manager.get(
|
||||
"active_out_export_profile_name"
|
||||
),
|
||||
active_out_profile=self.config_manager.get("active_out_export_profile_name")
|
||||
)
|
||||
|
||||
sp_config = self.config_manager.get("segment_processor_config", {})
|
||||
self.view.segment_processor_tab.output_dir_var.set(
|
||||
sp_config.get("last_output_dir", "")
|
||||
)
|
||||
self.view.segment_processor_tab.create_separate_folders_var.set(
|
||||
sp_config.get("create_separate_folders", True)
|
||||
)
|
||||
self.view.segment_processor_tab.output_dir_var.set(sp_config.get("last_output_dir", ""))
|
||||
self.view.segment_processor_tab.create_separate_folders_var.set(sp_config.get("create_separate_folders", True))
|
||||
|
||||
fa_options = self.config_manager.get("flight_analysis_options", {})
|
||||
self.view.aggregate_by_scale_var.set(fa_options.get("aggregate_by_scale", True))
|
||||
self.view.aggregate_by_waveform_var.set(
|
||||
fa_options.get("aggregate_by_waveform", True)
|
||||
)
|
||||
self.view.aggregate_by_waveform_var.set(fa_options.get("aggregate_by_waveform", True))
|
||||
|
||||
def on_out_config_changed(self, *args):
|
||||
filepath_str = self.view.out_filepath_var.get()
|
||||
if not filepath_str:
|
||||
return
|
||||
if not filepath_str: return
|
||||
p = Path(filepath_str)
|
||||
if not self.view.out_output_dir_var.get():
|
||||
self.view.out_output_dir_var.set(str(p.parent))
|
||||
if not self.view.out_output_dir_var.get(): self.view.out_output_dir_var.set(str(p.parent))
|
||||
self.view.out_basename_var.set(p.stem)
|
||||
|
||||
def on_rec_config_changed(self, *args):
|
||||
try:
|
||||
file_count = self.view.rec_file_count_var.get()
|
||||
except (tk.TclError, ValueError):
|
||||
return
|
||||
except (tk.TclError, ValueError): return
|
||||
filepath_str = self.view.rec_filepath_var.get()
|
||||
if not filepath_str:
|
||||
return
|
||||
if not filepath_str: return
|
||||
p = Path(filepath_str)
|
||||
if not self.view.rec_output_dir_var.get():
|
||||
self.view.rec_output_dir_var.set(str(p.parent))
|
||||
if not self.view.rec_output_dir_var.get(): self.view.rec_output_dir_var.set(str(p.parent))
|
||||
base_stem = Path(p.stem).stem
|
||||
new_stem = f"{base_stem}-n{file_count}"
|
||||
self.view.rec_basename_var.set(new_stem)
|
||||
|
||||
def select_output_dir(self, dir_var: tk.StringVar):
|
||||
initial_dir = dir_var.get() if dir_var.get() else None
|
||||
if new_dir := filedialog.askdirectory(
|
||||
initialdir=initial_dir, title="Select Output Directory"
|
||||
):
|
||||
if new_dir := filedialog.askdirectory(initialdir=initial_dir, title="Select Output Directory"):
|
||||
dir_var.set(new_dir)
|
||||
|
||||
def _select_file(self, path_var: tk.StringVar, file_types: List[Tuple[str, str]]):
|
||||
initial_dir = (
|
||||
Path(path_var.get()).parent
|
||||
if path_var.get() and Path(path_var.get()).exists()
|
||||
else None
|
||||
)
|
||||
if filepath := filedialog.askopenfilename(
|
||||
initialdir=initial_dir, filetypes=file_types
|
||||
):
|
||||
initial_dir = (Path(path_var.get()).parent if path_var.get() and Path(path_var.get()).exists() else None)
|
||||
if filepath := filedialog.askopenfilename(initialdir=initial_dir, filetypes=file_types):
|
||||
path_var.set(filepath)
|
||||
|
||||
def select_out_file(self):
|
||||
self._select_file(
|
||||
self.view.out_filepath_var,
|
||||
[("Radar Output", "*.out"), ("All files", "*.*")],
|
||||
)
|
||||
self._select_file(self.view.out_filepath_var, [("Radar Output", "*.out"), ("All files", "*.*")])
|
||||
|
||||
def select_rec_file(self):
|
||||
self._select_file(
|
||||
self.view.rec_filepath_var,
|
||||
[("Recorder Data", "*.rec"), ("All files", "*.*")],
|
||||
)
|
||||
self._select_file(self.view.rec_filepath_var, [("Recorder Data", "*.rec"), ("All files", "*.*")])
|
||||
|
||||
def _prepare_out_processor_files(self) -> bool:
|
||||
self.output_file_handles.clear()
|
||||
@ -237,26 +202,12 @@ class AppController:
|
||||
self.active_export_profiles.clear()
|
||||
self.json_data_buffer.clear()
|
||||
try:
|
||||
output_dir, basename, profiles = (
|
||||
Path(self.view.out_output_dir_var.get()),
|
||||
self.view.out_basename_var.get(),
|
||||
self.config_manager.get_export_profiles(),
|
||||
)
|
||||
output_dir, basename, profiles = Path(self.view.out_output_dir_var.get()), self.view.out_basename_var.get(), self.config_manager.get_export_profiles()
|
||||
use_full_path = self.view.out_use_full_path_var.get()
|
||||
|
||||
if self.view.out_output_csv_var.get():
|
||||
profile = next(
|
||||
(
|
||||
p
|
||||
for p in profiles
|
||||
if p.name == self.view.out_csv_profile_var.get()
|
||||
),
|
||||
None,
|
||||
)
|
||||
if not profile:
|
||||
raise ValueError(
|
||||
f"CSV profile '{self.view.out_csv_profile_var.get()}' not found."
|
||||
)
|
||||
profile = next((p for p in profiles if p.name == self.view.out_csv_profile_var.get()), None)
|
||||
if not profile: raise ValueError(f"CSV profile '{self.view.out_csv_profile_var.get()}' not found.")
|
||||
self.active_export_profiles["csv"] = profile
|
||||
|
||||
csv_filename = f"{basename}_{profile.name}.csv"
|
||||
@ -264,28 +215,15 @@ class AppController:
|
||||
delimiter = "\t" if self.view.out_csv_use_tab_var.get() else ","
|
||||
|
||||
fh = open(path, "w", encoding="utf-8", newline="")
|
||||
header = [
|
||||
field.data_path if use_full_path else field.column_name
|
||||
for field in profile.fields
|
||||
]
|
||||
header = [field.data_path if use_full_path else field.column_name for field in profile.fields]
|
||||
csv_writer = csv.writer(fh, delimiter=delimiter)
|
||||
csv_writer.writerow(header)
|
||||
self.output_file_handles["csv"] = fh
|
||||
self.csv_writers["csv"] = csv_writer
|
||||
|
||||
if self.view.out_output_json_var.get():
|
||||
profile = next(
|
||||
(
|
||||
p
|
||||
for p in profiles
|
||||
if p.name == self.view.out_json_profile_var.get()
|
||||
),
|
||||
None,
|
||||
)
|
||||
if not profile:
|
||||
raise ValueError(
|
||||
f"JSON profile '{self.view.out_json_profile_var.get()}' not found."
|
||||
)
|
||||
profile = next((p for p in profiles if p.name == self.view.out_json_profile_var.get()), None)
|
||||
if not profile: raise ValueError(f"JSON profile '{self.view.out_json_profile_var.get()}' not found.")
|
||||
self.active_export_profiles["json"] = profile
|
||||
|
||||
return True
|
||||
@ -295,124 +233,72 @@ class AppController:
|
||||
return False
|
||||
|
||||
def start_out_processing(self):
|
||||
if self.is_processing:
|
||||
log.warning("Processing already in progress.")
|
||||
return
|
||||
if not all(
|
||||
[
|
||||
self.view.out_filepath_var.get(),
|
||||
self.view.out_output_dir_var.get(),
|
||||
self.view.out_basename_var.get(),
|
||||
]
|
||||
):
|
||||
log.error("Please set input file, output directory, and base filename.")
|
||||
return
|
||||
if not any(
|
||||
[self.view.out_output_csv_var.get(), self.view.out_output_json_var.get()]
|
||||
):
|
||||
log.error("Please select at least one output format (CSV or JSON).")
|
||||
return
|
||||
if not self._prepare_out_processor_files():
|
||||
return
|
||||
if self.is_processing: log.warning("Processing already in progress."); return
|
||||
if not all([self.view.out_filepath_var.get(), self.view.out_output_dir_var.get(), self.view.out_basename_var.get()]):
|
||||
log.error("Please set input file, output directory, and base filename."); return
|
||||
if not any([self.view.out_output_csv_var.get(), self.view.out_output_json_var.get()]):
|
||||
log.error("Please select at least one output format (CSV or JSON)."); return
|
||||
if not self._prepare_out_processor_files(): return
|
||||
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
|
||||
filepath_str = self.view.out_filepath_var.get()
|
||||
self.config_manager.set("last_opened_out_file", filepath_str)
|
||||
self.config_manager.set(
|
||||
"last_out_output_dir", self.view.out_output_dir_var.get()
|
||||
)
|
||||
self.config_manager.set(
|
||||
"active_out_export_profile_name", self.view.out_csv_profile_var.get()
|
||||
)
|
||||
self.config_manager.set("last_out_output_dir", self.view.out_output_dir_var.get())
|
||||
self.config_manager.set("active_out_export_profile_name", self.view.out_csv_profile_var.get())
|
||||
self.config_manager.save_config()
|
||||
|
||||
active_profile = self.active_export_profiles.get(
|
||||
"csv"
|
||||
) or self.active_export_profiles.get("json")
|
||||
active_profile = self.active_export_profiles.get("csv") or self.active_export_profiles.get("json")
|
||||
if not active_profile:
|
||||
log.error("No active export profile found for processing.")
|
||||
self.is_processing = False
|
||||
self.view.update_ui_for_processing_state(False)
|
||||
self.is_processing = False; self.view.update_ui_for_processing_state(False)
|
||||
return
|
||||
|
||||
cpp_config = self.config_manager.get_cpp_converter_config()
|
||||
enable_profiling = cpp_config.get("enable_python_worker_profiling", False)
|
||||
|
||||
self.out_processor.start_processing(
|
||||
Path(filepath_str), active_profile, enable_profiling
|
||||
)
|
||||
self.out_processor.start_processing(Path(filepath_str), active_profile, enable_profiling)
|
||||
self.worker_process = self.out_processor.get_worker()
|
||||
self.view.poll_result_queue()
|
||||
|
||||
def _build_cpp_command_list(self) -> List[str]:
|
||||
config = self.config_manager.get_cpp_converter_config()
|
||||
exe_path = config.get("cpp_executable_path")
|
||||
if not exe_path or not Path(exe_path).is_file():
|
||||
raise ValueError("g_reconvert.exe path is not set or is invalid.")
|
||||
rec_file, output_dir, out_basename = (
|
||||
self.view.rec_filepath_var.get(),
|
||||
self.view.rec_output_dir_var.get(),
|
||||
self.view.rec_basename_var.get(),
|
||||
)
|
||||
if not all([rec_file, output_dir, out_basename]):
|
||||
raise ValueError(
|
||||
"Missing required paths for C++ converter (REC file or Output)."
|
||||
)
|
||||
if not exe_path or not Path(exe_path).is_file(): raise ValueError("g_reconvert.exe path is not set or is invalid.")
|
||||
rec_file, output_dir, out_basename = self.view.rec_filepath_var.get(), self.view.rec_output_dir_var.get(), self.view.rec_basename_var.get()
|
||||
if not all([rec_file, output_dir, out_basename]): raise ValueError("Missing required paths for C++ converter (REC file or Output).")
|
||||
output_file_path = Path(output_dir) / f"{out_basename}.out"
|
||||
self.last_generated_out_file = output_file_path
|
||||
command = [
|
||||
exe_path,
|
||||
rec_file,
|
||||
f"/o={str(output_file_path)}",
|
||||
f"/n={self.view.rec_file_count_var.get()}",
|
||||
]
|
||||
if config.get("post_process"):
|
||||
command.append(f"/p={config.get('post_process_level', '1')}")
|
||||
if config.get("video_show"):
|
||||
command.append("/vshow")
|
||||
if config.get("video_save"):
|
||||
command.append("/vsave")
|
||||
if config.get("gps_save_track"):
|
||||
command.append("/gps")
|
||||
if config.get("silent_overwrite"):
|
||||
command.append("//o")
|
||||
command = [exe_path, rec_file, f"/o={str(output_file_path)}", f"/n={self.view.rec_file_count_var.get()}"]
|
||||
if config.get("post_process"): command.append(f"/p={config.get('post_process_level', '1')}")
|
||||
if config.get("video_show"): command.append("/vshow")
|
||||
if config.get("video_save"): command.append("/vsave")
|
||||
if config.get("gps_save_track"): command.append("/gps")
|
||||
if config.get("silent_overwrite"): command.append("//o")
|
||||
log.info(f"Assembled C++ command: {' '.join(command)}")
|
||||
return command
|
||||
|
||||
def start_rec_conversion(self):
|
||||
if self.is_processing:
|
||||
log.warning("A process is already running.")
|
||||
return
|
||||
if self.is_processing: log.warning("A process is already running."); return
|
||||
try:
|
||||
command_list, output_dir = (
|
||||
self._build_cpp_command_list(),
|
||||
self.view.rec_output_dir_var.get(),
|
||||
)
|
||||
command_list, output_dir = self._build_cpp_command_list(), self.view.rec_output_dir_var.get()
|
||||
except ValueError as e:
|
||||
log.error(f"Configuration error: {e}")
|
||||
messagebox.showerror("Configuration Error", str(e), parent=self.view)
|
||||
return
|
||||
log.error(f"Configuration error: {e}"); messagebox.showerror("Configuration Error", str(e), parent=self.view); return
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
worker_args = (command_list, self.result_queue, output_dir)
|
||||
self._launch_worker(run_cpp_converter, worker_args)
|
||||
|
||||
def _launch_worker(self, target_func, args_tuple: Tuple):
|
||||
while not self.command_queue.empty():
|
||||
self.command_queue.get_nowait()
|
||||
while not self.result_queue.empty():
|
||||
self.result_queue.get_nowait()
|
||||
self.worker_process = mp.Process(
|
||||
target=target_func, args=args_tuple, daemon=True
|
||||
)
|
||||
while not self.command_queue.empty(): self.command_queue.get_nowait()
|
||||
while not self.result_queue.empty(): self.result_queue.get_nowait()
|
||||
self.worker_process = mp.Process(target=target_func, args=args_tuple, daemon=True)
|
||||
self.worker_process.start()
|
||||
self.view.poll_result_queue()
|
||||
|
||||
def stop_processing(self):
|
||||
if not self.is_processing:
|
||||
return
|
||||
if not self.is_processing: return
|
||||
log.info("Stop requested by user.")
|
||||
if self.worker_process and self.worker_process.is_alive():
|
||||
self.command_queue.put("STOP")
|
||||
@ -421,10 +307,8 @@ class AppController:
|
||||
|
||||
def _close_all_files(self):
|
||||
for fh in self.output_file_handles.values():
|
||||
try:
|
||||
fh.close()
|
||||
except Exception as e:
|
||||
log.error(f"Error closing file handle {fh.name}: {e}")
|
||||
try: fh.close()
|
||||
except Exception as e: log.error(f"Error closing file handle {fh.name}: {e}")
|
||||
self.output_file_handles.clear()
|
||||
self.csv_writers.clear()
|
||||
|
||||
@ -432,9 +316,7 @@ class AppController:
|
||||
use_full_path = self.view.out_use_full_path_var.get()
|
||||
if "csv" in self.csv_writers:
|
||||
profile = self.active_export_profiles["csv"]
|
||||
row_values = [
|
||||
_get_value_from_path(batch, field) for field in profile.fields
|
||||
]
|
||||
row_values = [_get_value_from_path(batch, field) for field in profile.fields]
|
||||
self.csv_writers["csv"].writerow(row_values)
|
||||
if "json" in self.active_export_profiles:
|
||||
profile = self.active_export_profiles["json"]
|
||||
@ -450,72 +332,47 @@ class AppController:
|
||||
|
||||
def _write_json_buffer_to_file(self):
|
||||
if not self.json_data_buffer:
|
||||
if "json" in self.active_export_profiles:
|
||||
log.info("JSON export enabled, but no data. Skipping file creation.")
|
||||
if "json" in self.active_export_profiles: log.info("JSON export enabled, but no data. Skipping file creation.")
|
||||
return
|
||||
try:
|
||||
profile = self.active_export_profiles["json"]
|
||||
output_dir, basename = (
|
||||
Path(self.view.out_output_dir_var.get()),
|
||||
self.view.out_basename_var.get(),
|
||||
)
|
||||
output_dir, basename = Path(self.view.out_output_dir_var.get()), self.view.out_basename_var.get()
|
||||
json_filename = f"{basename}_{profile.name}.json"
|
||||
path = output_dir / json_filename
|
||||
log.info(
|
||||
f"Writing {len(self.json_data_buffer)} records to JSON file: {path}"
|
||||
)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(self.json_data_buffer, f, indent=4)
|
||||
log.info(f"Writing {len(self.json_data_buffer)} records to JSON file: {path}")
|
||||
with open(path, "w", encoding="utf-8") as f: json.dump(self.json_data_buffer, f, indent=4)
|
||||
log.info("JSON file written successfully.")
|
||||
except (IOError, TypeError) as e:
|
||||
log.error(f"Failed to write JSON output file: {e}")
|
||||
finally:
|
||||
self.json_data_buffer.clear()
|
||||
except (IOError, TypeError) as e: log.error(f"Failed to write JSON output file: {e}")
|
||||
finally: self.json_data_buffer.clear()
|
||||
|
||||
def open_folder_from_path(self, folder_path_str: str):
|
||||
if not folder_path_str:
|
||||
return
|
||||
if not folder_path_str: return
|
||||
folder_path = Path(folder_path_str)
|
||||
if not folder_path.is_dir():
|
||||
log.error(f"Directory '{folder_path}' does not exist.")
|
||||
return
|
||||
if not folder_path.is_dir(): log.error(f"Directory '{folder_path}' does not exist."); return
|
||||
log.info(f"Opening folder: {folder_path}")
|
||||
try:
|
||||
if sys.platform == "win32":
|
||||
os.startfile(folder_path)
|
||||
elif sys.platform == "darwin":
|
||||
subprocess.run(["open", folder_path])
|
||||
else:
|
||||
subprocess.run(["xdg-open", folder_path])
|
||||
except Exception as e:
|
||||
log.error(f"Failed to open folder: {e}")
|
||||
if sys.platform == "win32": os.startfile(folder_path)
|
||||
elif sys.platform == "darwin": subprocess.run(["open", folder_path])
|
||||
else: subprocess.run(["xdg-open", folder_path])
|
||||
except Exception as e: log.error(f"Failed to open folder: {e}")
|
||||
|
||||
def handle_worker_completion(self, msg: Dict[str, Any]):
|
||||
status = "Interrupted" if msg.get("interrupted") else "Complete"
|
||||
log.info(f"--- Process {status}. ---")
|
||||
if self.view.out_output_json_var.get():
|
||||
self._write_json_buffer_to_file()
|
||||
if self.view.out_output_json_var.get(): self._write_json_buffer_to_file()
|
||||
self._close_all_files()
|
||||
self.is_processing = False
|
||||
self.worker_process = None
|
||||
self.segment_processor = None
|
||||
self.view.update_ui_for_processing_state(False)
|
||||
is_cpp_success = "Conversion process completed successfully" in msg.get(
|
||||
"message", ""
|
||||
)
|
||||
is_cpp_success = "Conversion process completed successfully" in msg.get("message", "")
|
||||
self.view.update_rec_tab_buttons_state(conversion_successful=is_cpp_success)
|
||||
if is_cpp_success:
|
||||
log.info(
|
||||
f"C++ converter successfully generated: {self.last_generated_out_file}"
|
||||
)
|
||||
if stats := msg.get("stats"):
|
||||
self._log_summary(stats)
|
||||
if is_cpp_success: log.info(f"C++ converter successfully generated: {self.last_generated_out_file}")
|
||||
if stats := msg.get("stats"): self._log_summary(stats)
|
||||
|
||||
def process_last_generated_out(self):
|
||||
if self.last_generated_out_file and self.last_generated_out_file.exists():
|
||||
log.info(
|
||||
f"Loading generated file '{self.last_generated_out_file.name}' into OUT Processor."
|
||||
)
|
||||
log.info(f"Loading generated file '{self.last_generated_out_file.name}' into OUT Processor.")
|
||||
self.view.out_filepath_var.set(str(self.last_generated_out_file))
|
||||
self.view.notebook.select(self.view.out_processor_tab)
|
||||
else:
|
||||
@ -528,47 +385,32 @@ class AppController:
|
||||
block_types = sorted([k for k in stats if k.startswith("found_")])
|
||||
if block_types:
|
||||
log.info("Block Types Found:")
|
||||
for key in block_types:
|
||||
log.info(f" - {key.replace('found_', '')}: {stats[key]}")
|
||||
if failed := stats.get("failed_to_parse_blocks"):
|
||||
log.error(f"Failed to Parse Blocks: {failed}")
|
||||
for key in block_types: log.info(f" - {key.replace('found_', '')}: {stats[key]}")
|
||||
if failed := stats.get("failed_to_parse_blocks"): log.error(f"Failed to Parse Blocks: {failed}")
|
||||
log.info("------------------------------------")
|
||||
|
||||
def open_profile_editor(self):
|
||||
if (
|
||||
self.view.profile_editor_window
|
||||
and self.view.profile_editor_window.winfo_exists()
|
||||
):
|
||||
self.view.profile_editor_window.lift()
|
||||
return
|
||||
if self.view.profile_editor_window and self.view.profile_editor_window.winfo_exists():
|
||||
self.view.profile_editor_window.lift(); return
|
||||
profiles = self.config_manager.get_export_profiles()
|
||||
self.view.profile_editor_window = ProfileEditorWindow(
|
||||
master=self.view.master, controller=self, profiles=profiles
|
||||
)
|
||||
self.view.profile_editor_window = ProfileEditorWindow(master=self.view.master, controller=self, profiles=profiles)
|
||||
self.view.profile_editor_window.wait_window()
|
||||
self._load_initial_config()
|
||||
|
||||
def open_rec_config_editor(self):
|
||||
if self.view.rec_config_window and self.view.rec_config_window.winfo_exists():
|
||||
self.view.rec_config_window.lift()
|
||||
return
|
||||
self.view.rec_config_window.lift(); return
|
||||
current_config = self.config_manager.get_cpp_converter_config()
|
||||
self.view.rec_config_window = RecConfigWindow(
|
||||
master=self.view.master,
|
||||
controller=self,
|
||||
current_config=current_config,
|
||||
master=self.view.master, controller=self, current_config=current_config,
|
||||
title="g_reconverter Config (REC to OUT)",
|
||||
save_command=self.save_rec_config,
|
||||
save_command=self.save_rec_config
|
||||
)
|
||||
self.view.rec_config_window.wait_window()
|
||||
|
||||
def open_export_config_editor(self):
|
||||
if (
|
||||
self.view.export_config_window
|
||||
and self.view.export_config_window.winfo_exists()
|
||||
):
|
||||
self.view.export_config_window.lift()
|
||||
return
|
||||
if self.view.export_config_window and self.view.export_config_window.winfo_exists():
|
||||
self.view.export_config_window.lift(); return
|
||||
current_config = self.config_manager.get("segment_export_config", {})
|
||||
self.view.export_config_window = SegmentExportConfigWindow(
|
||||
master=self.view.master, controller=self, current_config=current_config
|
||||
@ -584,9 +426,7 @@ class AppController:
|
||||
full_config.update(new_config)
|
||||
main_cpp_config = self.config_manager.get_cpp_converter_config()
|
||||
if "g_reconverter_options" in full_config:
|
||||
full_config["g_reconverter_options"]["cpp_executable_path"] = (
|
||||
main_cpp_config.get("cpp_executable_path")
|
||||
)
|
||||
full_config["g_reconverter_options"]["cpp_executable_path"] = main_cpp_config.get("cpp_executable_path")
|
||||
self.config_manager.set("segment_export_config", full_config)
|
||||
self.config_manager.save_config()
|
||||
log.info("Segment export configuration saved.")
|
||||
@ -595,44 +435,38 @@ class AppController:
|
||||
self.config_manager.save_export_profiles(profiles)
|
||||
|
||||
def open_current_flight_folder(self):
|
||||
if (
|
||||
self.flight_analyzer.current_flight_folder_path
|
||||
and self.flight_analyzer.current_flight_folder_path.is_dir()
|
||||
):
|
||||
self.open_folder_from_path(
|
||||
str(self.flight_analyzer.current_flight_folder_path)
|
||||
)
|
||||
if self.flight_analyzer.current_flight_folder_path and self.flight_analyzer.current_flight_folder_path.is_dir():
|
||||
self.open_folder_from_path(str(self.flight_analyzer.current_flight_folder_path))
|
||||
else:
|
||||
log.warning("No flight folder available. Please run an analysis first.")
|
||||
messagebox.showinfo(
|
||||
"No Folder", "No flight folder has been created yet.", parent=self.view
|
||||
)
|
||||
messagebox.showinfo("No Folder", "No flight folder has been created yet.", parent=self.view)
|
||||
|
||||
def select_and_analyze_flight_folder(self):
|
||||
initial_dir = self.config_manager.get("last_flight_folder")
|
||||
new_dir_str = filedialog.askdirectory(
|
||||
initialdir=initial_dir, title="Select Folder with Flight Recordings"
|
||||
)
|
||||
new_dir_str = filedialog.askdirectory(initialdir=initial_dir, title="Select Folder with Flight Recordings")
|
||||
if not new_dir_str:
|
||||
return
|
||||
|
||||
new_dir = Path(new_dir_str)
|
||||
self.config_manager.set("last_flight_folder", new_dir_str)
|
||||
self.config_manager.save_config()
|
||||
self.view.analyzer_rec_folder_var.set(new_dir_str)
|
||||
|
||||
flight_name = self._generate_flight_name(new_dir)
|
||||
self.view.analyzer_flight_name_var.set(flight_name)
|
||||
|
||||
workspace_dir = Path.cwd() / "flight_workspace"
|
||||
flight_dir = workspace_dir / flight_name
|
||||
if not self.view.analyzer_workspace_dir_var.get():
|
||||
self.view.analyzer_workspace_dir_var.set(str(Path.cwd() / "flight_workspace"))
|
||||
|
||||
workspace_parent_dir = Path(self.view.analyzer_workspace_dir_var.get())
|
||||
flight_dir = workspace_parent_dir / flight_name
|
||||
|
||||
summary_csv = flight_dir / "flight_summary.csv"
|
||||
|
||||
if summary_csv.is_file():
|
||||
response = messagebox.askyesno(
|
||||
"Previous Analysis Found",
|
||||
f"An analysis for flight '{flight_name}' already exists.\n\nDo you want to load the previous results?",
|
||||
parent=self.view,
|
||||
f"An analysis for flight '{flight_name}' already exists in the specified workspace.\n\nDo you want to load the previous results?",
|
||||
parent=self.view
|
||||
)
|
||||
if response:
|
||||
log.info(f"Loading previous analysis from {flight_dir}")
|
||||
@ -647,12 +481,11 @@ class AppController:
|
||||
try:
|
||||
rec_files = sorted(list(new_dir.glob("*.rec")))
|
||||
if not rec_files:
|
||||
self.view.analyzer_info_var.set("No .rec files found.")
|
||||
return
|
||||
self.view.analyzer_info_var.set("No .rec files found."); return
|
||||
|
||||
self.total_files_for_analysis = len(rec_files)
|
||||
total_size_mb = sum(f.stat().st_size for f in rec_files) / (1024 * 1024)
|
||||
info_text = f"Found {self.total_files_for_analysis} .rec files, Total size: {total_size_mb:.2f} MB. Ready for analysis."
|
||||
info_text = (f"Found {self.total_files_for_analysis} .rec files, Total size: {total_size_mb:.2f} MB. Ready for analysis.")
|
||||
|
||||
self.view.analyzer_info_var.set(info_text)
|
||||
self.view.start_analysis_button.config(state=tk.NORMAL)
|
||||
@ -662,12 +495,9 @@ class AppController:
|
||||
|
||||
def _generate_flight_name(self, folder_path: Path) -> str:
|
||||
rec_files = sorted([f for f in folder_path.glob("*.rec")])
|
||||
if not rec_files:
|
||||
return f"{folder_path.name}_Flight"
|
||||
if not rec_files: return f"{folder_path.name}_Flight"
|
||||
first_file_name = rec_files[0].stem
|
||||
match = re.search(
|
||||
r"(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})", first_file_name
|
||||
)
|
||||
match = re.search(r"(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})-(\d{2})", first_file_name)
|
||||
if match:
|
||||
yy, mo, dd, hh, mi, ss = match.groups()
|
||||
return f"{yy}{mo}{dd}_{hh}{mi}{ss}_Flight"
|
||||
@ -675,8 +505,7 @@ class AppController:
|
||||
|
||||
def _load_previous_analysis(self, summary_csv_path: Path):
|
||||
if pd is None:
|
||||
log.error("Cannot load previous analysis: Pandas is not installed.")
|
||||
return
|
||||
log.error("Cannot load previous analysis: Pandas is not installed."); return
|
||||
try:
|
||||
summary_df = pd.read_csv(summary_csv_path)
|
||||
self.last_flight_summary_df = summary_df
|
||||
@ -687,9 +516,7 @@ class AppController:
|
||||
self.view.start_analysis_button.config(state=tk.NORMAL)
|
||||
except Exception as e:
|
||||
log.error(f"Failed to load previous analysis from {summary_csv_path}: {e}")
|
||||
messagebox.showerror(
|
||||
"Load Error", f"Could not load the summary file:\n{e}", parent=self.view
|
||||
)
|
||||
messagebox.showerror("Load Error", f"Could not load the summary file:\n{e}", parent=self.view)
|
||||
|
||||
def start_flight_analysis(self):
|
||||
if self.is_processing:
|
||||
@ -698,28 +525,27 @@ class AppController:
|
||||
|
||||
flight_name = self.view.analyzer_flight_name_var.get().strip()
|
||||
rec_folder = self.view.analyzer_rec_folder_var.get()
|
||||
if not flight_name or not rec_folder:
|
||||
messagebox.showerror(
|
||||
"Setup Incomplete",
|
||||
"Select a folder and provide a flight name.",
|
||||
parent=self.view,
|
||||
)
|
||||
workspace_parent_dir_str = self.view.analyzer_workspace_dir_var.get().strip()
|
||||
|
||||
if not all([flight_name, rec_folder, workspace_parent_dir_str]):
|
||||
messagebox.showerror("Setup Incomplete", "Select a recordings folder, a workspace directory, and provide a flight name.", parent=self.view)
|
||||
return
|
||||
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
self.view.analyzer_info_var.set(
|
||||
f"Starting analysis for flight: {flight_name}..."
|
||||
)
|
||||
|
||||
self.config_manager.set("last_flight_workspace_parent_dir", workspace_parent_dir_str)
|
||||
analysis_options = {
|
||||
"aggregate_by_scale": self.view.aggregate_by_scale_var.get(),
|
||||
"aggregate_by_waveform": self.view.aggregate_by_waveform_var.get(),
|
||||
"aggregate_by_waveform": self.view.aggregate_by_waveform_var.get()
|
||||
}
|
||||
self.config_manager.set("flight_analysis_options", analysis_options)
|
||||
self.config_manager.save_config()
|
||||
|
||||
self.flight_analyzer.start_analysis(rec_folder, flight_name, analysis_options)
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
|
||||
full_workspace_path = Path(workspace_parent_dir_str) / flight_name
|
||||
self.view.analyzer_info_var.set(f"Starting analysis for flight: {flight_name}...")
|
||||
|
||||
self.flight_analyzer.start_analysis(rec_folder, flight_name, full_workspace_path, analysis_options)
|
||||
self.worker_process = self.flight_analyzer.worker_process
|
||||
self.view.poll_result_queue()
|
||||
|
||||
@ -736,67 +562,42 @@ class AppController:
|
||||
|
||||
def start_segment_export(self):
|
||||
if self.is_processing:
|
||||
messagebox.showwarning(
|
||||
"Process Busy",
|
||||
"Cannot start export while another process is running.",
|
||||
parent=self.view,
|
||||
)
|
||||
return
|
||||
messagebox.showwarning("Process Busy", "Cannot start export while another process is running.", parent=self.view); return
|
||||
|
||||
selected_item_ids = self.view.flight_timeline_tree.selection()
|
||||
if not selected_item_ids:
|
||||
messagebox.showinfo(
|
||||
"No Selection",
|
||||
"Please select one or more segments to export.",
|
||||
parent=self.view,
|
||||
)
|
||||
return
|
||||
messagebox.showinfo("No Selection", "Please select one or more segments to export.", parent=self.view); return
|
||||
|
||||
if self.last_flight_summary_df is None:
|
||||
messagebox.showerror(
|
||||
"Error",
|
||||
"Flight summary data is not available. Cannot proceed with export.",
|
||||
)
|
||||
messagebox.showerror("Error", "Flight summary data is not available. Cannot proceed with export.")
|
||||
return
|
||||
|
||||
self.total_segments_for_export = len(selected_item_ids)
|
||||
log.info(f"Queuing {self.total_segments_for_export} segment(s) for export.")
|
||||
|
||||
while not self.export_manager.job_queue.empty():
|
||||
try:
|
||||
self.export_manager.job_queue.get_nowait()
|
||||
except queue.Empty:
|
||||
break
|
||||
try: self.export_manager.job_queue.get_nowait()
|
||||
except queue.Empty: break
|
||||
|
||||
rec_folder = Path(self.view.analyzer_rec_folder_var.get())
|
||||
export_config = self.config_manager.get("segment_export_config")
|
||||
folder_template = export_config.get("naming_options", {}).get(
|
||||
"folder_name_template", "{Segment}_{StartBatch}-{EndBatch}"
|
||||
)
|
||||
folder_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}_{StartBatch}-{EndBatch}")
|
||||
current_flight_path = self.flight_analyzer.current_flight_folder_path
|
||||
if not current_flight_path:
|
||||
log.error("Cannot start export, flight analysis folder path is unknown.")
|
||||
messagebox.showerror(
|
||||
"Error",
|
||||
"Could not determine the flight workspace folder.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("Error", "Could not determine the flight workspace folder.", parent=self.view)
|
||||
return
|
||||
|
||||
for iid in selected_item_ids:
|
||||
try:
|
||||
row_index = self.view.flight_timeline_tree.index(iid)
|
||||
segment_info_dict = self.last_flight_summary_df.iloc[
|
||||
row_index
|
||||
].to_dict()
|
||||
segment_info_dict = self.last_flight_summary_df.iloc[row_index].to_dict()
|
||||
|
||||
placeholders = {
|
||||
"{Segment}": str(
|
||||
segment_info_dict.get("Segment (Mode | Scale | WF)", "")
|
||||
),
|
||||
"{StartBatch}": str(segment_info_dict.get("Start Batch", "")),
|
||||
"{EndBatch}": str(segment_info_dict.get("End Batch", "")),
|
||||
"{BatchCount}": str(segment_info_dict.get("Batch Count", "")),
|
||||
"{Segment}": str(segment_info_dict.get('Segment (Mode | Scale | WF)', '')),
|
||||
"{StartBatch}": str(segment_info_dict.get('Start Batch', '')),
|
||||
"{EndBatch}": str(segment_info_dict.get('End Batch', '')),
|
||||
"{BatchCount}": str(segment_info_dict.get('Batch Count', '')),
|
||||
}
|
||||
folder_name = folder_template
|
||||
for key, value in placeholders.items():
|
||||
@ -805,9 +606,7 @@ class AppController:
|
||||
|
||||
segment_folder = current_flight_path / safe_folder_name
|
||||
|
||||
job = ExportJob(
|
||||
segment_info_dict, segment_folder, rec_folder, export_config
|
||||
)
|
||||
job = ExportJob(segment_info_dict, segment_folder, rec_folder, export_config)
|
||||
self.export_manager.submit_job(job)
|
||||
|
||||
except (ValueError, IndexError) as e:
|
||||
@ -818,9 +617,7 @@ class AppController:
|
||||
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
self.view.analyzer_info_var.set(
|
||||
f"Exporting {self.total_segments_for_export} segment(s)..."
|
||||
)
|
||||
self.view.analyzer_info_var.set(f"Exporting {self.total_segments_for_export} segment(s)...")
|
||||
|
||||
self.export_manager.start()
|
||||
|
||||
@ -829,38 +626,22 @@ class AppController:
|
||||
def load_segments_for_processing(self):
|
||||
log.info("Loading segments for batch processing...")
|
||||
if self.is_processing:
|
||||
messagebox.showwarning(
|
||||
"Busy",
|
||||
"Cannot load segments while another process is running.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showwarning("Busy", "Cannot load segments while another process is running.", parent=self.view)
|
||||
return
|
||||
|
||||
flight_folder = self.flight_analyzer.current_flight_folder_path
|
||||
if not flight_folder or not flight_folder.is_dir():
|
||||
messagebox.showerror(
|
||||
"No Flight Loaded",
|
||||
"Please run a flight analysis first on the 'Flight Analyzer' tab.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("No Flight Loaded", "Please run a flight analysis first on the 'Flight Analyzer' tab.", parent=self.view)
|
||||
return
|
||||
|
||||
if self.last_flight_summary_df is None:
|
||||
messagebox.showerror(
|
||||
"No Summary Data",
|
||||
"Flight summary data not found. Please re-run the analysis.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("No Summary Data", "Flight summary data not found. Please re-run the analysis.", parent=self.view)
|
||||
return
|
||||
|
||||
try:
|
||||
export_config = self.config_manager.get("segment_export_config", {})
|
||||
naming_template = export_config.get("naming_options", {}).get(
|
||||
"folder_name_template", "{Segment}_{StartBatch}-{EndBatch}"
|
||||
)
|
||||
verified_segments = SegmentProcessor.scan_for_segments(
|
||||
flight_folder, self.last_flight_summary_df, naming_template
|
||||
)
|
||||
naming_template = export_config.get("naming_options", {}).get("folder_name_template", "{Segment}_{StartBatch}-{EndBatch}")
|
||||
verified_segments = SegmentProcessor.scan_for_segments(flight_folder, self.last_flight_summary_df, naming_template)
|
||||
|
||||
if not verified_segments:
|
||||
log.warning("No segments found or summary is empty.")
|
||||
@ -870,28 +651,18 @@ class AppController:
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Failed to load and verify segments: {e}", exc_info=True)
|
||||
messagebox.showerror(
|
||||
"Error",
|
||||
f"An error occurred while loading segments:\n{e}",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("Error", f"An error occurred while loading segments:\n{e}", parent=self.view)
|
||||
|
||||
def start_segment_batch_processing(self):
|
||||
if self.is_processing:
|
||||
messagebox.showwarning(
|
||||
"Busy", "Another process is already running.", parent=self.view
|
||||
)
|
||||
messagebox.showwarning("Busy", "Another process is already running.", parent=self.view)
|
||||
return
|
||||
|
||||
tab: SegmentProcessorTab = self.view.segment_processor_tab
|
||||
selected_segments = tab.get_selected_segments_data()
|
||||
|
||||
if not selected_segments:
|
||||
messagebox.showinfo(
|
||||
"No Selection",
|
||||
"Please select at least one 'Ready' segment to process.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showinfo("No Selection", "Please select at least one 'Ready' segment to process.", parent=self.view)
|
||||
return
|
||||
|
||||
cpp_config = self.config_manager.get_cpp_converter_config()
|
||||
@ -912,18 +683,10 @@ class AppController:
|
||||
}
|
||||
|
||||
if not config["output_dir"]:
|
||||
messagebox.showerror(
|
||||
"Configuration Error",
|
||||
"Please specify an output directory.",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("Configuration Error", "Please specify an output directory.", parent=self.view)
|
||||
return
|
||||
if not config["generate_csv"] and not config["generate_json"]:
|
||||
messagebox.showerror(
|
||||
"Configuration Error",
|
||||
"Please select at least one output format (CSV or JSON).",
|
||||
parent=self.view,
|
||||
)
|
||||
messagebox.showerror("Configuration Error", "Please select at least one output format (CSV or JSON).", parent=self.view)
|
||||
return
|
||||
|
||||
sp_config = self.config_manager.get("segment_processor_config", {})
|
||||
@ -935,9 +698,7 @@ class AppController:
|
||||
self.is_processing = True
|
||||
self.view.start_processing_ui()
|
||||
|
||||
self.segment_processor = SegmentProcessor(
|
||||
config, self.result_queue, self.command_queue
|
||||
)
|
||||
self.segment_processor = SegmentProcessor(config, self.result_queue, self.command_queue)
|
||||
self.segment_processor.start()
|
||||
|
||||
self.view.poll_result_queue()
|
||||
@ -27,7 +27,6 @@ log = logger.get_logger(__name__)
|
||||
|
||||
TICK_DURATION_S = 64e-6
|
||||
|
||||
|
||||
class FlightAnalyzer:
|
||||
"""Manages the multi-step process of analyzing a flight folder."""
|
||||
|
||||
@ -40,30 +39,30 @@ class FlightAnalyzer:
|
||||
self.analysis_options: dict = {}
|
||||
|
||||
def start_analysis(
|
||||
self, rec_folder_str: str, flight_name: str, analysis_options: dict
|
||||
self, rec_folder_str: str, flight_name: str, workspace_path: Path, analysis_options: dict
|
||||
) -> threading.Thread:
|
||||
self.current_flight_name = flight_name
|
||||
self.analysis_options = analysis_options
|
||||
analysis_thread = threading.Thread(
|
||||
target=self._flight_analysis_orchestrator,
|
||||
args=(rec_folder_str, flight_name),
|
||||
args=(rec_folder_str, flight_name, workspace_path),
|
||||
daemon=True,
|
||||
)
|
||||
analysis_thread.start()
|
||||
return analysis_thread
|
||||
|
||||
def _flight_analysis_orchestrator(self, rec_folder_str: str, flight_name: str):
|
||||
def _flight_analysis_orchestrator(self, rec_folder_str: str, flight_name: str, flight_dir: Path):
|
||||
self.current_flight_folder_path = None
|
||||
try:
|
||||
workspace_dir = Path.cwd() / "flight_workspace"
|
||||
flight_dir = workspace_dir / flight_name
|
||||
flight_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.current_flight_folder_path = flight_dir
|
||||
|
||||
cpp_config = self.config_manager.get_cpp_converter_config()
|
||||
exe_path = cpp_config.get("cpp_executable_path")
|
||||
if not exe_path or not Path(exe_path).is_file():
|
||||
raise ValueError(f"C++ executable not found at path: {exe_path}")
|
||||
raise ValueError(
|
||||
f"C++ executable not found at path: {exe_path}"
|
||||
)
|
||||
|
||||
rec_files = sorted(Path(rec_folder_str).glob("*.rec"))
|
||||
if not rec_files:
|
||||
@ -80,9 +79,7 @@ class FlightAnalyzer:
|
||||
"/a",
|
||||
]
|
||||
|
||||
log.info(
|
||||
f"Running g_reconverter for full analysis: {' '.join(command_list)}"
|
||||
)
|
||||
log.info(f"Running g_reconverter for full analysis: {' '.join(command_list)}")
|
||||
|
||||
self.worker_process = mp.Process(
|
||||
target=run_cpp_converter,
|
||||
@ -102,12 +99,7 @@ class FlightAnalyzer:
|
||||
def handle_final_analysis_steps(self):
|
||||
if not self.current_flight_folder_path:
|
||||
log.error("Cannot run final analysis steps: flight folder path is not set.")
|
||||
self.result_queue.put(
|
||||
{
|
||||
"type": "error",
|
||||
"message": "Internal state error: flight folder path missing.",
|
||||
}
|
||||
)
|
||||
self.result_queue.put({"type": "error", "message": "Internal state error: flight folder path missing."})
|
||||
return
|
||||
|
||||
try:
|
||||
@ -132,18 +124,14 @@ class FlightAnalyzer:
|
||||
|
||||
self._create_flight_report_txt(summary_df, self.current_flight_folder_path)
|
||||
|
||||
self.result_queue.put(
|
||||
{
|
||||
self.result_queue.put({
|
||||
"type": "analysis_summary_data",
|
||||
"data": summary_df,
|
||||
"flight_folder_path": self.current_flight_folder_path,
|
||||
}
|
||||
)
|
||||
"flight_folder_path": self.current_flight_folder_path
|
||||
})
|
||||
|
||||
log.info("Flight analysis complete. All artifacts saved.")
|
||||
self.result_queue.put(
|
||||
{"type": "complete", "message": "Analysis successful."}
|
||||
)
|
||||
self.result_queue.put({"type": "complete", "message": "Analysis successful."})
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Final analysis steps failed: {e}", exc_info=True)
|
||||
@ -174,23 +162,21 @@ class FlightAnalyzer:
|
||||
|
||||
storyboard_df = pd.read_csv(
|
||||
txt_path,
|
||||
sep=";",
|
||||
sep=';',
|
||||
header=0,
|
||||
names=unique_column_names,
|
||||
on_bad_lines="skip",
|
||||
encoding="utf-8",
|
||||
encoding_errors="ignore",
|
||||
on_bad_lines='skip',
|
||||
encoding='utf-8',
|
||||
encoding_errors='ignore'
|
||||
)
|
||||
|
||||
for col in storyboard_df.select_dtypes(include=["object"]).columns:
|
||||
for col in storyboard_df.select_dtypes(include=['object']).columns:
|
||||
storyboard_df[col] = storyboard_df[col].str.strip()
|
||||
|
||||
numeric_cols = ["Batch", "TTAG"]
|
||||
for col in numeric_cols:
|
||||
if col in storyboard_df.columns:
|
||||
storyboard_df[col] = pd.to_numeric(
|
||||
storyboard_df[col], errors="coerce"
|
||||
)
|
||||
storyboard_df[col] = pd.to_numeric(storyboard_df[col], errors="coerce")
|
||||
|
||||
storyboard_df.dropna(subset=["Batch", "TTAG"], inplace=True)
|
||||
storyboard_df["Batch"] = storyboard_df["Batch"].astype(int)
|
||||
@ -205,9 +191,14 @@ class FlightAnalyzer:
|
||||
return None
|
||||
|
||||
csv_path = output_dir / "flight_storyboard.csv"
|
||||
json_path = output_dir / "flight_storyboard.json"
|
||||
|
||||
log.info(f"Saving full storyboard to {csv_path}")
|
||||
storyboard_df.to_csv(csv_path, index=False)
|
||||
|
||||
log.info(f"Saving full storyboard to {json_path}")
|
||||
storyboard_df.to_json(json_path, orient="records", indent=4)
|
||||
|
||||
return storyboard_df
|
||||
|
||||
def _create_and_save_summary(
|
||||
@ -218,50 +209,35 @@ class FlightAnalyzer:
|
||||
agg_by_scale = options.get("aggregate_by_scale", True)
|
||||
agg_by_waveform = options.get("aggregate_by_waveform", True)
|
||||
|
||||
# Costruzione dinamica della chiave di stato
|
||||
status_components = []
|
||||
|
||||
mode_part = (
|
||||
df.get("Mode", pd.Series(index=df.index, dtype=str))
|
||||
.astype(str)
|
||||
.str.strip()
|
||||
.replace("", "N/A")
|
||||
)
|
||||
status_components.append(mode_part)
|
||||
mode_part = df.get('Mode', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
|
||||
submode_part = df.get('Mode.3', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
|
||||
|
||||
# Unisci sempre Mode e Submode
|
||||
full_mode_part = mode_part + "-" + submode_part
|
||||
status_components.append(full_mode_part)
|
||||
|
||||
if agg_by_scale:
|
||||
scale_part = (
|
||||
df.get("Scal.2", pd.Series(index=df.index, dtype=str))
|
||||
.astype(str)
|
||||
.str.strip()
|
||||
.replace("", "N/A")
|
||||
)
|
||||
scale_part = df.get('Scal.2', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
|
||||
status_components.append(scale_part)
|
||||
|
||||
if agg_by_waveform:
|
||||
wf_part1 = (
|
||||
df.get("WF", pd.Series(index=df.index, dtype=str))
|
||||
.astype(str)
|
||||
.str.strip()
|
||||
.replace("", "N/A")
|
||||
)
|
||||
wf_part2 = (
|
||||
df.get("WF.2", pd.Series(index=df.index, dtype=str))
|
||||
.astype(str)
|
||||
.str.strip()
|
||||
.replace("", "N/A")
|
||||
)
|
||||
wf_part1 = df.get('WF', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
|
||||
wf_part2 = df.get('WF.2', pd.Series(index=df.index, dtype=str)).astype(str).str.strip().replace('', 'N/A')
|
||||
status_components.append("wf-" + wf_part1 + "-" + wf_part2)
|
||||
|
||||
df["status"] = status_components[0]
|
||||
df['status'] = status_components[0]
|
||||
for component in status_components[1:]:
|
||||
df["status"] = df["status"] + "_" + component
|
||||
df['status'] = df['status'] + "_" + component
|
||||
|
||||
df["status_changed"] = df["status"].ne(df["status"].shift())
|
||||
df['status_changed'] = df['status'].ne(df['status'].shift())
|
||||
|
||||
min_ttag = df["TTAG"].min()
|
||||
df["flight_time_s"] = (df["TTAG"] - min_ttag) * TICK_DURATION_S
|
||||
min_ttag = df['TTAG'].min()
|
||||
df['flight_time_s'] = (df['TTAG'] - min_ttag) * TICK_DURATION_S
|
||||
|
||||
change_indices = df[df["status_changed"]].index.tolist()
|
||||
change_indices = df[df['status_changed']].index.tolist()
|
||||
if not change_indices or change_indices[0] != 0:
|
||||
change_indices.insert(0, 0)
|
||||
if df.index[-1] + 1 not in change_indices:
|
||||
@ -272,28 +248,23 @@ class FlightAnalyzer:
|
||||
start_loc, end_loc = change_indices[i], change_indices[i+1] - 1
|
||||
segment = df.loc[start_loc:end_loc]
|
||||
|
||||
if segment.empty:
|
||||
continue
|
||||
if segment.empty: continue
|
||||
|
||||
start_time_s = segment["flight_time_s"].iloc[0]
|
||||
end_time_s = segment["flight_time_s"].iloc[-1]
|
||||
start_time_s = segment['flight_time_s'].iloc[0]
|
||||
end_time_s = segment['flight_time_s'].iloc[-1]
|
||||
|
||||
summary_records.append(
|
||||
{
|
||||
"Segment (Mode | Scale | WF)": segment["status"].iloc[0],
|
||||
"Start Batch": segment["Batch"].iloc[0],
|
||||
"End Batch": segment["Batch"].iloc[-1],
|
||||
"Batch Count": segment["Batch"].iloc[-1]
|
||||
- segment["Batch"].iloc[0]
|
||||
+ 1,
|
||||
"Duration (s)": end_time_s - start_time_s,
|
||||
"start_time_str": str(timedelta(seconds=int(start_time_s))),
|
||||
"end_time_str": str(timedelta(seconds=int(end_time_s))),
|
||||
"Start File": segment["file"].iloc[0],
|
||||
"End File": segment["file"].iloc[-1],
|
||||
"# Files": segment["file"].nunique(),
|
||||
}
|
||||
)
|
||||
summary_records.append({
|
||||
'Segment (Mode | Scale | WF)': segment['status'].iloc[0],
|
||||
'Start Batch': segment['Batch'].iloc[0],
|
||||
'End Batch': segment['Batch'].iloc[-1],
|
||||
'Batch Count': segment['Batch'].iloc[-1] - segment['Batch'].iloc[0] + 1,
|
||||
'Duration (s)': end_time_s - start_time_s,
|
||||
'start_time_str': str(timedelta(seconds=int(start_time_s))),
|
||||
'end_time_str': str(timedelta(seconds=int(end_time_s))),
|
||||
'Start File': segment['file'].iloc[0],
|
||||
'End File': segment['file'].iloc[-1],
|
||||
'# Files': segment['file'].nunique()
|
||||
})
|
||||
|
||||
summary_df = pd.DataFrame(summary_records)
|
||||
|
||||
@ -312,8 +283,8 @@ class FlightAnalyzer:
|
||||
log.info(f"Generating human-readable flight report to {report_path}")
|
||||
|
||||
try:
|
||||
total_duration = summary_df["Duration (s)"].sum()
|
||||
total_batches = summary_df["Batch Count"].sum()
|
||||
total_duration = summary_df['Duration (s)'].sum()
|
||||
total_batches = summary_df['Batch Count'].sum()
|
||||
num_segments = len(summary_df)
|
||||
|
||||
with open(report_path, "w", encoding="utf-8") as f:
|
||||
@ -329,31 +300,18 @@ class FlightAnalyzer:
|
||||
f.write("--- SEGMENT SUMMARY ---\n")
|
||||
|
||||
report_df = summary_df.copy()
|
||||
report_df["Duration (s)"] = report_df["Duration (s)"].map(
|
||||
"{:.2f}".format
|
||||
)
|
||||
report_df['Duration (s)'] = report_df['Duration (s)'].map('{:.2f}'.format)
|
||||
|
||||
report_df = report_df[
|
||||
[
|
||||
"Segment (Mode | Scale | WF)",
|
||||
"Start Batch",
|
||||
"End Batch",
|
||||
"Batch Count",
|
||||
"start_time_str",
|
||||
"end_time_str",
|
||||
"Duration (s)",
|
||||
"Start File",
|
||||
"End File",
|
||||
"# Files",
|
||||
]
|
||||
]
|
||||
report_df.rename(
|
||||
columns={
|
||||
"start_time_str": "Start Time",
|
||||
"end_time_str": "End Time",
|
||||
},
|
||||
inplace=True,
|
||||
)
|
||||
report_df = report_df[[
|
||||
'Segment (Mode | Scale | WF)',
|
||||
'Start Batch', 'End Batch', 'Batch Count',
|
||||
'start_time_str', 'end_time_str', 'Duration (s)',
|
||||
'Start File', 'End File', '# Files'
|
||||
]]
|
||||
report_df.rename(columns={
|
||||
'start_time_str': 'Start Time',
|
||||
'end_time_str': 'End Time'
|
||||
}, inplace=True)
|
||||
|
||||
df_string = report_df.to_string(index=True)
|
||||
|
||||
|
||||
@ -81,6 +81,7 @@ class MainWindow(tk.Frame):
|
||||
|
||||
self.analyzer_rec_folder_var = tk.StringVar()
|
||||
self.analyzer_flight_name_var = tk.StringVar()
|
||||
self.analyzer_workspace_dir_var = tk.StringVar()
|
||||
self.analyzer_info_var = tk.StringVar(
|
||||
value="Please select a folder and a flight name."
|
||||
)
|
||||
@ -137,98 +138,87 @@ class MainWindow(tk.Frame):
|
||||
|
||||
def _create_flight_analyzer_tab(self, parent):
|
||||
parent.columnconfigure(0, weight=1)
|
||||
parent.rowconfigure(4, weight=1)
|
||||
parent.rowconfigure(5, weight=1) # Aumenta per fare spazio ai nuovi frame
|
||||
|
||||
setup_frame = ttk.LabelFrame(parent, text="Flight Setup")
|
||||
# --- Frame 1: Flight Source ---
|
||||
setup_frame = ttk.LabelFrame(parent, text="Flight Source")
|
||||
setup_frame.grid(row=0, column=0, sticky="ew", padx=5, pady=5)
|
||||
setup_frame.columnconfigure(1, weight=1)
|
||||
ttk.Label(setup_frame, text="Recordings Folder:").grid(
|
||||
row=0, column=0, padx=5, pady=5, sticky="w"
|
||||
)
|
||||
rec_folder_entry = ttk.Entry(
|
||||
setup_frame, textvariable=self.analyzer_rec_folder_var, state="readonly"
|
||||
)
|
||||
ttk.Label(setup_frame, text="Recordings Folder:").grid(row=0, column=0, padx=5, pady=5, sticky="w")
|
||||
rec_folder_entry = ttk.Entry(setup_frame, textvariable=self.analyzer_rec_folder_var, state="readonly")
|
||||
rec_folder_entry.grid(row=0, column=1, sticky="ew", padx=5)
|
||||
ttk.Button(setup_frame, text="Browse...", command=self.controller.select_and_analyze_flight_folder).grid(row=0, column=2, padx=5)
|
||||
|
||||
# --- Frame 2: Workspace ---
|
||||
workspace_frame = ttk.LabelFrame(parent, text="Flight Workspace (Analysis Output)")
|
||||
workspace_frame.grid(row=1, column=0, sticky="ew", padx=5, pady=5)
|
||||
workspace_frame.columnconfigure(1, weight=1)
|
||||
|
||||
ttk.Label(workspace_frame, text="Workspace Path:").grid(row=0, column=0, padx=5, pady=5, sticky="w")
|
||||
workspace_entry = ttk.Entry(workspace_frame, textvariable=self.analyzer_workspace_dir_var)
|
||||
workspace_entry.grid(row=0, column=1, sticky="ew", padx=5)
|
||||
|
||||
dir_buttons_frame = ttk.Frame(workspace_frame)
|
||||
dir_buttons_frame.grid(row=0, column=2, padx=5)
|
||||
ttk.Button(
|
||||
setup_frame,
|
||||
text="Browse...",
|
||||
command=self.controller.select_and_analyze_flight_folder,
|
||||
).grid(row=0, column=2, padx=5)
|
||||
ttk.Label(setup_frame, text="Flight Name:").grid(
|
||||
row=1, column=0, padx=5, pady=5, sticky="w"
|
||||
)
|
||||
flight_name_entry = ttk.Entry(
|
||||
setup_frame, textvariable=self.analyzer_flight_name_var
|
||||
)
|
||||
dir_buttons_frame, text="Browse...",
|
||||
command=lambda: self.controller.select_output_dir(self.analyzer_workspace_dir_var)
|
||||
).pack(side=tk.LEFT)
|
||||
ttk.Button(
|
||||
dir_buttons_frame, text="Open...",
|
||||
command=lambda: self.controller.open_folder_from_path(self.analyzer_workspace_dir_var.get())
|
||||
).pack(side=tk.LEFT, padx=(5, 0))
|
||||
|
||||
ttk.Label(workspace_frame, text="Flight Name:").grid(row=1, column=0, padx=5, pady=5, sticky="w")
|
||||
flight_name_entry = ttk.Entry(workspace_frame, textvariable=self.analyzer_flight_name_var)
|
||||
flight_name_entry.grid(row=1, column=1, columnspan=2, sticky="ew", padx=5)
|
||||
|
||||
aggregation_frame = ttk.LabelFrame(parent, text="Segment Definition")
|
||||
aggregation_frame.grid(row=1, column=0, sticky="ew", padx=5, pady=5)
|
||||
|
||||
# --- Frame 3: Segment Definition ---
|
||||
aggregation_frame = ttk.LabelFrame(parent, text="Segment Definition Options")
|
||||
aggregation_frame.grid(row=2, column=0, sticky="ew", padx=5, pady=5)
|
||||
|
||||
ttk.Checkbutton(
|
||||
aggregation_frame,
|
||||
text="Create new segment on Scale change",
|
||||
variable=self.aggregate_by_scale_var,
|
||||
variable=self.aggregate_by_scale_var
|
||||
).pack(side=tk.LEFT, padx=10, pady=5)
|
||||
|
||||
ttk.Checkbutton(
|
||||
aggregation_frame,
|
||||
text="Create new segment on Waveform (WF) change",
|
||||
variable=self.aggregate_by_waveform_var,
|
||||
variable=self.aggregate_by_waveform_var
|
||||
).pack(side=tk.LEFT, padx=10, pady=5)
|
||||
|
||||
# --- Frame 4: Actions & Info ---
|
||||
action_frame = ttk.Frame(parent)
|
||||
action_frame.grid(row=2, column=0, sticky="ew", padx=5, pady=10)
|
||||
self.start_analysis_button = ttk.Button(
|
||||
action_frame,
|
||||
text="Start Flight Analysis",
|
||||
command=self.controller.start_flight_analysis,
|
||||
state=tk.DISABLED,
|
||||
)
|
||||
action_frame.grid(row=3, column=0, sticky="ew", padx=5, pady=10)
|
||||
self.start_analysis_button = ttk.Button(action_frame, text="Start Flight Analysis", command=self.controller.start_flight_analysis, state=tk.DISABLED)
|
||||
self.start_analysis_button.pack(side=tk.LEFT, padx=(0, 5))
|
||||
self.open_flight_folder_button = ttk.Button(
|
||||
action_frame,
|
||||
text="Open Flight Folder",
|
||||
command=self.controller.open_current_flight_folder,
|
||||
state=tk.DISABLED,
|
||||
)
|
||||
self.open_flight_folder_button = ttk.Button(action_frame, text="Open Flight Folder", command=self.controller.open_current_flight_folder, state=tk.DISABLED)
|
||||
self.open_flight_folder_button.pack(side=tk.LEFT, padx=5)
|
||||
info_label = ttk.Label(action_frame, textvariable=self.analyzer_info_var)
|
||||
info_label.pack(side=tk.LEFT, padx=20)
|
||||
|
||||
# --- Frame 5: Progress ---
|
||||
progress_frame = ttk.Frame(parent)
|
||||
progress_frame.grid(row=3, column=0, sticky="ew", padx=5, pady=5)
|
||||
progress_frame.grid(row=4, column=0, sticky="ew", padx=5, pady=5)
|
||||
progress_frame.columnconfigure(1, weight=1)
|
||||
ttk.Label(progress_frame, text="Analysis Progress:").grid(
|
||||
row=0, column=0, sticky="w"
|
||||
)
|
||||
self.analyzer_progressbar = ttk.Progressbar(
|
||||
progress_frame, variable=self.analyzer_progress_var
|
||||
)
|
||||
ttk.Label(progress_frame, text="Analysis Progress:").grid(row=0, column=0, sticky="w")
|
||||
self.analyzer_progressbar = ttk.Progressbar(progress_frame, variable=self.analyzer_progress_var)
|
||||
self.analyzer_progressbar.grid(row=0, column=1, sticky="ew", padx=5)
|
||||
ttk.Label(progress_frame, textvariable=self.analyzer_progress_text_var).grid(
|
||||
row=0, column=2, sticky="w"
|
||||
)
|
||||
ttk.Label(progress_frame, textvariable=self.analyzer_progress_text_var).grid(row=0, column=2, sticky="w")
|
||||
|
||||
# --- Frame 6: Results ---
|
||||
results_frame = ttk.LabelFrame(parent, text="Flight Summary & Segments")
|
||||
results_frame.grid(row=4, column=0, sticky="nsew", padx=5, pady=5)
|
||||
results_frame.grid(row=5, column=0, sticky="nsew", padx=5, pady=5)
|
||||
results_frame.columnconfigure(0, weight=1)
|
||||
results_frame.rowconfigure(0, weight=1)
|
||||
|
||||
self.flight_timeline_tree = ttk.Treeview(
|
||||
results_frame,
|
||||
columns=(
|
||||
"status",
|
||||
"start_batch",
|
||||
"end_batch",
|
||||
"batch_count",
|
||||
"duration",
|
||||
"start_file",
|
||||
"end_file",
|
||||
"file_count",
|
||||
),
|
||||
show="headings",
|
||||
selectmode="extended",
|
||||
columns=("status", "start_batch", "end_batch", "batch_count", "duration", "start_file", "end_file", "file_count"),
|
||||
show="headings", selectmode="extended"
|
||||
)
|
||||
self.flight_timeline_tree.heading("status", text="Segment (Mode | Scale | WF)")
|
||||
self.flight_timeline_tree.heading("start_batch", text="Start Batch")
|
||||
@ -247,32 +237,17 @@ class MainWindow(tk.Frame):
|
||||
self.flight_timeline_tree.column("end_file", width=200, stretch=True)
|
||||
self.flight_timeline_tree.column("file_count", width=60, anchor="center")
|
||||
self.flight_timeline_tree.grid(row=0, column=0, sticky="nsew")
|
||||
tree_scrollbar = ttk.Scrollbar(
|
||||
results_frame, orient="vertical", command=self.flight_timeline_tree.yview
|
||||
)
|
||||
tree_scrollbar = ttk.Scrollbar(results_frame, orient="vertical", command=self.flight_timeline_tree.yview)
|
||||
self.flight_timeline_tree.configure(yscrollcommand=tree_scrollbar.set)
|
||||
tree_scrollbar.grid(row=0, column=1, sticky="ns")
|
||||
|
||||
buttons_frame = ttk.Frame(results_frame)
|
||||
buttons_frame.grid(row=1, column=0, columnspan=2, sticky="ew", pady=5)
|
||||
ttk.Button(
|
||||
buttons_frame, text="Select All", command=self._select_all_segments
|
||||
).pack(side=tk.LEFT, padx=(0, 5))
|
||||
ttk.Button(
|
||||
buttons_frame, text="Select None", command=self._deselect_all_segments
|
||||
).pack(side=tk.LEFT)
|
||||
self.export_segment_button = ttk.Button(
|
||||
buttons_frame,
|
||||
text="Export Selected Segment(s)",
|
||||
state=tk.DISABLED,
|
||||
command=self.controller.start_segment_export,
|
||||
)
|
||||
ttk.Button(buttons_frame, text="Select All", command=self._select_all_segments).pack(side=tk.LEFT, padx=(0, 5))
|
||||
ttk.Button(buttons_frame, text="Select None", command=self._deselect_all_segments).pack(side=tk.LEFT)
|
||||
self.export_segment_button = ttk.Button(buttons_frame, text="Export Selected Segment(s)", state=tk.DISABLED, command=self.controller.start_segment_export)
|
||||
self.export_segment_button.pack(side=tk.RIGHT)
|
||||
configure_export_button = ttk.Button(
|
||||
buttons_frame,
|
||||
text="Configure Segment Export...",
|
||||
command=self.controller.open_export_config_editor,
|
||||
)
|
||||
configure_export_button = ttk.Button(buttons_frame, text="Configure Segment Export...", command=self.controller.open_export_config_editor)
|
||||
configure_export_button.pack(side=tk.RIGHT, padx=5)
|
||||
|
||||
def _select_all_segments(self):
|
||||
|
||||
@ -99,6 +99,7 @@ class ConfigManager:
|
||||
"last_out_output_dir": "",
|
||||
"last_rec_output_dir": "",
|
||||
"last_flight_folder": "",
|
||||
"last_flight_workspace_parent_dir": "",
|
||||
"active_out_export_profile_name": "Default",
|
||||
"export_profiles": [default_export_profile.to_dict()],
|
||||
"cpp_converter_config": default_cpp_config,
|
||||
|
||||
Loading…
Reference in New Issue
Block a user