try to fix black into replay big daata from dumper, increase buffer size to 1 mega
This commit is contained in:
parent
484e3a1453
commit
42401b115f
@ -5,7 +5,7 @@ import json
|
||||
import os
|
||||
import logging
|
||||
import appdirs # For platform-independent config/data directories
|
||||
from typing import Dict, Any, Optional, List # Added List
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -24,8 +24,8 @@ class AppSettings:
|
||||
self.config_dir = appdirs.user_config_dir(APP_NAME, APP_AUTHOR)
|
||||
self.config_filepath = os.path.join(self.config_dir, CONFIG_FILE_NAME)
|
||||
|
||||
print(f"DEBUG: Configuration file path is: {self.config_filepath}")
|
||||
logger.info(f"Configuration file path determined by AppSettings: {self.config_filepath}") # Anche un log
|
||||
# DEBUG: Convert print statement to logger.debug for consistency
|
||||
logger.debug(f"Configuration file path determined by AppSettings: {self.config_filepath}")
|
||||
|
||||
self._settings: Dict[str, Any] = self._get_default_settings()
|
||||
|
||||
@ -64,6 +64,8 @@ class AppSettings:
|
||||
"max_array_elements": 100,
|
||||
"max_recursion_depth": 10,
|
||||
"max_string_length": 2048,
|
||||
"dump_raw_gdb_output_to_file": False,
|
||||
"raw_gdb_output_dir": ""
|
||||
},
|
||||
"gui": {
|
||||
"main_window_geometry": "850x650",
|
||||
@ -302,4 +304,4 @@ class AppSettings:
|
||||
def update_settings_bulk(self, new_settings_dict: Dict[str, Any]) -> None:
|
||||
# Use the same merging logic as _load_settings to ensure consistency
|
||||
self._settings = self._recursive_update(self._settings, new_settings_dict)
|
||||
logger.info("Settings updated in bulk (merged).")
|
||||
logger.info("Settings updated in bulk (merged).")
|
||||
@ -40,7 +40,7 @@ class GDBSession:
|
||||
executable_path: Path to the target executable to debug.
|
||||
gdb_script_full_path: Optional full path to the GDB Python dumper script.
|
||||
dumper_options: Optional dictionary with options for the dumper script
|
||||
(e.g., {'max_array_elements': 100, ...}).
|
||||
(e.g., {'max_array_elements': 100, 'dump_raw_gdb_output_to_file': True}).
|
||||
"""
|
||||
if not os.path.exists(gdb_path):
|
||||
msg = f"GDB executable not found at: {gdb_path}"
|
||||
@ -132,12 +132,19 @@ class GDBSession:
|
||||
return None
|
||||
|
||||
def start(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> None:
|
||||
# MODIFICA: Aumento del buffer di lettura per wexpect.spawn per gestire output JSON di grandi dimensioni
|
||||
# maxread: Il numero massimo di byte da leggere in una singola operazione read.
|
||||
# Aumentandolo, wexpect può catturare output più grandi in una volta sola, riducendo il rischio di troncamento.
|
||||
# Un valore di 65536 (64KB) o 131072 (128KB) è un buon punto di partenza per output grandi.
|
||||
WEXPECT_MAX_READ_BUFFER = 1048576
|
||||
|
||||
command = f'"{self.gdb_path}" --nx --quiet "{self.executable_path}"'
|
||||
logger.info(f"Spawning GDB process: {command} with startup timeout: {timeout}s")
|
||||
logger.info(f"Spawning GDB process: {command} with startup timeout: {timeout}s, maxread: {WEXPECT_MAX_READ_BUFFER} bytes")
|
||||
output_at_startup = "" # Initialize to ensure it's always defined
|
||||
try:
|
||||
spawn_timeout = max(timeout, 5)
|
||||
self.child = wexpect.spawn(command, timeout=spawn_timeout, encoding='utf-8', errors='replace')
|
||||
# MODIFICA: Aggiunto l'argomento maxread a wexpect.spawn
|
||||
self.child = wexpect.spawn(command, timeout=spawn_timeout, encoding='utf-8', errors='replace', maxread=WEXPECT_MAX_READ_BUFFER)
|
||||
|
||||
# Expect the first prompt and capture output before it
|
||||
self.child.expect_exact(self.gdb_prompt, timeout=max(timeout, 15))
|
||||
@ -321,26 +328,38 @@ class GDBSession:
|
||||
f"Setting GDB variables for dumper options with timeout: {timeout}s"
|
||||
)
|
||||
for key, value in self.dumper_options.items():
|
||||
# Ensure value is appropriate for GDB set command (numbers, or strings if needed)
|
||||
# For now, assuming values are integers as per dumper_options structure.
|
||||
if isinstance(value, (int, float, bool)): # GDB handles bools as 0/1
|
||||
# MODIFICA: Gestione delle nuove opzioni per il salvataggio del JSON grezzo
|
||||
if key == "dump_raw_gdb_output_to_file":
|
||||
gdb_var_name = "$py_dumper_dump_raw_json_to_file"
|
||||
# In GDB, i booleani sono spesso trattati come 0 (false) o 1 (true)
|
||||
set_command = f"set {gdb_var_name} = {1 if value else 0}"
|
||||
elif key == "raw_gdb_output_dir":
|
||||
gdb_var_name = "$py_dumper_raw_json_output_dir"
|
||||
# Le stringhe devono essere racchiuse tra virgolette in GDB.
|
||||
# Sostituiamo le backslash con slash per maggiore compatibilità cross-platform con GDB.
|
||||
normalized_path = str(value).replace("\\", "/")
|
||||
set_command = f'set {gdb_var_name} = "{normalized_path}"'
|
||||
# Gestione delle opzioni esistenti (numeri interi, float, booleani)
|
||||
elif isinstance(value, (int, float, bool)):
|
||||
gdb_var_name = (
|
||||
f"$py_dumper_{key}" # e.g., $py_dumper_max_array_elements
|
||||
f"$py_dumper_{key}" # es. $py_dumper_max_array_elements
|
||||
)
|
||||
set_command = f"set {gdb_var_name} = {value}"
|
||||
try:
|
||||
logger.debug(f"Setting GDB variable: {set_command}")
|
||||
self.send_cmd(set_command, expect_prompt=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to set GDB dumper variable '{gdb_var_name}' with command '{set_command}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
# Decide if this is critical. For now, we log and continue.
|
||||
else:
|
||||
logger.warning(
|
||||
f"Skipping GDB variable for dumper option '{key}': value '{value}' is not a number or bool."
|
||||
f"Skipping GDB variable for dumper option '{key}': value '{value}' is not a supported type (int, float, bool, or recognized special key)."
|
||||
)
|
||||
continue # Passa all'elemento successivo
|
||||
|
||||
try:
|
||||
logger.debug(f"Setting GDB variable: {set_command}")
|
||||
self.send_cmd(set_command, expect_prompt=True, timeout=timeout)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to set GDB dumper variable '{gdb_var_name}' with command '{set_command}': {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
# Decidi se questo è critico. Per ora, loggiamo e continuiamo.
|
||||
|
||||
def _source_gdb_dumper_script(
|
||||
self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT
|
||||
@ -935,4 +954,4 @@ class GDBSession:
|
||||
logger.info("Quit: GDB session quit called, but no active child process.")
|
||||
|
||||
def is_alive(self) -> bool:
|
||||
return self.child is not None and self.child.isalive()
|
||||
return self.child is not None and self.child.isalive()
|
||||
@ -4,6 +4,7 @@ import json
|
||||
import traceback
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
|
||||
# --- Inizio diagnostica sourcing ---
|
||||
try:
|
||||
@ -56,6 +57,10 @@ except Exception as e_log_setup:
|
||||
DEFAULT_MAX_ARRAY_ELEMENTS = 10
|
||||
DEFAULT_MAX_RECURSION_DEPTH = 10
|
||||
DEFAULT_MAX_STRING_LENGTH = 2048
|
||||
DEFAULT_DUMP_RAW_JSON_TO_FILE = False
|
||||
DEFAULT_RAW_JSON_OUTPUT_DIR = ""
|
||||
|
||||
|
||||
_dumper_log_write(
|
||||
"Attempting to read configuration from GDB convenience variables using parse_and_eval..."
|
||||
)
|
||||
@ -91,6 +96,58 @@ def _get_gdb_variable_as_int(var_name_no_dollar: str, default_value: int) -> int
|
||||
)
|
||||
return default_value
|
||||
|
||||
def _get_gdb_variable_as_bool(var_name_no_dollar: str, default_value: bool) -> bool:
|
||||
full_var_name_for_eval = f"${var_name_no_dollar}"
|
||||
try:
|
||||
gdb_value_obj = gdb.parse_and_eval(full_var_name_for_eval)
|
||||
if gdb_value_obj is not None:
|
||||
val = bool(int(gdb_value_obj))
|
||||
_dumper_log_write(
|
||||
f"Read GDB variable '{full_var_name_for_eval}' as bool: {val} (via parse_and_eval)"
|
||||
)
|
||||
return val
|
||||
else:
|
||||
_dumper_log_write(
|
||||
f"GDB variable '{full_var_name_for_eval}' evaluated to None. Using default: {default_value}"
|
||||
)
|
||||
return default_value
|
||||
except gdb.error as e_gdb:
|
||||
_dumper_log_write(
|
||||
f"GDBError reading GDB variable '{full_var_name_for_eval}' as bool: {type(e_gdb).__name__} - {e_gdb}. Using default: {default_value}"
|
||||
)
|
||||
return default_value
|
||||
except Exception as e_other:
|
||||
_dumper_log_write(
|
||||
f"Unexpected error reading GDB variable '{full_var_name_for_eval}' as bool: {type(e_other).__name__} - {e_other}. Using default: {default_value}"
|
||||
)
|
||||
return default_value
|
||||
|
||||
def _get_gdb_variable_as_string(var_name_no_dollar: str, default_value: str) -> str:
|
||||
full_var_name_for_eval = f"${var_name_no_dollar}"
|
||||
try:
|
||||
gdb_value_obj = gdb.parse_and_eval(full_var_name_for_eval)
|
||||
if gdb_value_obj is not None:
|
||||
val = str(gdb_value_obj).strip('"')
|
||||
_dumper_log_write(
|
||||
f"Read GDB variable '{full_var_name_for_eval}' as string: '{val}' (via parse_and_eval)"
|
||||
)
|
||||
return val
|
||||
else:
|
||||
_dumper_log_write(
|
||||
f"GDB variable '{full_var_name_for_eval}' evaluated to None. Using default: '{default_value}'"
|
||||
)
|
||||
return default_value
|
||||
except gdb.error as e_gdb:
|
||||
_dumper_log_write(
|
||||
f"GDBError reading GDB variable '{full_var_name_for_eval}' as string: {type(e_gdb).__name__} - {e_gdb}. Using default: '{default_value}'"
|
||||
)
|
||||
return default_value
|
||||
except Exception as e_other:
|
||||
_dumper_log_write(
|
||||
f"Unexpected error reading GDB variable '{full_var_name_for_eval}' as string: {type(e_other).__name__} - {e_other}. Using default: '{default_value}'"
|
||||
)
|
||||
return default_value
|
||||
|
||||
MAX_ARRAY_ELEMENTS = _get_gdb_variable_as_int(
|
||||
"py_dumper_max_array_elements", DEFAULT_MAX_ARRAY_ELEMENTS
|
||||
)
|
||||
@ -100,18 +157,46 @@ MAX_RECURSION_DEPTH = _get_gdb_variable_as_int(
|
||||
MAX_STRING_LENGTH = _get_gdb_variable_as_int(
|
||||
"py_dumper_max_string_length", DEFAULT_MAX_STRING_LENGTH
|
||||
)
|
||||
DUMP_RAW_JSON_TO_FILE = _get_gdb_variable_as_bool(
|
||||
"py_dumper_dump_raw_json_to_file", DEFAULT_DUMP_RAW_JSON_TO_FILE
|
||||
)
|
||||
RAW_JSON_OUTPUT_DIR = _get_gdb_variable_as_string(
|
||||
"py_dumper_raw_json_output_dir", DEFAULT_RAW_JSON_OUTPUT_DIR
|
||||
)
|
||||
if RAW_JSON_OUTPUT_DIR and not os.path.exists(RAW_JSON_OUTPUT_DIR):
|
||||
try:
|
||||
os.makedirs(RAW_JSON_OUTPUT_DIR, exist_ok=True)
|
||||
_dumper_log_write(f"Created RAW_JSON_OUTPUT_DIR: {RAW_JSON_OUTPUT_DIR}")
|
||||
except OSError as e:
|
||||
_dumper_log_write(f"ERROR: Could not create RAW_JSON_OUTPUT_DIR '{RAW_JSON_OUTPUT_DIR}': {e}. Raw JSON dump will default to GDB working directory or be skipped.")
|
||||
RAW_JSON_OUTPUT_DIR = ""
|
||||
|
||||
_dumper_log_write(
|
||||
f"Effective Dumper Config: ArrayElements={MAX_ARRAY_ELEMENTS}, RecursionDepth={MAX_RECURSION_DEPTH}, StringLength={MAX_STRING_LENGTH}"
|
||||
)
|
||||
_dumper_log_write(
|
||||
f"Raw JSON Dump Config: Enabled={DUMP_RAW_JSON_TO_FILE}, OutputDir='{RAW_JSON_OUTPUT_DIR if RAW_JSON_OUTPUT_DIR else 'GDB CWD'}'"
|
||||
)
|
||||
gdb.write("GDB_DUMPER_SCRIPT: Configuration variables read.\n")
|
||||
gdb.flush()
|
||||
|
||||
def _sanitize_filename_component(component: str) -> str:
|
||||
"""
|
||||
Sanitizes a string to be suitable for a filename component.
|
||||
Removes invalid characters and limits length.
|
||||
"""
|
||||
if not component:
|
||||
return "unknown"
|
||||
component = re.sub(r'[\\/*?:"<>|]', "_", component)
|
||||
component = component.replace(" ", "_")
|
||||
return component[:100]
|
||||
|
||||
|
||||
class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.current_depth = 0
|
||||
self.visited_values = {} # Stores unique_key -> depth_visited_at
|
||||
self.visited_values = {}
|
||||
|
||||
def _get_value_unique_key(self, gdb_val):
|
||||
if not gdb_val:
|
||||
@ -142,53 +227,40 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
gdb.TYPE_CODE_ARRAY,
|
||||
gdb.TYPE_CODE_STRUCT,
|
||||
gdb.TYPE_CODE_UNION,
|
||||
gdb.TYPE_CODE_REF # Add REF to visitable types
|
||||
gdb.TYPE_CODE_REF
|
||||
]
|
||||
except gdb.error:
|
||||
return False
|
||||
|
||||
def _serialize_value(self, gdb_val_orig): # Renamed to gdb_val_orig to avoid confusion after deref
|
||||
# --- MODIFICATION: Handle C++ References by dereferencing them first ---
|
||||
gdb_val = gdb_val_orig # Start with the original value
|
||||
def _serialize_value(self, gdb_val_orig):
|
||||
gdb_val = gdb_val_orig
|
||||
try:
|
||||
if hasattr(gdb_val, "type") and gdb_val.type is not None:
|
||||
# Check if the original type (before strip_typedefs) is a reference
|
||||
if gdb_val.type.code == gdb.TYPE_CODE_REF:
|
||||
_dumper_log_write(f"DEBUG_SERIALIZE_VALUE: Detected C++ reference type '{str(gdb_val.type)}' at address {gdb_val.address if hasattr(gdb_val, 'address') else 'N/A'}. Attempting to get referenced value.")
|
||||
gdb_val = gdb_val.referenced_value() # Get the actual object the reference points to
|
||||
gdb_val = gdb_val.referenced_value()
|
||||
_dumper_log_write(f"DEBUG_SERIALIZE_VALUE: Dereferenced to type '{str(gdb_val.type)}' at address {gdb_val.address if hasattr(gdb_val, 'address') else 'N/A'}.")
|
||||
except gdb.error as e_ref:
|
||||
_dumper_log_write(f"ERROR_SERIALIZE_GDB: GDB error trying to dereference a C++ reference: {e_ref}")
|
||||
# If dereferencing fails, try to proceed with the original reference value,
|
||||
# or return an error string immediately if that's safer.
|
||||
# For now, let it proceed with the original gdb_val (which is gdb_val_orig here)
|
||||
# or it might fall into an error case below.
|
||||
# Alternatively, return f"<gdb_error_dereferencing_ref: {e_ref}>" here.
|
||||
# Let's allow it to proceed and see if other handlers catch it,
|
||||
# otherwise, it might just print the address as before.
|
||||
pass # Allow fall-through if dereferencing a reference fails, gdb_val remains gdb_val_orig
|
||||
# --- END MODIFICATION ---
|
||||
pass
|
||||
|
||||
# --- Gestione profondità e cicli (invariata) ---
|
||||
if self.current_depth > MAX_RECURSION_DEPTH:
|
||||
return f"<max_recursion_depth_reached_{MAX_RECURSION_DEPTH}>"
|
||||
|
||||
unique_key = None
|
||||
is_visitable = self._is_visitable_value(gdb_val) # Check visitable for the (potentially dereferenced) value
|
||||
is_visitable = self._is_visitable_value(gdb_val)
|
||||
|
||||
if is_visitable:
|
||||
unique_key = self._get_value_unique_key(gdb_val)
|
||||
if unique_key and unique_key in self.visited_values:
|
||||
# If we've seen this exact value (address + type) at the same or shallower depth
|
||||
if self.current_depth >= self.visited_values[unique_key]:
|
||||
type_in_key = unique_key.split("_", 1)[1] if "_" in unique_key else "unknown_type"
|
||||
addr_in_key = unique_key.split("_", 1)[0]
|
||||
return f"<cyclic_or_shared_ref_to_type_{type_in_key}_at_{addr_in_key}>"
|
||||
|
||||
self.current_depth += 1
|
||||
if is_visitable and unique_key: # If we are visiting this value
|
||||
if is_visitable and unique_key:
|
||||
self.visited_values[unique_key] = self.current_depth
|
||||
# --- Fine gestione profondità e cicli ---
|
||||
|
||||
serialized_val = None
|
||||
try:
|
||||
@ -197,9 +269,8 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
elif gdb_val.is_optimized_out:
|
||||
serialized_val = "<optimized_out>"
|
||||
else:
|
||||
val_type = gdb_val.type.strip_typedefs() # Use stripped type for actual data handling
|
||||
val_type = gdb_val.type.strip_typedefs()
|
||||
type_code = val_type.code
|
||||
# original_type_str refers to the type of gdb_val, which might be the dereferenced one
|
||||
original_type_str = str(gdb_val.type)
|
||||
type_name_str = str(val_type.name) if val_type.name else ""
|
||||
|
||||
@ -207,7 +278,6 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
f"DEBUG_SERIALIZE_VALUE (post-ref-check): original_type='{original_type_str}', type_name_str='{type_name_str}', type_code={type_code}"
|
||||
)
|
||||
|
||||
# --- LOGICA STRINGHE E TIPI (come prima) ---
|
||||
is_handling_as_string = False
|
||||
condition_is_std_string_type = False
|
||||
if (
|
||||
@ -259,7 +329,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
try:
|
||||
_M_p_val = None
|
||||
if "_M_dataplus" in (
|
||||
f.name for f in gdb_val.type.fields() if f.name # Use gdb_val here
|
||||
f.name for f in gdb_val.type.fields() if f.name
|
||||
):
|
||||
_M_dataplus_val = gdb_val["_M_dataplus"]
|
||||
if "_M_p" in (
|
||||
@ -267,7 +337,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
):
|
||||
_M_p_val = _M_dataplus_val["_M_p"]
|
||||
elif "_M_p" in (
|
||||
f.name for f in gdb_val.type.fields() if f.name # Use gdb_val here
|
||||
f.name for f in gdb_val.type.fields() if f.name
|
||||
):
|
||||
_M_p_val = gdb_val["_M_p"]
|
||||
|
||||
@ -321,7 +391,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
_dumper_log_write(
|
||||
f"STD_STRING_PATH: _M_p failed or not applicable. Trying gdb_val.string() on '{original_type_str}'."
|
||||
)
|
||||
extracted_content = gdb_val.string( # Use gdb_val here
|
||||
extracted_content = gdb_val.string(
|
||||
encoding="utf-8", errors="replace"
|
||||
)
|
||||
_dumper_log_write(
|
||||
@ -331,7 +401,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
_dumper_log_write(
|
||||
f"STD_STRING_PATH_ERROR: gdb_val.string() also failed: {e_gdb_str_fb}. Fallback to str()."
|
||||
)
|
||||
extracted_content = str(gdb_val) # Use gdb_val here
|
||||
extracted_content = str(gdb_val)
|
||||
if (
|
||||
extracted_content.startswith('"')
|
||||
and extracted_content.endswith('"')
|
||||
@ -368,7 +438,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
f"C_STYLE_STRING_PATH: Identified '{original_type_str}' as C-style string."
|
||||
)
|
||||
try:
|
||||
extracted_str_c = gdb_val.string( # Use gdb_val here
|
||||
extracted_str_c = gdb_val.string(
|
||||
encoding="utf-8", errors="replace"
|
||||
)
|
||||
if (
|
||||
@ -394,35 +464,35 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
_dumper_log_write(
|
||||
f"NON_STRING_PATH: Type '{original_type_str}' not string. Trying iterators/other."
|
||||
)
|
||||
iterator_children_result = self._get_iterator_children(gdb_val) # Use gdb_val here
|
||||
iterator_children_result = self._get_iterator_children(gdb_val)
|
||||
if iterator_children_result is not None:
|
||||
serialized_val = iterator_children_result
|
||||
else:
|
||||
if type_code == gdb.TYPE_CODE_PTR:
|
||||
serialized_val = self._handle_pointer(gdb_val, val_type) # Use gdb_val here
|
||||
serialized_val = self._handle_pointer(gdb_val, val_type)
|
||||
elif type_code == gdb.TYPE_CODE_ARRAY:
|
||||
serialized_val = self._handle_c_array(gdb_val, val_type) # Use gdb_val here
|
||||
serialized_val = self._handle_c_array(gdb_val, val_type)
|
||||
elif type_code in [gdb.TYPE_CODE_STRUCT, gdb.TYPE_CODE_UNION]:
|
||||
serialized_val = self._handle_struct_or_class(
|
||||
gdb_val, val_type, original_type_str # Use gdb_val here
|
||||
gdb_val, val_type, original_type_str
|
||||
)
|
||||
elif type_code == gdb.TYPE_CODE_ENUM:
|
||||
serialized_val = str(gdb_val) # Use gdb_val here
|
||||
serialized_val = str(gdb_val)
|
||||
elif type_code == gdb.TYPE_CODE_INT:
|
||||
serialized_val = int(gdb_val) # Use gdb_val here
|
||||
serialized_val = int(gdb_val)
|
||||
elif type_code == gdb.TYPE_CODE_FLT:
|
||||
serialized_val = float(gdb_val) # Use gdb_val here
|
||||
serialized_val = float(gdb_val)
|
||||
elif type_code == gdb.TYPE_CODE_BOOL:
|
||||
serialized_val = bool(gdb_val) # Use gdb_val here
|
||||
elif str(val_type) == "void": # Check on val_type (stripped type)
|
||||
serialized_val = bool(gdb_val)
|
||||
elif str(val_type) == "void":
|
||||
serialized_val = (
|
||||
f"<void_value_at_0x{gdb_val:x}>" # Use gdb_val here
|
||||
if gdb_val # Use gdb_val here
|
||||
f"<void_value_at_0x{gdb_val:x}>"
|
||||
if gdb_val
|
||||
else "<void_value_null>"
|
||||
)
|
||||
else:
|
||||
try:
|
||||
raw_f_str = str(gdb_val) # Use gdb_val here
|
||||
raw_f_str = str(gdb_val)
|
||||
serialized_val = (
|
||||
raw_f_str[:MAX_STRING_LENGTH]
|
||||
if len(raw_f_str) > MAX_STRING_LENGTH
|
||||
@ -431,7 +501,6 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
)
|
||||
except Exception:
|
||||
serialized_val = f"<failed_to_str_type_{str(val_type)}>"
|
||||
# --- FINE LOGICA ---
|
||||
except gdb.error as e_gdb:
|
||||
_dumper_log_write(
|
||||
f"ERROR_SERIALIZE_GDB: GDB error in _serialize_value for type {str(gdb_val.type) if hasattr(gdb_val, 'type') else 'N/A'}: {e_gdb}"
|
||||
@ -442,42 +511,32 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
f"ERROR_SERIALIZE_PYTHON: Python Traceback in _serialize_value for type {str(gdb_val.type) if hasattr(gdb_val, 'type') else 'N/A'}:\n{traceback.format_exc(limit=3)}"
|
||||
)
|
||||
serialized_val = f"<python_script_error_serializing: {e_py} (type: {str(gdb_val.type) if hasattr(gdb_val, 'type') else 'N/A'})>"
|
||||
finally: # Ensure depth and visited_values are correctly managed on exit
|
||||
finally:
|
||||
self.current_depth -= 1
|
||||
if is_visitable and unique_key and unique_key in self.visited_values:
|
||||
# Only remove from visited_values if we added it at this depth call
|
||||
if self.visited_values[unique_key] == self.current_depth + 1:
|
||||
del self.visited_values[unique_key]
|
||||
return serialized_val
|
||||
|
||||
def default(self, o):
|
||||
if isinstance(o, gdb.Value):
|
||||
# Reset depth and visited set only for the top-level call to json.dump/encode
|
||||
# The self.current_depth check ensures this happens only once per top-level gdb.Value.
|
||||
is_top_level_call = (self.current_depth == 0)
|
||||
if is_top_level_call:
|
||||
self.visited_values.clear()
|
||||
# _dumper_log_write(f"DEBUG_ENCODER_DEFAULT: Top-level gdb.Value. Resetting depth and visited_values for {str(o.type) if o.type else 'N/A'}.")
|
||||
|
||||
# Store original value for logging in case of issues
|
||||
original_o_type_str = str(o.type) if hasattr(o,"type") and o.type else "N/A"
|
||||
try:
|
||||
return self._serialize_value(o)
|
||||
except Exception as e: # Catch-all for unexpected errors in _serialize_value itself
|
||||
except Exception as e:
|
||||
_dumper_log_write(f"CRITICAL_ERROR_IN_DEFAULT: Unhandled exception in _serialize_value for type {original_o_type_str}. Error: {e}\n{traceback.format_exc(limit=5)}")
|
||||
return f"<internal_dumper_encoder_error_for_type_{original_o_type_str}>"
|
||||
finally:
|
||||
# If this was a top-level call, ensure depth is reset for the *next* top-level gdb.Value.
|
||||
# _serialize_value decrements depth, so it should be back to 0 if it started at 0.
|
||||
# This visited_values.clear() here might be redundant if _serialize_value always balances depth,
|
||||
# but it's a safeguard. The primary clear is now at the start of the 'if is_top_level_call'.
|
||||
if is_top_level_call and self.current_depth != 0:
|
||||
_dumper_log_write(f"WARNING_ENCODER_DEFAULT: Depth imbalance after _serialize_value for top-level. Depth is {self.current_depth}. Resetting.")
|
||||
self.current_depth = 0 # Force reset
|
||||
return json.JSONEncoder.default(self, o) # Fallback for non-gdb.Value types
|
||||
self.current_depth = 0
|
||||
return json.JSONEncoder.default(self, o)
|
||||
|
||||
def _get_iterator_children(self, gdb_val_original):
|
||||
# This method attempts to use GDB's children() iterator, which often leverages pretty-printers.
|
||||
type_name = "UnknownType"
|
||||
try:
|
||||
if not hasattr(gdb_val_original, "type") or gdb_val_original.type is None:
|
||||
@ -489,74 +548,62 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
else str(gdb_val_type_stripped)
|
||||
)
|
||||
|
||||
# Avoid trying children() on basic_string itself, as we have specific string handling.
|
||||
if "std::basic_string" in type_name:
|
||||
return None
|
||||
|
||||
# Check if the value has a 'children' attribute and if it's callable
|
||||
has_children_method = False
|
||||
if hasattr(gdb_val_original, "children"):
|
||||
try:
|
||||
has_children_method = callable(gdb_val_original.children)
|
||||
except gdb.error:
|
||||
pass # Some types might error on access even if attr exists
|
||||
pass
|
||||
|
||||
elements = []
|
||||
children_processed_successfully = False
|
||||
|
||||
if has_children_method:
|
||||
# _dumper_log_write(f"DEBUG: Trying children() for type '{type_name}'")
|
||||
try:
|
||||
children_iter = gdb_val_original.children()
|
||||
# Iterate and count, respecting MAX_ARRAY_ELEMENTS
|
||||
count = 0
|
||||
for (
|
||||
child_tuple_or_val
|
||||
) in children_iter: # This can be (name, val) or just val
|
||||
) in children_iter:
|
||||
child_val_to_serialize = None
|
||||
key_for_map_entry = None # For map-like structures where children are (key,value)
|
||||
key_for_map_entry = None
|
||||
|
||||
if (
|
||||
isinstance(child_tuple_or_val, tuple)
|
||||
and len(child_tuple_or_val) == 2
|
||||
):
|
||||
# Assuming (key, value) or (name, value) from children()
|
||||
# For maps, first element is key. For structs, it's field name.
|
||||
# We only want to treat it as a map's key if the container is map-like.
|
||||
# This heuristic is tricky. If it's a struct, we'll handle fields later.
|
||||
# For now, if children() gives tuples, assume it's key-value for map-like.
|
||||
key_obj, val_obj = child_tuple_or_val
|
||||
# A better check for map-like would be needed if this causes issues.
|
||||
# For now, let's assume if key_obj is a gdb.Value, it's a complex key.
|
||||
if isinstance(key_obj, gdb.Value): # Could be a map key
|
||||
if isinstance(key_obj, gdb.Value):
|
||||
key_for_map_entry = self._serialize_value(key_obj)
|
||||
else: # Or a field name (string)
|
||||
else:
|
||||
key_for_map_entry = str(
|
||||
key_obj
|
||||
) # Use as is, might be field name.
|
||||
)
|
||||
|
||||
child_val_to_serialize = val_obj
|
||||
else: # If not a tuple, or tuple of wrong size, treat whole item as value
|
||||
else:
|
||||
child_val_to_serialize = child_tuple_or_val
|
||||
|
||||
# MODIFIED: Use dynamic MAX_ARRAY_ELEMENTS
|
||||
if count < MAX_ARRAY_ELEMENTS:
|
||||
serialized_element = (
|
||||
self._serialize_value(child_val_to_serialize)
|
||||
if isinstance(child_val_to_serialize, gdb.Value)
|
||||
else child_val_to_serialize
|
||||
) # Handle non-gdb.Value items (e.g. from synthetic children)
|
||||
)
|
||||
|
||||
if key_for_map_entry is not None and (
|
||||
"map" in type_name or "unordered_map" in type_name
|
||||
): # Heuristic for maps
|
||||
):
|
||||
elements.append(
|
||||
{
|
||||
"key": key_for_map_entry,
|
||||
"value": serialized_element,
|
||||
}
|
||||
)
|
||||
else: # For vectors, lists, sets, or struct fields from children()
|
||||
else:
|
||||
elements.append(serialized_element)
|
||||
else:
|
||||
elements.append(
|
||||
@ -564,24 +611,16 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
)
|
||||
break
|
||||
count += 1
|
||||
children_processed_successfully = (
|
||||
True # Processed children iterator fully or up to limit
|
||||
)
|
||||
children_processed_successfully = True
|
||||
except Exception as e_children:
|
||||
_dumper_log_write(
|
||||
f"WARNING: Error iterating children() for type '{type_name}': {e_children}. Fallback might apply."
|
||||
)
|
||||
children_processed_successfully = (
|
||||
False # Mark as failed to allow fallback
|
||||
)
|
||||
children_processed_successfully = False
|
||||
|
||||
# Fallback for std::vector if children() didn't work or wasn't available
|
||||
# This is highly implementation-specific (GNU libstdc++).
|
||||
is_std_vector = "std::vector" in type_name
|
||||
if not children_processed_successfully and is_std_vector:
|
||||
# _dumper_log_write(f"DEBUG: Attempting std::vector manual pointer traversal for '{type_name}'.")
|
||||
try:
|
||||
# Accessing internal pointers of std::vector
|
||||
m_impl = gdb_val_original["_M_impl"]
|
||||
m_start_val = m_impl["_M_start"]
|
||||
m_finish_val = m_impl["_M_finish"]
|
||||
@ -596,14 +635,12 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
)
|
||||
if (
|
||||
element_type.sizeof == 0
|
||||
): # Vector of empty type (e.g. std::vector<ZeroSizedType>)
|
||||
elements = [] # Empty list
|
||||
):
|
||||
elements = []
|
||||
else:
|
||||
current_ptr_val = m_start_val
|
||||
num_elements_manually = 0
|
||||
manual_elements = []
|
||||
# MODIFIED: Use dynamic MAX_ARRAY_ELEMENTS
|
||||
# Add a safety break for the while loop to prevent infinite loops on bad GDB state
|
||||
max_loop_iterations = MAX_ARRAY_ELEMENTS + 5
|
||||
|
||||
while (
|
||||
@ -629,12 +666,12 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
try:
|
||||
current_ptr_val = (
|
||||
current_ptr_val + 1
|
||||
) # Pointer arithmetic
|
||||
)
|
||||
except gdb.error as e_ptr_arith:
|
||||
_dumper_log_write(
|
||||
f"ERROR: Pointer arithmetic failed for std::vector on '{type_name}': {e_ptr_arith}."
|
||||
)
|
||||
break # Exit loop if pointer arithmetic fails
|
||||
break
|
||||
|
||||
if (
|
||||
num_elements_manually >= MAX_ARRAY_ELEMENTS
|
||||
@ -644,9 +681,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
f"<std_vector_truncated_showing_{MAX_ARRAY_ELEMENTS}_elements>"
|
||||
)
|
||||
elements = manual_elements
|
||||
children_processed_successfully = (
|
||||
True # Mark as success if this path taken
|
||||
)
|
||||
children_processed_successfully = True
|
||||
except Exception as e_vector_manual:
|
||||
_dumper_log_write(
|
||||
f"WARNING: Failed manual std::vector traversal for '{type_name}': {e_vector_manual}"
|
||||
@ -654,7 +689,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
|
||||
if children_processed_successfully:
|
||||
return elements
|
||||
return None # If no children processing was successful
|
||||
return None
|
||||
except Exception as e_outer_children:
|
||||
_dumper_log_write(
|
||||
f"ERROR: Outer Python error in _get_iterator_children for '{type_name}':\n{traceback.format_exc(limit=2)}"
|
||||
@ -662,24 +697,19 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
return None
|
||||
|
||||
def _handle_pointer(self, gdb_val, val_type):
|
||||
# If a pointer is NULL, represent it as None (JSON null)
|
||||
if not gdb_val: # Checks if pointer is 0x0
|
||||
if not gdb_val:
|
||||
return None
|
||||
|
||||
target_type = val_type.target().strip_typedefs()
|
||||
target_type_name_str = str(target_type.name) if target_type.name else ""
|
||||
|
||||
# Check for char* or wchar_t* (common string types)
|
||||
# GDB's gdb.Value.string() handles this well, including length limits.
|
||||
# Type codes for char are int-like (e.g., TYPE_CODE_INT, TYPE_CODE_CHAR).
|
||||
if target_type.code == gdb.TYPE_CODE_INT and (
|
||||
"char" in target_type_name_str or "wchar_t" in target_type_name_str
|
||||
):
|
||||
try:
|
||||
# MODIFIED: Use dynamic MAX_STRING_LENGTH
|
||||
return gdb_val.string(
|
||||
encoding="utf-8", errors="replace"
|
||||
) # , length=MAX_STRING_LENGTH)
|
||||
)
|
||||
except gdb.error as e_str:
|
||||
_dumper_log_write(
|
||||
f"INFO: gdb.Value.string() failed for pointer type '{str(val_type)}' at {str(gdb_val)}: {e_str}"
|
||||
@ -691,33 +721,28 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
)
|
||||
return f"<unicode_decode_error_at_{str(gdb_val)}>"
|
||||
|
||||
# For other pointer types, dereference if within recursion depth
|
||||
# MODIFIED: Use dynamic MAX_RECURSION_DEPTH
|
||||
if (
|
||||
self.current_depth < MAX_RECURSION_DEPTH
|
||||
): # Check depth *before* dereferencing
|
||||
):
|
||||
try:
|
||||
return self._serialize_value(gdb_val.dereference())
|
||||
except (
|
||||
gdb.error
|
||||
) as e_deref: # Handle errors during dereference (e.g., invalid pointer)
|
||||
) as e_deref:
|
||||
_dumper_log_write(
|
||||
f"INFO: Failed to dereference pointer of type '{str(val_type)}' at {str(gdb_val)}: {e_deref}"
|
||||
)
|
||||
return f"<error_dereferencing_ptr_{str(gdb_val)}: {e_deref}>"
|
||||
else: # Max depth reached for this pointer path
|
||||
else:
|
||||
return f"<pointer_0x{gdb_val:x}_not_dereferenced_max_depth_{MAX_RECURSION_DEPTH}>"
|
||||
|
||||
def _handle_c_array(self, gdb_val, val_type):
|
||||
arr_elements = []
|
||||
try:
|
||||
# GDB arrays usually have known bounds.
|
||||
bounds = val_type.range() # (lower_bound, upper_bound)
|
||||
if bounds[0] > bounds[1]: # Empty or invalid range
|
||||
bounds = val_type.range()
|
||||
if bounds[0] > bounds[1]:
|
||||
return []
|
||||
|
||||
# Calculate number of elements, respecting MAX_ARRAY_ELEMENTS
|
||||
# MODIFIED: Use dynamic MAX_ARRAY_ELEMENTS
|
||||
num_elements_in_array = bounds[1] - bounds[0] + 1
|
||||
num_elements_to_fetch = min(num_elements_in_array, MAX_ARRAY_ELEMENTS)
|
||||
|
||||
@ -742,21 +767,17 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
return f"<python_script_error_processing_c_array: {e_py} (type: {str(val_type)})>"
|
||||
|
||||
def _handle_struct_or_class(self, gdb_val, val_type, original_type_str=""):
|
||||
# Use original_type_str if available (includes typedefs), else fallback to val_type string
|
||||
type_display_name = original_type_str if original_type_str else str(val_type)
|
||||
obj_dict = {"_type": type_display_name}
|
||||
|
||||
try:
|
||||
fields = val_type.fields()
|
||||
if not fields: # No fields, or it's an opaque type from GDB's perspective
|
||||
# Try to get a summary string if GDB provides one (e.g. for types with custom printers not using children())
|
||||
if not fields:
|
||||
try:
|
||||
summary_str = str(gdb_val)
|
||||
# Avoid redundant type info if summary is just the type name
|
||||
if summary_str != type_display_name and summary_str != str(
|
||||
val_type.name
|
||||
):
|
||||
# MODIFIED: Use dynamic MAX_STRING_LENGTH for summary
|
||||
obj_dict["_summary"] = (
|
||||
summary_str[:MAX_STRING_LENGTH]
|
||||
if len(summary_str) > MAX_STRING_LENGTH
|
||||
@ -769,9 +790,8 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
for field in fields:
|
||||
field_name = field.name
|
||||
if field_name is None:
|
||||
continue # Skip unnamed fields (e.g. padding)
|
||||
continue
|
||||
|
||||
# Skip artificial fields unless they are base classes (GDB often marks vtable ptr as artificial)
|
||||
if field.artificial and not field.is_base_class:
|
||||
continue
|
||||
|
||||
@ -779,9 +799,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
try:
|
||||
base_val = gdb_val.cast(field.type)
|
||||
base_obj_data = self._serialize_value(base_val)
|
||||
# Merge base class fields into the current object's dictionary
|
||||
if isinstance(base_obj_data, dict):
|
||||
# Prefix base class members to avoid name collisions, or integrate directly
|
||||
base_type_name = (
|
||||
str(field.type.name)
|
||||
if field.type.name
|
||||
@ -789,9 +807,9 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
)
|
||||
for k_base, v_base in base_obj_data.items():
|
||||
if k_base == "_type":
|
||||
continue # Don't copy _type from base
|
||||
continue
|
||||
obj_dict[f"{base_type_name}::{k_base}"] = v_base
|
||||
else: # If base class serialization isn't a dict (e.g. error string)
|
||||
else:
|
||||
obj_dict[
|
||||
(
|
||||
str(field.type.name)
|
||||
@ -807,7 +825,7 @@ class EnhancedJsonEncoder(json.JSONEncoder):
|
||||
else "base_class_error"
|
||||
)
|
||||
] = f"<gdb_error_casting_base: {e_base_cast}>"
|
||||
else: # Regular field
|
||||
else:
|
||||
try:
|
||||
field_value_obj = gdb_val[field_name]
|
||||
obj_dict[field_name] = self._serialize_value(field_value_obj)
|
||||
@ -844,7 +862,7 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
|
||||
def __init__(self):
|
||||
super(GDBDumpJsonCommand, self).__init__(
|
||||
"dump_json", gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL # gdb.COMPLETE_EXPRESSION potrebbe essere più adatto
|
||||
"dump_json", gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL
|
||||
)
|
||||
self.output_start_delimiter = "START_JSON_OUTPUT"
|
||||
self.output_end_delimiter = "END_JSON_OUTPUT"
|
||||
@ -855,16 +873,10 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
gdb.flush()
|
||||
|
||||
def invoke(self, arg_string_raw, from_tty):
|
||||
# arg_string_raw è la stringa completa passata al comando dump_json
|
||||
# from_tty non viene usato ma è richiesto dalla firma di gdb.Command.invoke
|
||||
|
||||
_dumper_log_write(
|
||||
f"--- dump_json command invoked with arg_string_raw: '{arg_string_raw}', from_tty: {from_tty} ---"
|
||||
)
|
||||
|
||||
gdb.write(f"{self.output_start_delimiter}\n")
|
||||
gdb.flush()
|
||||
|
||||
try:
|
||||
if not arg_string_raw.strip():
|
||||
raise ValueError("No expression provided to dump_json.")
|
||||
@ -873,7 +885,7 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
|
||||
parts = arg_string.split('@')
|
||||
base_expr_str = parts[0].strip()
|
||||
dim_exprs_str = [d.strip() for d in parts[1:] if d.strip()] # Dimension expressions
|
||||
dim_exprs_str = [d.strip() for d in parts[1:] if d.strip()]
|
||||
|
||||
num_dimensions_specified = len(dim_exprs_str)
|
||||
evaluated_dims = []
|
||||
@ -887,28 +899,25 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
except gdb.error as e_base:
|
||||
_dumper_log_write(f"GDB error evaluating base expression '{base_expr_str}': {e_base}")
|
||||
raise ValueError(f"Cannot evaluate base expression '{base_expr_str}': {e_base}")
|
||||
except Exception as e_base_other: # Catch other potential errors like runtime errors in Python extensions
|
||||
except Exception as e_base_other:
|
||||
_dumper_log_write(f"Unexpected error evaluating base expression '{base_expr_str}': {e_base_other}")
|
||||
raise ValueError(f"Unexpected error evaluating base expression '{base_expr_str}': {e_base_other}")
|
||||
|
||||
|
||||
if num_dimensions_specified == 0:
|
||||
# Standard behavior: dump the evaluated expression directly
|
||||
_dumper_log_write(f"No dimensions specified. Dumping '{base_expr_str}' as is.")
|
||||
data_to_encode = gdb_value_or_ptr
|
||||
else:
|
||||
# Sized dump: ensure base expression is a pointer or array
|
||||
base_type_stripped = gdb_value_or_ptr.type.strip_typedefs()
|
||||
if base_type_stripped.code not in [gdb.TYPE_CODE_PTR, gdb.TYPE_CODE_ARRAY]:
|
||||
raise ValueError(f"Base expression '{base_expr_str}' (type: {base_type_stripped}) "
|
||||
"must be a pointer or array for sized dump with '@'.")
|
||||
|
||||
# Evaluate dimension expressions
|
||||
for i, dim_str in enumerate(dim_exprs_str):
|
||||
try:
|
||||
dim_val = int(dim_str)
|
||||
_dumper_log_write(f"Dimension {i+1} ('{dim_str}') parsed as literal int: {dim_val}")
|
||||
except ValueError: # Not a literal int, try to eval as GDB expression
|
||||
except ValueError:
|
||||
try:
|
||||
_dumper_log_write(f"Evaluating dimension expression {i+1}: '{dim_str}'")
|
||||
dim_gdb_val = gdb.parse_and_eval(dim_str)
|
||||
@ -925,41 +934,33 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
raise ValueError(f"Invalid negative dimension '{dim_str}' (value: {dim_val})")
|
||||
evaluated_dims.append(dim_val)
|
||||
|
||||
# --- Handle 1D array: expr@dim1 ---
|
||||
if num_dimensions_specified == 1:
|
||||
dim1 = evaluated_dims[0]
|
||||
_dumper_log_write(f"Processing as 1D array. Pointer: '{base_expr_str}', Dimension1: {dim1}")
|
||||
array_elements = []
|
||||
|
||||
# Determine the actual number of elements to fetch, respecting MAX_ARRAY_ELEMENTS
|
||||
# MAX_ARRAY_ELEMENTS is used as a global cap on the number of elements from any single array/container.
|
||||
elements_to_fetch = dim1
|
||||
truncated = False
|
||||
if MAX_ARRAY_ELEMENTS > 0 and dim1 > MAX_ARRAY_ELEMENTS: # Check if global limit is set and exceeded
|
||||
if MAX_ARRAY_ELEMENTS > 0 and dim1 > MAX_ARRAY_ELEMENTS:
|
||||
elements_to_fetch = MAX_ARRAY_ELEMENTS
|
||||
truncated = True
|
||||
_dumper_log_write(f"Dimension {dim1} exceeds MAX_ARRAY_ELEMENTS ({MAX_ARRAY_ELEMENTS}). Will fetch {elements_to_fetch}.")
|
||||
|
||||
# Get the target type of the pointer (type of elements in the array)
|
||||
# If base_type_stripped is TYPE_CODE_ARRAY, target() gives element type.
|
||||
# If base_type_stripped is TYPE_CODE_PTR, target() gives pointed-to type.
|
||||
try:
|
||||
element_gdb_type = base_type_stripped.target()
|
||||
except gdb.error: # target() can fail if it's not a pointer/array type (should be caught earlier)
|
||||
except gdb.error:
|
||||
raise ValueError(f"Could not determine element type for '{base_expr_str}'.")
|
||||
|
||||
_dumper_log_write(f"Fetching {elements_to_fetch} elements of type '{str(element_gdb_type)}'.")
|
||||
for i in range(elements_to_fetch):
|
||||
try:
|
||||
# GDB pointer arithmetic: (pointer_value + index)
|
||||
# Then dereference to get the element's gdb.Value
|
||||
element_val = (gdb_value_or_ptr + i).dereference()
|
||||
array_elements.append(element_val)
|
||||
except gdb.error as e_deref:
|
||||
_dumper_log_write(f"GDB error dereferencing element at index {i} for '{base_expr_str}': {e_deref}")
|
||||
array_elements.append(f"<gdb_error_dereferencing_at_index_{i}: {str(e_deref).replace('"', '')}>") # Avoid breaking JSON
|
||||
break # Stop processing this array on error
|
||||
except Exception as e_proc: # Other Python errors
|
||||
array_elements.append(f"<gdb_error_dereferencing_at_index_{i}: {str(e_deref).replace('"', '')}>")
|
||||
break
|
||||
except Exception as e_proc:
|
||||
_dumper_log_write(f"Python error processing element at index {i}: {e_proc}")
|
||||
array_elements.append(f"<script_error_processing_element_at_index_{i}>")
|
||||
break
|
||||
@ -969,16 +970,14 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
|
||||
data_to_encode = array_elements
|
||||
|
||||
# --- Handle 2D array: expr@dim1@dim2 ---
|
||||
elif num_dimensions_specified == 2:
|
||||
dim1 = evaluated_dims[0] # Typically rows
|
||||
dim2 = evaluated_dims[1] # Typically columns
|
||||
dim1 = evaluated_dims[0]
|
||||
dim2 = evaluated_dims[1]
|
||||
_dumper_log_write(f"Processing as 2D array. Pointer: '{base_expr_str}', Dim1 (rows): {dim1}, Dim2 (cols): {dim2}")
|
||||
matrix_rows = []
|
||||
total_elements_dumped = 0
|
||||
max_total_elements_to_dump = MAX_ARRAY_ELEMENTS if MAX_ARRAY_ELEMENTS > 0 else (dim1 * dim2) # Cap total elements
|
||||
max_total_elements_to_dump = MAX_ARRAY_ELEMENTS if MAX_ARRAY_ELEMENTS > 0 else (dim1 * dim2)
|
||||
|
||||
# Get element type (same logic as 1D)
|
||||
try:
|
||||
element_gdb_type = base_type_stripped.target()
|
||||
except gdb.error:
|
||||
@ -1003,29 +1002,57 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
except gdb.error as e_deref:
|
||||
_dumper_log_write(f"GDB error dereferencing element at [{r}][{c}] (offset {offset}) for '{base_expr_str}': {e_deref}")
|
||||
row_data.append(f"<gdb_error_dereferencing_at_[{r}][{c}]: {str(e_deref).replace('"', '')}>")
|
||||
stop_processing_matrix = True # Stop all processing on error
|
||||
stop_processing_matrix = True
|
||||
break
|
||||
except Exception as e_proc:
|
||||
_dumper_log_write(f"Python error processing element at [{r}][{c}]: {e_proc}")
|
||||
row_data.append(f"<script_error_processing_element_at_[{r}][{c}]>")
|
||||
stop_processing_matrix = True
|
||||
break
|
||||
matrix_rows.append(row_data) # Add row even if it was broken by error/truncation
|
||||
matrix_rows.append(row_data)
|
||||
|
||||
data_to_encode = matrix_rows
|
||||
|
||||
else: # More than 2 dimensions specified
|
||||
else:
|
||||
raise ValueError(f"Unsupported number of dimensions ({num_dimensions_specified}). Max 2 dimensions are supported via '@'.")
|
||||
|
||||
# --- Encode the prepared data (single value, list, or list of lists) ---
|
||||
encoder = EnhancedJsonEncoder(
|
||||
indent=None, separators=(",", ":"), ensure_ascii=False # Compact JSON
|
||||
indent=None, separators=(",", ":"), ensure_ascii=False
|
||||
)
|
||||
json_output_str = encoder.encode(data_to_encode)
|
||||
gdb.write(f"{json_output_str}\n")
|
||||
_dumper_log_write(f"Successfully encoded '{arg_string}' to JSON.")
|
||||
|
||||
_dumper_log_write(f"JSON serialization complete for '{arg_string}'. Length: {len(json_output_str)} characters.")
|
||||
|
||||
except gdb.error as e_gdb: # GDB-specific errors during parse_and_eval or operations
|
||||
if DUMP_RAW_JSON_TO_FILE:
|
||||
try:
|
||||
_dumper_log_write(f"Attempting to save raw JSON to file...")
|
||||
sanitized_var_name = _sanitize_filename_component(base_expr_str)
|
||||
timestamp_str = time.strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
output_dir_for_file = RAW_JSON_OUTPUT_DIR if RAW_JSON_OUTPUT_DIR else gdb.parse_and_eval('$cwd').string()
|
||||
|
||||
if not os.path.exists(output_dir_for_file):
|
||||
os.makedirs(output_dir_for_file, exist_ok=True)
|
||||
|
||||
raw_output_filepath = os.path.join(
|
||||
output_dir_for_file, f"raw_dump_{timestamp_str}_{sanitized_var_name}.json"
|
||||
)
|
||||
with open(raw_output_filepath, "w", encoding="utf-8") as f_raw:
|
||||
f_raw.write(json_output_str)
|
||||
_dumper_log_write(f"Raw JSON successfully saved to file: {raw_output_filepath}")
|
||||
except Exception as e_file_dump:
|
||||
_dumper_log_write(f"ERROR: Failed to dump raw JSON to file: {e_file_dump}\n{traceback.format_exc(limit=2)}")
|
||||
gdb.write(f"GDB_DUMPER_SCRIPT_ERROR: Failed to save raw JSON to file: {e_file_dump}\n")
|
||||
|
||||
_dumper_log_write(f"Attempting to write JSON to GDB console for '{arg_string}'.")
|
||||
gdb.write(f"{self.output_start_delimiter}\n")
|
||||
gdb.write(f"{json_output_str}\n")
|
||||
# MODIFICA: Aggiunto gdb.flush() dopo ogni gdb.write() per forzare il push dei dati
|
||||
gdb.flush() # Forza il flush dell'output
|
||||
_dumper_log_write(f"Finished writing JSON to GDB console for '{arg_string}'.")
|
||||
|
||||
except gdb.error as e_gdb:
|
||||
_dumper_log_write(
|
||||
f"GDB error during 'dump_json {arg_string_raw}': {e_gdb}\n{traceback.format_exc(limit=2)}"
|
||||
)
|
||||
@ -1034,8 +1061,10 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
"expression": arg_string_raw,
|
||||
"details": "The expression or a part of it (like a dimension) could not be processed by GDB.",
|
||||
}
|
||||
gdb.write(f"{self.output_start_delimiter}\n")
|
||||
gdb.write(f"{json.dumps(error_payload)}\n")
|
||||
except ValueError as e_val: # Errors raised by our parsing/validation logic
|
||||
gdb.flush() # Forza il flush dell'output in caso di errore
|
||||
except ValueError as e_val:
|
||||
_dumper_log_write(
|
||||
f"ValueError during 'dump_json {arg_string_raw}': {e_val}\n{traceback.format_exc(limit=2)}"
|
||||
)
|
||||
@ -1044,8 +1073,10 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
"expression": arg_string_raw,
|
||||
"details": "Invalid input format or internal script error related to value processing.",
|
||||
}
|
||||
gdb.write(f"{self.output_start_delimiter}\n")
|
||||
gdb.write(f"{json.dumps(error_payload)}\n")
|
||||
except Exception as e_py: # Catch-all for other Python errors in the script
|
||||
gdb.flush() # Forza il flush dell'output in caso di errore
|
||||
except Exception as e_py:
|
||||
_dumper_log_write(
|
||||
f"Unexpected Python error during 'dump_json {arg_string_raw}': {e_py}\n{traceback.format_exc(limit=3)}"
|
||||
)
|
||||
@ -1054,27 +1085,29 @@ class GDBDumpJsonCommand(gdb.Command):
|
||||
"expression": arg_string_raw,
|
||||
"details": f"An unexpected Python error occurred in gdb_dumper.py: {type(e_py).__name__} - {str(e_py)}. Check gdb_dumper_debug.log.",
|
||||
}
|
||||
gdb.write(f"{self.output_start_delimiter}\n")
|
||||
gdb.write(f"{json.dumps(error_payload)}\n")
|
||||
gdb.flush() # Forza il flush dell'output in caso di errore
|
||||
finally:
|
||||
gdb.write(f"{self.output_end_delimiter}\n")
|
||||
gdb.flush()
|
||||
gdb.flush() # Forza il flush finale
|
||||
_dumper_log_write(
|
||||
f"--- dump_json command finished for arg: '{arg_string_raw}' ---"
|
||||
)
|
||||
|
||||
|
||||
# Register the command when the script is sourced by GDB.
|
||||
# Registra il comando quando lo script viene sourced da GDB.
|
||||
try:
|
||||
GDBDumpJsonCommand()
|
||||
gdb.write(
|
||||
"GDB_DUMPER_SCRIPT: GDBDumpJsonCommand instance created and command 'dump_json' should be available.\n"
|
||||
) # Nuovo gdb.write
|
||||
gdb.flush() # Nuovo flush
|
||||
)
|
||||
gdb.flush()
|
||||
except Exception as e_command_reg:
|
||||
gdb.write(
|
||||
f"GDB_DUMPER_SCRIPT_CRITICAL_ERROR: Failed to register GDBDumpJsonCommand. Exception: {type(e_command_reg).__name__}: {e_command_reg}\n"
|
||||
) # Nuovo gdb.write
|
||||
gdb.flush() # Nuovo flush
|
||||
)
|
||||
gdb.flush()
|
||||
_dumper_log_write(
|
||||
f"CRITICAL: Failed to register GDBDumpJsonCommand: {e_command_reg}"
|
||||
)
|
||||
@ -1083,5 +1116,5 @@ except Exception as e_command_reg:
|
||||
_dumper_log_write("--- GDB Dumper Script Fully Parsed and Command Registered ---")
|
||||
gdb.write(
|
||||
"GDB_DUMPER_SCRIPT: End of script reached. All initializations complete.\n"
|
||||
) # Nuovo gdb.write
|
||||
gdb.flush() # Nuovo flush
|
||||
)
|
||||
gdb.flush()
|
||||
@ -61,6 +61,9 @@ class ConfigWindow(tk.Toplevel):
|
||||
self.dumper_max_array_elements_var = tk.StringVar()
|
||||
self.dumper_max_recursion_depth_var = tk.StringVar()
|
||||
self.dumper_max_string_length_var = tk.StringVar()
|
||||
# MODIFICA: Nuove StringVar per la flag di salvataggio del JSON grezzo e la sua directory
|
||||
self.dumper_dump_raw_gdb_output_to_file_var = tk.BooleanVar()
|
||||
self.dumper_raw_gdb_output_dir_var = tk.StringVar()
|
||||
|
||||
self._load_current_settings_to_vars()
|
||||
self._create_widgets()
|
||||
@ -122,6 +125,13 @@ class ConfigWindow(tk.Toplevel):
|
||||
)
|
||||
)
|
||||
)
|
||||
# MODIFICA: Caricamento delle nuove opzioni dal AppSettings
|
||||
self.dumper_dump_raw_gdb_output_to_file_var.set(
|
||||
self.app_settings.get_setting("dumper_options", "dump_raw_gdb_output_to_file", False)
|
||||
)
|
||||
self.dumper_raw_gdb_output_dir_var.set(
|
||||
self.app_settings.get_setting("dumper_options", "raw_gdb_output_dir", "")
|
||||
)
|
||||
|
||||
def _create_widgets(self):
|
||||
"""Creates and lays out widgets for the configuration window."""
|
||||
@ -318,6 +328,27 @@ class ConfigWindow(tk.Toplevel):
|
||||
font=("TkDefaultFont", 8),
|
||||
).grid(row=i, column=2, sticky=tk.W, padx=5, pady=3)
|
||||
|
||||
# MODIFICA: Aggiunti i widget per le nuove opzioni di salvataggio del JSON grezzo
|
||||
row_idx = len(dumper_settings) # Continua dopo le impostazioni esistenti
|
||||
|
||||
# Separatore per raggruppare le nuove opzioni
|
||||
ttk.Separator(dumper_config_frame, orient=tk.HORIZONTAL).grid(row=row_idx, column=0, columnspan=3, sticky="ew", pady=(10, 5))
|
||||
row_idx += 1
|
||||
|
||||
# Checkbox per attivare/disattivare il salvataggio
|
||||
ttk.Checkbutton(dumper_config_frame, text="Dump Raw JSON Output to File (Diagnostic)",
|
||||
variable=self.dumper_dump_raw_gdb_output_to_file_var).grid(row=row_idx, column=0, columnspan=3, sticky=tk.W, padx=5, pady=5)
|
||||
row_idx += 1
|
||||
|
||||
# Campo di testo per la directory di output e pulsante Browse
|
||||
ttk.Label(dumper_config_frame, text="Raw JSON Output Directory:").grid(row=row_idx, column=0, sticky=tk.W, padx=5, pady=3)
|
||||
ttk.Entry(dumper_config_frame, textvariable=self.dumper_raw_gdb_output_dir_var).grid(row=row_idx, column=1, sticky=(tk.W, tk.E), padx=5, pady=3)
|
||||
ttk.Button(dumper_config_frame, text="Browse...",
|
||||
command=lambda: self._browse_directory(self.dumper_raw_gdb_output_dir_var, "Select Raw JSON Output Directory")).grid(row=row_idx, column=2, padx=5, pady=3)
|
||||
row_idx += 1
|
||||
# Nota esplicativa
|
||||
ttk.Label(dumper_config_frame, text="(Leave empty to use user's home directory)", foreground="gray", font=("TkDefaultFont", 8)).grid(row=row_idx, column=1, columnspan=2, sticky=tk.W, padx=7, pady=(0,5))
|
||||
|
||||
def _browse_file(self, target_var: tk.StringVar, title: str, filetypes=None):
|
||||
"""Helper to browse for a file and set the target_var."""
|
||||
current_path = target_var.get()
|
||||
@ -339,6 +370,20 @@ class ConfigWindow(tk.Toplevel):
|
||||
if path:
|
||||
target_var.set(path)
|
||||
|
||||
# MODIFICA: Nuova funzione per sfogliare directory
|
||||
def _browse_directory(self, target_var: tk.StringVar, title: str):
|
||||
"""Helper to browse for a directory and set the target_var."""
|
||||
current_path = target_var.get()
|
||||
initial_dir = current_path if current_path and os.path.isdir(current_path) else None
|
||||
|
||||
path = filedialog.askdirectory(
|
||||
title=title,
|
||||
initialdir=initial_dir,
|
||||
parent=self
|
||||
)
|
||||
if path:
|
||||
target_var.set(path)
|
||||
|
||||
def _validate_settings(self) -> bool:
|
||||
"""
|
||||
Validates the current values in the StringVars.
|
||||
@ -388,6 +433,14 @@ class ConfigWindow(tk.Toplevel):
|
||||
errors.append("GDB Executable path cannot be empty.")
|
||||
# Dumper script path can be empty.
|
||||
|
||||
# MODIFICA: Validazione della directory del JSON grezzo
|
||||
if self.dumper_dump_raw_gdb_output_to_file_var.get(): # Se la flag è attiva
|
||||
output_dir = self.dumper_raw_gdb_output_dir_var.get().strip()
|
||||
if output_dir and not os.path.isdir(output_dir):
|
||||
# Se l'utente ha specificato una directory (non vuota), deve esistere
|
||||
errors.append(f"Raw JSON Output Directory '{output_dir}' is not a valid directory or does not exist. Please create it or leave empty to use GDB's working directory.")
|
||||
|
||||
|
||||
if errors:
|
||||
messagebox.showerror("Validation Error", "\n".join(errors), parent=self)
|
||||
return False
|
||||
@ -450,6 +503,14 @@ class ConfigWindow(tk.Toplevel):
|
||||
"max_string_length",
|
||||
int(self.dumper_max_string_length_var.get()),
|
||||
)
|
||||
# MODIFICA: Salvataggio delle nuove opzioni nel AppSettings
|
||||
self.app_settings.set_setting(
|
||||
"dumper_options", "dump_raw_gdb_output_to_file", self.dumper_dump_raw_gdb_output_to_file_var.get()
|
||||
)
|
||||
self.app_settings.set_setting(
|
||||
"dumper_options", "raw_gdb_output_dir", self.dumper_raw_gdb_output_dir_var.get().strip()
|
||||
)
|
||||
|
||||
|
||||
# GUI (save current config window geometry)
|
||||
self.app_settings.set_setting(
|
||||
@ -517,4 +578,4 @@ class ConfigWindow(tk.Toplevel):
|
||||
# def _settings_changed(self) -> bool:
|
||||
# # Compare current var values with what was initially loaded from app_settings
|
||||
# # This is more complex as you need to store initial values or re-compare with app_settings
|
||||
# return True # Placeholder
|
||||
# # return True # Placeholder
|
||||
@ -606,8 +606,10 @@ class GDBGui(tk.Tk):
|
||||
path = filedialog.askopenfilename(
|
||||
title=title,
|
||||
filetypes=filetypes or [("All files", "*.*")],
|
||||
initialdir=initial_dir,
|
||||
parent=self,
|
||||
initialdir=(
|
||||
initial_dir if initial_dir else None
|
||||
), # Pass None if not determined
|
||||
parent=self, # Ensure dialog is on top of this window
|
||||
)
|
||||
if path:
|
||||
target_var.set(path)
|
||||
@ -687,7 +689,7 @@ class GDBGui(tk.Tk):
|
||||
target_exe = self.exe_path_var.get()
|
||||
gdb_script = self.app_settings.get_setting("general", "gdb_dumper_script_path")
|
||||
|
||||
# ... (validazioni iniziali per gdb_exe, target_exe, etc. rimangono invariate) ...
|
||||
# Validazioni iniziali per gdb_exe, target_exe
|
||||
if not gdb_exe or not os.path.isfile(gdb_exe):
|
||||
messagebox.showerror("Configuration Error", "GDB executable path is not configured correctly. Please check Options > Configure.", parent=self)
|
||||
self._check_critical_configs_and_update_gui()
|
||||
@ -699,10 +701,14 @@ class GDBGui(tk.Tk):
|
||||
messagebox.showerror("File Not Found", f"Target executable not found: {target_exe}", parent=self)
|
||||
return
|
||||
|
||||
# Verifica se lo script dumper è specificato e valido
|
||||
dumper_script_invalid = False
|
||||
if gdb_script and not os.path.isfile(gdb_script):
|
||||
dumper_script_invalid = True
|
||||
self.gdb_dumper_status_var.set(f"Dumper: '{self.app_settings.get_setting('general', 'gdb_dumper_script_path')}' (Not Found!)")
|
||||
if gdb_script:
|
||||
if not os.path.isfile(gdb_script):
|
||||
dumper_script_invalid = True
|
||||
self.gdb_dumper_status_var.set(f"Dumper: '{self.app_settings.get_setting('general', 'gdb_dumper_script_path')}' (Not Found!)")
|
||||
else: # Nessun dumper script configurato
|
||||
self.gdb_dumper_status_var.set("Dumper: Not Configured (Optional).")
|
||||
|
||||
|
||||
if self.gdb_session and self.gdb_session.is_alive():
|
||||
@ -715,12 +721,15 @@ class GDBGui(tk.Tk):
|
||||
|
||||
try:
|
||||
startup_timeout = self.app_settings.get_setting("timeouts", "gdb_start", 30)
|
||||
quit_timeout_on_no_symbols = self.app_settings.get_setting("timeouts", "gdb_quit", 10) # Timeout per quit
|
||||
current_dumper_options = self.app_settings.get_category_settings("dumper_options", {})
|
||||
quit_timeout_on_no_symbols = self.app_settings.get_setting("timeouts", "gdb_quit", 10)
|
||||
|
||||
# MODIFICA: Ottieni tutte le dumper_options configurate, incluse le nuove opzioni diagnostiche
|
||||
current_dumper_options = self.app_settings.get_category_settings("dumper_options", {})
|
||||
|
||||
self.gdb_session = GDBSession(
|
||||
gdb_path=gdb_exe, executable_path=target_exe,
|
||||
gdb_script_full_path=gdb_script, dumper_options=current_dumper_options
|
||||
gdb_script_full_path=gdb_script,
|
||||
dumper_options=current_dumper_options # MODIFICA: Passa il dizionario completo delle dumper_options
|
||||
)
|
||||
self.gdb_session.start(timeout=startup_timeout)
|
||||
|
||||
@ -781,7 +790,8 @@ class GDBGui(tk.Tk):
|
||||
"JSON dumping might be affected. Check logs.",
|
||||
parent=self)
|
||||
self.gdb_dumper_status_var.set(f"Dumper: {os.path.basename(gdb_script)} (Load Failed!)")
|
||||
elif self.gdb_session: # Nessuno script dumper specificato
|
||||
# Nessun dumper script specificato
|
||||
else:
|
||||
self._update_gdb_raw_output("No GDB dumper script specified. JSON dump via script unavailable.\n", append=True)
|
||||
self._update_status_bar("GDB session active. No dumper script.")
|
||||
self.gdb_dumper_status_var.set("Dumper: Not Configured (Optional).")
|
||||
@ -794,6 +804,7 @@ class GDBGui(tk.Tk):
|
||||
self.dump_var_button.config(state=tk.DISABLED)
|
||||
self.stop_gdb_button.config(state=tk.NORMAL)
|
||||
|
||||
# Disabilita i controlli del profilo se la sessione manuale è attiva
|
||||
if hasattr(self, 'run_profile_button'): self.run_profile_button.config(state=tk.DISABLED)
|
||||
if hasattr(self, 'profile_selection_combo'): self.profile_selection_combo.config(state=tk.DISABLED)
|
||||
|
||||
@ -1415,8 +1426,8 @@ class GDBGui(tk.Tk):
|
||||
self.profile_exec_status_var.set("No profile currently running to stop.")
|
||||
|
||||
def _open_last_run_output_folder(self) -> None:
|
||||
logger.info("Attempting to open last run output folder.") # Nuovo log
|
||||
logger.info(f"Current self.last_run_output_path: '{self.last_run_output_path}'") # Nuovo log
|
||||
logger.info("Attempting to open last run output folder.")
|
||||
logger.info(f"Current self.last_run_output_path: '{self.last_run_output_path}'")
|
||||
|
||||
if not self.last_run_output_path:
|
||||
logger.warning("self.last_run_output_path is None or empty.")
|
||||
@ -1425,7 +1436,7 @@ class GDBGui(tk.Tk):
|
||||
return
|
||||
|
||||
is_dir = os.path.isdir(self.last_run_output_path)
|
||||
logger.info(f"Path '{self.last_run_output_path}' is_dir: {is_dir}") # Nuovo log
|
||||
logger.info(f"Path '{self.last_run_output_path}' is_dir: {is_dir}")
|
||||
|
||||
if not is_dir:
|
||||
logger.warning(f"Path '{self.last_run_output_path}' is not a valid directory.")
|
||||
@ -1434,21 +1445,21 @@ class GDBGui(tk.Tk):
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info(f"Proceeding to open folder: {self.last_run_output_path} on platform: {sys.platform}") # Nuovo log
|
||||
logger.info(f"Proceeding to open folder: {self.last_run_output_path} on platform: {sys.platform}")
|
||||
if sys.platform == "win32":
|
||||
logger.info(f"Executing: os.startfile('{self.last_run_output_path}')") # Nuovo log
|
||||
logger.info(f"Executing: os.startfile('{self.last_run_output_path}')")
|
||||
os.startfile(self.last_run_output_path)
|
||||
logger.info("os.startfile executed.") # Nuovo log
|
||||
logger.info("os.startfile executed.")
|
||||
elif sys.platform == "darwin":
|
||||
command = ["open", self.last_run_output_path]
|
||||
logger.info(f"Executing: {command}") # Nuovo log
|
||||
logger.info(f"Executing: {command}")
|
||||
subprocess.run(command, check=True)
|
||||
logger.info(f"{command} executed.") # Nuovo log
|
||||
logger.info(f"{command} executed.")
|
||||
else: # Assume Linux/altri Unix-like
|
||||
command = ["xdg-open", self.last_run_output_path]
|
||||
logger.info(f"Executing: {command}") # Nuovo log
|
||||
logger.info(f"Executing: {command}")
|
||||
subprocess.run(command, check=True)
|
||||
logger.info(f"{command} executed.") # Nuovo log
|
||||
logger.info(f"{command} executed.")
|
||||
except FileNotFoundError: # Per xdg-open o open se non trovati
|
||||
logger.error(f"File manager command ('xdg-open' or 'open') not found on this system.", exc_info=True)
|
||||
messagebox.showerror("Error", f"Could not find the file manager command ('xdg-open' or 'open'). Please open the folder manually:\n{self.last_run_output_path}", parent=self)
|
||||
@ -1555,4 +1566,4 @@ class ScrolledTextLogHandler(logging.Handler):
|
||||
except tk.TclError:
|
||||
self._active = False
|
||||
except Exception:
|
||||
self._ac
|
||||
self._active = False # In caso di altri errori, disattiva per prevenire loop
|
||||
Loading…
Reference in New Issue
Block a user