From 40f40ed3f7c6feae9f0387706ccb276aaacab488 Mon Sep 17 00:00:00 2001 From: VALLONGOL Date: Mon, 26 May 2025 11:04:04 +0200 Subject: [PATCH] test mi --- cpp_python_debug/core/gdb_controller.py | 1129 +++++------------ cpp_python_debug/core/symbol_analyzer.py | 63 +- .../gui/profile_manager_window.py | 183 ++- 3 files changed, 469 insertions(+), 906 deletions(-) diff --git a/cpp_python_debug/core/gdb_controller.py b/cpp_python_debug/core/gdb_controller.py index 7e602d7..8fc8aa8 100644 --- a/cpp_python_debug/core/gdb_controller.py +++ b/cpp_python_debug/core/gdb_controller.py @@ -1,46 +1,23 @@ # File: cpp_python_debug/core/gdb_controller.py -# Manages the GDB process and interaction, including sourcing the custom dumper script -# and passing configurable options to it. - import os import re import wexpect import logging -import json # For parsing JSON output from the GDB script (though not directly used in this file after changes) +import json import time -from typing import Optional, Dict, Any, List # For type hinting +from typing import Dict, Any, List, Optional, Tuple logger = logging.getLogger(__name__) -# Default timeouts if not provided by the caller, though explicit passing is preferred. -DEFAULT_GDB_OPERATION_TIMEOUT = 30 -DEFAULT_LONG_GDB_OPERATION_TIMEOUT = 120 # For run/continue - +DEFAULT_GDB_OPERATION_TIMEOUT = 30 +DEFAULT_LONG_GDB_OPERATION_TIMEOUT = 120 class GDBSession: - """ - Manages a GDB subprocess, providing methods to send commands, - set breakpoints, run the target, and dump variables using a custom GDB Python script. - Accepts specific timeouts for operations and dumper script options. - """ - - def __init__( - self, - gdb_path: str, - executable_path: str, - gdb_script_full_path: Optional[str] = None, - dumper_options: Optional[Dict[str, Any]] = None, - ): - """ - Initializes the GDB session. - - Args: - gdb_path: Path to the GDB executable. - executable_path: Path to the target executable to debug. - gdb_script_full_path: Optional full path to the GDB Python dumper script. - dumper_options: Optional dictionary with options for the dumper script - (e.g., {'max_array_elements': 100, ...}). - """ + def __init__(self, + gdb_path: str, + executable_path: str, + gdb_script_full_path: Optional[str] = None, + dumper_options: Optional[Dict[str, Any]] = None): # Rimosso use_mi_mode dal costruttore if not os.path.exists(gdb_path): msg = f"GDB executable not found at: {gdb_path}" logger.error(msg) @@ -49,102 +26,54 @@ class GDBSession: msg = f"Target executable not found at: {executable_path}" logger.error(msg) raise FileNotFoundError(msg) - + self.gdb_path = gdb_path self.executable_path = executable_path - self.gdb_script_path = None # Will be set if path is valid - self.dumper_options = ( - dumper_options if dumper_options else {} - ) # Store dumper options + self.gdb_script_path = None + self.dumper_options = dumper_options if dumper_options else {} + self.use_mi_mode = False # Ora è sempre False, GDB non supporta MI come interprete if gdb_script_full_path: if os.path.exists(gdb_script_full_path): self.gdb_script_path = gdb_script_full_path else: - logger.warning( - f"GDB Python dumper script not found at: {gdb_script_full_path}. Advanced JSON dumping will be unavailable." - ) - - self.child = None + logger.warning(f"GDB Python dumper script not found at: {gdb_script_full_path}. Advanced JSON dumping will be unavailable.") + + self.child: Optional[wexpect.SpawnBase] = None self.gdb_prompt = "(gdb) " - self.gdb_script_sourced_successfully = False + self.symbols_found = True - logger.info( - f"GDBSession initialized. GDB: '{gdb_path}', Executable: '{executable_path}', " - f"DumperScript: '{self.gdb_script_path if self.gdb_script_path else 'Not provided'}'" - ) + logger.info(f"GDBSession initialized. GDB: '{gdb_path}', Executable: '{executable_path}', " + f"DumperScript: '{self.gdb_script_path if self.gdb_script_path else 'Not provided'}', " + f"MI Mode: False (GDB version does not support reliable MI)") # Logga chiaramente if self.dumper_options: logger.info(f"Dumper options provided: {self.dumper_options}") - - def get_gdb_version(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> Optional[str]: - """ - Retrieves the GDB version string. - Args: - timeout: Timeout for the GDB command. - - Returns: - The GDB version string (typically the first line of 'gdb --version'), - or None if an error occurs or version cannot be parsed. - """ - if not self.child or not self.child.isalive(): - # Questo metodo potrebbe essere chiamato anche prima che una sessione completa sia "startata" - # per l'analisi dei simboli, quindi potremmo dover avviare GDB solo per questo. - # Per ora, assumiamo che sia chiamato su una sessione già avviata, - # o che il chiamante gestisca l'avvio/chiusura di una sessione temporanea. - # In alternativa, potrebbe essere un metodo statico o una funzione helper - # che lancia 'gdb --version' come processo separato. - # Per coerenza con gli altri metodi, lo lasciamo come metodo d'istanza. - # Se la sessione non è 'start()'ata (cioè non c'è un eseguibile caricato), - # GDB potrebbe comunque rispondere a 'show version'. - logger.warning("GDB session not fully active, attempting 'show version'.") - # Se child non esiste, non possiamo fare nulla qui. - # Il chiamante (es. ProfileManagerWindow per l'analisi) dovrà gestire - # l'avvio di una sessione GDB se necessario. - # Questa implementazione assume che self.child esista. - if not self.child: - logger.error("No GDB child process available to get version.") - return None - - # Usiamo 'show version' che funziona all'interno di una sessione GDB attiva - # 'gdb --version' è per l'uso da riga di comando esterna. - command = "show version" - logger.info(f"Requesting GDB version with command: '{command}'") - try: - output = self.send_cmd(command, expect_prompt=True, timeout=timeout) - # L'output di 'show version' è multiriga. La prima riga è di solito quella che vogliamo. - # Esempio: - # GNU gdb (GDB) 16.2 - # Copyright (C) 2024 Free Software Foundation, Inc. - # ... - if output: - first_line = output.splitlines()[0].strip() - logger.info(f"GDB version string: {first_line}") - return first_line - logger.warning("No output received for 'show version' command.") - return None - except (ConnectionError, TimeoutError) as e: - logger.error(f"Error getting GDB version: {e}", exc_info=True) - return None - except Exception as e_parse: - logger.error(f"Error parsing 'show version' output: {e_parse}", exc_info=True) - return None + def is_alive(self) -> bool: + return self.child is not None and self.child.isalive() def start(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> None: - command = f'"{self.gdb_path}" --nx --quiet "{self.executable_path}"' + # Avvia GDB sempre in modalità console + gdb_args = ["--nx", "--quiet"] + gdb_args.append(f'"{self.executable_path}"') + command = f'"{self.gdb_path}" {" ".join(gdb_args)}' + logger.info(f"Spawning GDB process: {command} with startup timeout: {timeout}s") + output_at_startup = "" try: - # Aumentiamo leggermente il timeout per wexpect.spawn per dare a GDB il tempo di caricare i simboli (o fallire) - # e stampare l'output prima del primo prompt. - spawn_timeout = max(timeout, 5) # Assicurati almeno 5 secondi per lo spawn e il primo output - self.child = wexpect.spawn(command, timeout=spawn_timeout, encoding='utf-8', errors='replace') + self.child = wexpect.spawn(command, timeout=max(timeout, 5), encoding='utf-8', errors='replace') + + try: + self.child.expect_exact(self.gdb_prompt, timeout=max(timeout, 15)) + output_at_startup = self.child.before + + except (wexpect.TIMEOUT, wexpect.EOF) as e_expect: + output_at_startup = self.child.before if hasattr(self.child, 'before') else "" + logger.error(f"Error ({type(e_expect).__name__}) waiting for GDB prompt. Output: \n'{output_at_startup}'") + raise - # Aspetta il primo prompt. L'output prima di questo prompt conterrà i messaggi di caricamento. - self.child.expect_exact(self.gdb_prompt, timeout=max(timeout, 15)) # Timeout per il prompt - output_at_startup = self.child.before if hasattr(self.child, 'before') else "" - logger.debug(f"GDB output at startup (before first prompt):\n{output_at_startup}") # Logga l'output iniziale + logger.debug(f"GDB output at startup (full capture):\n{output_at_startup}") - # --- NUOVO: Controllo per assenza di simboli --- no_symbols_message = "No debugging symbols found" if no_symbols_message in output_at_startup: self.symbols_found = False @@ -152,15 +81,16 @@ class GDBSession: else: self.symbols_found = True logger.info("Debugging symbols appear to be loaded (no 'No debugging symbols found' message detected).") - # --- FINE NUOVO CONTROLLO --- - logger.info("GDB started successfully and prompt received.") + logger.info("GDB started successfully and ready for commands.") + # Disabilita paginazione (sempre in console mode) pagination_timeout = max(5, timeout // 2) logger.info(f"Disabling GDB pagination ('set pagination off') with timeout: {pagination_timeout}s.") self.send_cmd("set pagination off", expect_prompt=True, timeout=pagination_timeout) logger.info("GDB pagination disabled.") + # Dumper script è per la modalità console if self.gdb_script_path: self._set_gdb_dumper_variables(timeout=pagination_timeout) self._source_gdb_dumper_script(timeout=pagination_timeout) @@ -168,43 +98,63 @@ class GDBSession: logger.info("No GDB dumper script path provided; skipping sourcing.") self.gdb_script_sourced_successfully = False - except wexpect.TIMEOUT as e_timeout: - error_msg = f"Timeout ({timeout}s) waiting for GDB prompt or during GDB startup." - logger.error(error_msg) - debug_output = "" - try: - if self.child: debug_output = self.child.read_nonblocking(size=2048, timeout=1) - # Aggiungiamo anche l'output catturato da `before` se disponibile - if hasattr(e_timeout, 'value') and isinstance(e_timeout.value, str): # wexpect.TIMEOUT può avere 'value' - debug_output += "\nOutput before timeout (from exception value):\n" + e_timeout.value - elif output_at_startup: # Se abbiamo catturato output_at_startup prima del timeout del prompt - debug_output += "\nOutput at startup before timeout:\n" + output_at_startup - - except Exception: pass - logger.error(f"GDB output details before timeout: {debug_output}") - if self.child and self.child.isalive(): self.child.close() + except (wexpect.TIMEOUT, TimeoutError, wexpect.EOF) as e_specific_managed: + error_msg_detail = f"{type(e_specific_managed).__name__} during GDB startup." + logger.error(error_msg_detail) + additional_debug_output = "" + if not output_at_startup: + try: + if self.child: additional_debug_output = self.child.read_nonblocking(size=2048, timeout=0.2) + if hasattr(e_specific_managed, 'value') and isinstance(e_specific_managed.value, str): + additional_debug_output += "\nOutput from exception value:\n" + e_specific_managed.value + except Exception: pass + if additional_debug_output: + logger.error(f"Additional GDB output details: {additional_debug_output}") + + if self.child and self.is_alive(): self.child.close() self.child = None - raise TimeoutError(error_msg) from e_timeout + if isinstance(e_specific_managed, wexpect.EOF): + raise ConnectionAbortedError(f"GDB process terminated unexpectedly (EOF). Output captured: '{output_at_startup}'") from e_specific_managed + elif isinstance(e_specific_managed, wexpect.TIMEOUT): + raise TimeoutError(error_msg_detail + f" Output: '{output_at_startup}'") from e_specific_managed + else: + raise + except Exception as e: logger.error(f"!!! Unexpected exception in GDBSession.start(): {type(e).__name__}: {e}", exc_info=True) - if self.child and self.child.isalive(): self.child.close() + if self.child and self.is_alive(): self.child.close() self.child = None - raise # RILANCIA L'ECCEZIONE ORIGINALE + raise + + # _send_mi_cmd viene rimosso completamente + + def get_gdb_version(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> Optional[str]: + # Implementazione per console mode (unica ora) + if not self.child or not self.is_alive(): + logger.error("GDB session not active, cannot get GDB version.") + return None + command = "show version" + logger.info(f"Requesting GDB version with command: '{command}'") + try: + output = self.send_cmd(command, expect_prompt=True, timeout=timeout) + if output: + for line in output.splitlines(): + if line.strip().lower().startswith("gnu gdb"): + gdb_version_string = line.strip() + logger.info(f"GDB version string: {gdb_version_string}") + return gdb_version_string + logger.warning("Could not find GDB version string in 'show version' output.") + return None + logger.warning("No output received for 'show version' command.") + return None + except Exception as e: + logger.error(f"Error getting GDB version: {e}", exc_info=True) + return None + def list_functions(self, regex_filter: Optional[str] = None, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> List[str]: - """ - Lists functions known to GDB, optionally filtered by a regex. - (Implementazione precedente di list_functions è già abbastanza buona, la riporto qui per completezza - assicurandoci che sia allineata con le necessità) - - Args: - regex_filter: Optional regex to filter function names. - timeout: Timeout for the GDB command. - - Returns: - A list of function name strings. Returns an empty list on error or if no functions match. - """ - if not self.child or not self.child.isalive(): + # Implementazione per console mode (unica ora) + if not self.child or not self.is_alive(): logger.error("GDB session not active, cannot list functions.") return [] @@ -215,712 +165,221 @@ class GDBSession: logger.info(f"Requesting GDB function list with command: '{command}'") functions: List[str] = [] try: - # Assicurarsi che la paginazione sia disattivata è gestito in start() output = self.send_cmd(command, expect_prompt=True, timeout=timeout) - potential_function_lines = [] - current_file_context = None - # Flag per indicare se siamo nella sezione "Non-debugging symbols" in_non_debugging_symbols_section = False - + GENERIC_FUNCTION_NAMES = { + "allocate", "deallocate", "construct", "destroy", "_S_construct", "_S_destroy", + "max_size", "_S_max_size", "allocator", "~allocator", + "_Alloc_hider", "~_Alloc_hider", "_M_construct", "_M_construct_aux", + "_M_data", "_M_dispose", "_M_capacity", "_M_local_data", "_M_set_length", "_S_copy_chars", + "insert", "_M_create", "_M_emplace_back_aux", "emplace_back", + "_M_allocate", "_M_deallocate", "_M_get_Tp_allocator", "_Vector_base", "~_Vector_base", + "_Vector_impl", "~_Vector_impl", "_M_check_len", "push_back", "size", "vector", + "operator*", "operator++", "operator()", "operator[]", "operator->", "operator+", "operator=", + "operator!=", "operator==", "operator new", "operator delete", + "base", "move_iterator", "__make_move_if_noexcept_iterator", "__addressof", "forward", "move", + "__distance", "distance", "__iterator_category", + "__uninit_copy", "__uninitialized_copy", "__uninitialized_copy_a", "__uninitialized_move_if_noexcept_a", + "uninitialized_copy", "__builtin_popcount", "__builtin_clz", "__builtin_ctz", + "new", "vsnprintf", + "__tcf_0", "_GLOBAL__sub_I_main", "__static_initialization_and_destruction_0", + "char", "int", "void", + "std::string", "std::vector", "std::basic_string" + } for line in output.splitlines(): line_strip = line.strip() - if not line_strip: - in_non_debugging_symbols_section = False # Una riga vuota potrebbe resettare la sezione - continue - - if line_strip.startswith("All defined functions"): # Ignora questa intestazione comune - continue - if line_strip.startswith("File "): # Resetta contesto non-debug se incontriamo un nuovo file - in_non_debugging_symbols_section = False - file_match = re.match(r"File\s+(.+):", line_strip) - if file_match: - current_file_context = file_match.group(1).strip() - logger.debug(f"Function parsing context: File '{current_file_context}'") - continue - + if not line_strip: continue + if line_strip.startswith("All defined functions:") or line_strip.startswith("File "): + in_non_debugging_symbols_section = False; continue if line_strip.startswith("Non-debugging symbols:"): - in_non_debugging_symbols_section = True - logger.debug("Entering Non-debugging symbols section.") - continue - - # Se siamo nella sezione non-debugging, i simboli sono spesso solo indirizzo e nome + in_non_debugging_symbols_section = True; logger.debug("Entering Non-debugging symbols section."); continue if in_non_debugging_symbols_section: - # Esempio: 0x00401000 _start - m_non_debug = re.match(r"^\s*0x[0-9a-fA-F]+\s+([a-zA-Z_][\w:<>\.~]*)", line_strip) + m_non_debug = re.match(r"^\s*0x[0-9a-fA-F]+\s+([a-zA-Z_][\w:<>\.~]*?(?:::[a-zA-Z_][\w:<>\.~]*?)*)", line_strip) if m_non_debug: - func_name = m_non_debug.group(1) - if func_name not in functions: - functions.append(func_name) - logger.debug(f"Found non-debugging symbol/function: {func_name}") - continue # Processa la prossima riga - - - # Pattern per simboli di debug (più strutturati) - # Tentativo 1: "numero_riga: [tipo_ritorno] nome_funzione(parametri);" - m_debug_line = re.match(r"^\s*\d+:\s+(?:[\w\s:*&<>~\[\]]+\s+)?([a-zA-Z_][\w:<>\s~*&\-\[\]]*?(?:::[a-zA-Z_][\w:<>\s~*&\-\[\]]*?)*)\s*\(", line_strip) - if m_debug_line: - func_name = m_debug_line.group(1).strip() - func_name = re.sub(r'\s+const\s*$', '', func_name).strip() # Rimuovi 'const' alla fine e spazi - if func_name and func_name not in functions: - functions.append(func_name) - logger.debug(f"Found function (debug, type 1): {func_name}") + func_name = m_non_debug.group(1).strip() + if func_name and len(func_name) > 2 and not func_name.startswith("_") and not func_name.startswith("std::") : + if func_name not in functions: functions.append(func_name); logger.debug(f"Found non-debugging symbol/function: {func_name}") continue - - # Tentativo 2: "[tipo_ritorno] nome_funzione(parametri)" (senza numero riga) - m_debug_no_line = re.match(r"^\s*(?:[\w\s:*&<>~\[\]]+\s+)?([a-zA-Z_][\w:<>\s~*&\-\[\]]*?(?:::[a-zA-Z_][\w:<>\s~*&\-\[\]]*?)*)\s*\(", line_strip) - if m_debug_no_line: - func_name = m_debug_no_line.group(1).strip() - func_name = re.sub(r'\s+const\s*$', '', func_name).strip() - if func_name and func_name not in functions: - # Evita di aggiungere tipi o parole chiave come funzioni - if not (func_name in ["void", "int", "char", "short", "long", "float", "double", "bool", - "class", "struct", "enum", "union", "typename", "template"] or func_name.endswith("operator")): - functions.append(func_name) - logger.debug(f"Found function (debug, type 2): {func_name}") + m_func = re.search(r"([a-zA-Z_~][\w:<>\.~\[\]]*?(?:::[a-zA-Z_~][\w:<>\.~\[\]]*?)*)\s*\(", line_strip) + if m_func: + func_name = m_func.group(1).strip() + func_name = re.sub(r'\s+(const|volatile|noexcept)\s*$', '', func_name) + if "::" not in func_name and func_name in GENERIC_FUNCTION_NAMES: + logger.debug(f"Filtered generic un-qualified function name: {func_name}"); continue + if func_name.startswith("~") and func_name[1:] in GENERIC_FUNCTION_NAMES: + logger.debug(f"Filtered generic un-qualified destructor name: {func_name}"); continue + if func_name not in functions: functions.append(func_name); logger.debug(f"Found function: {func_name}") continue - if functions: - logger.info(f"Successfully parsed {len(functions)} function names.") - functions.sort() - elif output: # C'era output ma non abbiamo parsato nulla - logger.warning(f"Could not parse any function names from 'info functions' output, though output was received. First 200 chars of output:\n{output[:200]}") - - except (ConnectionError, TimeoutError) as e: - logger.error(f"Error listing functions from GDB: {e}", exc_info=True) - return [] - except Exception as e_parse: - logger.error(f"Error parsing 'info functions' output: {e_parse}", exc_info=True) + logger.info(f"Successfully parsed {len(functions)} function names."); functions.sort() + elif output: + logger.warning(f"Could not parse any function names from 'info functions' output. First 200 chars of output:\n{output[:200]}") + return functions + except Exception as e: + logger.error(f"Error parsing 'info functions' output (console mode): {e}", exc_info=True) return [] - return functions - - def _set_gdb_dumper_variables( - self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT - ) -> None: - """ - Sets GDB variables for dumper script options if they are provided. - These variables will be prefixed with '$py_dumper_'. - """ - if not self.dumper_options or not self.child: - return - - logger.info( - f"Setting GDB variables for dumper options with timeout: {timeout}s" - ) - for key, value in self.dumper_options.items(): - # Ensure value is appropriate for GDB set command (numbers, or strings if needed) - # For now, assuming values are integers as per dumper_options structure. - if isinstance(value, (int, float, bool)): # GDB handles bools as 0/1 - gdb_var_name = ( - f"$py_dumper_{key}" # e.g., $py_dumper_max_array_elements - ) - set_command = f"set {gdb_var_name} = {value}" - try: - logger.debug(f"Setting GDB variable: {set_command}") - self.send_cmd(set_command, expect_prompt=True, timeout=timeout) - except Exception as e: - logger.error( - f"Failed to set GDB dumper variable '{gdb_var_name}' with command '{set_command}': {e}", - exc_info=True, - ) - # Decide if this is critical. For now, we log and continue. - else: - logger.warning( - f"Skipping GDB variable for dumper option '{key}': value '{value}' is not a number or bool." - ) - - def _source_gdb_dumper_script( - self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT - ) -> None: - """ - Sources the GDB Python dumper script. - Assumes self.gdb_script_path is valid and self.child is active. - """ - if not self.gdb_script_path or not self.child: - return - - normalized_script_path = self.gdb_script_path.replace("\\", "/") - logger.info( - f"Sourcing GDB Python script: {normalized_script_path} with timeout: {timeout}s" - ) - - source_command = f"source {normalized_script_path}" - logger.debug(f"Constructed source command: [{source_command}]") - + def list_global_variables(self, regex_filter: Optional[str] = None, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> List[str]: + # Implementazione per console mode (unica ora) + if not self.child or not self.is_alive(): + logger.error("GDB session not active, cannot list global variables.") + return [] + + command = "info variables" + if regex_filter: command += f" {regex_filter.strip()}" + variables: List[str] = [] try: - output_before_prompt = self.send_cmd( - source_command, expect_prompt=True, timeout=timeout - ) - - logger.info(f"--- DEBUG SOURCE OUTPUT START (Original from send_cmd) ---") - logger.info(f"Raw output for '{source_command}':\n{output_before_prompt}") - logger.info(f"--- DEBUG SOURCE OUTPUT END (Original from send_cmd) ---") - - cleaned_output = output_before_prompt # Default a non pulito - script_basename = os.path.basename(normalized_script_path) - - lines = output_before_prompt.splitlines() - cleaned_lines = list(lines) # Copia modificabile - prefix_removed_count = 0 - - logger.debug( - f"Analyzing output for problematic prefix. Original number of lines: {len(lines)}" - ) - - # --- Logica di Pulizia Modificata --- - # Iteriamo per trovare la prima riga che assomiglia al prefisso problematico - temp_cleaned_lines = [] - prefix_found_and_skipped = False - for i, line_content in enumerate(lines): - line_stripped = line_content.lstrip() - is_problematic_line = ( - script_basename in line_stripped - and line_stripped.startswith("<") - and "GDB_DUMPER_SCRIPT:" - not in line_content # Non rimuovere i nostri log - ) - - if is_problematic_line and not prefix_found_and_skipped: - logger.info( - f"Problematic prefix line identified and skipped: '{line_content}'" - ) - prefix_found_and_skipped = True # Salta questa riga - prefix_removed_count = ( - 1 # Segna che abbiamo rimosso/saltato qualcosa - ) - continue # Non aggiungere questa riga a temp_cleaned_lines - - temp_cleaned_lines.append(line_content) - - if prefix_found_and_skipped: - cleaned_output = "\n".join(temp_cleaned_lines) - logger.info( - f"Output after attempting prefix removal ({prefix_removed_count} line(s) effectively skipped):" - ) - logger.info( - f"--- CLEANED OUTPUT FOR ANALYSIS ---\n{cleaned_output}\n--- END CLEANED OUTPUT ---" - ) - else: - logger.info( - "No problematic prefix line identified and skipped. Using original output for analysis." - ) - # cleaned_output rimane output_before_prompt - logger.info( - f"--- OUTPUT FOR ANALYSIS (NO CLEANING APPLIED) ---\n{cleaned_output}\n--- END OUTPUT ---" - ) - # --- Fine Logica di Pulizia Modificata --- - - error_detected = False - python_exception_patterns = [ - r"Traceback \(most recent call last\):", - r"Python Exception :", - r"Error occurred in Python:", - r"SyntaxError:", - r"IndentationError:", - ] - gdb_error_patterns = [ - r"""^[^:]*: No such file or directory\.""", - r"^Error:", - ] - - for pattern in python_exception_patterns: - if re.search(pattern, cleaned_output, re.MULTILINE | re.IGNORECASE): - logger.error( - f"Python error/exception detected while sourcing GDB script '{normalized_script_path}' (pattern: '{pattern}'). Checked output:\n{cleaned_output}" - ) - error_detected = True - break - - if not error_detected: - for pattern in gdb_error_patterns: - for line_idx, line_content in enumerate( - cleaned_output.splitlines() - ): - if pattern == r"^Error:" and line_content.strip().startswith( - "Error:" - ): - logger.error( - f"GDB 'Error:' detected on line {line_idx+1} while sourcing script '{normalized_script_path}': '{line_content.strip()}'. Full cleaned output:\n{cleaned_output}" - ) - error_detected = True - break - elif pattern != r"^Error:" and re.search( - pattern, line_content, re.IGNORECASE - ): - logger.error( - f"GDB error pattern '{pattern}' detected on line {line_idx+1} while sourcing script '{normalized_script_path}': '{line_content.strip()}'. Full cleaned output:\n{cleaned_output}" - ) - error_detected = True - break - if error_detected: - break - - if error_detected: - self.gdb_script_sourced_successfully = False - logger.warning( - f"GDB dumper script '{normalized_script_path}' FAILED to source correctly due to detected errors in (cleaned) output." - ) - else: - success_marker = "GDB_DUMPER_SCRIPT: End of script reached" - problem_still_present_after_cleaning = False - # Verifica se, dopo la pulizia, la PRIMA RIGA NON VUOTA dell'output pulito - # assomiglia ancora al prefisso problematico. - first_non_empty_cleaned_line = "" - for line_content in cleaned_output.splitlines(): - if line_content.strip(): # Se la riga non è vuota dopo lo strip - first_non_empty_cleaned_line = line_content.lstrip() - break - - if ( - first_non_empty_cleaned_line.startswith("<") - and script_basename in first_non_empty_cleaned_line - and "GDB_DUMPER_SCRIPT:" not in first_non_empty_cleaned_line - ): - problem_still_present_after_cleaning = True - logger.warning( - f"Problematic-looking string might still be present at start of non-empty cleaned output: '{first_non_empty_cleaned_line}'" - ) - - if ( - success_marker in cleaned_output - and not problem_still_present_after_cleaning - ): - logger.info( - f"GDB script '{normalized_script_path}' sourced successfully. Output analysis complete. Success markers found. Prefix (if any) handled." - ) - self.gdb_script_sourced_successfully = True - elif ( - success_marker in cleaned_output - and problem_still_present_after_cleaning - ): - logger.error( - f"GDB script '{normalized_script_path}' sourced, success markers FOUND, but problematic prefix seems to persist. Treating as FAILED source." - ) - self.gdb_script_sourced_successfully = False - else: - logger.error( - f"GDB script '{normalized_script_path}' sourced, no explicit errors found, BUT success markers (e.g., '{success_marker}') MISSING from cleaned output. Treating as failed source." - ) - logger.debug( - f"Cleaned output that was checked (missing markers or problem persisted):\n{cleaned_output}" - ) - self.gdb_script_sourced_successfully = False - - except Exception as e: - logger.error( - f"Exception during 'source' command for GDB script '{normalized_script_path}': {e}", - exc_info=True, - ) - self.gdb_script_sourced_successfully = False - - def send_cmd( - self, - command: str, - expect_prompt: bool = True, - timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT, - ) -> str: - """ - Sends a command to the GDB subprocess and waits for the prompt. - - Args: - command: The GDB command string to send. - expect_prompt: Whether to expect the GDB prompt after sending the command. - timeout: Timeout in seconds for this specific command. - - Returns: - The output from GDB before the next prompt (if expect_prompt is True). - - Raises: - ConnectionError: If GDB session is not active. - TimeoutError: If the command times out. - wexpect.EOF: If GDB exits unexpectedly. - """ - if not self.child or not self.child.isalive(): - logger.error("GDB session not started or is dead. Cannot send command.") - raise ConnectionError("GDB session not active.") - - logger.debug(f"Sending GDB command: '{command}' with timeout: {timeout}s") - try: - self.child.sendline(command) - if expect_prompt: - # Expect either the prompt, EOF, or a timeout - index = self.child.expect_exact( - [self.gdb_prompt, wexpect.EOF, wexpect.TIMEOUT], timeout=timeout - ) - output_before = ( - self.child.before - if hasattr(self.child, "before") - else "" - ) - - if index == 0: # Prompt found - logger.debug( - f"GDB output for '{command}':\n{output_before.strip() if output_before else ''}" - ) - return output_before - elif index == 1: # EOF - logger.error( - f"GDB exited unexpectedly (EOF) after command: '{command}'. Output: {output_before.strip() if output_before else ''}" - ) - self.child.close() - self.child = None - raise wexpect.EOF( - f"GDB exited unexpectedly after command: {command}" - ) - elif index == 2: # Timeout - # Try to get more output if a timeout occurred - current_output = output_before - try: - # Non-blocking read to grab anything GDB might have printed just before/during timeout - current_output += self.child.read_nonblocking( - size=4096, timeout=0.2 - ) - except Exception: - pass # Ignore errors on this non-blocking read attempt - logger.error( - f"Timeout ({timeout}s) executing GDB command: '{command}'. Partial output: {current_output.strip() if current_output else ''}" - ) - raise TimeoutError( - f"Timeout ({timeout}s) executing GDB command: '{command}'. Partial output: {current_output.strip() if current_output else ''}" - ) - return "" # If not expecting prompt, return empty string (or could return None) - except ( - wexpect.TIMEOUT, - TimeoutError, - ) as e_timeout: # Catch our explicit TimeoutError too - # Log already happened if it was from expect_exact index 2 - if not isinstance( - e_timeout, TimeoutError - ): # If it was a raw wexpect.TIMEOUT - logger.error( - f"Timeout during GDB command '{command}': {e_timeout}", - exc_info=True, - ) - raise TimeoutError(f"Timeout during GDB command: {command}") from e_timeout - except wexpect.EOF as e_eof: - logger.error(f"GDB EOF during command '{command}': {e_eof}", exc_info=True) - if self.child and self.child.isalive(): - self.child.close() - self.child = None - raise # Re-raise the original EOF exception - except Exception as e: - logger.error( - f"Generic error during GDB command '{command}': {e}", exc_info=True - ) - raise ConnectionError(f"Error during GDB command '{command}': {e}") from e - - def set_breakpoint( - self, location: str, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT - ) -> str: - logger.info(f"Setting breakpoint at: {location} with timeout {timeout}s") - return self.send_cmd(f"break {location}", timeout=timeout) - - def run_program( - self, params: str = "", timeout: int = DEFAULT_LONG_GDB_OPERATION_TIMEOUT - ) -> str: - run_command = "run" - if params: - run_command += f" {params.strip()}" - logger.info(f"Running program in GDB: '{run_command}' with timeout {timeout}s") - return self.send_cmd(run_command, timeout=timeout) - - def continue_execution( - self, timeout: int = DEFAULT_LONG_GDB_OPERATION_TIMEOUT - ) -> str: - logger.info(f"Continuing program execution in GDB with timeout {timeout}s.") - return self.send_cmd("continue", timeout=timeout) - - def dump_variable_to_json( - self, var_name: str, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT - ) -> Dict[str, Any]: - if not self.gdb_script_sourced_successfully: - logger.warning( - f"GDB dumper script was not sourced successfully. Cannot dump '{var_name}' to JSON via script." - ) - return { - "_gdb_tool_error": "GDB dumper script not available or failed to load." + output = self.send_cmd(command, expect_prompt=True, timeout=timeout) + EXCLUDE_GLOBAL_VARS_AGGRESSIVE = { + "__CTOR_LIST__", "__DTOR_LIST__", "_data_start__", "_bss_start__", "_bss_end__", "_end__", + "__mingw_winmain_nShowCmd", "deregister_frame_fn", "__native_vcclrit_reason", + "__native_dllmain_reason", "_dowildcard", "_charmax", "_CRT_MT", "_MINGW_INSTALL_DEBUG_MATHERR", + "two_exp_digits_env", "fpi", "pmem_next", "_imp___get_invalid_parameter_handler", + "_imp___set_invalid_parameter_handler", "_imp___get_output_format", + "_imp___set_output_format", "_imp_____lc_codepage_func", "__security_cookie", + "__security_cookie_complement", "__JCR_END__", "__JCR_LIST__", "_data_end__", + "GS_ExceptionPointers", "__dyn_tls_init_callback", "CSWTCH.5", "p05", "__tens_D2A", + "__tinytens_D2A", "__bigtens_D2A", "_rt_psrelocs_start", "_RUNTIME_PSEUDO_RELOC_LIST_END__", + "__RUNTIME_PSEUDO_RELOC_LIST_END__", "_rt_psrelocs_end", "__EH_FRAME_BEGIN__", + "__FRAME_END__", "startinfo", "has_cctor", "managedapp", "mainret", "argret", + "envp", "argv", "argc", "obj", "initialized", "mingw_initcharmax", "GS_ContextRecord", + "GS_ExceptionRecord", "_newmode", "mingw_initltssuo_force", "mingw_initltsdyn_force", + "mingw_initltsdrot_force", "_tls_index", "stUserMathErr", "mingw_app_type", "was_init", + "maxSections", "the_secs", "_fmode", "__mingw_oldexcpt_handler", "key_dtor_list", + "__mingwthr_cs_init", "__mingwthr_cs", "p5s", "private_mem", "freelist", "dtoa_CS_init", + "dtoa_CritSec", "s_mbstate", "internal_mbstate", "handler", "last_value", + "msvcrt.lc_codepage", "__mingw_winmain_lpCmdLine", "__mingw_winmain_hInstance", + "hmod_libgcc", "__onexitend", "__onexitbegin", "__native_startup_lock", + "__native_startup_state", "_bss_end__", "_head_libgcc_s_dw2_1_dll", + "_head_lib32_libkernel32_a", "_head_lib32_libmsvcrt_a", "_head_libstdc___6_dll", + "hname", "_IAT_start__", "_IAT_end__", "_nm___ZSt4cout", + "libgcc_s_dw2_1_dll_iname", "_lib32_libkernel32_a_iname", + "_lib32_libmsvcrt_a_iname", "libstdc___6_dll_iname", "__crt_xc_start__", + "__xc_a", "mingw_pcppinit", "__xc_z", "__crt_xc_end__", "__crt_xi_start__", "__xi_a", + "mingw_pcinit", "__mingw_pinit", "__xi_z", "__crt_xi_end__", "__crt_xl_start__", "__xl_a", + "__xl_c", "__xl_d", "__xl_z", "__crt_xp_end__", "__crt_xp_start__", "__crt_xt_end__", + "__crt_xt_start__", "__xd_a", "__xd_z", "__tls_start__", "_tls_start", "_tls_used", + "_tls_end", "__tls_end__", + "std::piecewise_construct", "std::__ioinit", "std::integral_constant", + "std::integral_constant::value", "std::integral_constant::value", + "__gnu_cxx::__numeric_traits_floating", "__gnu_cxx::__numeric_traits_integer", + "__gnu_cxx::__numeric_traits_floating::__max_exponent10", + "__gnu_cxx::__numeric_traits_floating::__max_exponent10", + "__gnu_cxx::__numeric_traits_floating::__max_exponent10", + "__gnu_cxx::__numeric_traits_integer::__max", + "__gnu_cxx::__numeric_traits_integer::__max", + "__gnu_cxx::__numeric_traits_integer::__min", + "__gnu_cxx::__numeric_traits_integer::__max", + "__gnu_cxx::__numeric_traits_integer::__min", + "__gnu_cxx::__numeric_traits_integer::__digits", + "p", "d", "t", "f", "G", "C", "s", "h", "m", "a", "e", "o", "i", "w", "k", "l", "_" } - - logger.info( - f"Dumping variable '{var_name}' to JSON using 'dump_json' GDB command with timeout {timeout}s." - ) - try: - # The dump_json command itself might take time, so use a potentially longer timeout. - raw_gdb_output = self.send_cmd( - f"dump_json {var_name}", expect_prompt=True, timeout=timeout - ) - - # Search for the JSON block delimiters - match = re.search( - r"START_JSON_OUTPUT\s*([\s\S]*?)\s*END_JSON_OUTPUT", - raw_gdb_output, - re.DOTALL, - ) - if match: - json_str = match.group(1).strip() - logger.debug( - f"JSON string received from GDB 'dump_json' (first 500 chars): {json_str[:500]}..." - ) - try: - parsed_data = json.loads(json_str) - if ( - isinstance(parsed_data, dict) - and "gdb_script_error" in parsed_data - ): - error_detail = parsed_data.get( - "details", parsed_data["gdb_script_error"] - ) - logger.error( - f"Error reported by GDB dumper script for '{var_name}': {error_detail}" - ) - # Include raw GDB output in the error dict if script indicated an error - parsed_data["raw_gdb_output_on_script_error"] = raw_gdb_output - return parsed_data - except json.JSONDecodeError as jde: - logger.error( - f"Failed to decode JSON from GDB 'dump_json' for '{var_name}'. Error: {jde}. Raw string: '{json_str}'" - ) - return { - "_gdb_tool_error": "JSONDecodeError from GDB script output", - "details": str(jde), - "raw_response": json_str, - } - else: # Delimiters not found - logger.error( - f"Delimiters START_JSON_OUTPUT/END_JSON_OUTPUT not found in 'dump_json' output for '{var_name}'." - ) - logger.debug( - f"Full GDB output for 'dump_json {var_name}':\n{raw_gdb_output}" - ) - # Check if the output suggests a GDB or Python script error directly - if ( - "Traceback (most recent call last):" in raw_gdb_output - or "gdb.error:" in raw_gdb_output - or ( - raw_gdb_output.strip() - and raw_gdb_output.strip().splitlines()[0].startswith("Error:") - ) - ): - return { - "_gdb_tool_error": "Error detected during GDB 'dump_json' script execution (delimiters missing)", - "raw_gdb_output": raw_gdb_output, - } - return { - "_gdb_tool_error": "JSON delimiters not found in GDB script output (no obvious GDB error in output)", - "raw_gdb_output": raw_gdb_output, - } - - except TimeoutError: # Timeout from send_cmd - logger.error(f"Timeout dumping variable '{var_name}' with 'dump_json'.") - return { - "_gdb_tool_error": f"Timeout during GDB 'dump_json {var_name}' command" - } - except Exception as e: # Other exceptions (e.g., ConnectionError from send_cmd) - logger.error( - f"Generic exception dumping variable '{var_name}' with 'dump_json': {e}", - exc_info=True, - ) - return { - "_gdb_tool_error": f"Generic exception during 'dump_json {var_name}': {str(e)}" - } - - def kill_program(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> str: - logger.info(f"Sending 'kill' command to GDB with timeout {timeout}s.") - full_output = "" - if not self.child or not self.child.isalive(): - logger.warning("Cannot send 'kill', GDB session not active.") - return "" - try: - self.child.sendline("kill") - full_output += "kill\n" - - # Patterns for expect_list: 0=confirmation, 1=prompt, 2=EOF, 3=TIMEOUT - # Using re.compile for robustness with expect_list - patterns = [ - re.compile(r"Kill the program being debugged\s*\?\s*\(y or n\)\s*"), - re.compile(re.escape(self.gdb_prompt)), - wexpect.EOF, # Questa è una costante, non una regex - wexpect.TIMEOUT, # Questa è una costante - ] - - confirmation_timeout = max(5, timeout // 2) - logger.debug( - f"Kill: Expecting confirmation or prompt with timeout {confirmation_timeout}s" - ) - index = self.child.expect_list(patterns, timeout=confirmation_timeout) - output_segment = self.child.before if hasattr(self.child, "before") else "" - full_output += output_segment - - if index == 0: - logger.info("Kill: GDB asked for kill confirmation. Sending 'y'.") - self.child.sendline("y") - full_output += "y\n" - - # Wait for the final prompt after 'y' - logger.debug( - f"Kill: Expecting GDB prompt after 'y' with timeout {confirmation_timeout}s" - ) - self.child.expect_exact(self.gdb_prompt, timeout=confirmation_timeout) - output_segment_after_y = ( - self.child.before if hasattr(self.child, "before") else "" - ) - full_output += output_segment_after_y - logger.info("Kill: Kill confirmed and acknowledged by GDB.") - elif index == 1: - logger.info( - "Kill: GDB returned to prompt after 'kill' (program likely not running or no confirmation needed)." - ) - elif index == 2: - logger.warning( - "Kill: GDB exited (EOF) during 'kill' command/confirmation." - ) - self.child = None - # Non sollevare EOF qui, ma segnala l'output - full_output += "" - elif index == 3: - logger.error( - f"Kill: Timeout waiting for kill confirmation or prompt. Output so far: {output_segment.strip()}" - ) - full_output += "" - - return full_output.strip() - - except (TimeoutError, wexpect.EOF, ConnectionError) as e: - logger.warning( - f"Kill: Exception during 'kill' (detail: {type(e).__name__} - {e}). Output: {full_output.strip()}" - ) - return f"" + for line in output.splitlines(): + line_strip = line.strip() + if not line_strip: continue + if line_strip.startswith("All defined variables:") or line_strip.startswith("File "): continue + m = re.match(r"^\s*0x[0-9a-fA-F]+\s+([a-zA-Z_][\w:<>\[\]\.~]*?(?:::[a-zA-Z_][\w:<>\[\]\.~]*?)*)", line_strip) + if m: + var_name = m.group(1).strip() + if var_name in EXCLUDE_GLOBAL_VARS_AGGRESSIVE: + logger.debug(f"Filtered excluded global variable: {var_name}"); continue + if len(var_name) <= 2 and not var_name.isalnum(): + logger.debug(f"Filtered very short non-alphanumeric variable: {var_name}"); continue + if len(var_name) <= 2 and var_name.isalnum() and var_name not in ["_dl_vsprintf_buffer", "MyData", "id"]: + logger.debug(f"Filtered very short alphanumeric variable by content: {var_name}"); continue + if var_name.startswith("_imp__"): + logger.debug(f"Filtered _imp__ variable: {var_name}"); continue + if var_name.startswith("__") or (var_name.startswith("_") and "_" in var_name[1:]) : + logger.debug(f"Filtered internal symbol prefix: {var_name}"); continue + if var_name not in variables: variables.append(var_name); logger.debug(f"Found global variable: {var_name}") + continue + if variables: + logger.info(f"Successfully parsed {len(variables)} global variables."); variables.sort() + elif output: + logger.warning(f"Could not parse any global variables from 'info variables' output. First 200 chars of output:\n{output[:200]}") + return variables except Exception as e: - logger.error( - f"Kill: Unexpected error during 'kill': {e}. Output: {full_output.strip()}", - exc_info=True, - ) - return ( - f"" - ) + logger.error(f"Error parsing 'info variables' output (console mode): {e}", exc_info=True) + return [] - def quit(self, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> None: - if self.child and self.child.isalive(): - logger.info( - f"Attempting GDB quit sequence with overall timeout {timeout}s." - ) - phase_timeout = max(3, timeout // 3) - - try: - self.child.sendline("quit") - logger.debug("Quit: Sent 'quit' command to GDB.") - - # Patterns for expect_list. EOF and TIMEOUT are special constants. - # Regexes should be pre-compiled for reliability with expect_list if issues persist. - expect_patterns_quit = [ - re.compile(re.escape(self.gdb_prompt)), # 0: Prompt GDB - re.compile( - r"Quit anyway\s*\?\s*\(y or n\)\s*" - ), # 1: Conferma "Quit anyway?" - wexpect.EOF, # 2: EOF - wexpect.TIMEOUT, # 3: TIMEOUT - ] - - logger.debug( - f"Quit: Expecting one of the patterns with timeout {phase_timeout}s" - ) - index = self.child.expect_list( - expect_patterns_quit, timeout=phase_timeout - ) - - response_after_quit = ( - self.child.before if hasattr(self.child, "before") else "" - ) - logger.debug( - f"Quit: GDB response after 'quit' (index {index}): {response_after_quit!r}" - ) - - if index == 1: # "Quit anyway?" matched - logger.info("Quit: GDB asked for quit confirmation. Sending 'y'.") - self.child.sendline("y") - try: - # After 'y', expect EOF or TIMEOUT (if GDB hangs). Prompt means quit failed. - final_expect_patterns_y = [ - re.compile(re.escape(self.gdb_prompt)), # 0 - wexpect.EOF, # 1 - wexpect.TIMEOUT, # 2 - ] - final_index = self.child.expect_list( - final_expect_patterns_y, timeout=phase_timeout - ) - final_response = ( - self.child.before if hasattr(self.child, "before") else "" - ) - logger.debug( - f"Quit: GDB response after 'y' (index {final_index}): {final_response!r}" - ) - if final_index == 0: - logger.warning( - "Quit: GDB did not quit after 'y' confirmation and returned to prompt." - ) - elif final_index == 1: - logger.info( - "Quit: GDB exited after 'y' confirmation (EOF received)." - ) - elif final_index == 2: - logger.info( - "Quit: Timeout waiting for GDB to exit after 'y'. Assuming exited or hung." - ) - except wexpect.TIMEOUT: - logger.info( - "Quit: Timeout (expecting EOF/Prompt) after 'y'. Assuming GDB exited or hung." - ) - except wexpect.EOF: - logger.info( - "Quit: GDB exited (EOF expecting EOF/Prompt) after 'y' confirmation." - ) - elif index == 0: - logger.warning( - "Quit: GDB did not quit (returned to prompt, no confirmation asked)." - ) - elif index == 2: - logger.info( - "Quit: GDB exited immediately after 'quit' command (EOF received, no confirmation)." - ) - elif index == 3: - logger.warning( - "Quit: Timeout waiting for GDB response after 'quit' command (no confirmation). GDB might be hung or exited." - ) - - except wexpect.TIMEOUT: - logger.warning( - "Quit: Timeout on initial expect after 'quit'. Assuming GDB exited or hung." - ) - except wexpect.EOF: - logger.info("Quit: EOF on initial expect after 'quit'. GDB exited.") - except Exception as e_quit_main: - logger.error( - f"Quit: Exception during GDB quit sequence: {e_quit_main}", - exc_info=True, - ) - finally: - if self.child and self.child.isalive(): - logger.warning( - "Quit: GDB process is still alive after quit attempts. Closing connection." - ) - try: - self.child.close() # Rimosso force=True - except Exception as e_close_final: - logger.error( - f"Quit: Error during final GDB child close: {e_close_final}", - exc_info=True, - ) - elif self.child and not self.child.isalive(): - logger.info( - "Quit: GDB process was already not alive before final close call." - ) - - self.child = None - self.gdb_script_sourced_successfully = False - logger.info("Quit: GDB session resources (controller-side) released.") - else: - logger.info("Quit: GDB session quit called, but no active child process.") - - def is_alive(self) -> bool: - return self.child is not None and self.child.isalive() + def list_types(self, regex_filter: Optional[str] = None, timeout: int = DEFAULT_GDB_OPERATION_TIMEOUT) -> List[str]: + # Implementazione per console mode (unica ora) + if not self.child or not self.is_alive(): + logger.error("GDB session not active, cannot list types.") + return [] + + command = "info types" + if regex_filter: command += f" {regex_filter.strip()}" + types: List[str] = [] + EXCLUDED_TYPES_CONSOLE_MODE = { + "bool", "char", "char16_t", "char32_t", "decltype(nullptr)", "double", "float", "int", "long", + "long double", "long long", "short", "signed char", "sizetype", "unsigned char", + "unsigned int", "unsigned long", "unsigned long long", "unsigned short", "wchar_t", + "void", "__float128", "__unknown__", "std", "std::allocator", "std::char_traits", "std::allocator_traits", "std::pointer_traits", + "std::__ptrtr_not_void", "std::__alloctr_rebind", "std::integral_constant", + "std::piecewise_construct_t", "std::__cxx11::basic_string", "std::basic_ostream", + "std::ios_base::Init", "std::__exception_ptr", "std::_Destroy_aux", + "std::bidirectional_iterator_tag", "std::forward_iterator_tag", "std::input_iterator_tag", + "std::random_access_iterator_tag", "std::iterator_traits", "std::move_iterator", + "std::literals", "std::literals::string_literals", + "__gnu_cxx::__numeric_traits_floating", "__gnu_cxx::__numeric_traits_integer", + "__gnu_cxx::__ops", "__gnu_cxx::new_allocator", "__gnu_cxx::__normal_iterator", + "__mingwthr_run_key_dtors", "mainCRTStartup", "WinMainCRTStartup", + "_GLOBAL__sub_I_main", "__static_initialization_and_destruction_0", "__tcf_0", + "func_ptr", "std::__false_type", "enum", "ix86_arch_indices", "ix86_tune_indices", + "__gnu_cxx::__alloc_traits", + "_Ios_Fmtflags", "_Ios_Iostate", "_Ios_Openmode", "_Ios_Seekdir", + "_Destroy_aux", "_Construct", "__uninit_copy", "__uninitialized_copy", + "__uninitialized_copy_a", "__uninitialized_move_if_noexcept_a", "uninitialized_copy", + "_Vector_base", "_M_allocate", "_M_deallocate", "_M_get_Tp_allocator", + "_Vector_impl", "~_Vector_impl", "_M_check_len", + "_M_emplace_back_aux", "__addressof", "forward", "move", "max", "__distance", "distance", + "__iterator_category", "operator", "std::string", "std::vector", "std::basic_string", + "std::tuple", "std::pair", "std::map", "std::set", "__pformat", "__mingw", + "_imp", "_rt", "_head", "__crt", "__xl", "__xc", "__xi", "__xd", "__tls", + "__JCR", "__EH", "__FRAME", "CSWTCH", "p5s", "dtoa", "mbstate", "fpreset", + "wcslen", "strchr", "strlen", "strerror", "vfprintf", "fprintf", "fputc", "fwrite", + "getenv", "localeconv", "malloc", "free", "memcpy", "setlocale", "signal", + "strncmp", "atoi", "calloc", "abort", "exit", "__mingw_vsnprintf", "__mingw_pformat", + "_ValidateImageBase", "_FindPESection", "_FindPESectionByName", + "__mingw_GetSectionForAddress", "__mingw_GetSectionCount", "_FindPESectionExec", + "_GetPEImageBase", "_IsNonwritableInCurrentImage", "__mingw_enum_import_library_names", + "__register_frame_info", "__deregister_frame_info", "_Unwind_Resume", + "__mingw_raise_matherr", "_matherr", "_fpreset", "_decode_pointer", + "_encode_pointer", "mark_section_writable", "_pei386_runtime_relocator", "_gnu_exception_handler", + "__mingwthr_run_key_dtors.part.0", "___w64_mingwthr_add_key_dtor", "___w64_mingwthr_remove_key_dtor", + "__mingw_TLScallback", "_ValidateImageBase.part.0", + "MyData", # Se MyData è la tua classe e vuoi vederla qui, rimuovila da EXCLUDED_TYPES + } + try: + output = self.send_cmd(command, expect_prompt=True, timeout=timeout) + for line in output.splitlines(): + line_strip = line.strip() + if not line_strip: continue + if line_strip.startswith("All defined types:") or line_strip.startswith("File ") or line_strip.startswith("") or line_strip.startswith("enum ix86_"): continue + m_keyword_type = re.match(r"^(?:struct|class|enum|union)\s+([a-zA-Z_~][\w:<>\.~\[\]]*?(?:::[a-zA-Z_~][\w:<>\.~\[\]]*?)*)\s*(?:{|$)", line_strip) + if m_keyword_type: + type_name = m_keyword_type.group(1).strip() + if type_name and type_name not in EXCLUDED_TYPES_CONSOLE_MODE and not type_name.startswith("__"): + if type_name not in types: types.append(type_name); logger.debug(f"Found type (keyword): {type_name}") + continue + m_typedef = re.match(r"^typedef\s+.*\s+([a-zA-Z_]\w*);", line_strip) + if m_typedef: + typedef_name = m_typedef.group(1).strip() + if typedef_name and typedef_name not in EXCLUDED_TYPES_CONSOLE_MODE and not typedef_name.startswith("__"): + if typedef_name not in types: types.append(typedef_name); logger.debug(f"Found type (typedef): {typedef_name}") + continue + m_direct_type = re.match(r"^([a-zA-Z_~][\w:<>\.,\s\[\]]*?(?:::[a-zA-Z_~][\w:<>\.,\s\[\]]*?)*)(?:;)?$", line_strip) + if m_direct_type: + direct_type_name = m_direct_type.group(1).strip() + direct_type_name = re.sub(r'<\s*char\s*(?:,\s*std::char_traits)?(?:,\s*std::allocator)?\s*>$', '', direct_type_name) + if direct_type_name and direct_type_name not in EXCLUDED_TYPES_CONSOLE_MODE: + if direct_type_name.startswith("_") and not direct_type_name.startswith("std::"): continue + if len(direct_type_name) > 60: continue + if direct_type_name not in types: types.append(direct_type_name); logger.debug(f"Found type (direct): {direct_type_name}") + continue + if types: logger.info(f"Successfully parsed {len(types)} types."); types.sort() + elif output: logger.warning(f"Could not parse any types from 'info types' output. First 200 chars:\n{output[:200]}") + return types + except Exception as e: logger.error(f"Error parsing 'info types' output (console mode): {e}", exc_info=True); return [] \ No newline at end of file diff --git a/cpp_python_debug/core/symbol_analyzer.py b/cpp_python_debug/core/symbol_analyzer.py index f4be3c7..32b0b67 100644 --- a/cpp_python_debug/core/symbol_analyzer.py +++ b/cpp_python_debug/core/symbol_analyzer.py @@ -34,18 +34,7 @@ class SymbolAnalyzer: target_exe_path: str, progress_callback: Optional[Callable[[str], None]] = None, status_callback: Optional[Callable[[str], None]] = None) -> Dict[str, Any]: - """ - Performs a full symbol analysis of the target executable using GDB. - - Args: - target_exe_path: The full path to the target executable. - progress_callback: A callback function (str) -> None to log detailed progress. - status_callback: A callback function (str) -> None to update general status. - - Returns: - A dictionary containing all analyzed symbol data, or an empty dict on critical failure. - """ - # Inizializza la struttura dati completa per l'analisi + analysis_data_dict: Dict[str, Any] = { "analyzed_executable_path": target_exe_path, "executable_checksum": None, @@ -55,12 +44,12 @@ class SymbolAnalyzer: "symbols": { "functions": [], "functions_count": 0, - "global_variables": [], # Per Iterazione 3 - "global_variables_count": 0,# Per Iterazione 3 - "types": [], # Per Iterazione 3 - "types_count": 0, # Per Iterazione 3 - "source_files": [], # Per Iterazione 3 - "source_files_count": 0 # Per Iterazione 3 + "global_variables": [], + "global_variables_count": 0, + "types": [], + "types_count": 0, + "source_files": [], # Placeholder per futura iterazione + "source_files_count": 0 # Placeholder per futura iterazione } } @@ -79,8 +68,9 @@ class SymbolAnalyzer: self._temp_gdb_session = GDBSession( gdb_path=self.gdb_exe_path, executable_path=target_exe_path, - gdb_script_full_path=None, # Non serve lo script dumper per l'analisi - dumper_options={} + gdb_script_full_path=None, + dumper_options={}, + use_mi_mode=True ) startup_timeout = self._get_timeout("timeouts", "gdb_start", 30) @@ -98,28 +88,39 @@ class SymbolAnalyzer: log_progress(f"GDB Version: {analysis_data_dict['gdb_version_info']}") set_status("Fetching function list..."); log_progress("Fetching function list from GDB...") - # Diamo più tempo per 'info functions' in caso di librerie grandi functions = self._temp_gdb_session.list_functions(timeout=command_timeout * 4) analysis_data_dict["symbols"]["functions"] = functions analysis_data_dict["symbols"]["functions_count"] = len(functions) log_progress(f"Found {len(functions)} functions.") - # --- In future iterations, add calls for other symbol types (variables, types, sources) --- - # set_status("Fetching global variables..."); log_progress("Fetching global variables...") - # global_vars = self._temp_gdb_session.list_global_variables(timeout=command_timeout * 2) - # analysis_data_dict["symbols"]["global_variables"] = global_vars - # analysis_data_dict["symbols"]["global_variables_count"] = len(global_vars) - # log_progress(f"Found {len(global_vars)} global variables.") - # ... (similarly for types and source_files) ... + # --- NUOVO: Recupera variabili globali --- + set_status("Fetching global variables..."); log_progress("Fetching global variables from GDB...") + global_variables = self._temp_gdb_session.list_global_variables(timeout=command_timeout * 2) + analysis_data_dict["symbols"]["global_variables"] = global_variables + analysis_data_dict["symbols"]["global_variables_count"] = len(global_variables) + log_progress(f"Found {len(global_variables)} global variables.") + + # --- NUOVO: Recupera tipi --- + set_status("Fetching data types..."); log_progress("Fetching data types from GDB...") + types = self._temp_gdb_session.list_types(timeout=command_timeout * 2) + analysis_data_dict["symbols"]["types"] = types + analysis_data_dict["symbols"]["types_count"] = len(types) + log_progress(f"Found {len(types)} types.") + + # --- Placeholder per futura iterazione: file sorgente --- + # set_status("Fetching source files..."); log_progress("Fetching source files from GDB...") + # source_files = self._temp_gdb_session.list_source_files(timeout=command_timeout * 2) + # analysis_data_dict["symbols"]["source_files"] = source_files + # analysis_data_dict["symbols"]["source_files_count"] = len(source_files) + # log_progress(f"Found {len(source_files)} source files.") set_status("Calculating file checksum and timestamp..."); log_progress("Calculating file checksum and timestamp...") - # Utilizza la funzione esterna calculate_file_checksum analysis_data_dict["executable_checksum"] = calculate_file_checksum(target_exe_path) try: mtime = os.path.getmtime(target_exe_path) analysis_data_dict["executable_timestamp"] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(mtime)) - except OSError: pass # Se il file non esiste (già controllato), o permessi. Lascia N/A + except OSError: pass log_progress(f"Checksum (MD5): {analysis_data_dict['executable_checksum'] or 'N/A'}") log_progress(f"File Timestamp: {analysis_data_dict['executable_timestamp']}") @@ -133,7 +134,7 @@ class SymbolAnalyzer: logger.error(f"Error during symbol analysis for '{target_exe_path}': {e}", exc_info=True) log_progress(f"\nCRITICAL ERROR during analysis: {type(e).__name__} - {e}") set_status(f"Error during analysis: {e}") - return {} # Ritorna un dizionario vuoto in caso di errore critico + return {} finally: if self._temp_gdb_session and self._temp_gdb_session.is_alive(): log_progress("Closing GDB session..."); set_status("Closing GDB session...") diff --git a/cpp_python_debug/gui/profile_manager_window.py b/cpp_python_debug/gui/profile_manager_window.py index bd9478c..823a3bb 100644 --- a/cpp_python_debug/gui/profile_manager_window.py +++ b/cpp_python_debug/gui/profile_manager_window.py @@ -45,6 +45,7 @@ DEFAULT_PROFILE = { class ProfileManagerWindow(tk.Toplevel): + # ... (__init__ come prima) ... def __init__(self, parent: 'GDBGui', app_settings: 'AppSettings'): super().__init__(parent) self.parent_window = parent @@ -72,8 +73,8 @@ class ProfileManagerWindow(tk.Toplevel): # StringVars per i conteggi self.functions_count_var = tk.StringVar(value="Functions: N/A") - # self.variables_count_var = tk.StringVar(value="Globals: N/A") # Futuro - # self.types_count_var = tk.StringVar(value="Types: N/A") # Futuro + self.variables_count_var = tk.StringVar(value="Globals: N/A") # NUOVO + self.types_count_var = tk.StringVar(value="Types: N/A") # NUOVO # self.sources_count_var = tk.StringVar(value="Sources: N/A") # Futuro self._load_profiles_from_settings() @@ -90,26 +91,6 @@ class ProfileManagerWindow(tk.Toplevel): self.target_exe_var.trace_add("write", lambda *args: self._on_target_exe_changed_in_form()) self.program_params_var.trace_add("write", self._mark_form_as_modified) - def _mark_form_as_modified(self, *args): - self._current_profile_modified_in_form = True - - def _on_target_exe_changed_in_form(self, *args): - self._mark_form_as_modified(*args) - self._update_analysis_status_display() - - def _load_profiles_from_settings(self) -> None: - self._profiles_data = [] - loaded_profiles = self.app_settings.get_profiles() - for profile_dict in loaded_profiles: - copied_profile = json.loads(json.dumps(profile_dict)) # Deep copy - if "actions" not in copied_profile or not isinstance(copied_profile["actions"], list): - copied_profile["actions"] = [] - if "symbol_analysis" not in copied_profile: - copied_profile["symbol_analysis"] = None - self._profiles_data.append(copied_profile) - self._profiles_list_changed_overall = False - logger.debug(f"Loaded {len(self._profiles_data)} profiles into ProfileManagerWindow.") - def _create_widgets(self) -> None: main_frame = ttk.Frame(self, padding="10") main_frame.pack(expand=True, fill=tk.BOTH) @@ -196,15 +177,20 @@ class ProfileManagerWindow(tk.Toplevel): self.view_functions_button.grid(row=row_s, column=1, padx=(10,5), pady=2, sticky="w") row_s += 1 - # Placeholder per futuri conteggi e bottoni (Iterazione 3) - # ttk.Label(symbols_summary_frame, textvariable=self.variables_count_var).grid(row=row_s, column=0, sticky="w", padx=5, pady=2) - # self.view_variables_button = ttk.Button(symbols_summary_frame, text="View...", command=self._view_analyzed_variables, state=tk.DISABLED, width=8) - # self.view_variables_button.grid(row=row_s, column=1, padx=(10,5), pady=2, sticky="w") - # row_s += 1 - # ... (e così via per tipi e sorgenti) + # --- NUOVI Conteggi e Bottoni View per Variabili Globali e Tipi --- + ttk.Label(symbols_summary_frame, textvariable=self.variables_count_var).grid(row=row_s, column=0, sticky="w", padx=5, pady=2) + self.view_variables_button = ttk.Button(symbols_summary_frame, text="View...", command=self._view_analyzed_variables, state=tk.DISABLED, width=8) + self.view_variables_button.grid(row=row_s, column=1, padx=(10,5), pady=2, sticky="w") + row_s += 1 + + ttk.Label(symbols_summary_frame, textvariable=self.types_count_var).grid(row=row_s, column=0, sticky="w", padx=5, pady=2) + self.view_types_button = ttk.Button(symbols_summary_frame, text="View...", command=self._view_analyzed_types, state=tk.DISABLED, width=8) + self.view_types_button.grid(row=row_s, column=1, padx=(10,5), pady=2, sticky="w") + row_s += 1 + # --- FINE NUOVI Widget --- actions_ui_frame = ttk.LabelFrame(right_pane, text="Debug Actions", padding="10") - actions_ui_frame.grid(row=3, column=0, sticky="nsew", pady=5) # Riga 3 + actions_ui_frame.grid(row=3, column=0, sticky="nsew", pady=5) actions_ui_frame.rowconfigure(0, weight=1) actions_ui_frame.columnconfigure(0, weight=1) actions_ui_frame.columnconfigure(1, weight=0) @@ -390,6 +376,31 @@ class ProfileManagerWindow(tk.Toplevel): self._update_analysis_status_display() logger.info(f"Profile '{profile_name}' (index {profile_index}) basic details updated.") return True + + def _load_profiles_from_settings(self) -> None: + """ + Loads profiles from application settings into the internal _profiles_data list. + Ensures deep copy and initializes new keys like 'symbol_analysis' if missing. + """ + self._profiles_data = [] + loaded_profiles = self.app_settings.get_profiles() # Questo restituisce una copia + for profile_dict in loaded_profiles: + # Effettua una deep copy robusta del dizionario del profilo per evitare modifiche dirette + # ai dati dell'AppSettings fino al salvataggio esplicito. + copied_profile = json.loads(json.dumps(profile_dict)) + + # Assicura che le chiavi necessarie esistano per la struttura del profilo, + # fornendo valori di default se mancanti. + if "actions" not in copied_profile or not isinstance(copied_profile["actions"], list): + copied_profile["actions"] = [] + + if "symbol_analysis" not in copied_profile: + copied_profile["symbol_analysis"] = None # Inizializza la chiave di analisi simboli + + self._profiles_data.append(copied_profile) + + self._profiles_list_changed_overall = False # Reset della flag di modifica complessiva + logger.debug(f"Loaded {len(self._profiles_data)} profiles into ProfileManagerWindow.") def _new_profile(self) -> None: if self._selected_profile_index is not None and self._current_profile_modified_in_form: @@ -446,6 +457,10 @@ class ProfileManagerWindow(tk.Toplevel): self._populate_profiles_listbox() self._select_profile_by_index(len(self._profiles_data) - 1) self._mark_form_as_modified() + + def _mark_form_as_modified(self, *args): + """Callback to mark the current profile form as modified when a StringVar is changed.""" + self._current_profile_modified_in_form = True def _delete_profile(self) -> None: if self._selected_profile_index is None or not self._profiles_data: return @@ -652,6 +667,10 @@ class ProfileManagerWindow(tk.Toplevel): self.analyse_symbols_button.config(state=tk.DISABLED) self.functions_count_var.set("Functions: N/A") self.view_functions_button.config(state=tk.DISABLED) + self.variables_count_var.set("Globals: N/A") # Reset + self.view_variables_button.config(state=tk.DISABLED) # Reset + self.types_count_var.set("Types: N/A") # Reset + self.view_types_button.config(state=tk.DISABLED) # Reset return profile = self._profiles_data[self._selected_profile_index] @@ -661,8 +680,14 @@ class ProfileManagerWindow(tk.Toplevel): details_text_lines = [f"Target in Form: {exe_display_name}"] status_text = "Symbol Analysis: " status_color = "blue" + + # Inizializza tutti i conteggi a N/A e bottoni View a DISABLED funcs_count_text = "Functions: N/A" view_funcs_btn_state = tk.DISABLED + vars_count_text = "Globals: N/A" + view_vars_btn_state = tk.DISABLED + types_count_text = "Types: N/A" + view_types_btn_state = tk.DISABLED analysis_button_state = tk.DISABLED if target_exe_in_form and os.path.isfile(target_exe_in_form): @@ -677,11 +702,20 @@ class ProfileManagerWindow(tk.Toplevel): analysis_data = profile.get("symbol_analysis") if analysis_data and isinstance(analysis_data, dict): symbols_dict = analysis_data.get("symbols", {}) + + # Popola i conteggi e abilita i bottoni View se i dati ci sono num_functions = symbols_dict.get("functions_count", 0) funcs_count_text = f"Functions: {num_functions}" - if num_functions > 0 : - view_funcs_btn_state = tk.NORMAL + if num_functions > 0 : view_funcs_btn_state = tk.NORMAL + num_variables = symbols_dict.get("global_variables_count", 0) + vars_count_text = f"Globals: {num_variables}" + if num_variables > 0 : view_vars_btn_state = tk.NORMAL + + num_types = symbols_dict.get("types_count", 0) + types_count_text = f"Types: {num_types}" + if num_types > 0 : view_types_btn_state = tk.NORMAL + saved_checksum = analysis_data.get("executable_checksum") saved_analysis_ts_str = analysis_data.get("analysis_timestamp") saved_exe_at_analysis = analysis_data.get("analyzed_executable_path", "Unknown") @@ -698,18 +732,24 @@ class ProfileManagerWindow(tk.Toplevel): if os.path.normpath(saved_exe_at_analysis) != os.path.normpath(target_exe_in_form): status_text += "TARGET CHANGED since last analysis. RE-ANALYSIS RECOMMENDED." status_color = "orange red" - view_funcs_btn_state = tk.DISABLED + view_funcs_btn_state = tk.DISABLED # Disabilita bottoni View se il target è cambiato + view_vars_btn_state = tk.DISABLED + view_types_btn_state = tk.DISABLED elif saved_checksum and current_checksum_for_form_exe and saved_checksum == current_checksum_for_form_exe: status_text += "Up-to-date." status_color = "dark green" elif saved_checksum and current_checksum_for_form_exe and saved_checksum != current_checksum_for_form_exe: status_text += "EXECUTABLE CHANGED since last analysis. RE-ANALYSIS REQUIRED." status_color = "red" - view_funcs_btn_state = tk.DISABLED + view_funcs_btn_state = tk.DISABLED # Disabilita bottoni View se l'eseguibile è cambiato + view_vars_btn_state = tk.DISABLED + view_types_btn_state = tk.DISABLED else: status_text += "Status unclear. Consider re-analysing." status_color = "orange red" - view_funcs_btn_state = tk.DISABLED + view_funcs_btn_state = tk.DISABLED # Disabilita bottoni View se lo stato è incerto + view_vars_btn_state = tk.DISABLED + view_types_btn_state = tk.DISABLED else: status_text += "Not performed. Click 'Analyse' to generate." status_color = "blue" @@ -721,6 +761,10 @@ class ProfileManagerWindow(tk.Toplevel): self.functions_count_var.set(funcs_count_text) self.view_functions_button.config(state=view_funcs_btn_state) + self.variables_count_var.set(vars_count_text) # Aggiorna + self.view_variables_button.config(state=view_vars_btn_state) # Aggiorna + self.types_count_var.set(types_count_text) # Aggiorna + self.view_types_button.config(state=view_types_btn_state) # Aggiorna def _trigger_symbol_analysis(self) -> None: if self._selected_profile_index is None: @@ -742,7 +786,6 @@ class ProfileManagerWindow(tk.Toplevel): self.progress_dialog = SymbolAnalysisProgressDialog(self) - # Crea l'istanza di SymbolAnalyzer symbol_analyzer = SymbolAnalyzer(gdb_exe_path, self.app_settings) analysis_thread = threading.Thread( @@ -753,10 +796,10 @@ class ProfileManagerWindow(tk.Toplevel): analysis_thread.start() def _perform_symbol_analysis_thread(self, profile_to_update: Dict[str, Any], - target_exe_path: str, symbol_analyzer: SymbolAnalyzer, # Ora prende SymbolAnalyzer + target_exe_path: str, symbol_analyzer: SymbolAnalyzer, progress_dialog: SymbolAnalysisProgressDialog): - analysis_data_dict: Dict[str, Any] = {} # Sarà popolato da SymbolAnalyzer.analyze() + analysis_data_dict: Dict[str, Any] = {} analysis_succeeded_overall = False def gui_log(msg: str): @@ -771,14 +814,13 @@ class ProfileManagerWindow(tk.Toplevel): gui_log(f"Starting symbol analysis for: {os.path.basename(target_exe_path)}") gui_set_status(f"Analyzing {os.path.basename(target_exe_path)}...") - # Chiama il metodo analyze di SymbolAnalyzer analysis_data_dict = symbol_analyzer.analyze( target_exe_path=target_exe_path, progress_callback=gui_log, status_callback=gui_set_status ) - if analysis_data_dict: # Se non è un dizionario vuoto (errore critico in SymbolAnalyzer) + if analysis_data_dict: analysis_succeeded_overall = True gui_set_status("Symbol analysis successfully completed."); gui_log("\nSymbol analysis successfully completed.") else: @@ -813,6 +855,7 @@ class ProfileManagerWindow(tk.Toplevel): self._update_analysis_status_display() + # --- Metodi per Visualizzare Simboli Analizzati --- def _view_analyzed_functions(self) -> None: if self._selected_profile_index is None or \ not (0 <= self._selected_profile_index < len(self._profiles_data)): @@ -845,4 +888,64 @@ class ProfileManagerWindow(tk.Toplevel): title_suffix = " (Analysis might be obsolete)" if is_obsolete else "" dialog_title = f"Analyzed Functions for '{exe_name_for_title}'{title_suffix}" - SymbolListViewerDialog(self, functions_list, title=dialog_title) \ No newline at end of file + SymbolListViewerDialog(self, functions_list, title=dialog_title) + + def _view_analyzed_variables(self) -> None: # NUOVO METODO PER VARIABILI GLOBALI + if self._selected_profile_index is None or not (0 <= self._selected_profile_index < len(self._profiles_data)): + messagebox.showinfo("Info", "No profile selected or data available.", parent=self) + return + profile = self._profiles_data[self._selected_profile_index] + analysis_data = profile.get("symbol_analysis") + if not analysis_data or not isinstance(analysis_data.get("symbols"), dict): + messagebox.showinfo("No Analysis Data", "No symbol analysis data available for this profile.", parent=self) + return + variables_list = analysis_data["symbols"].get("global_variables", []) + if not variables_list: + messagebox.showinfo("No Global Variables", "No global variables found in the last analysis for this profile.", parent=self) + return + + target_exe_in_form = self.target_exe_var.get() + analyzed_exe_path = analysis_data.get("analyzed_executable_path", "") + exe_name_for_title = os.path.basename(target_exe_in_form) if target_exe_in_form else "Unknown Executable" + + is_obsolete = True + if os.path.normpath(analyzed_exe_path) == os.path.normpath(target_exe_in_form): + current_checksum = file_utils.calculate_file_checksum(target_exe_in_form) + saved_checksum = analysis_data.get("executable_checksum") + if current_checksum and saved_checksum and current_checksum == saved_checksum: + is_obsolete = False + + title_suffix = " (Analysis might be obsolete)" if is_obsolete else "" + dialog_title = f"Analyzed Global Variables for '{exe_name_for_title}'{title_suffix}" + + SymbolListViewerDialog(self, variables_list, title=dialog_title) + + def _view_analyzed_types(self) -> None: # NUOVO METODO PER TIPI + if self._selected_profile_index is None or not (0 <= self._selected_profile_index < len(self._profiles_data)): + messagebox.showinfo("Info", "No profile selected or data available.", parent=self) + return + profile = self._profiles_data[self._selected_profile_index] + analysis_data = profile.get("symbol_analysis") + if not analysis_data or not isinstance(analysis_data.get("symbols"), dict): + messagebox.showinfo("No Analysis Data", "No symbol analysis data available for this profile.", parent=self) + return + types_list = analysis_data["symbols"].get("types", []) + if not types_list: + messagebox.showinfo("No Types", "No types found in the last analysis for this profile.", parent=self) + return + + target_exe_in_form = self.target_exe_var.get() + analyzed_exe_path = analysis_data.get("analyzed_executable_path", "") + exe_name_for_title = os.path.basename(target_exe_in_form) if target_exe_in_form else "Unknown Executable" + + is_obsolete = True + if os.path.normpath(analyzed_exe_path) == os.path.normpath(target_exe_in_form): + current_checksum = file_utils.calculate_file_checksum(target_exe_in_form) + saved_checksum = analysis_data.get("executable_checksum") + if current_checksum and saved_checksum and current_checksum == saved_checksum: + is_obsolete = False + + title_suffix = " (Analysis might be obsolete)" if is_obsolete else "" + dialog_title = f"Analyzed Types for '{exe_name_for_title}'{title_suffix}" + + SymbolListViewerDialog(self, types_list, title=dialog_title) \ No newline at end of file