fix export matrix with base type

This commit is contained in:
VALLONGOL 2025-10-07 07:45:26 +02:00
parent cafc441085
commit 6e058bf024
2 changed files with 62 additions and 52 deletions

View File

@ -6,10 +6,10 @@
import re
# --- Version Data (Generated) ---
__version__ = "v.1.0.0.2-1-ga4d6c04"
GIT_COMMIT_HASH = "a4d6c043e67f3e7a8c5dd092643b595c28dbe25b"
__version__ = "v.1.0.0.3-4-g5f6d85b"
GIT_COMMIT_HASH = "5f6d85b34bf03f89668d8f02e4a9482261e0b1fb"
GIT_BRANCH = "master"
BUILD_TIMESTAMP = "2025-06-13T13:57:33.708395+00:00"
BUILD_TIMESTAMP = "2025-09-25T12:04:11.622704+00:00"
IS_GIT_REPO = True
# --- Default Values (for comparison or fallback) ---

View File

@ -410,49 +410,56 @@ class GDBDumpJsonCommand(gdb.Command):
def _get_struct_unpack_format(self, gdb_type) -> Tuple[str, List[str]]:
"""
Determines the Python `struct` unpack format string for a given GDB type.
Returns the format string and a list of field names.
Determina la stringa di formato per struct.unpack per un tipo GDB.
Supporta sia strutture che tipi base (int, float, double, ecc.).
Ritorna la stringa di formato e la lista dei nomi dei campi (vuota per tipi base).
"""
_dumper_log_write(f"[_get_struct_unpack_format] Analyzing type: {gdb_type.name}")
_dumper_log_write(f"[_get_struct_unpack_format] Analisi tipo: {gdb_type.name}")
type_code_map = {
'char': 'c', 'signed char': 'b', 'unsigned char': 'B', 'bool': '?',
'short': 'h', 'unsigned short': 'H', 'int': 'i', 'unsigned int': 'I',
'long': 'l', 'unsigned long': 'L', 'long long': 'q', 'unsigned long long': 'Q',
'float': 'f', 'double': 'd'
}
endian_char = '<'
try:
endian_str = gdb.execute("show endian", to_string=True)
if "big endian" in endian_str:
endian_char = '>'
_dumper_log_write(f" Target endianness detected: {'big' if endian_char == '>' else 'little'}")
_dumper_log_write(f" Endianness: {'big' if endian_char == '>' else 'little'}")
except gdb.error:
_dumper_log_write(" Could not determine target endianness, assuming little-endian.")
_dumper_log_write(" Endianness non rilevata, assumo little-endian.")
# Se è una struttura
if gdb_type.code == gdb.TYPE_CODE_STRUCT:
format_str = endian_char
field_names = []
if gdb_type.code != gdb.TYPE_CODE_STRUCT:
raise TypeError(f"Cannot generate unpack format for non-struct type: {gdb_type.name}")
for field in gdb_type.fields():
if field.is_base_class or field.name is None:
continue
field_type_str = str(field.type.strip_typedefs())
fmt_char = type_code_map.get(field_type_str)
if fmt_char:
format_str += fmt_char
field_names.append(field.name)
_dumper_log_write(f" - Field '{field.name}' (type: {field_type_str}) mapped to format char '{fmt_char}'")
_dumper_log_write(f" - Campo '{field.name}' ({field_type_str}) -> '{fmt_char}'")
else:
_dumper_log_write(f" [!] Unsupported field type '{field_type_str}' for struct unpacking.")
raise TypeError(f"Unsupported field type in struct: {field_type_str}")
_dumper_log_write(f" Successfully generated unpack format: '{format_str}' with fields: {field_names}")
_dumper_log_write(f" [!] Tipo campo non supportato: '{field_type_str}'")
raise TypeError(f"Tipo campo non supportato: {field_type_str}")
_dumper_log_write(f" Formato struct: '{format_str}', campi: {field_names}")
return format_str, field_names
# Se è un tipo base
elif gdb_type.code in [gdb.TYPE_CODE_INT, gdb.TYPE_CODE_FLT, gdb.TYPE_CODE_BOOL]:
type_name = str(gdb_type.name)
fmt_char = type_code_map.get(type_name)
if not fmt_char:
_dumper_log_write(f" [!] Tipo base non supportato: '{type_name}'")
raise TypeError(f"Tipo base non supportato: {type_name}")
format_str = endian_char + fmt_char
_dumper_log_write(f" Formato tipo base: '{format_str}'")
return format_str, []
else:
raise TypeError(f"Tipo non supportato per unpack: {gdb_type.name}")
def _handle_dynamic_matrix(self, base_expr: str, rows_expr: str, cols_expr: str) -> List[List[Any]]:
try:
@ -472,30 +479,28 @@ class GDBDumpJsonCommand(gdb.Command):
if sizeof_element == 0:
raise ValueError("Cannot dump matrix of zero-sized elements.")
# --- Attempting Optimized Path ---
# --- Percorso Ottimizzato ---
try:
unpack_format, field_names = self._get_struct_unpack_format(element_type)
_dumper_log_write(f" Attempting OPTIMIZED path with format '{unpack_format}'.")
_dumper_log_write(f" OPTIMIZED path con formato '{unpack_format}'.")
start_address = int(base_ptr_val)
total_elements = num_rows * num_cols
total_bytes = total_elements * sizeof_element
_dumper_log_write(f" Calculated memory block: address=0x{start_address:x}, total_bytes={total_bytes}")
_dumper_log_write(f" Blocco memoria: address=0x{start_address:x}, total_bytes={total_bytes}")
if total_bytes > 2_000_000_000: # 2GB sanity check
if total_bytes > 2_000_000_000:
raise ValueError(f"Memory block size ({total_bytes} bytes) is excessively large. Aborting.")
_dumper_log_write(" Calling inferior.read_memory...")
_dumper_log_write(" Chiamo inferior.read_memory...")
inferior = gdb.selected_inferior()
memory_block = inferior.read_memory(start_address, total_bytes)
_dumper_log_write(f" Memory block of size {len(memory_block)} read successfully.")
_dumper_log_write(f" Memory block di {len(memory_block)} bytes letto correttamente.")
matrix_data = []
import struct
rows_limit = min(num_rows, MAX_ARRAY_ELEMENTS) if MAX_ARRAY_ELEMENTS > 0 else num_rows
# Log progress every 10% of rows or every 1000 rows, whichever is less frequent
log_interval = max(1, min(rows_limit // 10, 1000))
for r in range(rows_limit):
@ -508,8 +513,13 @@ class GDBDumpJsonCommand(gdb.Command):
offset = (r * num_cols + c) * sizeof_element
element_bytes = memory_block[offset : offset + sizeof_element]
unpacked_values = struct.unpack(unpack_format, element_bytes)
cell_dict = dict(zip(field_names, unpacked_values))
row_data.append(cell_dict)
# Se è una struttura, restituisco dict con i campi
if field_names:
cell = dict(zip(field_names, unpacked_values))
else:
# Tipo base: restituisco direttamente il valore
cell = unpacked_values[0]
row_data.append(cell)
if MAX_ARRAY_ELEMENTS > 0 and num_cols > cols_limit:
row_data.append(f"<row_trunc_to_{cols_limit}>")
@ -518,7 +528,7 @@ class GDBDumpJsonCommand(gdb.Command):
if MAX_ARRAY_ELEMENTS > 0 and num_rows > rows_limit:
matrix_data.append([f"<matrix_rows_trunc_to_{rows_limit}>"])
_dumper_log_write(" Optimized path finished successfully.")
_dumper_log_write(" Percorso ottimizzato completato.")
metadata = {
"is_matrix": True,
"original_rows": num_rows,
@ -528,7 +538,7 @@ class GDBDumpJsonCommand(gdb.Command):
except Exception as e_opt:
# --- Fallback Path ---
_dumper_log_write(f" [!] Optimization failed: {e_opt}. Falling back to SLOW, element-by-element parsing.")
_dumper_log_write(f" [!] Ottimizzazione fallita: {e_opt}. Fallback lento.")
matrix_data = []
rows_limit = min(num_rows, MAX_ARRAY_ELEMENTS) if MAX_ARRAY_ELEMENTS > 0 else num_rows