add separate log for worker
This commit is contained in:
parent
23b3c37c88
commit
94d3ba00d5
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"last_opened_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_354-n11.out",
|
"last_opened_file": "C:/src/____GitProjects/radar_data_reader/_25-05-15-12-22-52_sata_568-n13.out",
|
||||||
"last_output_file": "C:\\src\\____GitProjects\\radar_data_reader\\_25-05-15-12-22-52_sata_354-n11.csv",
|
"last_output_file": "C:\\src\\____GitProjects\\radar_data_reader\\_25-05-15-12-22-52_sata_568-n13.csv",
|
||||||
"active_export_profile_name": "Default",
|
"active_export_profile_name": "Default",
|
||||||
"export_profiles": [
|
"export_profiles": [
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,9 +1,12 @@
|
|||||||
|
# radar_data_reader/core/file_reader.py
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Worker process logic for reading and parsing radar data files.
|
Worker process logic for reading and parsing radar data files.
|
||||||
"""
|
"""
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
|
import logging # <-- NUOVO IMPORT
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional, Dict, Iterator, Tuple, Set
|
from typing import Set, Iterator, Tuple
|
||||||
import queue
|
import queue
|
||||||
import cProfile
|
import cProfile
|
||||||
import pstats
|
import pstats
|
||||||
@ -22,6 +25,35 @@ HEADER_BLOCK_NAME = 1213223748
|
|||||||
BLOCK_NAME_OFFSET = 17
|
BLOCK_NAME_OFFSET = 17
|
||||||
BLOCK_SIZE_OFFSET = 5
|
BLOCK_SIZE_OFFSET = 5
|
||||||
|
|
||||||
|
# --- NUOVA COSTANTE PER IL FILE DI LOG DEL WORKER ---
|
||||||
|
WORKER_LOG_FILE = "worker_process.log"
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_worker_logging() -> logging.Handler:
|
||||||
|
"""Configures a dedicated file logger for this worker process."""
|
||||||
|
# Get the logger specific to this module
|
||||||
|
worker_log = logging.getLogger(__name__)
|
||||||
|
worker_log.setLevel(logging.DEBUG) # Capture all levels of messages
|
||||||
|
|
||||||
|
# Create a file handler that overwrites the file each time (mode='w')
|
||||||
|
file_handler = logging.FileHandler(WORKER_LOG_FILE, mode='w', encoding='utf-8')
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s [%(levelname)-8s] (Worker-PID:%(process)d): %(message)s',
|
||||||
|
'%H:%M:%S'
|
||||||
|
)
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Add the handler to this specific logger
|
||||||
|
worker_log.addHandler(file_handler)
|
||||||
|
|
||||||
|
# --- THIS IS THE KEY STEP ---
|
||||||
|
# Stop log messages from propagating to the root logger,
|
||||||
|
# which would send them to the main process's queue.
|
||||||
|
worker_log.propagate = False
|
||||||
|
|
||||||
|
worker_log.info("Dedicated worker logging configured. Output will be in " + WORKER_LOG_FILE)
|
||||||
|
return file_handler
|
||||||
|
|
||||||
|
|
||||||
def run_worker_process(
|
def run_worker_process(
|
||||||
filepath: Path,
|
filepath: Path,
|
||||||
@ -29,22 +61,23 @@ def run_worker_process(
|
|||||||
result_queue: mp.Queue,
|
result_queue: mp.Queue,
|
||||||
profile: ExportProfile
|
profile: ExportProfile
|
||||||
):
|
):
|
||||||
|
"""This function is the main target for the multiprocessing.Process."""
|
||||||
|
|
||||||
|
# --- NUOVA GESTIONE DEL LOGGING CON TRY...FINALLY ---
|
||||||
|
worker_file_handler = _setup_worker_logging()
|
||||||
|
|
||||||
profiler = cProfile.Profile()
|
profiler = cProfile.Profile()
|
||||||
profiler.enable()
|
profiler.enable()
|
||||||
|
|
||||||
"""This function is the main target for the multiprocessing.Process."""
|
try:
|
||||||
log.info(f"[Worker-{mp.current_process().pid}] Started for file: {filepath.name}")
|
log.info(f"[Worker-{mp.current_process().pid}] Started for file: {filepath.name}")
|
||||||
|
|
||||||
# --- Pre-computation of required data based on the profile ---
|
# Pre-computation of required data based on the profile
|
||||||
# Determine which signal types are needed by inspecting the data paths.
|
|
||||||
# A path like "signals.SUM.some_attribute" means we need to parse "SUM" blocks.
|
|
||||||
required_signal_types = {
|
required_signal_types = {
|
||||||
f.data_path.split('.')[1] for f in profile.fields if f.data_path.startswith('signals.')
|
f.data_path.split('.')[1] for f in profile.fields if f.data_path.startswith('signals.')
|
||||||
}
|
}
|
||||||
log.info(f"Optimization active. Required signal types for this run: {required_signal_types}")
|
log.info(f"Optimization active. Required signal types for this run: {required_signal_types}")
|
||||||
# Note: Header data is always considered necessary for context (n_rbin, n_pri, etc.)
|
|
||||||
|
|
||||||
try:
|
|
||||||
reader = RadarFileReader(filepath)
|
reader = RadarFileReader(filepath)
|
||||||
if not reader.load_and_find_blocks():
|
if not reader.load_and_find_blocks():
|
||||||
result_queue.put({"type": "error", "message": "Failed to load or find blocks."})
|
result_queue.put({"type": "error", "message": "Failed to load or find blocks."})
|
||||||
@ -55,7 +88,6 @@ def run_worker_process(
|
|||||||
|
|
||||||
interrupted = False
|
interrupted = False
|
||||||
|
|
||||||
# Pass the required signal types to the generator
|
|
||||||
batch_generator = reader.build_batches_generator(required_signal_types)
|
batch_generator = reader.build_batches_generator(required_signal_types)
|
||||||
|
|
||||||
for i, (batch, blocks_done_count) in enumerate(batch_generator):
|
for i, (batch, blocks_done_count) in enumerate(batch_generator):
|
||||||
@ -82,25 +114,35 @@ def run_worker_process(
|
|||||||
|
|
||||||
result_queue.put({"type": "complete", "interrupted": interrupted})
|
result_queue.put({"type": "complete", "interrupted": interrupted})
|
||||||
log.info(f"[Worker-{mp.current_process().pid}] Processing finished.")
|
log.info(f"[Worker-{mp.current_process().pid}] Processing finished.")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(f"[Worker-{mp.current_process().pid}] Unhandled exception: {e}", exc_info=True)
|
log.error(f"[Worker-{mp.current_process().pid}] Unhandled exception: {e}", exc_info=True)
|
||||||
result_queue.put({"type": "error", "message": f"Worker failed: {e}"})
|
result_queue.put({"type": "error", "message": f"Worker failed: {e}"})
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
# --- NUOVA LOGICA DI PULIZIA NEL BLOCCO FINALLY ---
|
||||||
|
log.info("Worker process shutting down. Cleaning up logging.")
|
||||||
|
|
||||||
profiler.disable()
|
profiler.disable()
|
||||||
pid = mp.current_process().pid
|
pid = mp.current_process().pid
|
||||||
profile_filename = f"profile_output_{pid}.prof"
|
profile_filename = f"profile_output_{pid}.prof"
|
||||||
profiler.dump_stats(profile_filename)
|
profiler.dump_stats(profile_filename)
|
||||||
log.info(f"Profiling data saved to {profile_filename}")
|
log.info(f"Profiling data saved to {profile_filename}")
|
||||||
|
|
||||||
|
# Restore logger to its original state and close the file handler
|
||||||
|
worker_log = logging.getLogger(__name__)
|
||||||
|
worker_log.propagate = True
|
||||||
|
worker_log.removeHandler(worker_file_handler)
|
||||||
|
worker_file_handler.close()
|
||||||
|
|
||||||
|
|
||||||
class RadarFileReader:
|
class RadarFileReader:
|
||||||
"""Class containing the pure logic for file reading and parsing."""
|
"""Class containing the pure logic for file reading and parsing."""
|
||||||
|
|
||||||
def __init__(self, file_path: Path):
|
def __init__(self, file_path: Path):
|
||||||
self.file_path = file_path
|
self.file_path = file_path
|
||||||
self.data_vector: Optional[np.ndarray] = None
|
self.data_vector: np.ndarray | None = None
|
||||||
self.block_indices: Optional[List[int]] = None
|
self.block_indices: list[int] | None = None
|
||||||
|
|
||||||
def load_and_find_blocks(self) -> bool:
|
def load_and_find_blocks(self) -> bool:
|
||||||
# This method remains unchanged
|
# This method remains unchanged
|
||||||
@ -129,8 +171,8 @@ class RadarFileReader:
|
|||||||
if self.block_indices is None or self.data_vector is None:
|
if self.block_indices is None or self.data_vector is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
current_header: Optional[MainHeader] = None
|
current_header: MainHeader | None = None
|
||||||
current_signals: Dict[str, np.ndarray] = {}
|
current_signals: dict[str, np.ndarray] = {}
|
||||||
|
|
||||||
for block_num, block_start_index in enumerate(self.block_indices):
|
for block_num, block_start_index in enumerate(self.block_indices):
|
||||||
try:
|
try:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user