unificato il payload_router tra debug e main

This commit is contained in:
VALLONGOL 2025-10-22 16:09:05 +02:00
parent 92b7a12492
commit 03ac1c2ce5
5 changed files with 143 additions and 95 deletions

View File

@ -2,7 +2,7 @@
"general": {
"scan_limit": 60,
"max_range": 100,
"geometry": "1599x1024+626+57",
"geometry": "1599x1024+481+287",
"last_selected_scenario": "scenario_dritto",
"connection": {
"target": {

View File

@ -14,7 +14,7 @@ from target_simulator.core.models import Scenario
from target_simulator.core.sfp_transport import SfpTransport, PayloadHandler
from target_simulator.core import command_builder
from target_simulator.utils.logger import get_logger
from target_simulator.core.simulation_payload_handler import SimulationPayloadHandler
from target_simulator.gui.payload_router import DebugPayloadRouter
from target_simulator.analysis.simulation_state_hub import SimulationStateHub
@ -32,7 +32,12 @@ class SFPCommunicator(CommunicatorInterface):
self.simulation_hub = simulation_hub
self.update_queue = update_queue
self._connection_state_callbacks: List[Callable[[bool], None]] = []
self._extra_payload_handlers: Dict[int, PayloadHandler] = {}
# Unified payload router
self.payload_router = DebugPayloadRouter(simulation_hub=simulation_hub, update_queue=update_queue)
def router(self) -> DebugPayloadRouter:
return self.payload_router
def add_connection_state_callback(self, callback: Callable[[bool], None]):
if callback not in self._connection_state_callbacks:
@ -76,18 +81,15 @@ class SFPCommunicator(CommunicatorInterface):
f"Initializing SFP Transport: Bind {local_port_int} -> Remote {self._destination}"
)
payload_handlers = {}
if self.simulation_hub:
self.logger.info("Simulation hub provided. Setting up simulation payload handlers.")
sim_handler = SimulationPayloadHandler(self.simulation_hub, update_queue=self.update_queue)
payload_handlers.update(sim_handler.get_handlers())
payload_handlers.update(self._extra_payload_handlers)
# The unified router provides all necessary handlers
payload_handlers = self.payload_router.get_handlers()
self.transport = SfpTransport(
host="0.0.0.0",
port=local_port_int,
payload_handlers=payload_handlers,
# Pass the router's raw packet callback to the transport
raw_packet_callback=self.payload_router.update_raw_packet
)
if self.transport.start():
@ -107,22 +109,14 @@ class SFPCommunicator(CommunicatorInterface):
return result
def add_payload_handlers(self, handlers: Dict[int, PayloadHandler]):
if not handlers:
return
self._extra_payload_handlers.update(handlers)
if self.transport:
self.transport.add_payload_handlers(handlers)
self.logger.info("Attached extra payload handlers to running transport.")
# This method is now a no-op as handlers are managed centrally.
self.logger.warning("add_payload_handlers is deprecated. Handlers are managed by the unified router.")
pass
def remove_payload_handlers(self, handlers: Dict[int, PayloadHandler]):
if not handlers:
return
for flow in handlers.keys():
self._extra_payload_handlers.pop(flow, None)
if self.transport:
self.transport.remove_payload_handlers(handlers)
self.logger.info("Detached extra payload handlers from running transport.")
# This method is now a no-op as handlers are managed centrally.
self.logger.warning("remove_payload_handlers is deprecated. Handlers are managed by the unified router.")
pass
def disconnect(self) -> None:
if self.transport:
@ -135,7 +129,8 @@ class SFPCommunicator(CommunicatorInterface):
@property
def is_open(self) -> bool:
return self.transport is not None and self.transport._socket is not None
"""Returns True when the underlying transport socket is open."""
return self.transport is not None and getattr(self.transport, '_socket', None) is not None
def send_scenario(self, scenario: Scenario) -> bool:
if not self.is_open or not self._destination:
@ -175,7 +170,6 @@ class SFPCommunicator(CommunicatorInterface):
return False
return self.transport.send_script_command(command, self._destination)
@staticmethod
def test_connection(config: Dict[str, Any]) -> bool:
local_port = config.get("local_port")
if not local_port:
@ -188,6 +182,5 @@ class SFPCommunicator(CommunicatorInterface):
except Exception:
return False
@staticmethod
def list_available_ports() -> List[str]:
return []

View File

@ -9,39 +9,39 @@ import collections
import datetime
import os
import logging
from queue import Queue, Full
from typing import Dict, Optional, Any
from target_simulator.core.sfp_structures import SFPHeader, SfpRisStatusPayload
from target_simulator.analysis.simulation_state_hub import SimulationStateHub
class DebugPayloadRouter:
"""
A router that buffers the last received payload for each flow,
allowing the GUI to sample the data at a lower frequency.
A unified router that handles payloads for the entire application.
It updates the SimulationStateHub for the main simulation and also
buffers detailed data for the debug window.
This class is thread-safe.
"""
def __init__(self):
def __init__(self, simulation_hub: Optional[SimulationStateHub] = None, update_queue: Optional[Queue] = None):
self._log_prefix = "[DebugPayloadRouter]"
self._lock = threading.Lock()
# Buffer to store the last received payload for each flow type
self._latest_payloads: Dict[str, bytearray] = {}
# Buffer to store the last raw packet received (bytes, addr)
self._last_raw_packet: Optional[tuple] = None
# History of raw packets (timestamp, addr, raw bytes)
self._history_size = 20
self._history = collections.deque(maxlen=self._history_size)
self._persist = False
# default persist dir: repository Temp/ folder
project_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..")
)
self._hub = simulation_hub
self._update_queue = update_queue
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
self._persist_dir = os.path.join(project_root, "Temp")
try:
os.makedirs(self._persist_dir, exist_ok=True)
except Exception:
pass
# Create handlers once and store them to ensure stable object references
self._handlers = {
ord("M"): lambda p: self._update_last_payload("MFD", p),
ord("S"): lambda p: self._update_last_payload("SAR", p),
@ -50,7 +50,10 @@ class DebugPayloadRouter:
ord("R"): self._handle_ris_status,
ord("r"): self._handle_ris_status,
}
logging.info(f"{self._log_prefix} Initialized.")
logging.info(f"{self._log_prefix} Initialized (Hub: {self._hub is not None}, Queue: {self._update_queue is not None}).")
# Convenience logger
self._logger = logging.getLogger(__name__)
def get_handlers(self) -> Dict[int, Any]:
"""Returns the stored handler instances."""
@ -60,23 +63,51 @@ class DebugPayloadRouter:
"""Thread-safely stores the latest payload for a given flow."""
with self._lock:
self._latest_payloads[flow_id] = payload
try:
self._logger.debug(f"{self._log_prefix} Stored payload for {flow_id} ({len(payload)} bytes)")
except Exception:
pass
def _handle_ris_status(self, payload: bytearray):
"""Try to parse a RIS status payload and store a concise summary.
If parsing fails, store the raw payload as before.
"""
Handles RIS status for both the Simulation Hub (if available) and
the debug window's internal buffers.
"""
# --- 1. Process for Simulation Hub (if configured) ---
if self._hub:
try:
parsed_for_hub = SfpRisStatusPayload.from_buffer_copy(payload)
ts_s = parsed_for_hub.scenario.timetag / 1000.0
for i, ris_target in enumerate(parsed_for_hub.tgt.tgt):
if ris_target.flags != 0:
self._hub.add_real_state(
target_id=i,
timestamp=ts_s,
state=(ris_target.x, ris_target.y, ris_target.z)
)
if self._update_queue:
try:
self._update_queue.put_nowait([])
try:
self._logger.debug(f"{self._log_prefix} Enqueued GUI update notification for hub (targets processed).")
except Exception:
pass
except Full:
try:
self._logger.warning(f"{self._log_prefix} GUI update queue is full; dropped notification.")
except Exception:
pass
except Exception:
logging.exception("DebugPayloadRouter: Failed to process RIS for Hub.")
# --- 2. Process for Debug Window UI (original logic) ---
try:
if len(payload) >= SfpRisStatusPayload.size():
# Interpret the first bytes as the status payload
parsed = SfpRisStatusPayload.from_buffer_copy(
bytes(payload[: SfpRisStatusPayload.size()])
)
parsed = SfpRisStatusPayload.from_buffer_copy(bytes(payload[:SfpRisStatusPayload.size()]))
sc = parsed.scenario
lines = []
lines.append("RIS Status Payload:\n")
# Scenario block
lines.append("Scenario:")
lines = ["RIS Status Payload:\n", "Scenario:"]
lines.append(f" timetag : {sc.timetag}")
lines.append(f" platform_azim : {sc.platform_azimuth:.6f}")
lines.append(f" vx,vy,vz : {sc.vx:.3f}, {sc.vy:.3f}, {sc.vz:.3f}")
@ -84,29 +115,18 @@ class DebugPayloadRouter:
lines.append(f" latitude : {sc.latitude:.6f}")
lines.append(f" longitude : {sc.longitude:.6f}")
lines.append(f" true_heading : {sc.true_heading:.3f}\n")
# Targets block
lines.append("Targets (first non-zero flags shown):")
any_target = False
for idx, t in enumerate(parsed.tgt.tgt):
if t.flags != 0:
any_target = True
lines.append(
f" [{idx}] flags={t.flags} heading={t.heading:.3f} x={t.x:.3f} y={t.y:.3f} z={t.z:.3f}"
)
lines.append(f" [{idx}] flags={t.flags} heading={t.heading:.3f} x={t.x:.3f} y={t.y:.3f} z={t.z:.3f}")
if not any_target:
lines.append(" (no enabled targets)")
# NOTE: omit hex sample from RIS textual summary to avoid
# cluttering the application log with large binary dumps.
# The structured JSON payload (RIS_STATUS_JSON) contains
# the parsed values that the UI consumes.
text_out = "\n".join(lines)
# Build structured JSON for UI table consumption
try:
import json
scenario_dict = {
"timetag": int(parsed.scenario.timetag),
"flags": int(parsed.scenario.flags),
@ -124,32 +144,25 @@ class DebugPayloadRouter:
}
targets_list = []
for idx, t in enumerate(parsed.tgt.tgt):
targets_list.append(
{
targets_list.append({
"index": idx,
"flags": int(t.flags),
"heading": float(t.heading),
"x": float(t.x),
"y": float(t.y),
"z": float(t.z),
}
)
})
struct = {"scenario": scenario_dict, "targets": targets_list}
json_bytes = bytearray(json.dumps(struct).encode("utf-8"))
except Exception:
json_bytes = bytearray(b"{}")
json_bytes = bytearray(b'{}')
# Store textual representation and structured JSON so GUI can display it directly
self._update_last_payload(
"RIS_STATUS", bytearray(text_out.encode("utf-8"))
)
self._update_last_payload("RIS_STATUS", bytearray(text_out.encode("utf-8")))
self._update_last_payload("RIS_STATUS_JSON", json_bytes)
return
except Exception:
# fall through to storing raw payload
pass
# Fallback: store raw payload (as hex dump)
try:
text_out = "\n".join([f"{b:02X}" for b in payload])
self._update_last_payload("RIS_STATUS", bytearray(text_out.encode("utf-8")))

View File

@ -73,13 +73,27 @@ class SfpDebugWindow(tk.Toplevel):
except Exception:
pass
self.logger = logging.getLogger(__name__)
self.payload_router = DebugPayloadRouter()
self.shared_communicator = getattr(self.master, 'target_communicator', None)
# Get the single, shared router from the communicator. The communicator
# exposes a method `router()` which returns the router instance; older
# code used getattr(..., 'router') which returned the bound method
# instead of the router object. Prefer calling `router()` when present.
self.payload_router = None
if self.shared_communicator is not None:
router_attr = getattr(self.shared_communicator, 'router', None)
try:
if callable(router_attr):
# call to obtain the DebugPayloadRouter instance
self.payload_router = router_attr()
else:
# fallback: attribute may already be the router instance
self.payload_router = router_attr
except Exception:
self.logger.exception("Failed to obtain router from shared communicator")
if self.shared_communicator:
self.shared_communicator.add_connection_state_callback(self._update_toggle_state)
handlers = self.payload_router.get_handlers()
self.shared_communicator.add_payload_handlers(handlers)
# Handlers are now managed centrally by the communicator, so no need to add/remove them here.
self.image_area_size = 150
self._ppi_visible = False
@ -577,8 +591,6 @@ class SfpDebugWindow(tk.Toplevel):
self.logger.info("SFP Debug Window closing.")
if self.shared_communicator:
self.shared_communicator.remove_connection_state_callback(self._update_toggle_state)
handlers = self.payload_router.get_handlers()
self.shared_communicator.remove_payload_handlers(handlers)
self.destroy()
def _update_toggle_state(self, connected: bool):
@ -595,9 +607,23 @@ class SfpDebugWindow(tk.Toplevel):
self._update_toggle_state(connected)
def _process_latest_payloads(self):
if not self.payload_router:
# nothing to poll; reschedule and return
self.logger.debug("SfpDebugWindow: no payload_router available; skipping poll")
self.after(self.GUI_POLL_INTERVAL_MS, self._process_latest_payloads)
return
try:
new_payloads = self.payload_router.get_and_clear_latest_payloads()
except Exception:
self.logger.exception("Error while fetching latest payloads from router")
new_payloads = {}
if new_payloads:
try:
self._log_to_widget(f"Processing {len(new_payloads)} new payload(s) for flows: {list(new_payloads.keys())}")
except Exception:
pass
for flow_id, payload in new_payloads.items():
if flow_id == "MFD" and _IMAGE_LIBS_AVAILABLE:
self._display_image_data(payload, self.mfd_tab, "mfd_photo")
@ -782,13 +808,21 @@ class SfpDebugWindow(tk.Toplevel):
except Exception: self.logger.exception("Failed to push targets to PPI")
except Exception: self.logger.exception("Error while preparing RIS targets for PPI")
except Exception: pass
# reschedule polling
self.after(self.GUI_POLL_INTERVAL_MS, self._process_latest_payloads)
raw_pkt = self.payload_router.get_and_clear_raw_packet()
# raw packet display (guarded)
try:
raw_pkt = self.payload_router.get_and_clear_raw_packet() if self.payload_router else None
if raw_pkt:
raw_bytes, addr = raw_pkt
self._display_raw_packet(raw_bytes, addr)
try: self._refresh_history_tree()
except Exception: pass
try:
self._refresh_history_tree()
except Exception:
pass
except Exception:
self.logger.exception("Error while fetching raw packet from router")
def _refresh_history_tree(self):
try:

View File

@ -18,3 +18,11 @@ Quindi dovremmo inserire una nuova legenda oltre a quella attuale che indichi ch
devo ppoter mandare 10 millisecondi lka posizione del radar e misurare i discostamenti dalla posizione calcolata da quella tornata dal radar e graficare gli scontamenti sulla traiettoria.
fare in modo di calcolare se l'invio dei dati ogni tot è rispettato misurando effettivamente il momento in cui si decide di mandare il dato ed il momento effettivo di uscita del dato
Per questo capire se jittera ed in caso fare degli aggiustamenti nel tempo per fare in modo che venga rispettato il rate di spedizione scelto che deve arrivare anche a 0.01 secondi.
vedere se con il rate di invio a 0.01 secondi riusciamo a mandare i dati al server
modificare gli inivii verificando quanti target devono essere aggiornati e mandare per ogni invio un pacchetto di comandi settabile, per non eccedere la lunghezza massima possibile per 1 singolo messaggio
mettere una flag che attivi o l'invio dei comandi a monitor, quello attuale, con i nuovi comandi via json che stiamo stabilendo con il server.