# --- START OF FILE image_recorder.py --- # image_recorder.py """ Handles saving SAR images as georeferenced TIFF files (GeoTIFF). Includes functionality to create the output directory, generate timestamped filenames, write standard EXIF/GPS and GeoTIFF tags using tifffile based on AppState GeoInfo, manage automatic cleanup of old recordings based on configuration, and performs saving asynchronously in a dedicated thread. This version attempts to replicate the tag structure of a reference TIFF file using standard TIFF/EXIF/GPS tags, passing byte-based tags via extratags. Dependencies: - numpy - tifffile - pyproj (optional, improves accuracy of GeoTIFF transform) - rasterio (optional, only if needed for CRS object creation or fallback) """ # Standard library imports import logging import os import datetime import math import sys import threading # For dedicated writer thread and shutdown event import queue # For internal queue from pathlib import Path from typing import Dict, Any, Optional, Tuple, List # Third-party imports import numpy as np # Attempt to import required libraries try: import tifffile _tifffile_available = True # Define standard TIFF tag codes we might use IMAGE_DESCRIPTION_TAG = 270 # ASCII MAKE_TAG = 271 # ASCII MODEL_TAG = 272 # ASCII SOFTWARE_TAG = 305 # ASCII ARTIST_TAG = 315 # ASCII COPYRIGHT_TAG = 33432 # ASCII ORIENTATION_TAG = 274 # SHORT X_RESOLUTION_TAG = 282 # RATIONAL Y_RESOLUTION_TAG = 283 # RATIONAL RESOLUTION_UNIT_TAG = 296 # SHORT DOCUMENT_NAME_TAG = 269 # ASCII EXIF_IFD_TAG = 34665 # LONG (Pointer) GPS_IFD_TAG = 34853 # LONG (Pointer) # Define EXIF specific tags (relative to EXIF IFD) EXIF_VERSION_TAG = 36864 # UNDEFINED (4 bytes) USER_COMMENT_TAG = 37510 # UNDEFINED (Variable bytes, starts with encoding) # Define GeoTIFF specific tags MODEL_TRANSFORMATION_TAG = 33922 # DOUBLE (16) GEO_KEY_DIRECTORY_TAG = 34735 # SHORT (Variable * 4) except ImportError: tifffile = None _tifffile_available = False logging.error( "[ImageRecorder Init] 'tifffile' library not found. " "Cannot save TIFF files. Please install it (`pip install tifffile`)." ) # Define tags as None if library missing to avoid NameErrors later IMAGE_DESCRIPTION_TAG = MAKE_TAG = MODEL_TAG = SOFTWARE_TAG = None ARTIST_TAG = COPYRIGHT_TAG = EXIF_IFD_TAG = GPS_IFD_TAG = None EXIF_VERSION_TAG = USER_COMMENT_TAG = None MODEL_TRANSFORMATION_TAG = GEO_KEY_DIRECTORY_TAG = None ORIENTATION_TAG = X_RESOLUTION_TAG = Y_RESOLUTION_TAG = RESOLUTION_UNIT_TAG = None DOCUMENT_NAME_TAG = None # Optional: rasterio (only for potential CRS object help if needed) try: import rasterio from rasterio.transform import Affine # Keep Affine if calc helper returns it from rasterio.errors import CRSError _rasterio_available = True except ImportError: rasterio = None Affine = None CRSError = None _rasterio_available = False logging.warning("[ImageRecorder Init] 'rasterio' not found. CRS validation limited.") # Optional: pyproj for better transform accuracy try: import pyproj _pyproj_available = True except ImportError: pyproj = None _pyproj_available = False logging.warning("[ImageRecorder Init] 'pyproj' not found. Transform accuracy might be reduced.") # Local application imports from app_state import AppState # Requires access to app state for config/flags import config # Requires access to config for paths/limits class ImageRecorder: """Handles recording SAR images to GeoTIFF files asynchronously.""" def __init__(self, app_state: AppState): """ Initializes the ImageRecorder and starts the writer thread. Args: app_state (AppState): Reference to the shared application state. """ self._log_prefix = "[ImageRecorder]" logging.debug(f"{self._log_prefix} Initializing...") self._app_state: AppState = app_state # Recording directory setup self.recording_dir_name = config.DEFAULT_SAR_RECORDING_DIRECTORY try: # Determine base path based on execution context if getattr(sys, 'frozen', False): # Check if running as bundled exe app_path = os.path.dirname(sys.executable) else: # Assumes ControlPanel.py (main script) is the entry point app_path = os.path.dirname(os.path.abspath(sys.argv[0])) self.recording_dir: Path = Path(app_path) / self.recording_dir_name logging.info(f"{self._log_prefix} Determined recording base path: {app_path}") except Exception as path_e: logging.exception(f"{self._log_prefix} Error determining script path. Using CWD.") # Fallback to current working directory if path detection fails self.recording_dir: Path = Path.cwd() / self.recording_dir_name self._ensure_recording_dir_exists() # Check essential library for core functionality if not _tifffile_available: logging.error(f"{self._log_prefix} Tifffile missing. Recording disabled.") self._writer_thread = None self._recording_queue = None self._stop_event = None return # Stop initialization # Log warnings for missing optional libraries if not _pyproj_available: logging.warning(f"{self._log_prefix} Pyproj missing. GeoTIFF transform accuracy might be reduced.") # --- Components for asynchronous saving --- self._recording_queue: queue.Queue = queue.Queue(maxsize=10) self._stop_event = threading.Event() self._writer_thread = threading.Thread( target=self._writer_loop, name="ImageWriterThread", daemon=True ) self._writer_thread.start() logging.info(f"{self._log_prefix} Writer thread started.") logging.debug(f"{self._log_prefix} Initialization complete.") def _ensure_recording_dir_exists(self): """Creates the recording directory if it doesn't exist.""" try: self.recording_dir.mkdir(parents=True, exist_ok=True) logging.info( f"{self._log_prefix} Recording directory ensured: {self.recording_dir}" ) except OSError as e: logging.error( f"{self._log_prefix} Failed to create recording directory " f"'{self.recording_dir}': {e}. Recording might fail." ) except Exception as e: logging.exception( f"{self._log_prefix} Unexpected error ensuring recording directory exists:" ) def record_sar_image( self, raw_image_data: np.ndarray, geo_info_radians: Dict[str, Any] ): """ Queues the raw SAR image data and GeoInfo for asynchronous saving. Args: raw_image_data (np.ndarray): The raw SAR image data (e.g., uint16). geo_info_radians (Dict[str, Any]): The georeferencing information with angles in radians. """ log_prefix = f"{self._log_prefix} QueueRecord" # Check if recording is enabled AND writer thread is running if not self._app_state.sar_recording_enabled: return if not self._writer_thread or not self._recording_queue or not _tifffile_available: if _tifffile_available: # Log only if library is present but thread missing logging.warning(f"{log_prefix} Skipping queueing: Writer thread not active.") return # Basic validation of inputs before queueing if raw_image_data is None or raw_image_data.size == 0: logging.warning(f"{log_prefix} Skipping queueing: No raw image data provided.") return if not geo_info_radians or not geo_info_radians.get("valid", False): logging.warning(f"{log_prefix} Skipping queueing: Invalid GeoInfo provided.") return # Put data onto the internal queue try: # Put a tuple containing COPIES of the data needed for saving item = (raw_image_data.copy(), geo_info_radians.copy()) self._recording_queue.put(item, block=False) logging.debug(f"{log_prefix} SAR image queued for recording.") except queue.Full: logging.warning( f"{log_prefix} Recording queue is full ({self._recording_queue.maxsize}). " "Discarding oldest SAR recording request." ) except Exception as e: logging.exception(f"{log_prefix} Error queueing SAR image for recording:") def _writer_loop(self): """Dedicated thread loop that processes the recording queue.""" log_prefix = f"{self._log_prefix} WriterLoop" logging.info(f"{log_prefix} Writer thread loop starting.") while not self._stop_event.is_set(): item = None try: item = self._recording_queue.get(block=True, timeout=1.0) except queue.Empty: continue except Exception as e: logging.exception(f"{log_prefix} Error getting from recording queue:") time.sleep(0.1) continue try: raw_data, geo_info = item log_prefix_item = f"{log_prefix} ItemProcess" logging.debug(f"{log_prefix_item} Dequeued SAR image for saving.") try: now = datetime.datetime.now() timestamp_str = now.strftime("%Y%m%d_%H%M%S_%f") # Microseconds dtype_bits = raw_data.dtype.itemsize * 8 filename = f"{timestamp_str}_SAR_geo_img{dtype_bits}.tif" output_path = self.recording_dir / filename except Exception as e: logging.exception(f"{log_prefix_item} Error generating filename: {e}") continue # Use the function to save TIFF with all tags using tifffile save_success = self._save_tiff_with_all_tags( raw_data, geo_info, output_path ) if save_success: logging.info(f"{log_prefix_item} Successfully saved TIFF with tags: {filename}") self._cleanup_old_recordings() else: logging.error(f"{log_prefix_item} Failed to save TIFF with tags: {filename}") except Exception as e: logging.exception(f"{log_prefix} Error processing recording item:") finally: # Ensure task_done is called even if processing fails if hasattr(self._recording_queue, 'task_done'): try: self._recording_queue.task_done() except ValueError: pass # Ignore if already marked done logging.info(f"{log_prefix} Writer thread loop finished.") def shutdown(self): """Signals the writer thread to stop and waits for it to finish.""" log_prefix = f"{self._log_prefix} Shutdown" logging.debug(f"{log_prefix} Initiating shutdown...") if self._writer_thread and self._stop_event: logging.debug(f"{log_prefix} Signaling writer thread to stop...") self._stop_event.set() logging.debug(f"{log_prefix} Waiting for writer thread to join...") self._writer_thread.join(timeout=5.0) # Wait up to 5 seconds if self._writer_thread.is_alive(): logging.warning(f"{log_prefix} Writer thread did not join cleanly.") else: logging.info(f"{log_prefix} Writer thread joined successfully.") else: logging.debug(f"{log_prefix} Writer thread was not active.") logging.info(f"{log_prefix} Shutdown complete.") # --- GeoTIFF/EXIF Saving Logic --- def _calculate_affine_transform( self, geo_info_radians: Dict[str, Any] ) -> Optional[Tuple]: # Return tuple (a,b,c,d,e,f) """ Calculates the Affine transform parameters (a,b,c,d,e,f) using GeoInfo. Maps pixel *center* coordinates to CRS coordinates (EPSG:4326). """ log_prefix = f"{self._log_prefix} TransformCalc" try: # Extract necessary info scale_x = geo_info_radians['scale_x'] scale_y = geo_info_radians['scale_y'] orient_rad = geo_info_radians['orientation'] center_lon_rad = geo_info_radians['lon'] center_lat_rad = geo_info_radians['lat'] width = geo_info_radians['width_px'] height = geo_info_radians['height_px'] ref_x = geo_info_radians['ref_x'] ref_y = geo_info_radians['ref_y'] if not (scale_x > 0 and scale_y > 0 and width > 0 and height > 0): logging.error(f"{log_prefix} Invalid scale or dimensions in GeoInfo.") return None # Calculate rotation components cos_o = math.cos(orient_rad) sin_o = math.sin(orient_rad) # Calculate degrees per pixel (approximate) m_per_deg_lat = 111132.954 m_per_deg_lon = max(abs(111319.488 * math.cos(center_lat_rad)), 1e-3) deg_per_pix_x = scale_x / m_per_deg_lon deg_per_pix_y = scale_y / m_per_deg_lat # Calculate affine elements: [a, b, c] # [d, e, f] a = deg_per_pix_x * cos_o b = deg_per_pix_y * sin_o d = -deg_per_pix_x * sin_o e = -deg_per_pix_y * cos_o # Calculate geographic coordinates of the center of the top-left pixel (0, 0) center_lon_deg = math.degrees(center_lon_rad) center_lat_deg = math.degrees(center_lat_rad) # Vector from ref pixel center to TL pixel center (0,0) dx_pixel = -ref_x dy_pixel = -ref_y # Convert to meters (unrotated) dx_meters_unrot = dx_pixel * scale_x dy_meters_unrot = dy_pixel * scale_y # Rotate vector dx_meters_rot = dx_meters_unrot * cos_o - dy_meters_unrot * sin_o dy_meters_rot = dx_meters_unrot * sin_o + dy_meters_unrot * cos_o c = 0.0 # Top-left pixel center longitude f = 0.0 # Top-left pixel center latitude if not _pyproj_available: logging.warning(f"{log_prefix} Calculating TL corner without pyproj (less accurate).") # Estimate TL pixel center by applying rotated offset in degrees c = center_lon_deg + (dx_meters_rot / m_per_deg_lon) f = center_lat_deg + (dy_meters_rot / m_per_deg_lat) else: # Use pyproj for more accurate calculation geod = pyproj.Geod(ellps="WGS84") # Calculate distance and azimuth from center to TL pixel center vector dist_tl_center = math.hypot(dx_meters_rot, dy_meters_rot) azi_tl_center = math.degrees(math.atan2(dx_meters_rot, dy_meters_rot)) # Project from center to get TL pixel center coordinates tl_center_lon, tl_center_lat, _ = geod.fwd( center_lon_deg, center_lat_deg, azi_tl_center, dist_tl_center ) c = tl_center_lon f = tl_center_lat # Return the 6 affine parameters as a tuple transform_tuple = (a, b, c, d, e, f) logging.debug(f"{log_prefix} Calculated Affine transform tuple: {transform_tuple}") return transform_tuple except KeyError as ke: logging.error(f"{log_prefix} Missing required key in geo_info: {ke}") return None except Exception as e: logging.exception(f"{log_prefix} Error calculating Affine transform:") return None def _save_tiff_with_all_tags( self, raw_image_data: np.ndarray, geo_info_radians: Dict[str, Any], output_path: Path ) -> bool: """ Saves the raw image data as a TIFF file including standard EXIF/GPS tags and GeoTIFF tags using tifffile.imwrite and structured metadata dictionary with tag names as keys, separating byte tags into extratags. """ log_prefix = f"{self._log_prefix} SaveAllTags" if not _tifffile_available: logging.error(f"{log_prefix} Cannot save TIFF: tifffile library missing.") return False metadata_dict = {} # Define outside try for logging in except extratags = [] # Define outside try for logging in except try: logging.debug(f"{log_prefix} Preparing all tags for {output_path}...") # Prepare tag dictionaries using helper functions # Standard tags are split into JSON-safe (name: value) and byte-based (code: value) standard_tags_normal, standard_tags_bytes = self._prepare_standard_tags_like_reference(geo_info_radians) standard_tags_normal = standard_tags_normal or {} standard_tags_bytes = standard_tags_bytes or {} # GPS tags prepared as name: value gps_tags = self._prepare_gps_tags_like_reference(geo_info_radians) or {} # GeoTIFF tags prepared as name: value geotiff_tags = self._prepare_geotiff_tags(geo_info_radians) or {} # --- Combine tags for tifffile.imwrite --- # 1. metadata dictionary (JSON serializable keys/values) metadata_dict = {} metadata_dict.update(standard_tags_normal) metadata_dict.update(geotiff_tags) if gps_tags: metadata_dict['GPS'] = gps_tags # Add GPS as nested dict # 2. extratags list for tags with byte values or needing specific formatting extratags = [] # Add byte tags (UserComment, ExifVersion) from standard tags logging.debug(f"{log_prefix} Formatting {len(standard_tags_bytes)} byte tags for extratags...") for code, value in standard_tags_bytes.items(): dtype_code, count, proc_val = self._get_tag_dtype_count_value(code, value) if dtype_code is not None: logging.debug(f" - Adding Bytes Tag {code}: dtype={dtype_code}, count={count}...") extratags.append((code, dtype_code, count, proc_val, True)) # writeonce=True else: logging.warning(f"Tag {code}: Could not format byte tag for extratags.") logging.debug( f"{log_prefix} Writing TIFF using imwrite with metadata " f"({len(metadata_dict)} main keys) and {len(extratags)} extratags..." ) output_path.parent.mkdir(parents=True, exist_ok=True) # Use tifffile.imwrite tifffile.imwrite( output_path, data=raw_image_data, photometric='minisblack', metadata=metadata_dict, # Pass dict with standard(non-byte), Geo, GPS extratags=extratags # Pass list with (code, type, count, value) for byte tags ) logging.info(f"{log_prefix} Successfully wrote TIFF with metadata/extratags to {output_path}") return True except ImportError as imp_err: logging.error(f"{log_prefix} Missing library for TIFF writing: {imp_err}") return False except Exception as e: # Log prepared data for debugging metadata_content = str(metadata_dict if 'metadata_dict' in locals() else 'Not prepared')[:500] extratags_content = str(extratags if 'extratags' in locals() else 'Not prepared')[:500] logging.debug(f"Metadata prepared before error: {metadata_content}...") logging.debug(f"Extratags prepared before error: {extratags_content}...") logging.exception(f"{log_prefix} Error writing TIFF file to {output_path}:") try: # Attempt cleanup if output_path.exists(): output_path.unlink() except OSError: pass return False def _prepare_geotiff_tags( self, geo_info_radians: Dict[str, Any] ) -> Optional[Dict[str, Any]]: # Return Dict[str, Any] """Prepares GeoTIFF specific tags (name: value).""" log_prefix = f"{self._log_prefix} PrepareGeoTags" if not _tifffile_available: return None try: geotiff_tags = {} transform_tuple = self._calculate_affine_transform(geo_info_radians) # Use standard tag names recognised by tifffile/exiftool if transform_tuple: a, b, c, d, e, f = transform_tuple model_transform_matrix = [[a,b,0.,c],[d,e,0.,f],[0.,0.,1.,0.],[0.,0.,0.,1.]] geotiff_tags['ModelTransformationTag'] = tuple( item for sublist in model_transform_matrix for item in sublist ) else: logging.warning(f"{log_prefix} Could not calculate transform, ModelTransformationTag omitted.") if GEO_KEY_DIRECTORY_TAG: # Check if constant is defined geokey_directory = [1,1,1,3, 1024,0,1,1, 2048,0,1,4326, 2054,0,1,9102] geotiff_tags['GeoKeyDirectoryTag'] = tuple(geokey_directory) logging.debug(f"{log_prefix} Prepared GeoTIFF tags (name: value).") return geotiff_tags except Exception as e: logging.exception(f"{log_prefix} Error preparing GeoTIFF tags: {e}") return None # Return two dictionaries: one for metadata (name:value), one for extratags (code:value_bytes) def _prepare_standard_tags_like_reference( self, geo_info_radians: Dict[str, Any] ) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[int, Any]]]: """ Prepares standard TIFF/EXIF tags matching the reference file. Separates tags with standard JSON-serializable values (returned as name:value) from tags requiring specific byte formatting (returned as code:value). """ log_prefix = f"{self._log_prefix} PrepareStdRefTags"; if not _tifffile_available: return None, None standard_tags_normal = {} # For metadata argument (name: value) standard_tags_bytes = {} # For extratags argument (code: value_bytes) try: # --- Tags for 'metadata' dictionary (JSON serializable values) --- standard_tags_normal['ImageDescription'] = "SAR" standard_tags_normal['Make'] = "RDR" standard_tags_normal['Model'] = "GBK" standard_tags_normal['Software'] = "ControlPanelRecorder_v1.0" # Use our name standard_tags_normal['Artist'] = "GRI" standard_tags_normal['Copyright'] = "LEO" standard_tags_normal['Orientation'] = 1 # TopLeft (int) standard_tags_normal['XResolution'] = (1,1) # Rational tuple standard_tags_normal['YResolution'] = (1,1) # Rational tuple standard_tags_normal['ResolutionUnit'] = 1 # None (int, matching reference) standard_tags_normal['DocumentName'] = "GRIFO Captured" # --- Tags for 'extratags' list (require specific type/bytes) --- # ExifVersion (Tag 36864, Type UNDEFINED, Count 4) if EXIF_VERSION_TAG: standard_tags_bytes[EXIF_VERSION_TAG] = b'0210' # UserComment (Tag 37510, Type UNDEFINED, Count variable) if USER_COMMENT_TAG: lat_deg = math.degrees(geo_info_radians.get('lat', 0.0)) lon_deg = math.degrees(geo_info_radians.get('lon', 0.0)) orient_deg = math.degrees(geo_info_radians.get('orientation', 0.0)) scale_x = geo_info_radians.get('scale_x', 0.0) scale_y = geo_info_radians.get('scale_y', 0.0) ref_x = geo_info_radians.get('ref_x', 0) ref_y = geo_info_radians.get('ref_y', 0) user_comment = ( f".GRIFO Captured image:." # Match reference format f"lat={lat_deg:.6f}.lon={lon_deg:.6f}." f"orientation={orient_deg:.6f}.scale={scale_x:.6f}." # Only one scale in ref? f"cx={ref_x}.cy={ref_y}." ) # Needs standard 8-byte prefix for UNDEFINED type standard_tags_bytes[USER_COMMENT_TAG] = b'ASCII\0\0\0' + user_comment.encode('ascii', errors='ignore') logging.debug(f"{log_prefix} Prepared {len(standard_tags_normal)} normal tags and {len(standard_tags_bytes)} byte tags."); return standard_tags_normal, standard_tags_bytes except Exception as e: logging.exception(f"{log_prefix} Error preparing standard tags: {e}"); return None, None def _prepare_gps_tags_like_reference( self, geo_info_radians: Dict[str, Any] ) -> Optional[Dict[str, Any]]: # Use names as keys for metadata['GPS'] """Prepares a dictionary of GPS IFD tags matching reference file (name: value).""" log_prefix = f"{self._log_prefix} PrepareGPSTagsRef"; if not _tifffile_available: return None try: lat_deg = math.degrees(geo_info_radians.get('lat', 0.0)) lon_deg = math.degrees(geo_info_radians.get('lon', 0.0)) img_direction = math.degrees(geo_info_radians.get('orientation', 0.0)) def deg_to_dms_rational(deg_val): """Converts decimal degrees to TIFF RATIONAL format ((deg,1),(min,1),(sec,den)).""" if not math.isfinite(deg_val): return None deg_val_abs = abs(deg_val); degrees = math.floor(deg_val_abs) minutes_decimal = (deg_val_abs - degrees) * 60.0; minutes = math.floor(minutes_decimal) seconds_decimal = (minutes_decimal - minutes) * 60.0 sec_den = 1000000 # Microdegree precision sec_num = int(round(seconds_decimal * sec_den)) return ((int(degrees), 1), (int(minutes), 1), (sec_num, sec_den)) lat_dms = deg_to_dms_rational(lat_deg) lon_dms = deg_to_dms_rational(lon_deg) if lat_dms is None or lon_dms is None: logging.warning(f"{log_prefix} Could not convert lat/lon to DMS rational."); return None gps_tags = {} # Use standard GPS tag names as keys for the nested dictionary gps_tags['GPSVersionID'] = (0, 0, 0, 2) # Match reference BYTE tuple gps_tags['GPSLatitudeRef'] = 'N' if lat_deg >= 0 else 'S' gps_tags['GPSLatitude'] = lat_dms gps_tags['GPSLongitudeRef'] = 'E' if lon_deg >= 0 else 'W' gps_tags['GPSLongitude'] = lon_dms gps_tags['GPSAltitudeRef'] = 0 # Above Sea Level gps_tags['GPSAltitude'] = (0, 1) # Rational 0/1 gps_tags['GPSTimeStamp'] = ((0, 1), (0, 1), (0, 1)) # Match reference 00:00:00 gps_tags['GPSSpeedRef'] = 'K' # km/h gps_tags['GPSSpeed'] = (0, 1) # undef -> 0/1 gps_tags['GPSTrackRef'] = 'T' # True North gps_tags['GPSTrack'] = (0, 1) # 0 degrees -> 0/1 gps_tags['GPSImgDirectionRef'] = 'T' # True North img_direction_norm = img_direction % 360.0 gps_tags['GPSImgDirection'] = (int(round(img_direction_norm * 1)), 1) # Match reference 0/1 ? logging.debug(f"{log_prefix} Prepared GPS tags like reference (name: value)."); return gps_tags except Exception as e: logging.exception(f"{log_prefix} Error preparing GPS tags like reference: {e}"); return None # Helper to determine tag type/count/value (Only needed for extratags) def _get_tag_dtype_count_value( self, code, value ) -> Tuple[Optional[int], Optional[int], Any]: """Helper to determine TIFF numeric dtype code, count, and processed value FOR EXTRATAGS.""" # Use standard TIFF numeric type codes directly # 1:BYTE, 2:ASCII, 3:SHORT, 4:LONG, 5:RATIONAL, 7:UNDEFINED, 12:DOUBLE dtype_code = None; count = 1; processed_value = value try: # This function now likely only processes byte-based tags for extratags if isinstance(value, bytes): dtype_code = 7 if code in [USER_COMMENT_TAG, EXIF_VERSION_TAG] else 1 # UNDEFINED or BYTE count = len(value) processed_value = value # Already bytes # --- Keep other types in case we need to put more in extratags later --- elif isinstance(value, str): dtype_code = 2; processed_value = value.encode('ascii', errors='ignore'); count = len(processed_value) + 1 elif isinstance(value, int): dtype_code = 4 if code in [GPS_IFD_TAG, EXIF_IFD_TAG] else 3 elif isinstance(value, float): dtype_code = 12 elif isinstance(value, tuple) or isinstance(value, list): # ... (rational, sequence handling - adjust if needed for extratags) ... if code in [2, 4, 6, 17]: # GPS Rationals flat_list = []; is_rational_format = True if not isinstance(value, tuple): is_rational_format = False else: for item in value: if not (isinstance(item, tuple) and len(item) == 2 and isinstance(item[0], int) and isinstance(item[1], int)): is_rational_format = False; break flat_list.extend(item) if is_rational_format: dtype_code = 5; count = len(value); processed_value = tuple(flat_list) else: logging.warning(f"Tag {code}: Invalid rational format for extratags: {value}"); return None, None, None elif code == 0: # GPS Version dtype_code = 1; count = len(value); processed_value = tuple(value) elif code == GEO_KEY_DIRECTORY_TAG: dtype_code = 3; count = len(value); processed_value = tuple(value) elif code == MODEL_TRANSFORMATION_TAG: if len(value)==16: dtype_code = 12; count = 16; processed_value = tuple(float(v) for v in value) else: logging.warning(f"Tag {code}: Invalid ModelTransform for extratags"); return None, None, None else: logging.warning(f"Tag {code}: Unhandled sequence type for extratags: {type(value)}"); return None, None, None else: logging.warning(f"Tag {code}: Unhandled value type for extratags: {type(value)}"); return None, None, None if dtype_code is None: logging.warning(f"Tag {code}: Could not determine dtype for extratags."); return None, None, None return dtype_code, count, processed_value except Exception as e: logging.exception(f"Error determining dtype/count for extratag {code}: {e}"); return None, None, None def _cleanup_old_recordings(self): """Removes the oldest SAR recordings if the count exceeds the configured limit.""" log_prefix = f"{self._log_prefix} Cleanup"; max_files = config.DEFAULT_MAX_SAR_RECORDINGS if max_files <= 0: return logging.debug(f"{log_prefix} Checking '{self.recording_dir}' for recordings older than the newest {max_files}.") try: if not self.recording_dir.is_dir(): return tif_files = []; pattern = "*_SAR_geo_img*.tif" for item in self.recording_dir.glob(pattern): if item.is_file(): try: mtime = item.stat().st_mtime; tif_files.append((item, mtime)) except Exception as stat_e: logging.warning(f"{log_prefix} Could not stat file '{item.name}': {stat_e}") current_file_count = len(tif_files) if current_file_count <= max_files: return tif_files.sort(key=lambda x: x[1]) num_to_delete = current_file_count - max_files; files_to_delete = tif_files[:num_to_delete] logging.debug(f"{log_prefix} Need to delete {num_to_delete} oldest recordings.") deleted_count = 0 for file_path, _ in files_to_delete: try: file_path.unlink(); logging.debug(f"{log_prefix} Deleted old recording: {file_path.name}"); deleted_count += 1 except Exception as delete_e: logging.error(f"{log_prefix} Failed to delete file '{file_path.name}': {delete_e}") logging.debug(f"{log_prefix} Cleanup finished. Deleted {deleted_count}/{num_to_delete} files.") except Exception as e: logging.exception(f"{log_prefix} Error during recording cleanup process:") # --- END OF FILE image_recorder.py ---