import json import logging import os from pathlib import Path from datetime import datetime from leo_grifo_core import theRecorder from leo_grifo_pdf2report import PDFClass class testReport: __DEFAULT_TEMPLATE = 'default_template.json' def __init__(self,script_file): cwd = os.path.dirname(__file__) self.__pdf_report_folder = os.path.join(cwd,"..","pdf_reports") script_dirname = os.path.dirname(script_file) script_basename =os.path.basename(script_file) json_file = os.path.join( script_dirname,"..","json",script_basename.replace(".py",".json") ) f = open(os.path.join(cwd,'json',self.__DEFAULT_TEMPLATE)) t = open(json_file) self.json_template = json.load(f) self.json_test = json.load(t) self.__start = datetime.now() self.add_comment(f"Start Time: {self.__start}") self.__session = None self.__custom_statistics = None # Hook for custom statistics data def title(self): return self.json_test.get('ID','') def add_comment(self,text): theRecorder.add_step(text, True, None, None) logging.info(text) def cur_session(self): return self.__session def open_session(self, name ): self.__session = f'{self.title()} : {name}' def close_session(self): if self.__session is not None: theRecorder.close_session(self.__session) self.__session = None def set_custom_statistics(self, statistics_data): """ Set custom statistics data to be rendered as dedicated PDF sections. Args: statistics_data: Dictionary with keys 'repetitions' (per-run data) and 'aggregate' (global statistics) This allows tests to provide structured statistics data that will be rendered as professional PDF tables in dedicated sections, separate from step-by-step execution logs. """ self.__custom_statistics = statistics_data logging.info("[Report] Custom statistics data registered for PDF generation") def get_pdf_folder(self): """ Get the folder path where PDF reports are saved. Returns: Full path to the PDF report folder for this test """ folder_path = f"{self.__pdf_report_folder}/{self.json_test.get('ID','not_identified')}" return folder_path def generate_pdf(self): self.__stop = datetime.now() self.add_comment(f"Stop Time: {self.__start}") elapsed_time = self.__stop - self.__start self.add_comment(f"Test execution time: {elapsed_time}") self.close_session() logging.getLogger().setLevel(logging.ERROR) my_pdf = PDFClass(elapsed_time) doc = my_pdf.start_doc(self.json_template) file_name = my_pdf.pdf_name(self.json_template, '') my_pdf.pdf_preamble(doc, self.json_test) my_pdf.pdf_test_information_section(doc, self.json_test) session = theRecorder.get_db_session() if len(session) > 1: tot = 0 ok = 0 for v in session: tot = tot + 1 if v.test['fail'] == 0: ok = ok + 1 my_pdf.pdf_step_summary(ok, tot - ok, doc) for s in session: my_pdf.pdf_step_result(s.test['name'], s.test['pass'], s.test['fail'], doc) step_list = theRecorder.get_db() my_pdf.pdf_step_execution(step_list[s.idx_first:s.idx_last], doc) # Generate custom statistics sections if provided by test if self.__custom_statistics is not None: logging.info("[Report] Generating custom statistics sections") # Machine Reliability Assessment (Executive Summary) my_pdf.pdf_add_machine_reliability_assessment(doc, self.__custom_statistics['aggregate'], self.__custom_statistics['repetitions']) # Per-Run Summary Table (1553 + Serial for each run) if 'repetitions' in self.__custom_statistics: my_pdf.pdf_add_per_run_summary(doc, self.__custom_statistics['repetitions']) # Global Aggregate Statistics (combined 1553 + Serial) if 'aggregate' in self.__custom_statistics: my_pdf.pdf_add_global_statistics(doc, self.__custom_statistics['aggregate']) folder_path = f"{self.__pdf_report_folder}/{self.json_test.get('ID','not_identified')}" if not Path(folder_path).is_dir(): os.makedirs(Path(folder_path)) my_pdf.pdf_generate(folder_path, doc, file_name)