87 lines
2.8 KiB
Python
87 lines
2.8 KiB
Python
"""Test UCC complete counter implementation."""
|
|
|
|
from pathlib import Path
|
|
import sys
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
|
|
from pyucc.core.ucc_complete_counter import UCCCompleteCounter
|
|
|
|
|
|
def test_single_file():
|
|
"""Test complete counter on single file."""
|
|
|
|
test_file = Path(
|
|
r"C:\__temp\Metrics\_25_10\REP\Projects\DSP\ChimeraMCK\dev\mck_c66\mck_c66_boot_multicore.c"
|
|
)
|
|
|
|
if not test_file.exists():
|
|
print(f"ERROR: Test file not found: {test_file}")
|
|
return
|
|
|
|
print(f"Testing: {test_file.name}")
|
|
print("=" * 80)
|
|
|
|
counter = UCCCompleteCounter(language="C")
|
|
result = counter.analyze_file(test_file)
|
|
|
|
print("\nPyUCC Complete Counter Results:")
|
|
print(f" Total Lines: {result['total_lines']:>6}")
|
|
print(f" Blank Lines: {result['blank_lines']:>6}")
|
|
print(f" Comments (Whole): {result['comment_whole']:>6}")
|
|
print(f" Comments (Embedded): {result['comment_embedded']:>6}")
|
|
print(f" Compiler Directives: {result['compiler_directives']:>6}")
|
|
print(f" Data Declarations: {result['data_declarations']:>6}")
|
|
print(f" Exec Instructions: {result['exec_instructions']:>6}")
|
|
print(f" Logical SLOC: {result['logical_sloc']:>6}")
|
|
print(f" Physical SLOC: {result['physical_sloc']:>6}")
|
|
|
|
print("\n" + "=" * 80)
|
|
print("UCC Expected Values:")
|
|
ucc_expected = {
|
|
"total_lines": 402,
|
|
"blank_lines": 86,
|
|
"comment_whole": 45,
|
|
"comment_embedded": 13,
|
|
"compiler_directives": 22,
|
|
"data_declarations": 57,
|
|
"exec_instructions": 127,
|
|
"logical_sloc": 206,
|
|
"physical_sloc": 271,
|
|
}
|
|
|
|
for key, expected in ucc_expected.items():
|
|
actual = result[key]
|
|
diff = actual - expected
|
|
accuracy = (
|
|
100 * (1 - abs(diff) / expected)
|
|
if expected > 0
|
|
else (100 if diff == 0 else 0)
|
|
)
|
|
|
|
status = "✓" if abs(diff) <= 2 else "✗" if abs(diff) > 10 else "~"
|
|
print(
|
|
f" {key:25s} UCC:{expected:>6} PyUCC:{actual:>6} Diff:{diff:>+6} {accuracy:>5.1f}% {status}"
|
|
)
|
|
|
|
# Calculate overall accuracy
|
|
total_diff = sum(abs(result[k] - ucc_expected[k]) for k in ucc_expected.keys())
|
|
total_expected = sum(ucc_expected.values())
|
|
overall_accuracy = 100 * (1 - total_diff / total_expected)
|
|
|
|
print("\n" + "=" * 80)
|
|
print(f"Overall accuracy: {overall_accuracy:.1f}%")
|
|
|
|
if overall_accuracy >= 95:
|
|
print(">> EXCELLENT! Target 95%+ achieved!")
|
|
elif overall_accuracy >= 90:
|
|
print(">> GOOD! Target 90%+ achieved!")
|
|
elif overall_accuracy >= 85:
|
|
print(">> FAIR - Close to target")
|
|
else:
|
|
print(">> NEEDS WORK - Below 85% accuracy")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
test_single_file()
|