131 lines
4.1 KiB
Python
131 lines
4.1 KiB
Python
"""Test differ consistency: multiple differ runs should give identical results."""
|
|
import os
|
|
import sys
|
|
import shutil
|
|
import tempfile
|
|
from pathlib import Path
|
|
|
|
# Create a test project structure
|
|
temp_project = tempfile.mkdtemp(prefix="test_project_")
|
|
|
|
try:
|
|
# Create some test files
|
|
(Path(temp_project) / "module1.py").write_text('''"""Module 1."""
|
|
|
|
def function_a():
|
|
"""Function A."""
|
|
return 42
|
|
|
|
def function_b():
|
|
"""Function B."""
|
|
x = 10
|
|
return x * 2
|
|
''', encoding='utf-8')
|
|
|
|
(Path(temp_project) / "module2.py").write_text('''"""Module 2."""
|
|
|
|
class MyClass:
|
|
"""A test class."""
|
|
|
|
def method1(self):
|
|
"""Method 1."""
|
|
pass
|
|
|
|
def method2(self):
|
|
"""Method 2."""
|
|
return "test"
|
|
''', encoding='utf-8')
|
|
|
|
print(f"Test project: {temp_project}")
|
|
print()
|
|
|
|
# Import modules
|
|
from pyucc.core.differ import BaselineManager
|
|
from pyucc.core.countings_impl import _COUNTING_CACHE
|
|
|
|
# Create baseline manager
|
|
baseline_dir = tempfile.mkdtemp(prefix="test_baselines_")
|
|
bm = BaselineManager(temp_project, baselines_root=baseline_dir)
|
|
|
|
print("Creating baseline 1...")
|
|
baseline_id_1 = bm.create_baseline_from_dir(temp_project, baseline_id="test_baseline_1", snapshot=False)
|
|
print(f"Created: {baseline_id_1}")
|
|
print(f"Cache size after baseline 1: {len(_COUNTING_CACHE)}")
|
|
|
|
# Load metadata
|
|
meta1 = bm.load_metadata(baseline_id_1)
|
|
print(f"Files in baseline 1: {len(meta1.files)}")
|
|
|
|
# Extract countings from first baseline
|
|
countings_1 = {}
|
|
for fm in meta1.files:
|
|
if fm.countings:
|
|
countings_1[fm.path] = (
|
|
fm.countings['physical_lines'],
|
|
fm.countings['code_lines'],
|
|
fm.countings['comment_lines'],
|
|
fm.countings['blank_lines']
|
|
)
|
|
|
|
print("Baseline 1 countings:")
|
|
for path, counts in sorted(countings_1.items()):
|
|
print(f" {path}: physical={counts[0]}, code={counts[1]}, comment={counts[2]}, blank={counts[3]}")
|
|
print()
|
|
|
|
# Create a second baseline from the SAME unchanged files
|
|
print("Creating baseline 2 from SAME files (cache should be used)...")
|
|
baseline_id_2 = bm.create_baseline_from_dir(temp_project, baseline_id="test_baseline_2", snapshot=False)
|
|
print(f"Created: {baseline_id_2}")
|
|
print(f"Cache size after baseline 2: {len(_COUNTING_CACHE)}")
|
|
|
|
meta2 = bm.load_metadata(baseline_id_2)
|
|
print(f"Files in baseline 2: {len(meta2.files)}")
|
|
|
|
# Extract countings from second baseline
|
|
countings_2 = {}
|
|
for fm in meta2.files:
|
|
if fm.countings:
|
|
countings_2[fm.path] = (
|
|
fm.countings['physical_lines'],
|
|
fm.countings['code_lines'],
|
|
fm.countings['comment_lines'],
|
|
fm.countings['blank_lines']
|
|
)
|
|
|
|
print("Baseline 2 countings:")
|
|
for path, counts in sorted(countings_2.items()):
|
|
print(f" {path}: physical={counts[0]}, code={counts[1]}, comment={counts[2]}, blank={counts[3]}")
|
|
print()
|
|
|
|
# Compare baselines
|
|
print("Comparing baselines...")
|
|
all_files = set(countings_1.keys()) | set(countings_2.keys())
|
|
differences = []
|
|
|
|
for path in sorted(all_files):
|
|
c1 = countings_1.get(path)
|
|
c2 = countings_2.get(path)
|
|
|
|
if c1 != c2:
|
|
differences.append((path, c1, c2))
|
|
|
|
if differences:
|
|
print(f"❌ FAIL: Found {len(differences)} differences:")
|
|
for path, c1, c2 in differences:
|
|
print(f" {path}:")
|
|
print(f" Baseline 1: {c1}")
|
|
print(f" Baseline 2: {c2}")
|
|
else:
|
|
print("✅ SUCCESS: All countings are identical between baselines!")
|
|
print(" Same content always produces same results, regardless of when analyzed.")
|
|
|
|
print()
|
|
print(f"Final cache size: {len(_COUNTING_CACHE)} entries")
|
|
print("(Should be 2 entries for 2 unique files, reused across both baselines)")
|
|
|
|
finally:
|
|
# Cleanup
|
|
shutil.rmtree(temp_project, ignore_errors=True)
|
|
if 'baseline_dir' in locals():
|
|
shutil.rmtree(baseline_dir, ignore_errors=True)
|