Skip to content

Commit

Permalink
rework
Browse files Browse the repository at this point in the history
  • Loading branch information
droideck committed Jan 7, 2025
1 parent 92ccd32 commit 6c35974
Show file tree
Hide file tree
Showing 2 changed files with 487 additions and 1,066 deletions.
105 changes: 15 additions & 90 deletions dirsrvtests/tests/suites/replication/log_analysis_report_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import pytest
import logging
import tempfile

from lib389.tasks import *
from lib389.utils import *
from lib389.topologies import topology_m4 as topo_m4
Expand Down Expand Up @@ -44,7 +45,7 @@ def test_replication_analysis_report(topo_m4):
5. Collect and analyze logs
6. Generate comprehensive report
7. Verify CSNs in reports
8. Verify report files and lag times
8. Verify report files
9. Cleanup temporary directory
:expectedresults:
1. Temporary directory should be created
Expand All @@ -54,7 +55,7 @@ def test_replication_analysis_report(topo_m4):
5. Logs should be collected and analyzed
6. Report should be generated in all formats
7. CSNs should be found in generated reports
8. Report files should exist and contain valid lag times
8. Report files should exist and be valid
9. Cleanup should complete successfully
"""
# Create temporary directory
Expand Down Expand Up @@ -104,7 +105,7 @@ def test_replication_analysis_report(topo_m4):
repl.wait_for_replication(m1, m3)
repl.wait_for_replication(m1, m4)

# Ensure logs are flushed
# Ensure logs are flushed (restarts can help commit logs)
for supplier in [m1, m2, m3, m4]:
supplier.restart()

Expand All @@ -120,13 +121,13 @@ def test_replication_analysis_report(topo_m4):
anonymous=False
)

# Generate reports
# First parse the logs to populate internal structures
analyzer.parse_logs()

# Then generate the reports
generated_files = analyzer.generate_report(
output_dir=tmp_dir,
formats=['csv', 'html'],
start_time=None,
end_time=None,
repl_lag_threshold=1.0,
report_name='replication_test_report'
)

Expand All @@ -149,20 +150,11 @@ def test_replication_analysis_report(topo_m4):
assert 'updates_by_suffix' in summary, "Missing updates by suffix"
assert DEFAULT_SUFFIX in summary['updates_by_suffix'], "Missing default suffix stats"

# Verify lag time calculations
assert summary['minimum_lag'] >= 0, "Negative minimum lag time found"
assert summary['maximum_lag'] >= summary['minimum_lag'], "Maximum lag less than minimum"
assert summary['average_lag'] >= summary['minimum_lag'], "Average lag less than minimum"
assert summary['average_lag'] <= summary['maximum_lag'], "Average lag greater than maximum"

# Verify reasonable lag times
assert summary['maximum_lag'] < 60, f"Excessive replication lag detected: {summary['maximum_lag']} seconds"

# Verify report files
expected_files = {
'csv': ('replication_test_report.csv', 150), # Increased min size for new format
'html': ('replication_test_report.html', 4096), # Increased for subplot structure
'summary': ('replication_test_report_summary.json', 200)
'csv': ('replication_test_report.csv', 100), # Min size for CSV
'html': ('replication_test_report.html', 2048), # Min size for HTML
'summary': ('replication_test_report_summary.json', 200) # Min size for JSON
}

for fmt, (filename, min_size) in expected_files.items():
Expand All @@ -183,69 +175,16 @@ def test_replication_analysis_report(topo_m4):
with open(filepath, 'r') as f:
lines = f.readlines()
assert len(lines) > 1, "CSV has no data rows"

# Verify headers match new format
expected_headers = [
'Timestamp', 'Server', 'CSN', 'Suffix', 'Target DN',
'Lag Time (s)', 'Duration (s)', 'Operation Type'
]
headers = [h.strip() for h in lines[0].strip().split(',')]
assert headers == expected_headers, (
f"CSV headers mismatch. Expected: {expected_headers}, "
f"Got: {headers}"
)

# Verify lag time values
lag_times = []
durations = []
for line in lines[1:]: # Skip header
fields = line.strip().split(',')
try:
lag_time = float(fields[headers.index('Lag Time (s)')])
duration = float(fields[headers.index('Duration (s)')])
lag_times.append(lag_time)
durations.append(duration)

# Verify value constraints
assert lag_time >= 0, f"Negative lag time found: {lag_time}"
assert duration >= 0, f"Negative duration found: {duration}"
except (ValueError, IndexError) as e:
assert False, f"Invalid time format: {e}"

# Verify lag time statistics match summary
if lag_times:
assert abs(min(lag_times) - summary['minimum_lag']) < 0.001, \
"CSV minimum lag doesn't match summary"
assert abs(max(lag_times) - summary['maximum_lag']) < 0.001, \
"CSV maximum lag doesn't match summary"
csv_avg = sum(lag_times) / len(lag_times)
assert abs(csv_avg - summary['average_lag']) < 0.001, \
"CSV average lag doesn't match summary"
# Check for at least some expected header columns
# The entire row might be: Timestamp,Server,CSN,Suffix,Target DN,Lag Time (s),Duration (s),Operation Etime
assert 'Timestamp,Server,CSN' in lines[0], "CSV missing expected headers"

elif fmt == 'html':
with open(filepath, 'r') as f:
content = f.read()
# Basic structure checks
assert 'plotly' in content.lower(), "HTML missing plotly integration"
assert 'Replication Analysis Report' in content, "HTML missing title"

# Verify new subplot structure
assert 'Replication Lag Times' in content, "Missing lag times subplot title"
assert 'Operation Durations' in content, "Missing durations subplot title"

# Verify axis labels
assert 'Lag Time (seconds)' in content, "Missing lag time axis label"
assert 'Duration (seconds)' in content, "Missing duration axis label"

# Verify interactive features
assert 'rangeslider' in content.lower(), "Missing range slider"
assert 'hoverinfo' in content.lower(), "Missing hover information"

# Verify lag time values
max_lag_str = str(round(summary['maximum_lag'], 3))
assert max_lag_str in content or \
str(round(float(max_lag_str), 2)) in content, \
"HTML missing maximum lag value"
elif fmt == 'summary':
with open(filepath, 'r') as f:
data = json.load(f)
Expand All @@ -255,15 +194,6 @@ def test_replication_analysis_report(topo_m4):
'average_lag', 'maximum_lag', 'minimum_lag',
'updates_by_suffix'
]), "JSON missing required fields"

# Verify lag time values in JSON
lag_stats = data['analysis_summary']
assert lag_stats['minimum_lag'] >= 0, "Negative minimum lag in JSON"
assert lag_stats['maximum_lag'] >= lag_stats['minimum_lag'], \
"Invalid maximum lag in JSON"
assert lag_stats['average_lag'] >= lag_stats['minimum_lag'] and \
lag_stats['average_lag'] <= lag_stats['maximum_lag'], \
"Invalid average lag in JSON"

# Verify operation counts
updates_count = summary['total_updates']
Expand All @@ -273,11 +203,6 @@ def test_replication_analysis_report(topo_m4):
f"got {updates_count}"
)

# Verify replication completion
for supplier in [m2, m3, m4]:
repl_lag = summary['updates_by_suffix'][DEFAULT_SUFFIX]
assert repl_lag > 0, f"No replication lag recorded for {supplier.serverid}"

finally:
# Cleanup
try:
Expand All @@ -299,4 +224,4 @@ def test_replication_analysis_report(topo_m4):
# Run isolated
# -s for DEBUG mode
CURRENT_FILE = os.path.realpath(__file__)
pytest.main("-s %s" % CURRENT_FILE)
pytest.main("-s %s" % CURRENT_FILE)
Loading

0 comments on commit 6c35974

Please sign in to comment.