Timer overhaul: update tests

This commit is contained in:
John McCardle 2026-01-03 22:44:53 -05:00
commit cec76b63dc
78 changed files with 521 additions and 495 deletions

View file

@ -5,13 +5,17 @@ import mcrfpy
import sys
import time
def test_metrics(runtime):
# Track success across callbacks
success = True
def test_metrics(timer, runtime):
"""Test the metrics after timer starts"""
global success
print("\nRunning metrics test...")
# Get metrics
metrics = mcrfpy.getMetrics()
print("\nPerformance Metrics:")
print(f" Frame Time: {metrics['frame_time']:.2f} ms")
print(f" Avg Frame Time: {metrics['avg_frame_time']:.2f} ms")
@ -21,17 +25,16 @@ def test_metrics(runtime):
print(f" Visible Elements: {metrics['visible_elements']}")
print(f" Current Frame: {metrics['current_frame']}")
print(f" Runtime: {metrics['runtime']:.2f} seconds")
# Test that metrics are reasonable
success = True
# Frame time should be positive
if metrics['frame_time'] <= 0:
print(" FAIL: Frame time should be positive")
success = False
else:
print(" PASS: Frame time is positive")
# FPS should be reasonable (between 1 and 20000 in headless mode)
# In headless mode, FPS can be very high since there's no vsync
if metrics['fps'] < 1 or metrics['fps'] > 20000:
@ -39,72 +42,76 @@ def test_metrics(runtime):
success = False
else:
print(f" PASS: FPS {metrics['fps']} is reasonable")
# UI elements count (may be 0 if scene hasn't rendered yet)
if metrics['ui_elements'] < 0:
print(f" FAIL: UI elements count {metrics['ui_elements']} is negative")
success = False
else:
print(f" PASS: UI element count {metrics['ui_elements']} is valid")
# Visible elements should be <= total elements
if metrics['visible_elements'] > metrics['ui_elements']:
print(" FAIL: Visible elements > total elements")
success = False
else:
print(" PASS: Visible element count is valid")
# Current frame should be > 0
if metrics['current_frame'] <= 0:
print(" FAIL: Current frame should be > 0")
success = False
else:
print(" PASS: Current frame is positive")
# Runtime should be > 0
if metrics['runtime'] <= 0:
print(" FAIL: Runtime should be > 0")
success = False
else:
print(" PASS: Runtime is positive")
# Test metrics update over multiple frames
print("\n\nTesting metrics over multiple frames...")
# Store initial metrics for comparison
initial_frame = metrics['current_frame']
initial_runtime = metrics['runtime']
# Schedule another check after 100ms
def check_later(runtime2):
def check_later(timer2, runtime2):
global success
metrics2 = mcrfpy.getMetrics()
print(f"\nMetrics after 100ms:")
print(f" Frame Time: {metrics2['frame_time']:.2f} ms")
print(f" Avg Frame Time: {metrics2['avg_frame_time']:.2f} ms")
print(f" FPS: {metrics2['fps']}")
print(f" Current Frame: {metrics2['current_frame']}")
# Frame count should have increased
if metrics2['current_frame'] > metrics['current_frame']:
if metrics2['current_frame'] > initial_frame:
print(" PASS: Frame count increased")
else:
print(" FAIL: Frame count did not increase")
nonlocal success
success = False
# Runtime should have increased
if metrics2['runtime'] > metrics['runtime']:
if metrics2['runtime'] > initial_runtime:
print(" PASS: Runtime increased")
else:
print(" FAIL: Runtime did not increase")
success = False
print("\n" + "="*50)
if success:
print("ALL METRICS TESTS PASSED!")
else:
print("SOME METRICS TESTS FAILED!")
sys.exit(0 if success else 1)
mcrfpy.setTimer("check_later", check_later, 100)
mcrfpy.Timer("check_later", check_later, 100, once=True)
# Set up test scene
print("Setting up metrics test scene...")
@ -136,4 +143,4 @@ ui.append(grid)
print(f"Created {len(ui)} UI elements (1 invisible)")
# Schedule test to run after render loop starts
mcrfpy.setTimer("test", test_metrics, 50)
mcrfpy.Timer("test", test_metrics, 50, once=True)