- Add --exec flag to execute multiple scripts before main program - Scripts are executed in order and share Python interpreter state - Implement full PyAutoGUI-compatible automation API in McRFPy_Automation - Add screenshot, mouse control, keyboard input capabilities - Fix Python initialization issues when multiple scripts are loaded - Update CommandLineParser to handle --exec with proper sys.argv management - Add comprehensive examples and documentation This enables automation testing by allowing test scripts to run alongside games using the same Python environment. The automation API provides event injection into the SFML render loop for UI testing. Closes #32 partially (Python interpreter emulation) References automation testing requirements
69 lines
No EOL
2.5 KiB
Python
69 lines
No EOL
2.5 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Example monitoring script that works alongside automation
|
|
Usage: ./mcrogueface game.py --exec example_automation.py --exec example_monitoring.py
|
|
"""
|
|
import mcrfpy
|
|
import time
|
|
|
|
class PerformanceMonitor:
|
|
def __init__(self):
|
|
self.start_time = time.time()
|
|
self.frame_samples = []
|
|
self.scene_changes = []
|
|
self.last_scene = None
|
|
print("Monitor: Performance monitoring initialized")
|
|
|
|
def collect_metrics(self):
|
|
"""Collect performance and state metrics"""
|
|
current_frame = mcrfpy.getFrame()
|
|
current_time = time.time() - self.start_time
|
|
current_scene = mcrfpy.currentScene()
|
|
|
|
# Track frame rate
|
|
if len(self.frame_samples) > 0:
|
|
last_frame, last_time = self.frame_samples[-1]
|
|
fps = (current_frame - last_frame) / (current_time - last_time)
|
|
print(f"Monitor: FPS = {fps:.1f}")
|
|
|
|
self.frame_samples.append((current_frame, current_time))
|
|
|
|
# Track scene changes
|
|
if current_scene != self.last_scene:
|
|
print(f"Monitor: Scene changed from '{self.last_scene}' to '{current_scene}'")
|
|
self.scene_changes.append((current_time, self.last_scene, current_scene))
|
|
self.last_scene = current_scene
|
|
|
|
# Keep only last 100 samples
|
|
if len(self.frame_samples) > 100:
|
|
self.frame_samples = self.frame_samples[-100:]
|
|
|
|
def generate_report(self):
|
|
"""Generate a summary report"""
|
|
if len(self.frame_samples) < 2:
|
|
return
|
|
|
|
total_frames = self.frame_samples[-1][0] - self.frame_samples[0][0]
|
|
total_time = self.frame_samples[-1][1] - self.frame_samples[0][1]
|
|
avg_fps = total_frames / total_time
|
|
|
|
print("\n=== Performance Report ===")
|
|
print(f"Monitor: Total time: {total_time:.1f} seconds")
|
|
print(f"Monitor: Total frames: {total_frames}")
|
|
print(f"Monitor: Average FPS: {avg_fps:.1f}")
|
|
print(f"Monitor: Scene changes: {len(self.scene_changes)}")
|
|
|
|
# Stop monitoring
|
|
mcrfpy.delTimer("performance_monitor")
|
|
|
|
# Create monitor instance
|
|
monitor = PerformanceMonitor()
|
|
|
|
# Register monitoring timer (runs every 500ms)
|
|
mcrfpy.setTimer("performance_monitor", monitor.collect_metrics, 500)
|
|
|
|
# Register report generation (runs after 30 seconds)
|
|
mcrfpy.setTimer("performance_report", monitor.generate_report, 30000)
|
|
|
|
print("Monitor: Script loaded - collecting metrics every 500ms")
|
|
print("Monitor: Will generate report after 30 seconds") |