chore: Extend benchmark to test 5000 entities
Validate SpatialHash scalability with larger entity counts. Results at 5,000 entities: - N×N visibility: 216.9× faster (431ms → 2ms) - Single query: 37.4× faster (0.11ms → 0.003ms) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
7d57ce2608
commit
366ccecb7d
1 changed files with 2 additions and 2 deletions
|
|
@ -29,8 +29,8 @@ GRID_SIZE = (100, 100) # 10,000 cells - entities will actually see each other
|
||||||
# Full suite - may timeout on large counts due to O(n²) visibility
|
# Full suite - may timeout on large counts due to O(n²) visibility
|
||||||
# ENTITY_COUNTS = [100, 500, 1000, 2500, 5000, 10000]
|
# ENTITY_COUNTS = [100, 500, 1000, 2500, 5000, 10000]
|
||||||
|
|
||||||
# Quick suite for initial baseline (on 100x100 grid, these give densities of 1-20%)
|
# Extended suite to validate scalability (on 100x100 grid)
|
||||||
ENTITY_COUNTS = [100, 500, 1000, 2000]
|
ENTITY_COUNTS = [100, 500, 1000, 2000, 5000]
|
||||||
QUERY_RADIUS = 15 # Smaller radius for smaller grid
|
QUERY_RADIUS = 15 # Smaller radius for smaller grid
|
||||||
MOVEMENT_PERCENT = 0.10 # 10% of entities move each frame
|
MOVEMENT_PERCENT = 0.10 # 10% of entities move each frame
|
||||||
N2N_SAMPLE_SIZE = 50 # Sample size for N×N visibility test
|
N2N_SAMPLE_SIZE = 50 # Sample size for N×N visibility test
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue