This commit is contained in:
2026-02-03 02:53:46 +02:00
parent 57106df5ae
commit f46bf16428
5 changed files with 1243 additions and 35 deletions

View File

@@ -30,7 +30,20 @@
"Bash(pip3 install numpy)",
"Bash(echo:*)",
"Bash(find:*)",
"Bash(node -c:*)"
"Bash(node -c:*)",
"Bash(curl:*)",
"Bash(head -3 python3 -c \"import numpy; print\\(numpy.__file__\\)\")",
"Bash(pip3 install:*)",
"Bash(apt list:*)",
"Bash(dpkg:*)",
"Bash(sudo apt-get install:*)",
"Bash(docker:*)",
"Bash(~/.local/bin/pip install:*)",
"Bash(pgrep:*)",
"Bash(kill:*)",
"Bash(sort:*)",
"Bash(journalctl:*)",
"Bash(pkill:*)"
]
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -69,8 +69,16 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
start_time = time.time()
cancel_token = CancellationToken()
# Dynamic timeout based on radius (large radius needs more time for tiled processing)
radius_m = request.settings.radius
if radius_m > 30_000:
calc_timeout = 600.0 # 10 min for 30-50km
elif radius_m > 10_000:
calc_timeout = 480.0 # 8 min for 10-30km
else:
calc_timeout = 300.0 # 5 min for ≤10km
try:
# Calculate with 5-minute timeout
if len(request.sites) == 1:
points = await asyncio.wait_for(
coverage_service.calculate_coverage(
@@ -78,7 +86,7 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
request.settings,
cancel_token,
),
timeout=300.0
timeout=calc_timeout,
)
else:
points = await asyncio.wait_for(
@@ -87,14 +95,15 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
request.settings,
cancel_token,
),
timeout=300.0
timeout=calc_timeout,
)
except asyncio.TimeoutError:
cancel_token.cancel()
# Force cleanup orphaned worker processes
from app.services.parallel_coverage_service import _kill_worker_processes
killed = _kill_worker_processes()
detail = f"Calculation timeout (5 min). Cleaned up {killed} workers." if killed else "Calculation timeout (5 min) — try smaller radius or lower resolution"
timeout_min = int(calc_timeout / 60)
detail = f"Calculation timeout ({timeout_min} min). Cleaned up {killed} workers." if killed else f"Calculation timeout ({timeout_min} min) — try smaller radius or lower resolution"
raise HTTPException(408, detail)
except asyncio.CancelledError:
cancel_token.cancel()

View File

@@ -180,6 +180,15 @@ async def _run_calculation(ws: WebSocket, calc_id: str, data: dict):
poller_task = asyncio.create_task(progress_poller())
# Dynamic timeout based on radius
radius_m = settings.radius
if radius_m > 30_000:
calc_timeout = 600.0 # 10 min for 30-50km
elif radius_m > 10_000:
calc_timeout = 480.0 # 8 min for 10-30km
else:
calc_timeout = 300.0 # 5 min for ≤10km
# Run calculation with timeout
start_time = time.time()
try:
@@ -190,7 +199,7 @@ async def _run_calculation(ws: WebSocket, calc_id: str, data: dict):
progress_fn=sync_progress_fn,
tile_callback=_tile_callback,
),
timeout=300.0,
timeout=calc_timeout,
)
else:
points = await asyncio.wait_for(
@@ -199,7 +208,7 @@ async def _run_calculation(ws: WebSocket, calc_id: str, data: dict):
progress_fn=sync_progress_fn,
tile_callback=_tile_callback,
),
timeout=300.0,
timeout=calc_timeout,
)
except asyncio.TimeoutError:
cancel_token.cancel()
@@ -207,7 +216,8 @@ async def _run_calculation(ws: WebSocket, calc_id: str, data: dict):
await poller_task
from app.services.parallel_coverage_service import _kill_worker_processes
_kill_worker_processes()
await ws_manager.send_error(ws, calc_id, "Calculation timeout (5 min)")
timeout_min = int(calc_timeout / 60)
await ws_manager.send_error(ws, calc_id, f"Calculation timeout ({timeout_min} min)")
return
except asyncio.CancelledError:
cancel_token.cancel()

View File

@@ -123,13 +123,14 @@ def _filter_buildings_to_bbox(
max_lat: float, max_lon: float,
site_lat: float, site_lon: float,
log_fn=None,
max_buildings: int = MAX_BUILDINGS_FOR_WORKERS,
) -> list:
"""Filter buildings to coverage bbox and cap at MAX_BUILDINGS_FOR_WORKERS.
"""Filter buildings to coverage bbox and cap at max_buildings.
Returns buildings sorted by distance to site (nearest first) so the
cap preserves buildings most likely to affect coverage.
"""
if not buildings or len(buildings) <= MAX_BUILDINGS_FOR_WORKERS:
if not buildings or len(buildings) <= max_buildings:
return buildings
original = len(buildings)
@@ -149,7 +150,7 @@ def _filter_buildings_to_bbox(
log_fn(f"Building bbox filter: {original} -> {len(filtered)}")
# If still too many, sort by centroid distance and cap
if len(filtered) > MAX_BUILDINGS_FOR_WORKERS:
if len(filtered) > max_buildings:
def _centroid_dist(b):
lats = [p[1] for p in b.geometry]
lons = [p[0] for p in b.geometry]
@@ -158,7 +159,7 @@ def _filter_buildings_to_bbox(
return (clat - site_lat) ** 2 + (clon - site_lon) ** 2
filtered.sort(key=_centroid_dist)
filtered = filtered[:MAX_BUILDINGS_FOR_WORKERS]
filtered = filtered[:max_buildings]
if log_fn:
log_fn(f"Building distance cap: -> {len(filtered)} (nearest to site)")
@@ -762,9 +763,57 @@ class CoverageService:
# Free full grid reference
del grid
# ── Pre-fetch buildings for inner zone (≤20km) ──
# This avoids re-reading the disk JSON cache (7-8s) per tile.
inner_radius_m = min(settings.radius, 20_000)
needs_osm = (settings.use_buildings
or getattr(settings, 'use_street_canyon', False)
or getattr(settings, 'use_water_reflection', False)
or getattr(settings, 'use_vegetation', False))
prefetched_buildings: List[Building] = []
prefetched_streets: list = []
prefetched_water: list = []
prefetched_vegetation: list = []
if needs_osm:
lat_delta = inner_radius_m / 111_320.0
lon_delta = inner_radius_m / (111_320.0 * max(math.cos(math.radians(site.lat)), 0.01))
inner_bbox = (
site.lat - lat_delta, site.lon - lon_delta,
site.lat + lat_delta, site.lon + lon_delta,
)
if progress_fn:
progress_fn("Pre-fetching map data", 0.02)
_clog(f"Pre-fetching OSM for inner zone ({inner_radius_m/1000:.0f}km)")
osm_prefetch = await self._fetch_osm_grid_aligned(
inner_bbox[0], inner_bbox[1], inner_bbox[2], inner_bbox[3],
settings,
)
prefetched_buildings = osm_prefetch.get("buildings", [])
prefetched_streets = osm_prefetch.get("streets", [])
prefetched_water = osm_prefetch.get("water_bodies", [])
prefetched_vegetation = osm_prefetch.get("vegetation_areas", [])
del osm_prefetch
_clog(f"Pre-fetched: {len(prefetched_buildings)} buildings, "
f"{len(prefetched_streets)} streets, "
f"{len(prefetched_water)} water, "
f"{len(prefetched_vegetation)} veg")
# Clear singleton memory cache — we hold our own reference
self.buildings._memory_cache.clear()
gc.collect()
site_elevation: Optional[float] = None
all_points: List[CoveragePoint] = []
# FSPL pre-check: compute minimum distance to each tile and estimate
# free-space signal. Skip tiles where even best-case FSPL < min_signal.
eirp_dbm = site.power + site.gain
min_signal = getattr(settings, 'min_signal', -130)
tiles_skipped_fspl = 0
for tile_idx, tile in enumerate(tiles):
if cancel_token and cancel_token.is_cancelled:
_clog("Tiled calculation cancelled")
@@ -776,6 +825,20 @@ class CoverageService:
tile_start = time.time()
min_lat, min_lon, max_lat, max_lon = tile.bbox
# Quick FSPL check: closest edge of tile to site
clamp_lat = max(min_lat, min(site.lat, max_lat))
clamp_lon = max(min_lon, min(site.lon, max_lon))
closest_dist = TerrainService.haversine_distance(
site.lat, site.lon, clamp_lat, clamp_lon,
)
if closest_dist > 500: # Skip check for tiles containing the site
fspl_db = 20 * math.log10(closest_dist) + 20 * math.log10(site.frequency * 1e6) - 147.55
best_rsrp = eirp_dbm - fspl_db
if best_rsrp < min_signal:
tiles_skipped_fspl += 1
continue
_clog(f"━━━ Tile {tile_idx + 1}/{total_tiles}: "
f"{len(tile_grid)} points ━━━")
@@ -787,28 +850,39 @@ class CoverageService:
f"Tile {_idx + 1}/{total_tiles}: {phase}", overall,
)
# ── 1. Fetch OSM data for this tile ──
_tile_progress("Fetching map data", 0.10)
# ── 1. Filter pre-fetched OSM data for this tile ──
tile_center_lat = (min_lat + max_lat) / 2
tile_center_lon = (min_lon + max_lon) / 2
tile_dist_m = TerrainService.haversine_distance(
site.lat, site.lon, tile_center_lat, tile_center_lon,
)
skip_buildings = tile_dist_m > 20_000
_tile_progress("Filtering map data", 0.10)
await asyncio.sleep(0)
osm_data = await self._fetch_osm_grid_aligned(
min_lat, min_lon, max_lat, max_lon, settings,
)
buildings = _filter_buildings_to_bbox(
osm_data["buildings"], min_lat, min_lon, max_lat, max_lon,
site.lat, site.lon, _clog,
)
streets = _filter_osm_list_to_bbox(
osm_data["streets"], min_lat, min_lon, max_lat, max_lon,
)
water_bodies = _filter_osm_list_to_bbox(
osm_data["water_bodies"], min_lat, min_lon, max_lat, max_lon,
)
vegetation_areas = _filter_osm_list_to_bbox(
osm_data["vegetation_areas"], min_lat, min_lon, max_lat, max_lon,
max_count=5000,
)
if skip_buildings:
buildings: list = []
streets: list = []
water_bodies: list = []
vegetation_areas: list = []
else:
# Fast in-memory filter from pre-fetched data (no disk I/O)
buildings = _filter_buildings_to_bbox(
prefetched_buildings, min_lat, min_lon, max_lat, max_lon,
site.lat, site.lon, _clog,
max_buildings=5000,
)
streets = _filter_osm_list_to_bbox(
prefetched_streets, min_lat, min_lon, max_lat, max_lon,
)
water_bodies = _filter_osm_list_to_bbox(
prefetched_water, min_lat, min_lon, max_lat, max_lon,
)
vegetation_areas = _filter_osm_list_to_bbox(
prefetched_vegetation, min_lat, min_lon, max_lat, max_lon,
max_count=5000,
)
spatial_idx: Optional[SpatialIndex] = None
if buildings:
@@ -907,15 +981,21 @@ class CoverageService:
_clog(f"Tile {tile_idx + 1}/{total_tiles} done: "
f"{len(tile_points)} points in {tile_time:.1f}s")
# ── 5. Free memory ──
# ── 5. Free per-tile memory ──
del buildings, streets, water_bodies, vegetation_areas
del osm_data, spatial_idx, point_elevations, precomputed
del spatial_idx, point_elevations, precomputed
del pre_distances, pre_path_loss, grid_lats, grid_lons
gc.collect()
# Free pre-fetched OSM data
del prefetched_buildings, prefetched_streets
del prefetched_water, prefetched_vegetation
gc.collect()
total_time = time.time() - calc_start
_clog(f"━━━ Tiled calculation complete: "
f"{len(all_points)} points in {total_time:.1f}s ━━━")
f"{len(all_points)} points in {total_time:.1f}s "
f"({tiles_skipped_fspl} tiles skipped by FSPL pre-check) ━━━")
if progress_fn:
progress_fn("Finalizing", 0.95)