mytec: after methods
This commit is contained in:
@@ -132,6 +132,59 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
|
||||
)
|
||||
|
||||
|
||||
@router.post("/preview")
|
||||
async def calculate_preview(request: CoverageRequest) -> CoverageResponse:
|
||||
"""
|
||||
Fast radial preview using terrain-only along 360 spokes.
|
||||
|
||||
Returns coverage points much faster than full calculation
|
||||
by skipping building/OSM data and using radial spokes instead of grid.
|
||||
"""
|
||||
if not request.sites:
|
||||
raise HTTPException(400, "At least one site required")
|
||||
|
||||
site = request.sites[0]
|
||||
effective_settings = apply_preset(request.settings.model_copy())
|
||||
|
||||
env = getattr(effective_settings, 'environment', 'urban')
|
||||
primary_model = select_propagation_model(site.frequency, env)
|
||||
models_used = ["terrain_los", primary_model.name]
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
points = await asyncio.wait_for(
|
||||
coverage_service.calculate_radial_preview(
|
||||
site, request.settings,
|
||||
),
|
||||
timeout=30.0,
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
raise HTTPException(408, "Preview timeout (30s)")
|
||||
|
||||
computation_time = time.time() - start_time
|
||||
|
||||
rsrp_values = [p.rsrp for p in points]
|
||||
los_count = sum(1 for p in points if p.has_los)
|
||||
|
||||
stats = {
|
||||
"min_rsrp": min(rsrp_values) if rsrp_values else 0,
|
||||
"max_rsrp": max(rsrp_values) if rsrp_values else 0,
|
||||
"avg_rsrp": sum(rsrp_values) / len(rsrp_values) if rsrp_values else 0,
|
||||
"los_percentage": (los_count / len(points) * 100) if points else 0,
|
||||
"mode": "radial_preview",
|
||||
}
|
||||
|
||||
return CoverageResponse(
|
||||
points=points,
|
||||
count=len(points),
|
||||
settings=effective_settings,
|
||||
stats=stats,
|
||||
computation_time=round(computation_time, 2),
|
||||
models_used=models_used,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/presets")
|
||||
async def get_presets():
|
||||
"""Get available propagation model presets"""
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
import os
|
||||
import json
|
||||
import asyncio
|
||||
import multiprocessing as mp
|
||||
from pathlib import Path
|
||||
from fastapi import APIRouter
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Valid SRTM tile sizes (bytes)
|
||||
_SRTM1_SIZE = 3601 * 3601 * 2 # 25,934,402
|
||||
_SRTM3_SIZE = 1201 * 1201 * 2 # 2,884,802
|
||||
|
||||
|
||||
@router.get("/info")
|
||||
async def get_system_info():
|
||||
@@ -72,3 +78,108 @@ async def shutdown():
|
||||
loop.call_later(3.0, lambda: os._exit(0))
|
||||
|
||||
return {"status": "shutting down", "workers_killed": killed}
|
||||
|
||||
|
||||
@router.get("/diagnostics")
|
||||
async def get_diagnostics():
|
||||
"""Validate terrain tiles and OSM cache files.
|
||||
|
||||
Checks:
|
||||
- Terrain .hgt files: must be exactly SRTM1 or SRTM3 size
|
||||
- OSM cache .json files: must be valid JSON with expected structure
|
||||
- Cache manager stats (memory + disk)
|
||||
"""
|
||||
data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
terrain_path = data_path / 'terrain'
|
||||
osm_dirs = [
|
||||
data_path / 'osm' / 'buildings',
|
||||
data_path / 'osm' / 'streets',
|
||||
data_path / 'osm' / 'vegetation',
|
||||
data_path / 'osm' / 'water',
|
||||
]
|
||||
|
||||
# --- Terrain tiles ---
|
||||
terrain_tiles = []
|
||||
terrain_errors = []
|
||||
total_terrain_bytes = 0
|
||||
|
||||
if terrain_path.exists():
|
||||
for hgt in sorted(terrain_path.glob("*.hgt")):
|
||||
size = hgt.stat().st_size
|
||||
total_terrain_bytes += size
|
||||
if size == _SRTM1_SIZE:
|
||||
terrain_tiles.append({"name": hgt.name, "type": "SRTM1", "size": size})
|
||||
elif size == _SRTM3_SIZE:
|
||||
terrain_tiles.append({"name": hgt.name, "type": "SRTM3", "size": size})
|
||||
else:
|
||||
terrain_errors.append({
|
||||
"name": hgt.name,
|
||||
"size": size,
|
||||
"error": f"Invalid size (expected {_SRTM1_SIZE} or {_SRTM3_SIZE})",
|
||||
})
|
||||
|
||||
# --- OSM cache ---
|
||||
osm_files = []
|
||||
osm_errors = []
|
||||
total_osm_bytes = 0
|
||||
|
||||
for osm_dir in osm_dirs:
|
||||
if not osm_dir.exists():
|
||||
continue
|
||||
category = osm_dir.name
|
||||
for jf in sorted(osm_dir.glob("*.json")):
|
||||
fsize = jf.stat().st_size
|
||||
total_osm_bytes += fsize
|
||||
try:
|
||||
data = json.loads(jf.read_text())
|
||||
has_timestamp = '_cached_at' in data or '_ts' in data
|
||||
has_data = 'data' in data or 'v' in data
|
||||
if has_timestamp and has_data:
|
||||
osm_files.append({
|
||||
"name": jf.name,
|
||||
"category": category,
|
||||
"size": fsize,
|
||||
"valid": True,
|
||||
})
|
||||
else:
|
||||
osm_errors.append({
|
||||
"name": jf.name,
|
||||
"category": category,
|
||||
"size": fsize,
|
||||
"error": "Missing expected keys (_cached_at/data or _ts/v)",
|
||||
})
|
||||
except json.JSONDecodeError as e:
|
||||
osm_errors.append({
|
||||
"name": jf.name,
|
||||
"category": category,
|
||||
"size": fsize,
|
||||
"error": f"Invalid JSON: {e}",
|
||||
})
|
||||
|
||||
# --- Cache manager stats ---
|
||||
try:
|
||||
from app.services.cache import cache_manager
|
||||
cache_stats = cache_manager.stats()
|
||||
except Exception:
|
||||
cache_stats = None
|
||||
|
||||
return {
|
||||
"data_path": str(data_path),
|
||||
"terrain": {
|
||||
"path": str(terrain_path),
|
||||
"exists": terrain_path.exists(),
|
||||
"tile_count": len(terrain_tiles),
|
||||
"error_count": len(terrain_errors),
|
||||
"total_mb": round(total_terrain_bytes / (1024 * 1024), 1),
|
||||
"tiles": terrain_tiles,
|
||||
"errors": terrain_errors,
|
||||
},
|
||||
"osm_cache": {
|
||||
"valid_count": len(osm_files),
|
||||
"error_count": len(osm_errors),
|
||||
"total_mb": round(total_osm_bytes / (1024 * 1024), 1),
|
||||
"files": osm_files,
|
||||
"errors": osm_errors,
|
||||
},
|
||||
"cache_manager": cache_stats,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user