Compare commits
115 Commits
05a464ce22
...
python-leg
| Author | SHA1 | Date | |
|---|---|---|---|
| 833dead43c | |||
| 1d8375af02 | |||
| acfd9b8f7b | |||
| 81e078e92a | |||
| e392b449cc | |||
| 6dcc5a19b9 | |||
| 6cd9d869cc | |||
| a61753c642 | |||
| 20d19d09ae | |||
| 255b91f257 | |||
| 3b36535d4e | |||
| f46bf16428 | |||
| 57106df5ae | |||
| 867ee3d0f4 | |||
| 7f0b4d2269 | |||
| f5429e40fd | |||
| c8c2608266 | |||
| aa07fb5f02 | |||
| b5b2fd90d2 | |||
| defa3ad440 | |||
| 1dde56705a | |||
| acc90fe538 | |||
| 4026233b21 | |||
| fa7378cf3f | |||
| 5488633e43 | |||
| 7893c57bc9 | |||
| 221000d5b3 | |||
| 3b010fed83 | |||
| 26f8067c94 | |||
| fb2b55caff | |||
| f6a39df366 | |||
| baf57ad77f | |||
| 013cb155a9 | |||
| b62e893abe | |||
| fa55fec94a | |||
| cdbf0127bf | |||
| 04fe8fb814 | |||
| 375a78f5b9 | |||
| c97355f444 | |||
| 7a5b27bd87 | |||
| 5821de9a8f | |||
| 358846fe20 | |||
| 7595ba430d | |||
| 1cc9bfd3a1 | |||
| 3c92fdbb90 | |||
| 5bd9302dd8 | |||
| 61e113965c | |||
| 1ffac9f510 | |||
| b21fa9b9cb | |||
| f7fd82fb58 | |||
| 0aa7db6c40 | |||
|
|
14f0b0f7f1 | ||
| d13233d8e3 | |||
| 58ee6b4163 | |||
| fe6ff7ff6e | |||
| 2f1126dbba | |||
|
|
52468faf8e | ||
| b7d008fe26 | |||
| 0fb19476cd | |||
| b0fbb035fc | |||
|
|
3396ff152c | ||
| 79ce920068 | |||
| 4326a6c4f7 | |||
| 43b1267c56 | |||
|
|
e8ae5bc1db | ||
| d6988e370e | |||
| ed60c4da9e | |||
| 625cce31e4 | |||
| d8256288b0 | |||
| 8e35329622 | |||
| f0b62ada77 | |||
| 24e9591e42 | |||
| a0b71bb76c | |||
| 4916112ac4 | |||
| ac069929b7 | |||
| 1d1f0c6a8a | |||
|
|
f948ff8b07 | ||
| 176df9ddaf | |||
| 12b5c89355 | |||
| fbaf619047 | |||
| aae0bc4b12 | |||
| 652b34571b | |||
| 05d6b8f13c | |||
| d0e827e350 | |||
| 55fd42b696 | |||
| 2a62d00a35 | |||
| 31db02de8e | |||
| 7ad59df69d | |||
| 79d32c9d30 | |||
| b932607521 | |||
| 7fe5f7068c | |||
| 94cf0f0a07 | |||
| 3fa8185ba9 | |||
| bb9ca9960d | |||
| 91618353e6 | |||
| 06ac155429 | |||
| fc9033a815 | |||
| 55a4e1d27a | |||
| bc1996b55d | |||
| e066bdefd8 | |||
| c52716978f | |||
| c9c44491b0 | |||
| e119394652 | |||
| 3e1061e369 | |||
| baebd29e1a | |||
| ed30d886be | |||
| b18e297eee | |||
| a03e9746d4 | |||
| 1a3c3e0f11 | |||
| 641832bc7b | |||
| 201aeeabd6 | |||
| 8b11163a79 | |||
| d3fb1801a8 | |||
|
|
af0fb2154a | ||
|
|
3f04d5101e |
@@ -4,7 +4,52 @@
|
|||||||
"Bash(npm create:*)",
|
"Bash(npm create:*)",
|
||||||
"Bash(npm install:*)",
|
"Bash(npm install:*)",
|
||||||
"Bash(npx tsc:*)",
|
"Bash(npx tsc:*)",
|
||||||
"Bash(npm run build:*)"
|
"Bash(npm run build:*)",
|
||||||
|
"Bash(npx eslint:*)",
|
||||||
|
"Bash(tree:*)",
|
||||||
|
"Bash(python:*)",
|
||||||
|
"Bash(pip --version:*)",
|
||||||
|
"Bash(pip install:*)",
|
||||||
|
"Bash(npx vite build:*)",
|
||||||
|
"Bash(git:*)",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(ls:*)",
|
||||||
|
"Bash(cd:*)",
|
||||||
|
"Bash(mkdir:*)",
|
||||||
|
"Bash(cp:*)",
|
||||||
|
"Bash(mv:*)",
|
||||||
|
"Read(*)",
|
||||||
|
"Write(*)",
|
||||||
|
"Bash(python3:*)",
|
||||||
|
"Bash(source:*)",
|
||||||
|
"Bash(/mnt/d/root/rfcp/venv/bin/python3:*)",
|
||||||
|
"Bash(node --check:*)",
|
||||||
|
"Bash(/mnt/d/root/rfcp/venv/bin/python -m pytest:*)",
|
||||||
|
"Bash(/mnt/d/root/rfcp/venv/bin/python:*)",
|
||||||
|
"Bash(/mnt/d/root/rfcp/venv/bin/pip list:*)",
|
||||||
|
"Bash(pip3 install numpy)",
|
||||||
|
"Bash(echo:*)",
|
||||||
|
"Bash(find:*)",
|
||||||
|
"Bash(node -c:*)",
|
||||||
|
"Bash(curl:*)",
|
||||||
|
"Bash(head -3 python3 -c \"import numpy; print\\(numpy.__file__\\)\")",
|
||||||
|
"Bash(pip3 install:*)",
|
||||||
|
"Bash(apt list:*)",
|
||||||
|
"Bash(dpkg:*)",
|
||||||
|
"Bash(sudo apt-get install:*)",
|
||||||
|
"Bash(docker:*)",
|
||||||
|
"Bash(~/.local/bin/pip install:*)",
|
||||||
|
"Bash(pgrep:*)",
|
||||||
|
"Bash(kill:*)",
|
||||||
|
"Bash(sort:*)",
|
||||||
|
"Bash(journalctl:*)",
|
||||||
|
"Bash(pkill:*)",
|
||||||
|
"Bash(pip3 list:*)",
|
||||||
|
"Bash(chmod:*)",
|
||||||
|
"Bash(pyinstaller:*)",
|
||||||
|
"Bash(npm i:*)",
|
||||||
|
"Bash(npm uninstall:*)",
|
||||||
|
"Bash(npm rebuild:*)"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
30
.gitignore
vendored
Normal file
30
.gitignore
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
data/terrain/
|
||||||
|
data/terrain/
|
||||||
|
venv/
|
||||||
|
backend/venv/
|
||||||
|
**/__pycache__/
|
||||||
|
*.pyc
|
||||||
|
**/__pycache__/
|
||||||
|
*.pyc
|
||||||
|
backend/data/
|
||||||
|
.claude/
|
||||||
|
CLAUDE.md
|
||||||
|
|
||||||
|
# Desktop build artifacts
|
||||||
|
desktop/backend-dist/
|
||||||
|
desktop/dist/
|
||||||
|
desktop/node_modules/
|
||||||
|
|
||||||
|
# Installer build artifacts
|
||||||
|
installer/build/
|
||||||
|
installer/dist/
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
*.spec.bak
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
nul
|
||||||
|
|
||||||
|
# PyInstaller build artifacts
|
||||||
|
backend/build/
|
||||||
|
backend/dist/
|
||||||
1513
RFCP-RUST-MIGRATION-PLAN.md
Normal file
1513
RFCP-RUST-MIGRATION-PLAN.md
Normal file
File diff suppressed because it is too large
Load Diff
23
RFCP.bat
Normal file
23
RFCP.bat
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
@echo off
|
||||||
|
title RFCP - RF Coverage Planner
|
||||||
|
cd /d "%~dp0"
|
||||||
|
|
||||||
|
REM Check if backend exists
|
||||||
|
if not exist "backend\app\main.py" (
|
||||||
|
echo ERROR: RFCP backend not found.
|
||||||
|
echo Run install.bat first or check your installation.
|
||||||
|
pause
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo ============================================
|
||||||
|
echo RFCP - RF Coverage Planner
|
||||||
|
echo ============================================
|
||||||
|
echo.
|
||||||
|
echo Starting backend server...
|
||||||
|
echo Open http://localhost:8090 in your browser
|
||||||
|
echo Press Ctrl+C to stop
|
||||||
|
echo.
|
||||||
|
|
||||||
|
cd backend
|
||||||
|
python -m uvicorn app.main:app --host 0.0.0.0 --port 8090
|
||||||
16
backend/Dockerfile
Normal file
16
backend/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy app
|
||||||
|
COPY app /app/app
|
||||||
|
|
||||||
|
# Expose port
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
# Run
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||||
0
backend/app/__init__.py
Normal file
0
backend/app/__init__.py
Normal file
0
backend/app/api/__init__.py
Normal file
0
backend/app/api/__init__.py
Normal file
BIN
backend/app/api/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/app/api/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/__pycache__/deps.cpython-311.pyc
Normal file
BIN
backend/app/api/__pycache__/deps.cpython-311.pyc
Normal file
Binary file not shown.
5
backend/app/api/deps.py
Normal file
5
backend/app/api/deps.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from app.core.database import get_database
|
||||||
|
|
||||||
|
|
||||||
|
async def get_db():
|
||||||
|
return await get_database()
|
||||||
0
backend/app/api/routes/__init__.py
Normal file
0
backend/app/api/routes/__init__.py
Normal file
BIN
backend/app/api/routes/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/app/api/routes/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/routes/__pycache__/health.cpython-311.pyc
Normal file
BIN
backend/app/api/routes/__pycache__/health.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/api/routes/__pycache__/projects.cpython-311.pyc
Normal file
BIN
backend/app/api/routes/__pycache__/projects.cpython-311.pyc
Normal file
Binary file not shown.
650
backend/app/api/routes/coverage.py
Normal file
650
backend/app/api/routes/coverage.py
Normal file
@@ -0,0 +1,650 @@
|
|||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||||
|
from typing import List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from app.services.coverage_service import (
|
||||||
|
coverage_service,
|
||||||
|
CoverageSettings,
|
||||||
|
SiteParams,
|
||||||
|
CoveragePoint,
|
||||||
|
apply_preset,
|
||||||
|
PRESETS,
|
||||||
|
select_propagation_model,
|
||||||
|
)
|
||||||
|
from app.services.parallel_coverage_service import CancellationToken
|
||||||
|
from app.services.boundary_service import calculate_coverage_boundary
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
class CoverageRequest(BaseModel):
|
||||||
|
"""Request body for coverage calculation"""
|
||||||
|
sites: List[SiteParams]
|
||||||
|
settings: CoverageSettings = CoverageSettings()
|
||||||
|
|
||||||
|
|
||||||
|
class BoundaryPoint(BaseModel):
|
||||||
|
"""Single boundary coordinate"""
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
|
||||||
|
|
||||||
|
class CoverageResponse(BaseModel):
|
||||||
|
"""Coverage calculation response"""
|
||||||
|
points: List[CoveragePoint]
|
||||||
|
count: int
|
||||||
|
settings: CoverageSettings
|
||||||
|
stats: dict
|
||||||
|
computation_time: float # seconds
|
||||||
|
models_used: List[str] # which models were active
|
||||||
|
boundary: Optional[List[BoundaryPoint]] = None # coverage boundary polygon
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/calculate")
|
||||||
|
async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
|
||||||
|
"""
|
||||||
|
Calculate RF coverage for one or more sites
|
||||||
|
|
||||||
|
Returns grid of RSRP values with terrain and building effects.
|
||||||
|
Supports propagation model presets: fast, standard, detailed, full.
|
||||||
|
"""
|
||||||
|
if not request.sites:
|
||||||
|
raise HTTPException(400, "At least one site required")
|
||||||
|
|
||||||
|
if len(request.sites) > 10:
|
||||||
|
raise HTTPException(400, "Maximum 10 sites per request")
|
||||||
|
|
||||||
|
# Validate settings
|
||||||
|
if request.settings.radius > 50000:
|
||||||
|
raise HTTPException(400, "Maximum radius 50km")
|
||||||
|
|
||||||
|
if request.settings.resolution < 50:
|
||||||
|
raise HTTPException(400, "Minimum resolution 50m")
|
||||||
|
|
||||||
|
# Apply preset and determine active models
|
||||||
|
effective_settings = apply_preset(request.settings.model_copy())
|
||||||
|
models_used = _get_active_models(effective_settings)
|
||||||
|
|
||||||
|
# Add the selected propagation model for the first site's frequency
|
||||||
|
env = getattr(effective_settings, 'environment', 'urban')
|
||||||
|
primary_model = select_propagation_model(request.sites[0].frequency, env)
|
||||||
|
if primary_model.name not in models_used:
|
||||||
|
models_used.insert(0, primary_model.name)
|
||||||
|
|
||||||
|
# Time the calculation
|
||||||
|
start_time = time.time()
|
||||||
|
cancel_token = CancellationToken()
|
||||||
|
|
||||||
|
# Dynamic timeout based on radius (large radius needs more time for tiled processing)
|
||||||
|
radius_m = request.settings.radius
|
||||||
|
if radius_m > 30_000:
|
||||||
|
calc_timeout = 600.0 # 10 min for 30-50km
|
||||||
|
elif radius_m > 10_000:
|
||||||
|
calc_timeout = 480.0 # 8 min for 10-30km
|
||||||
|
else:
|
||||||
|
calc_timeout = 300.0 # 5 min for ≤10km
|
||||||
|
|
||||||
|
try:
|
||||||
|
if len(request.sites) == 1:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_coverage(
|
||||||
|
request.sites[0],
|
||||||
|
request.settings,
|
||||||
|
cancel_token,
|
||||||
|
),
|
||||||
|
timeout=calc_timeout,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_multi_site_coverage(
|
||||||
|
request.sites,
|
||||||
|
request.settings,
|
||||||
|
cancel_token,
|
||||||
|
),
|
||||||
|
timeout=calc_timeout,
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
cancel_token.cancel()
|
||||||
|
# Force cleanup orphaned worker processes
|
||||||
|
from app.services.parallel_coverage_service import _kill_worker_processes
|
||||||
|
killed = _kill_worker_processes()
|
||||||
|
timeout_min = int(calc_timeout / 60)
|
||||||
|
detail = f"Calculation timeout ({timeout_min} min). Cleaned up {killed} workers." if killed else f"Calculation timeout ({timeout_min} min) — try smaller radius or lower resolution"
|
||||||
|
raise HTTPException(408, detail)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
cancel_token.cancel()
|
||||||
|
from app.services.parallel_coverage_service import _kill_worker_processes
|
||||||
|
_kill_worker_processes()
|
||||||
|
raise HTTPException(499, "Client disconnected")
|
||||||
|
|
||||||
|
computation_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Calculate stats
|
||||||
|
rsrp_values = [p.rsrp for p in points]
|
||||||
|
los_count = sum(1 for p in points if p.has_los)
|
||||||
|
|
||||||
|
stats = {
|
||||||
|
"min_rsrp": min(rsrp_values) if rsrp_values else 0,
|
||||||
|
"max_rsrp": max(rsrp_values) if rsrp_values else 0,
|
||||||
|
"avg_rsrp": sum(rsrp_values) / len(rsrp_values) if rsrp_values else 0,
|
||||||
|
"los_percentage": (los_count / len(points) * 100) if points else 0,
|
||||||
|
"points_with_buildings": sum(1 for p in points if p.building_loss > 0),
|
||||||
|
"points_with_terrain_loss": sum(1 for p in points if p.terrain_loss > 0),
|
||||||
|
"points_with_reflection_gain": sum(1 for p in points if p.reflection_gain > 0),
|
||||||
|
"points_with_vegetation_loss": sum(1 for p in points if p.vegetation_loss > 0),
|
||||||
|
"points_with_rain_loss": sum(1 for p in points if p.rain_loss > 0),
|
||||||
|
"points_with_indoor_loss": sum(1 for p in points if p.indoor_loss > 0),
|
||||||
|
"points_with_atmospheric_loss": sum(1 for p in points if p.atmospheric_loss > 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Calculate coverage boundary
|
||||||
|
boundary = None
|
||||||
|
if points:
|
||||||
|
boundary_coords = calculate_coverage_boundary(
|
||||||
|
[p.model_dump() for p in points],
|
||||||
|
threshold_dbm=request.settings.min_signal,
|
||||||
|
)
|
||||||
|
if boundary_coords:
|
||||||
|
boundary = [BoundaryPoint(**c) for c in boundary_coords]
|
||||||
|
|
||||||
|
return CoverageResponse(
|
||||||
|
points=points,
|
||||||
|
count=len(points),
|
||||||
|
settings=effective_settings,
|
||||||
|
stats=stats,
|
||||||
|
computation_time=round(computation_time, 2),
|
||||||
|
models_used=models_used,
|
||||||
|
boundary=boundary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/preview")
|
||||||
|
async def calculate_preview(request: CoverageRequest) -> CoverageResponse:
|
||||||
|
"""
|
||||||
|
Fast radial preview using terrain-only along 360 spokes.
|
||||||
|
|
||||||
|
Returns coverage points much faster than full calculation
|
||||||
|
by skipping building/OSM data and using radial spokes instead of grid.
|
||||||
|
"""
|
||||||
|
if not request.sites:
|
||||||
|
raise HTTPException(400, "At least one site required")
|
||||||
|
|
||||||
|
site = request.sites[0]
|
||||||
|
effective_settings = apply_preset(request.settings.model_copy())
|
||||||
|
|
||||||
|
env = getattr(effective_settings, 'environment', 'urban')
|
||||||
|
primary_model = select_propagation_model(site.frequency, env)
|
||||||
|
models_used = ["terrain_los", primary_model.name]
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_radial_preview(
|
||||||
|
site, request.settings,
|
||||||
|
),
|
||||||
|
timeout=30.0,
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
raise HTTPException(408, "Preview timeout (30s)")
|
||||||
|
|
||||||
|
computation_time = time.time() - start_time
|
||||||
|
|
||||||
|
rsrp_values = [p.rsrp for p in points]
|
||||||
|
los_count = sum(1 for p in points if p.has_los)
|
||||||
|
|
||||||
|
stats = {
|
||||||
|
"min_rsrp": min(rsrp_values) if rsrp_values else 0,
|
||||||
|
"max_rsrp": max(rsrp_values) if rsrp_values else 0,
|
||||||
|
"avg_rsrp": sum(rsrp_values) / len(rsrp_values) if rsrp_values else 0,
|
||||||
|
"los_percentage": (los_count / len(points) * 100) if points else 0,
|
||||||
|
"mode": "radial_preview",
|
||||||
|
}
|
||||||
|
|
||||||
|
return CoverageResponse(
|
||||||
|
points=points,
|
||||||
|
count=len(points),
|
||||||
|
settings=effective_settings,
|
||||||
|
stats=stats,
|
||||||
|
computation_time=round(computation_time, 2),
|
||||||
|
models_used=models_used,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/presets")
|
||||||
|
async def get_presets():
|
||||||
|
"""Get available propagation model presets"""
|
||||||
|
return {
|
||||||
|
"presets": {
|
||||||
|
"fast": {
|
||||||
|
"description": "Quick calculation - terrain only",
|
||||||
|
**PRESETS["fast"],
|
||||||
|
"estimated_speed": "~5 seconds for 5km radius"
|
||||||
|
},
|
||||||
|
"standard": {
|
||||||
|
"description": "Balanced - terrain + buildings with materials",
|
||||||
|
**PRESETS["standard"],
|
||||||
|
"estimated_speed": "~30 seconds for 5km radius"
|
||||||
|
},
|
||||||
|
"detailed": {
|
||||||
|
"description": "Accurate - adds dominant path + vegetation",
|
||||||
|
**PRESETS["detailed"],
|
||||||
|
"estimated_speed": "~2 minutes for 5km radius"
|
||||||
|
},
|
||||||
|
"full": {
|
||||||
|
"description": "Maximum realism - all models + water + vegetation",
|
||||||
|
**PRESETS["full"],
|
||||||
|
"estimated_speed": "~5 minutes for 5km radius"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/buildings")
|
||||||
|
async def get_buildings(
|
||||||
|
min_lat: float,
|
||||||
|
min_lon: float,
|
||||||
|
max_lat: float,
|
||||||
|
max_lon: float
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get buildings in bounding box (for debugging/visualization)
|
||||||
|
"""
|
||||||
|
from app.services.buildings_service import buildings_service
|
||||||
|
|
||||||
|
# Limit bbox size
|
||||||
|
if (max_lat - min_lat) > 0.1 or (max_lon - min_lon) > 0.1:
|
||||||
|
raise HTTPException(400, "Bbox too large (max 0.1 degrees)")
|
||||||
|
|
||||||
|
buildings = await buildings_service.fetch_buildings(
|
||||||
|
min_lat, min_lon, max_lat, max_lon
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"count": len(buildings),
|
||||||
|
"buildings": [b.model_dump() for b in buildings]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/link-budget")
|
||||||
|
async def calculate_link_budget(request: dict):
|
||||||
|
"""Calculate point-to-point link budget.
|
||||||
|
|
||||||
|
Body: {
|
||||||
|
"tx_lat": 48.46, "tx_lon": 35.04,
|
||||||
|
"tx_power_dbm": 43, "tx_gain_dbi": 18, "tx_cable_loss_db": 2,
|
||||||
|
"tx_height_m": 30,
|
||||||
|
"rx_lat": 48.50, "rx_lon": 35.10,
|
||||||
|
"rx_gain_dbi": 0, "rx_cable_loss_db": 0, "rx_sensitivity_dbm": -100,
|
||||||
|
"rx_height_m": 1.5,
|
||||||
|
"frequency_mhz": 1800
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
import math
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
|
||||||
|
# Extract parameters with defaults
|
||||||
|
tx_lat = request.get("tx_lat", 48.46)
|
||||||
|
tx_lon = request.get("tx_lon", 35.04)
|
||||||
|
tx_power_dbm = request.get("tx_power_dbm", 43)
|
||||||
|
tx_gain_dbi = request.get("tx_gain_dbi", 18)
|
||||||
|
tx_cable_loss_db = request.get("tx_cable_loss_db", 2)
|
||||||
|
tx_height_m = request.get("tx_height_m", 30)
|
||||||
|
|
||||||
|
rx_lat = request.get("rx_lat", 48.50)
|
||||||
|
rx_lon = request.get("rx_lon", 35.10)
|
||||||
|
rx_gain_dbi = request.get("rx_gain_dbi", 0)
|
||||||
|
rx_cable_loss_db = request.get("rx_cable_loss_db", 0)
|
||||||
|
rx_sensitivity_dbm = request.get("rx_sensitivity_dbm", -100)
|
||||||
|
rx_height_m = request.get("rx_height_m", 1.5)
|
||||||
|
|
||||||
|
freq = request.get("frequency_mhz", 1800)
|
||||||
|
|
||||||
|
# Calculate distance
|
||||||
|
distance_m = terrain_service.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
|
||||||
|
distance_km = distance_m / 1000
|
||||||
|
|
||||||
|
# Get elevations
|
||||||
|
tx_elev = await terrain_service.get_elevation(tx_lat, tx_lon)
|
||||||
|
rx_elev = await terrain_service.get_elevation(rx_lat, rx_lon)
|
||||||
|
|
||||||
|
# EIRP
|
||||||
|
eirp_dbm = tx_power_dbm + tx_gain_dbi - tx_cable_loss_db
|
||||||
|
|
||||||
|
# Free space path loss
|
||||||
|
if distance_km > 0:
|
||||||
|
fspl_db = 20 * math.log10(distance_km) + 20 * math.log10(freq) + 32.45
|
||||||
|
else:
|
||||||
|
fspl_db = 0
|
||||||
|
|
||||||
|
# Terrain profile for LOS check
|
||||||
|
profile = await terrain_service.get_elevation_profile(
|
||||||
|
tx_lat, tx_lon, rx_lat, rx_lon, num_points=100
|
||||||
|
)
|
||||||
|
|
||||||
|
# LOS check - does terrain block line of sight?
|
||||||
|
tx_total_height = tx_elev + tx_height_m
|
||||||
|
rx_total_height = rx_elev + rx_height_m
|
||||||
|
|
||||||
|
terrain_loss_db = 0.0
|
||||||
|
los_clear = True
|
||||||
|
obstructions = []
|
||||||
|
|
||||||
|
for i, point in enumerate(profile):
|
||||||
|
if i == 0 or i == len(profile) - 1:
|
||||||
|
continue
|
||||||
|
# Linear interpolation of LOS line at this point
|
||||||
|
fraction = i / (len(profile) - 1)
|
||||||
|
los_height = tx_total_height + fraction * (rx_total_height - tx_total_height)
|
||||||
|
if point["elevation"] > los_height:
|
||||||
|
los_clear = False
|
||||||
|
obstruction_height = point["elevation"] - los_height
|
||||||
|
obstructions.append({
|
||||||
|
"distance_m": point["distance"],
|
||||||
|
"height_above_los_m": round(obstruction_height, 1),
|
||||||
|
})
|
||||||
|
# Knife-edge diffraction estimate: ~6dB per major obstruction
|
||||||
|
terrain_loss_db += min(6.0, obstruction_height * 0.3)
|
||||||
|
|
||||||
|
# Cap terrain loss at reasonable max
|
||||||
|
terrain_loss_db = min(terrain_loss_db, 40.0)
|
||||||
|
|
||||||
|
total_path_loss = fspl_db + terrain_loss_db
|
||||||
|
|
||||||
|
# Received power
|
||||||
|
rx_power_dbm = eirp_dbm - total_path_loss + rx_gain_dbi - rx_cable_loss_db
|
||||||
|
|
||||||
|
# Link margin
|
||||||
|
margin_db = rx_power_dbm - rx_sensitivity_dbm
|
||||||
|
|
||||||
|
return {
|
||||||
|
"distance_km": round(distance_km, 2),
|
||||||
|
"distance_m": round(distance_m, 1),
|
||||||
|
"tx_elevation_m": round(tx_elev, 1),
|
||||||
|
"rx_elevation_m": round(rx_elev, 1),
|
||||||
|
"eirp_dbm": round(eirp_dbm, 1),
|
||||||
|
"fspl_db": round(fspl_db, 1),
|
||||||
|
"terrain_loss_db": round(terrain_loss_db, 1),
|
||||||
|
"total_path_loss_db": round(total_path_loss, 1),
|
||||||
|
"los_clear": los_clear,
|
||||||
|
"obstructions": obstructions,
|
||||||
|
"rx_power_dbm": round(rx_power_dbm, 1),
|
||||||
|
"margin_db": round(margin_db, 1),
|
||||||
|
"status": "OK" if margin_db >= 0 else "FAIL",
|
||||||
|
"link_budget": {
|
||||||
|
"tx_power_dbm": tx_power_dbm,
|
||||||
|
"tx_gain_dbi": tx_gain_dbi,
|
||||||
|
"tx_cable_loss_db": tx_cable_loss_db,
|
||||||
|
"rx_gain_dbi": rx_gain_dbi,
|
||||||
|
"rx_cable_loss_db": rx_cable_loss_db,
|
||||||
|
"rx_sensitivity_dbm": rx_sensitivity_dbm,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/fresnel-profile")
|
||||||
|
async def fresnel_profile(request: dict):
|
||||||
|
"""Calculate terrain profile with Fresnel zone boundaries.
|
||||||
|
|
||||||
|
Body: {
|
||||||
|
"tx_lat": 48.46, "tx_lon": 35.04, "tx_height_m": 30,
|
||||||
|
"rx_lat": 48.50, "rx_lon": 35.10, "rx_height_m": 1.5,
|
||||||
|
"frequency_mhz": 1800,
|
||||||
|
"num_points": 100
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
import math
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
|
||||||
|
tx_lat = request.get("tx_lat", 48.46)
|
||||||
|
tx_lon = request.get("tx_lon", 35.04)
|
||||||
|
rx_lat = request.get("rx_lat", 48.50)
|
||||||
|
rx_lon = request.get("rx_lon", 35.10)
|
||||||
|
tx_height = request.get("tx_height_m", 30)
|
||||||
|
rx_height = request.get("rx_height_m", 1.5)
|
||||||
|
freq = request.get("frequency_mhz", 1800)
|
||||||
|
num_points = request.get("num_points", 100)
|
||||||
|
|
||||||
|
# Get terrain profile
|
||||||
|
profile = await terrain_service.get_elevation_profile(
|
||||||
|
tx_lat, tx_lon, rx_lat, rx_lon, num_points
|
||||||
|
)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
return {"error": "Could not generate terrain profile"}
|
||||||
|
|
||||||
|
total_distance = profile[-1]["distance"] if profile else 0
|
||||||
|
|
||||||
|
# Get endpoint elevations
|
||||||
|
tx_elev = profile[0]["elevation"]
|
||||||
|
rx_elev = profile[-1]["elevation"]
|
||||||
|
tx_total = tx_elev + tx_height
|
||||||
|
rx_total = rx_elev + rx_height
|
||||||
|
|
||||||
|
wavelength = 300.0 / freq # meters
|
||||||
|
|
||||||
|
# Calculate Fresnel zone at each profile point
|
||||||
|
fresnel_data = []
|
||||||
|
los_blocked = False
|
||||||
|
fresnel_blocked = False
|
||||||
|
worst_clearance = float('inf')
|
||||||
|
fresnel_intrusion_count = 0
|
||||||
|
|
||||||
|
for i, point in enumerate(profile):
|
||||||
|
d1 = point["distance"] # distance from tx
|
||||||
|
d2 = total_distance - d1 # distance to rx
|
||||||
|
|
||||||
|
# LOS height at this point (linear interpolation)
|
||||||
|
if total_distance > 0:
|
||||||
|
fraction = d1 / total_distance
|
||||||
|
else:
|
||||||
|
fraction = 0
|
||||||
|
los_height = tx_total + fraction * (rx_total - tx_total)
|
||||||
|
|
||||||
|
# First Fresnel zone radius
|
||||||
|
if d1 > 0 and d2 > 0 and total_distance > 0:
|
||||||
|
f1_radius = math.sqrt((1 * wavelength * d1 * d2) / total_distance)
|
||||||
|
else:
|
||||||
|
f1_radius = 0
|
||||||
|
|
||||||
|
# Fresnel zone boundaries (height above sea level)
|
||||||
|
fresnel_top = los_height + f1_radius
|
||||||
|
fresnel_bottom = los_height - f1_radius
|
||||||
|
|
||||||
|
# Clearance: how much space between terrain and Fresnel bottom
|
||||||
|
clearance = fresnel_bottom - point["elevation"]
|
||||||
|
|
||||||
|
if clearance < worst_clearance:
|
||||||
|
worst_clearance = clearance
|
||||||
|
|
||||||
|
if point["elevation"] > los_height:
|
||||||
|
los_blocked = True
|
||||||
|
if point["elevation"] > fresnel_bottom:
|
||||||
|
fresnel_blocked = True
|
||||||
|
fresnel_intrusion_count += 1
|
||||||
|
|
||||||
|
fresnel_data.append({
|
||||||
|
"distance": round(point["distance"], 1),
|
||||||
|
"lat": point["lat"],
|
||||||
|
"lon": point["lon"],
|
||||||
|
"terrain_elevation": round(point["elevation"], 1),
|
||||||
|
"los_height": round(los_height, 1),
|
||||||
|
"fresnel_top": round(fresnel_top, 1),
|
||||||
|
"fresnel_bottom": round(fresnel_bottom, 1),
|
||||||
|
"f1_radius": round(f1_radius, 1),
|
||||||
|
"clearance": round(clearance, 1),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Calculate Fresnel clearance percentage
|
||||||
|
fresnel_clear_pct = round(100 * (1 - fresnel_intrusion_count / len(profile)), 1) if profile else 100
|
||||||
|
|
||||||
|
# Estimate additional loss due to Fresnel obstruction
|
||||||
|
if los_blocked:
|
||||||
|
estimated_loss_db = 10 + abs(worst_clearance) * 0.5 # rough estimate
|
||||||
|
elif fresnel_blocked:
|
||||||
|
estimated_loss_db = 3 + (100 - fresnel_clear_pct) * 0.06 # 3-6 dB typical
|
||||||
|
else:
|
||||||
|
estimated_loss_db = 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"profile": fresnel_data,
|
||||||
|
"total_distance_m": round(total_distance, 1),
|
||||||
|
"tx_elevation": round(tx_elev, 1),
|
||||||
|
"rx_elevation": round(rx_elev, 1),
|
||||||
|
"frequency_mhz": freq,
|
||||||
|
"wavelength_m": round(wavelength, 4),
|
||||||
|
"los_clear": not los_blocked,
|
||||||
|
"fresnel_clear": not fresnel_blocked,
|
||||||
|
"fresnel_clear_pct": fresnel_clear_pct,
|
||||||
|
"worst_clearance_m": round(worst_clearance, 1),
|
||||||
|
"estimated_loss_db": round(estimated_loss_db, 1),
|
||||||
|
"recommendation": (
|
||||||
|
"Clear — excellent link" if not fresnel_blocked
|
||||||
|
else "Fresnel zone partially blocked — expect 3-6 dB additional loss"
|
||||||
|
if not los_blocked
|
||||||
|
else "LOS blocked — significant diffraction loss expected"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/interference")
|
||||||
|
async def calculate_interference(request: CoverageRequest):
|
||||||
|
"""Calculate C/I (carrier-to-interference) ratio for multi-site scenario.
|
||||||
|
|
||||||
|
Uses the same request format as /calculate but returns interference analysis
|
||||||
|
instead of raw coverage. Requires 2+ sites to be meaningful.
|
||||||
|
|
||||||
|
Returns for each grid point:
|
||||||
|
- C/I ratio (carrier to interference) in dB
|
||||||
|
- Best server index
|
||||||
|
- Best server RSRP
|
||||||
|
"""
|
||||||
|
import numpy as np
|
||||||
|
from app.services.gpu_service import gpu_service
|
||||||
|
|
||||||
|
if len(request.sites) < 2:
|
||||||
|
raise HTTPException(400, "At least 2 sites required for interference analysis")
|
||||||
|
|
||||||
|
if len(request.sites) > 10:
|
||||||
|
raise HTTPException(400, "Maximum 10 sites per request")
|
||||||
|
|
||||||
|
# First calculate coverage for all sites
|
||||||
|
start_time = time.time()
|
||||||
|
cancel_token = CancellationToken()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Calculate coverage for each site individually
|
||||||
|
site_results = []
|
||||||
|
for site in request.sites:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_coverage(
|
||||||
|
site,
|
||||||
|
request.settings,
|
||||||
|
cancel_token,
|
||||||
|
),
|
||||||
|
timeout=120.0, # 2 min per site
|
||||||
|
)
|
||||||
|
site_results.append(points)
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
cancel_token.cancel()
|
||||||
|
raise HTTPException(408, "Calculation timeout")
|
||||||
|
|
||||||
|
computation_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Build coordinate -> RSRP mapping for each site
|
||||||
|
# We need to align the grids (same points for all sites)
|
||||||
|
coord_set = set()
|
||||||
|
for points in site_results:
|
||||||
|
for p in points:
|
||||||
|
coord_set.add((round(p.lat, 6), round(p.lon, 6)))
|
||||||
|
|
||||||
|
coord_list = sorted(coord_set)
|
||||||
|
|
||||||
|
# Build RSRP arrays aligned to coord_list
|
||||||
|
rsrp_grids = []
|
||||||
|
frequencies = []
|
||||||
|
for idx, (site, points) in enumerate(zip(request.sites, site_results)):
|
||||||
|
# Map coordinates to RSRP
|
||||||
|
point_map = {(round(p.lat, 6), round(p.lon, 6)): p.rsrp for p in points}
|
||||||
|
rsrp_array = np.array([
|
||||||
|
point_map.get(coord, -150) # -150 dBm = no coverage
|
||||||
|
for coord in coord_list
|
||||||
|
], dtype=np.float64)
|
||||||
|
rsrp_grids.append(rsrp_array)
|
||||||
|
frequencies.append(site.frequency)
|
||||||
|
|
||||||
|
# Calculate C/I using GPU service
|
||||||
|
ci_ratio, best_server_idx, best_rsrp = gpu_service.calculate_interference_vectorized(
|
||||||
|
rsrp_grids, frequencies
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build result points with C/I data
|
||||||
|
ci_points = []
|
||||||
|
for i, (lat, lon) in enumerate(coord_list):
|
||||||
|
ci_points.append({
|
||||||
|
"lat": lat,
|
||||||
|
"lon": lon,
|
||||||
|
"ci_ratio_db": round(float(ci_ratio[i]), 1),
|
||||||
|
"best_server_idx": int(best_server_idx[i]),
|
||||||
|
"best_server_rsrp": round(float(best_rsrp[i]), 1),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Calculate statistics
|
||||||
|
ci_values = [p["ci_ratio_db"] for p in ci_points]
|
||||||
|
stats = {
|
||||||
|
"min_ci_db": round(min(ci_values), 1) if ci_values else 0,
|
||||||
|
"max_ci_db": round(max(ci_values), 1) if ci_values else 0,
|
||||||
|
"avg_ci_db": round(sum(ci_values) / len(ci_values), 1) if ci_values else 0,
|
||||||
|
"good_coverage_pct": round(100 * sum(1 for c in ci_values if c >= 10) / len(ci_values), 1) if ci_values else 0,
|
||||||
|
"marginal_coverage_pct": round(100 * sum(1 for c in ci_values if 0 <= c < 10) / len(ci_values), 1) if ci_values else 0,
|
||||||
|
"interference_dominant_pct": round(100 * sum(1 for c in ci_values if c < 0) / len(ci_values), 1) if ci_values else 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for frequency groups
|
||||||
|
unique_freqs = set(frequencies)
|
||||||
|
freq_groups = {}
|
||||||
|
for freq in unique_freqs:
|
||||||
|
freq_groups[freq] = sum(1 for f in frequencies if f == freq)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"points": ci_points,
|
||||||
|
"count": len(ci_points),
|
||||||
|
"stats": stats,
|
||||||
|
"computation_time": round(computation_time, 2),
|
||||||
|
"sites": [{"name": s.name, "frequency_mhz": s.frequency} for s in request.sites],
|
||||||
|
"frequency_groups": freq_groups,
|
||||||
|
"warning": None if any(c > 1 for c in freq_groups.values()) else "All sites on different frequencies - no co-channel interference",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_active_models(settings: CoverageSettings) -> List[str]:
|
||||||
|
"""Determine which propagation models are active"""
|
||||||
|
models = [] # Base propagation model added by caller via select_propagation_model()
|
||||||
|
|
||||||
|
if settings.use_terrain:
|
||||||
|
models.append("terrain_los")
|
||||||
|
if settings.use_buildings:
|
||||||
|
models.append("buildings")
|
||||||
|
if settings.use_materials:
|
||||||
|
models.append("materials")
|
||||||
|
if settings.use_dominant_path:
|
||||||
|
models.append("dominant_path")
|
||||||
|
if settings.use_street_canyon:
|
||||||
|
models.append("street_canyon")
|
||||||
|
if settings.use_reflections:
|
||||||
|
models.append("reflections")
|
||||||
|
if settings.use_water_reflection:
|
||||||
|
models.append("water_reflection")
|
||||||
|
if settings.use_vegetation:
|
||||||
|
models.append("vegetation")
|
||||||
|
if settings.rain_rate > 0:
|
||||||
|
models.append("rain_attenuation")
|
||||||
|
if settings.indoor_loss_type != "none":
|
||||||
|
models.append("indoor_penetration")
|
||||||
|
if settings.use_atmospheric:
|
||||||
|
models.append("atmospheric")
|
||||||
|
|
||||||
|
return models
|
||||||
41
backend/app/api/routes/gpu.py
Normal file
41
backend/app/api/routes/gpu.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""GPU management API endpoints."""
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.services.gpu_backend import gpu_manager
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
class SetDeviceRequest(BaseModel):
|
||||||
|
backend: str
|
||||||
|
index: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status")
|
||||||
|
async def gpu_status():
|
||||||
|
"""Return GPU manager status: active backend, device, available devices."""
|
||||||
|
return gpu_manager.get_status()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/devices")
|
||||||
|
async def gpu_devices():
|
||||||
|
"""Return list of available compute devices."""
|
||||||
|
return {"devices": gpu_manager.get_devices()}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/set")
|
||||||
|
async def gpu_set_device(request: SetDeviceRequest):
|
||||||
|
"""Switch active compute device."""
|
||||||
|
try:
|
||||||
|
result = gpu_manager.set_device(request.backend, request.index)
|
||||||
|
return {"status": "ok", **result}
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/diagnostics")
|
||||||
|
async def gpu_diagnostics():
|
||||||
|
"""Full GPU diagnostic info for troubleshooting detection issues."""
|
||||||
|
return gpu_manager.get_diagnostics()
|
||||||
35
backend/app/api/routes/health.py
Normal file
35
backend/app/api/routes/health.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import sys
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from app.api.deps import get_db
|
||||||
|
from app.services.gpu_backend import gpu_manager
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/")
|
||||||
|
async def health_check():
|
||||||
|
gpu_info = gpu_manager.get_status()
|
||||||
|
return {
|
||||||
|
"status": "ok",
|
||||||
|
"service": "rfcp-backend",
|
||||||
|
"version": "3.6.0",
|
||||||
|
"build": "gpu" if gpu_info.get("gpu_available") else "cpu",
|
||||||
|
"gpu": {
|
||||||
|
"available": gpu_info.get("gpu_available", False),
|
||||||
|
"backend": gpu_info.get("active_backend", "cpu"),
|
||||||
|
"device": gpu_info.get("active_device", {}).get("name") if gpu_info.get("active_device") else "CPU",
|
||||||
|
},
|
||||||
|
"python": sys.version.split()[0],
|
||||||
|
"platform": platform.system(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/db")
|
||||||
|
async def db_check(db=Depends(get_db)):
|
||||||
|
try:
|
||||||
|
await db.command("ping")
|
||||||
|
return {"status": "ok", "database": "connected"}
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": "error", "database": str(e)}
|
||||||
83
backend/app/api/routes/projects.py
Normal file
83
backend/app/api/routes/projects.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.api.deps import get_db
|
||||||
|
from app.models.project import Project, CoverageSettings
|
||||||
|
from app.models.site import Site
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/current")
|
||||||
|
async def get_current_project(db=Depends(get_db)):
|
||||||
|
"""Get the global project."""
|
||||||
|
project = await db.projects.find_one({"name": "global"})
|
||||||
|
if not project:
|
||||||
|
default = Project(name="global")
|
||||||
|
result = await db.projects.insert_one(default.model_dump())
|
||||||
|
return default.model_dump()
|
||||||
|
project.pop("_id", None)
|
||||||
|
return project
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/current")
|
||||||
|
async def update_current_project(project: Project, db=Depends(get_db)):
|
||||||
|
"""Update the global project."""
|
||||||
|
project.updated_at = datetime.utcnow()
|
||||||
|
data = project.model_dump()
|
||||||
|
await db.projects.update_one(
|
||||||
|
{"name": "global"},
|
||||||
|
{"$set": data},
|
||||||
|
upsert=True,
|
||||||
|
)
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/current/sites")
|
||||||
|
async def get_sites(db=Depends(get_db)):
|
||||||
|
"""Get all sites from the global project."""
|
||||||
|
project = await db.projects.find_one({"name": "global"})
|
||||||
|
if not project:
|
||||||
|
return []
|
||||||
|
return project.get("sites", [])
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/current/sites")
|
||||||
|
async def update_sites(sites: list[Site], db=Depends(get_db)):
|
||||||
|
"""Update all sites in the global project."""
|
||||||
|
await db.projects.update_one(
|
||||||
|
{"name": "global"},
|
||||||
|
{
|
||||||
|
"$set": {
|
||||||
|
"sites": [s.model_dump() for s in sites],
|
||||||
|
"updated_at": datetime.utcnow(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
upsert=True,
|
||||||
|
)
|
||||||
|
return {"updated": len(sites)}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/current/settings")
|
||||||
|
async def get_settings(db=Depends(get_db)):
|
||||||
|
"""Get coverage settings."""
|
||||||
|
project = await db.projects.find_one({"name": "global"})
|
||||||
|
if not project:
|
||||||
|
return CoverageSettings().model_dump()
|
||||||
|
return project.get("settings", CoverageSettings().model_dump())
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/current/settings")
|
||||||
|
async def update_settings(settings: CoverageSettings, db=Depends(get_db)):
|
||||||
|
"""Update coverage settings."""
|
||||||
|
await db.projects.update_one(
|
||||||
|
{"name": "global"},
|
||||||
|
{
|
||||||
|
"$set": {
|
||||||
|
"settings": settings.model_dump(),
|
||||||
|
"updated_at": datetime.utcnow(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
upsert=True,
|
||||||
|
)
|
||||||
|
return settings.model_dump()
|
||||||
262
backend/app/api/routes/regions.py
Normal file
262
backend/app/api/routes/regions.py
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
from fastapi import APIRouter, BackgroundTasks, HTTPException
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
import asyncio
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
# Predefined regions
|
||||||
|
REGIONS = {
|
||||||
|
"ukraine": {
|
||||||
|
"name": "Ukraine",
|
||||||
|
"bbox": [44.0, 22.0, 52.5, 40.5], # min_lat, min_lon, max_lat, max_lon
|
||||||
|
"srtm_tiles": 120,
|
||||||
|
"estimated_size_gb": 3.0,
|
||||||
|
},
|
||||||
|
"ukraine_east": {
|
||||||
|
"name": "Eastern Ukraine (Donbas)",
|
||||||
|
"bbox": [47.0, 34.0, 50.5, 40.5],
|
||||||
|
"srtm_tiles": 24,
|
||||||
|
"estimated_size_gb": 0.6,
|
||||||
|
},
|
||||||
|
"ukraine_central": {
|
||||||
|
"name": "Central Ukraine",
|
||||||
|
"bbox": [48.0, 30.0, 51.0, 36.0],
|
||||||
|
"srtm_tiles": 18,
|
||||||
|
"estimated_size_gb": 0.5,
|
||||||
|
},
|
||||||
|
"ukraine_west": {
|
||||||
|
"name": "Western Ukraine",
|
||||||
|
"bbox": [48.0, 22.0, 51.0, 26.0],
|
||||||
|
"srtm_tiles": 12,
|
||||||
|
"estimated_size_gb": 0.3,
|
||||||
|
},
|
||||||
|
"kyiv_region": {
|
||||||
|
"name": "Kyiv Region",
|
||||||
|
"bbox": [49.5, 29.5, 51.5, 32.5],
|
||||||
|
"srtm_tiles": 6,
|
||||||
|
"estimated_size_gb": 0.15,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Download progress tracking (in-memory)
|
||||||
|
_download_tasks: dict[str, dict] = {}
|
||||||
|
|
||||||
|
|
||||||
|
class RegionInfo(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
bbox: list[float]
|
||||||
|
srtm_tiles: int
|
||||||
|
estimated_size_gb: float
|
||||||
|
downloaded: bool = False
|
||||||
|
download_progress: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgress(BaseModel):
|
||||||
|
task_id: str
|
||||||
|
region_id: str
|
||||||
|
status: str # queued, downloading_terrain, downloading_osm, done, error
|
||||||
|
progress: float # 0-100
|
||||||
|
current_step: str
|
||||||
|
downloaded_mb: float
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/available")
|
||||||
|
async def list_regions() -> list[RegionInfo]:
|
||||||
|
"""List available regions for download"""
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
|
||||||
|
cached_tiles = set(terrain_service.get_cached_tiles())
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for region_id, info in REGIONS.items():
|
||||||
|
min_lat, min_lon, max_lat, max_lon = info["bbox"]
|
||||||
|
needed_tiles = set()
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
tile = terrain_service.get_tile_name(lat, lon)
|
||||||
|
needed_tiles.add(tile)
|
||||||
|
|
||||||
|
downloaded_tiles = needed_tiles & cached_tiles
|
||||||
|
progress = len(downloaded_tiles) / len(needed_tiles) * 100 if needed_tiles else 0
|
||||||
|
|
||||||
|
result.append(RegionInfo(
|
||||||
|
id=region_id,
|
||||||
|
name=info["name"],
|
||||||
|
bbox=info["bbox"],
|
||||||
|
srtm_tiles=info["srtm_tiles"],
|
||||||
|
estimated_size_gb=info["estimated_size_gb"],
|
||||||
|
downloaded=progress >= 100,
|
||||||
|
download_progress=progress
|
||||||
|
))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/download/{region_id}")
|
||||||
|
async def start_download(region_id: str, background_tasks: BackgroundTasks) -> dict:
|
||||||
|
"""Start downloading a region in the background"""
|
||||||
|
if region_id not in REGIONS:
|
||||||
|
raise HTTPException(404, f"Region '{region_id}' not found")
|
||||||
|
|
||||||
|
# Check if already downloading
|
||||||
|
for task_id, task in _download_tasks.items():
|
||||||
|
if task["region_id"] == region_id and task["status"] not in ["done", "error"]:
|
||||||
|
return {"task_id": task_id, "status": "already_downloading"}
|
||||||
|
|
||||||
|
task_id = str(uuid.uuid4())[:8]
|
||||||
|
|
||||||
|
_download_tasks[task_id] = {
|
||||||
|
"region_id": region_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0.0,
|
||||||
|
"current_step": "Starting...",
|
||||||
|
"downloaded_mb": 0.0,
|
||||||
|
"error": None
|
||||||
|
}
|
||||||
|
|
||||||
|
background_tasks.add_task(_download_region_task, task_id, region_id)
|
||||||
|
|
||||||
|
return {"task_id": task_id, "status": "started"}
|
||||||
|
|
||||||
|
|
||||||
|
async def _download_region_task(task_id: str, region_id: str):
|
||||||
|
"""Background task to download region data"""
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
from app.services.buildings_service import buildings_service
|
||||||
|
from app.services.water_service import water_service
|
||||||
|
from app.services.vegetation_service import vegetation_service
|
||||||
|
|
||||||
|
task = _download_tasks[task_id]
|
||||||
|
region = REGIONS[region_id]
|
||||||
|
min_lat, min_lon, max_lat, max_lon = region["bbox"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Phase 1: Download SRTM tiles (0-70%)
|
||||||
|
task["status"] = "downloading_terrain"
|
||||||
|
task["current_step"] = "Downloading terrain data..."
|
||||||
|
|
||||||
|
# Count total tiles
|
||||||
|
total_tiles = 0
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
total_tiles += 1
|
||||||
|
|
||||||
|
downloaded_count = 0
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
tile_name = terrain_service.get_tile_name(lat, lon)
|
||||||
|
await terrain_service.download_tile(tile_name)
|
||||||
|
downloaded_count += 1
|
||||||
|
task["progress"] = (downloaded_count / total_tiles) * 70.0
|
||||||
|
task["current_step"] = f"Terrain: {downloaded_count}/{total_tiles} tiles"
|
||||||
|
task["downloaded_mb"] = terrain_service.get_cache_size_mb()
|
||||||
|
|
||||||
|
# Phase 2: Pre-cache OSM data (70-100%)
|
||||||
|
task["status"] = "downloading_osm"
|
||||||
|
task["current_step"] = "Downloading building data..."
|
||||||
|
|
||||||
|
total_chunks = 0
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
total_chunks += 1
|
||||||
|
|
||||||
|
done_chunks = 0
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
chunk_min_lat = float(lat)
|
||||||
|
chunk_min_lon = float(lon)
|
||||||
|
chunk_max_lat = float(lat + 1)
|
||||||
|
chunk_max_lon = float(lon + 1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await buildings_service.fetch_buildings(
|
||||||
|
chunk_min_lat, chunk_min_lon,
|
||||||
|
chunk_max_lat, chunk_max_lon
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Region] Buildings chunk error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await water_service.fetch_water_bodies(
|
||||||
|
chunk_min_lat, chunk_min_lon,
|
||||||
|
chunk_max_lat, chunk_max_lon
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Region] Water chunk error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await vegetation_service.fetch_vegetation(
|
||||||
|
chunk_min_lat, chunk_min_lon,
|
||||||
|
chunk_max_lat, chunk_max_lon
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Region] Vegetation chunk error: {e}")
|
||||||
|
|
||||||
|
done_chunks += 1
|
||||||
|
task["progress"] = 70 + (done_chunks / total_chunks) * 30
|
||||||
|
task["current_step"] = f"OSM data: {done_chunks}/{total_chunks} chunks"
|
||||||
|
|
||||||
|
# Delay to avoid Overpass rate limiting
|
||||||
|
await asyncio.sleep(1.0)
|
||||||
|
|
||||||
|
task["status"] = "done"
|
||||||
|
task["progress"] = 100.0
|
||||||
|
task["current_step"] = "Complete!"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
task["status"] = "error"
|
||||||
|
task["error"] = str(e)
|
||||||
|
task["current_step"] = f"Error: {e}"
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/download/{task_id}/progress")
|
||||||
|
async def get_download_progress(task_id: str) -> DownloadProgress:
|
||||||
|
"""Get download progress for a task"""
|
||||||
|
if task_id not in _download_tasks:
|
||||||
|
raise HTTPException(404, "Task not found")
|
||||||
|
|
||||||
|
task = _download_tasks[task_id]
|
||||||
|
return DownloadProgress(
|
||||||
|
task_id=task_id,
|
||||||
|
region_id=task["region_id"],
|
||||||
|
status=task["status"],
|
||||||
|
progress=task["progress"],
|
||||||
|
current_step=task["current_step"],
|
||||||
|
downloaded_mb=task["downloaded_mb"],
|
||||||
|
error=task["error"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/cache")
|
||||||
|
async def clear_cache() -> dict:
|
||||||
|
"""Clear all OSM cached data (keeps SRTM terrain)"""
|
||||||
|
from app.services.buildings_service import buildings_service
|
||||||
|
from app.services.water_service import water_service
|
||||||
|
from app.services.vegetation_service import vegetation_service
|
||||||
|
|
||||||
|
buildings_service.cache.clear()
|
||||||
|
water_service.cache.clear()
|
||||||
|
vegetation_service.cache.clear()
|
||||||
|
|
||||||
|
return {"status": "ok", "message": "OSM cache cleared"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/cache/stats")
|
||||||
|
async def get_cache_stats() -> dict:
|
||||||
|
"""Get cache statistics"""
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
from app.services.buildings_service import buildings_service
|
||||||
|
from app.services.water_service import water_service
|
||||||
|
from app.services.vegetation_service import vegetation_service
|
||||||
|
|
||||||
|
return {
|
||||||
|
"terrain_mb": round(terrain_service.get_cache_size_mb(), 2),
|
||||||
|
"terrain_tiles": len(terrain_service.get_cached_tiles()),
|
||||||
|
"buildings_mb": round(buildings_service.cache.get_size_mb(), 2),
|
||||||
|
"water_mb": round(water_service.cache.get_size_mb(), 2),
|
||||||
|
"vegetation_mb": round(vegetation_service.cache.get_size_mb(), 2),
|
||||||
|
}
|
||||||
185
backend/app/api/routes/system.py
Normal file
185
backend/app/api/routes/system.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import asyncio
|
||||||
|
import multiprocessing as mp
|
||||||
|
from pathlib import Path
|
||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
# Valid SRTM tile sizes (bytes)
|
||||||
|
_SRTM1_SIZE = 3601 * 3601 * 2 # 25,934,402
|
||||||
|
_SRTM3_SIZE = 1201 * 1201 * 2 # 2,884,802
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/info")
|
||||||
|
async def get_system_info():
|
||||||
|
"""Return system info: CPU cores, GPU availability, parallel backend."""
|
||||||
|
cpu_cores = mp.cpu_count() or 1
|
||||||
|
|
||||||
|
# Check Ray
|
||||||
|
ray_available = False
|
||||||
|
ray_initialized = False
|
||||||
|
try:
|
||||||
|
from app.services.parallel_coverage_service import RAY_AVAILABLE
|
||||||
|
ray_available = RAY_AVAILABLE
|
||||||
|
if ray_available:
|
||||||
|
import ray
|
||||||
|
ray_initialized = ray.is_initialized()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Check GPU via gpu_service
|
||||||
|
from app.services.gpu_service import gpu_service
|
||||||
|
gpu_info = gpu_service.get_info()
|
||||||
|
|
||||||
|
# Determine parallel backend
|
||||||
|
if ray_available:
|
||||||
|
parallel_backend = "ray"
|
||||||
|
elif cpu_cores > 1:
|
||||||
|
parallel_backend = "process_pool"
|
||||||
|
else:
|
||||||
|
parallel_backend = "sequential"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"cpu_cores": cpu_cores,
|
||||||
|
"parallel_workers": min(cpu_cores, 14),
|
||||||
|
"parallel_backend": parallel_backend,
|
||||||
|
"ray_available": ray_available,
|
||||||
|
"ray_initialized": ray_initialized,
|
||||||
|
"gpu": gpu_info,
|
||||||
|
"gpu_available": gpu_info.get("available", False),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/models")
|
||||||
|
async def get_propagation_models():
|
||||||
|
"""Return available propagation models and their valid ranges."""
|
||||||
|
from app.core.engine import engine
|
||||||
|
return {
|
||||||
|
"models": engine.get_available_models(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/shutdown")
|
||||||
|
async def shutdown():
|
||||||
|
"""Graceful shutdown endpoint. Kills worker processes then self-terminates.
|
||||||
|
|
||||||
|
Electron calls this first, waits briefly, then does PID-tree kill.
|
||||||
|
The os._exit(3s) is a safety net in case Electron doesn't kill us.
|
||||||
|
"""
|
||||||
|
from app.services.parallel_coverage_service import _kill_worker_processes
|
||||||
|
|
||||||
|
killed = _kill_worker_processes()
|
||||||
|
|
||||||
|
# Safety net: self-terminate after 3s if Electron doesn't kill us.
|
||||||
|
# Delay is long enough for Electron to do PID-tree kill first (preferred).
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
loop.call_later(3.0, lambda: os._exit(0))
|
||||||
|
|
||||||
|
return {"status": "shutting down", "workers_killed": killed}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/diagnostics")
|
||||||
|
async def get_diagnostics():
|
||||||
|
"""Validate terrain tiles and OSM cache files.
|
||||||
|
|
||||||
|
Checks:
|
||||||
|
- Terrain .hgt files: must be exactly SRTM1 or SRTM3 size
|
||||||
|
- OSM cache .json files: must be valid JSON with expected structure
|
||||||
|
- Cache manager stats (memory + disk)
|
||||||
|
"""
|
||||||
|
data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
terrain_path = data_path / 'terrain'
|
||||||
|
osm_dirs = [
|
||||||
|
data_path / 'osm' / 'buildings',
|
||||||
|
data_path / 'osm' / 'streets',
|
||||||
|
data_path / 'osm' / 'vegetation',
|
||||||
|
data_path / 'osm' / 'water',
|
||||||
|
]
|
||||||
|
|
||||||
|
# --- Terrain tiles ---
|
||||||
|
terrain_tiles = []
|
||||||
|
terrain_errors = []
|
||||||
|
total_terrain_bytes = 0
|
||||||
|
|
||||||
|
if terrain_path.exists():
|
||||||
|
for hgt in sorted(terrain_path.glob("*.hgt")):
|
||||||
|
size = hgt.stat().st_size
|
||||||
|
total_terrain_bytes += size
|
||||||
|
if size == _SRTM1_SIZE:
|
||||||
|
terrain_tiles.append({"name": hgt.name, "type": "SRTM1", "size": size})
|
||||||
|
elif size == _SRTM3_SIZE:
|
||||||
|
terrain_tiles.append({"name": hgt.name, "type": "SRTM3", "size": size})
|
||||||
|
else:
|
||||||
|
terrain_errors.append({
|
||||||
|
"name": hgt.name,
|
||||||
|
"size": size,
|
||||||
|
"error": f"Invalid size (expected {_SRTM1_SIZE} or {_SRTM3_SIZE})",
|
||||||
|
})
|
||||||
|
|
||||||
|
# --- OSM cache ---
|
||||||
|
osm_files = []
|
||||||
|
osm_errors = []
|
||||||
|
total_osm_bytes = 0
|
||||||
|
|
||||||
|
for osm_dir in osm_dirs:
|
||||||
|
if not osm_dir.exists():
|
||||||
|
continue
|
||||||
|
category = osm_dir.name
|
||||||
|
for jf in sorted(osm_dir.glob("*.json")):
|
||||||
|
fsize = jf.stat().st_size
|
||||||
|
total_osm_bytes += fsize
|
||||||
|
try:
|
||||||
|
data = json.loads(jf.read_text())
|
||||||
|
has_timestamp = '_cached_at' in data or '_ts' in data
|
||||||
|
has_data = 'data' in data or 'v' in data
|
||||||
|
if has_timestamp and has_data:
|
||||||
|
osm_files.append({
|
||||||
|
"name": jf.name,
|
||||||
|
"category": category,
|
||||||
|
"size": fsize,
|
||||||
|
"valid": True,
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
osm_errors.append({
|
||||||
|
"name": jf.name,
|
||||||
|
"category": category,
|
||||||
|
"size": fsize,
|
||||||
|
"error": "Missing expected keys (_cached_at/data or _ts/v)",
|
||||||
|
})
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
osm_errors.append({
|
||||||
|
"name": jf.name,
|
||||||
|
"category": category,
|
||||||
|
"size": fsize,
|
||||||
|
"error": f"Invalid JSON: {e}",
|
||||||
|
})
|
||||||
|
|
||||||
|
# --- Cache manager stats ---
|
||||||
|
try:
|
||||||
|
from app.services.cache import cache_manager
|
||||||
|
cache_stats = cache_manager.stats()
|
||||||
|
except Exception:
|
||||||
|
cache_stats = None
|
||||||
|
|
||||||
|
return {
|
||||||
|
"data_path": str(data_path),
|
||||||
|
"terrain": {
|
||||||
|
"path": str(terrain_path),
|
||||||
|
"exists": terrain_path.exists(),
|
||||||
|
"tile_count": len(terrain_tiles),
|
||||||
|
"error_count": len(terrain_errors),
|
||||||
|
"total_mb": round(total_terrain_bytes / (1024 * 1024), 1),
|
||||||
|
"tiles": terrain_tiles,
|
||||||
|
"errors": terrain_errors,
|
||||||
|
},
|
||||||
|
"osm_cache": {
|
||||||
|
"valid_count": len(osm_files),
|
||||||
|
"error_count": len(osm_errors),
|
||||||
|
"total_mb": round(total_osm_bytes / (1024 * 1024), 1),
|
||||||
|
"files": osm_files,
|
||||||
|
"errors": osm_errors,
|
||||||
|
},
|
||||||
|
"cache_manager": cache_stats,
|
||||||
|
}
|
||||||
272
backend/app/api/routes/terrain.py
Normal file
272
backend/app/api/routes/terrain.py
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
import math
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
from fastapi.responses import FileResponse
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.core.config import settings
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
from app.services.los_service import los_service
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def _build_elevation_grid(min_lat, max_lat, min_lon, max_lon, resolution):
|
||||||
|
"""Build a 2D elevation grid. Runs in thread executor (CPU-bound)."""
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
rows = min(resolution, 200)
|
||||||
|
cols = min(resolution, 200)
|
||||||
|
|
||||||
|
lats = np.linspace(max_lat, min_lat, rows) # north to south
|
||||||
|
lons = np.linspace(min_lon, max_lon, cols)
|
||||||
|
|
||||||
|
grid = []
|
||||||
|
min_elev = float('inf')
|
||||||
|
max_elev = float('-inf')
|
||||||
|
|
||||||
|
for lat in lats:
|
||||||
|
row = []
|
||||||
|
for lon in lons:
|
||||||
|
elev = terrain_service.get_elevation_sync(float(lat), float(lon))
|
||||||
|
row.append(elev)
|
||||||
|
if elev < min_elev:
|
||||||
|
min_elev = elev
|
||||||
|
if elev > max_elev:
|
||||||
|
max_elev = elev
|
||||||
|
grid.append(row)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"grid": grid,
|
||||||
|
"rows": rows,
|
||||||
|
"cols": cols,
|
||||||
|
"min_elevation": min_elev if min_elev != float('inf') else 0,
|
||||||
|
"max_elevation": max_elev if max_elev != float('-inf') else 0,
|
||||||
|
"bbox": {
|
||||||
|
"min_lat": min_lat,
|
||||||
|
"max_lat": max_lat,
|
||||||
|
"min_lon": min_lon,
|
||||||
|
"max_lon": max_lon,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/elevation")
|
||||||
|
async def get_elevation(
|
||||||
|
lat: float = Query(..., ge=-90, le=90, description="Latitude"),
|
||||||
|
lon: float = Query(..., ge=-180, le=180, description="Longitude")
|
||||||
|
):
|
||||||
|
"""Get elevation at a specific point"""
|
||||||
|
elevation = await terrain_service.get_elevation(lat, lon)
|
||||||
|
return {
|
||||||
|
"lat": lat,
|
||||||
|
"lon": lon,
|
||||||
|
"elevation": elevation,
|
||||||
|
"unit": "meters"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/elevation-grid")
|
||||||
|
async def get_elevation_grid(
|
||||||
|
min_lat: float = Query(..., ge=-90, le=90, description="South boundary"),
|
||||||
|
max_lat: float = Query(..., ge=-90, le=90, description="North boundary"),
|
||||||
|
min_lon: float = Query(..., ge=-180, le=180, description="West boundary"),
|
||||||
|
max_lon: float = Query(..., ge=-180, le=180, description="East boundary"),
|
||||||
|
resolution: int = Query(100, ge=10, le=200, description="Grid size (rows/cols)"),
|
||||||
|
):
|
||||||
|
"""Get elevation grid for a bounding box. Returns a 2D array for terrain visualization."""
|
||||||
|
if max_lat <= min_lat or max_lon <= min_lon:
|
||||||
|
raise HTTPException(400, "Invalid bbox: max must be greater than min")
|
||||||
|
if (max_lat - min_lat) > 2.0 or (max_lon - min_lon) > 2.0:
|
||||||
|
raise HTTPException(400, "Bbox too large (max 2 degrees per axis)")
|
||||||
|
|
||||||
|
# Ensure terrain tiles are loaded for this area
|
||||||
|
await terrain_service.ensure_tiles_for_bbox(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
|
||||||
|
# Pre-load all tiles that cover the bbox
|
||||||
|
lat_start = int(math.floor(min_lat))
|
||||||
|
lat_end = int(math.floor(max_lat))
|
||||||
|
lon_start = int(math.floor(min_lon))
|
||||||
|
lon_end = int(math.floor(max_lon))
|
||||||
|
for lat_i in range(lat_start, lat_end + 1):
|
||||||
|
for lon_i in range(lon_start, lon_end + 1):
|
||||||
|
tile_name = terrain_service.get_tile_name(lat_i + 0.5, lon_i + 0.5)
|
||||||
|
terrain_service._load_tile(tile_name)
|
||||||
|
|
||||||
|
# Build grid in thread executor (CPU-bound sync calls)
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
result = await loop.run_in_executor(
|
||||||
|
None, _build_elevation_grid,
|
||||||
|
min_lat, max_lat, min_lon, max_lon, resolution,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/profile")
|
||||||
|
async def get_elevation_profile(
|
||||||
|
lat1: float = Query(..., description="Start latitude"),
|
||||||
|
lon1: float = Query(..., description="Start longitude"),
|
||||||
|
lat2: float = Query(..., description="End latitude"),
|
||||||
|
lon2: float = Query(..., description="End longitude"),
|
||||||
|
points: int = Query(100, ge=10, le=500, description="Number of sample points")
|
||||||
|
):
|
||||||
|
"""Get elevation profile between two points"""
|
||||||
|
profile = await terrain_service.get_elevation_profile(lat1, lon1, lat2, lon2, points)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"start": {"lat": lat1, "lon": lon1},
|
||||||
|
"end": {"lat": lat2, "lon": lon2},
|
||||||
|
"num_points": len(profile),
|
||||||
|
"profile": profile
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/los")
|
||||||
|
async def check_line_of_sight(
|
||||||
|
tx_lat: float = Query(..., description="Transmitter latitude"),
|
||||||
|
tx_lon: float = Query(..., description="Transmitter longitude"),
|
||||||
|
tx_height: float = Query(..., ge=0, description="Transmitter height above ground (m)"),
|
||||||
|
rx_lat: float = Query(..., description="Receiver latitude"),
|
||||||
|
rx_lon: float = Query(..., description="Receiver longitude"),
|
||||||
|
rx_height: float = Query(1.5, ge=0, description="Receiver height above ground (m)")
|
||||||
|
):
|
||||||
|
"""Check line-of-sight between transmitter and receiver"""
|
||||||
|
result = await los_service.check_line_of_sight(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/fresnel")
|
||||||
|
async def check_fresnel_clearance(
|
||||||
|
tx_lat: float = Query(..., description="Transmitter latitude"),
|
||||||
|
tx_lon: float = Query(..., description="Transmitter longitude"),
|
||||||
|
tx_height: float = Query(..., ge=0, description="Transmitter height (m)"),
|
||||||
|
rx_lat: float = Query(..., description="Receiver latitude"),
|
||||||
|
rx_lon: float = Query(..., description="Receiver longitude"),
|
||||||
|
rx_height: float = Query(1.5, ge=0, description="Receiver height (m)"),
|
||||||
|
frequency: float = Query(1800, ge=100, le=6000, description="Frequency (MHz)")
|
||||||
|
):
|
||||||
|
"""Calculate Fresnel zone clearance"""
|
||||||
|
try:
|
||||||
|
result = await los_service.calculate_fresnel_clearance(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Fresnel calculation error: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tiles")
|
||||||
|
async def list_cached_tiles():
|
||||||
|
"""List cached SRTM tiles"""
|
||||||
|
tiles = list(terrain_service.terrain_path.glob("*.hgt"))
|
||||||
|
return {
|
||||||
|
"cache_dir": str(terrain_service.terrain_path),
|
||||||
|
"tiles": [t.stem for t in tiles],
|
||||||
|
"count": len(tiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/file/{region}")
|
||||||
|
async def get_terrain_file(region: str):
|
||||||
|
"""Serve raw SRTM terrain .hgt files (legacy compatibility)."""
|
||||||
|
terrain_path = os.path.join(settings.TERRAIN_DATA_DIR, f"{region}.hgt")
|
||||||
|
if os.path.exists(terrain_path):
|
||||||
|
return FileResponse(terrain_path)
|
||||||
|
raise HTTPException(status_code=404, detail=f"Region '{region}' not found")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status")
|
||||||
|
async def terrain_status():
|
||||||
|
"""Return terrain data availability info."""
|
||||||
|
cached_tiles = terrain_service.get_cached_tiles()
|
||||||
|
cache_size = terrain_service.get_cache_size_mb()
|
||||||
|
|
||||||
|
# Categorize by resolution based on file size
|
||||||
|
srtm1_tiles = []
|
||||||
|
srtm3_tiles = []
|
||||||
|
for t in cached_tiles:
|
||||||
|
tile_path = terrain_service.terrain_path / f"{t}.hgt"
|
||||||
|
try:
|
||||||
|
if tile_path.stat().st_size == 3601 * 3601 * 2:
|
||||||
|
srtm1_tiles.append(t)
|
||||||
|
else:
|
||||||
|
srtm3_tiles.append(t)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_tiles": len(cached_tiles),
|
||||||
|
"srtm1": {
|
||||||
|
"count": len(srtm1_tiles),
|
||||||
|
"resolution_m": 30,
|
||||||
|
"tiles": sorted(srtm1_tiles),
|
||||||
|
},
|
||||||
|
"srtm3": {
|
||||||
|
"count": len(srtm3_tiles),
|
||||||
|
"resolution_m": 90,
|
||||||
|
"tiles": sorted(srtm3_tiles),
|
||||||
|
},
|
||||||
|
"cache_size_mb": round(cache_size, 1),
|
||||||
|
"memory_cached": len(terrain_service._tile_cache),
|
||||||
|
"terra_server": "https://terra.eliah.one",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/download")
|
||||||
|
async def terrain_download(request: dict):
|
||||||
|
"""Pre-download tiles for a region.
|
||||||
|
|
||||||
|
Body: {"center_lat": 48.46, "center_lon": 35.04, "radius_km": 50}
|
||||||
|
Or: {"tiles": ["N48E034", "N48E035", "N47E034", "N47E035"]}
|
||||||
|
"""
|
||||||
|
if "tiles" in request:
|
||||||
|
tile_list = request["tiles"]
|
||||||
|
else:
|
||||||
|
center_lat = request.get("center_lat", 48.46)
|
||||||
|
center_lon = request.get("center_lon", 35.04)
|
||||||
|
radius_km = request.get("radius_km", 50)
|
||||||
|
tile_list = terrain_service.get_required_tiles(center_lat, center_lon, radius_km)
|
||||||
|
|
||||||
|
missing = [t for t in tile_list if not terrain_service.get_tile_path(t).exists()]
|
||||||
|
|
||||||
|
if not missing:
|
||||||
|
return {"status": "ok", "message": "All tiles already cached", "count": len(tile_list)}
|
||||||
|
|
||||||
|
# Download missing tiles
|
||||||
|
downloaded = []
|
||||||
|
failed = []
|
||||||
|
for tile_name in missing:
|
||||||
|
success = await terrain_service.download_tile(tile_name)
|
||||||
|
if success:
|
||||||
|
downloaded.append(tile_name)
|
||||||
|
else:
|
||||||
|
failed.append(tile_name)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "ok",
|
||||||
|
"required": len(tile_list),
|
||||||
|
"already_cached": len(tile_list) - len(missing),
|
||||||
|
"downloaded": downloaded,
|
||||||
|
"failed": failed,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/index")
|
||||||
|
async def terrain_index():
|
||||||
|
"""Fetch tile index from terra server."""
|
||||||
|
import httpx
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
resp = await client.get("https://terra.eliah.one/api/index")
|
||||||
|
if resp.status_code == 200:
|
||||||
|
return resp.json()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return {"error": "Could not reach terra.eliah.one", "offline": True}
|
||||||
306
backend/app/api/websocket.py
Normal file
306
backend/app/api/websocket.py
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
"""
|
||||||
|
WebSocket handler for real-time coverage calculation with progress.
|
||||||
|
|
||||||
|
Uses the same coverage_service pipeline as the HTTP endpoint but sends
|
||||||
|
progress updates during computation phases.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import WebSocket, WebSocketDisconnect
|
||||||
|
|
||||||
|
from app.services.coverage_service import (
|
||||||
|
coverage_service, SiteParams, CoverageSettings, apply_preset,
|
||||||
|
select_propagation_model,
|
||||||
|
)
|
||||||
|
from app.services.parallel_coverage_service import CancellationToken
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionManager:
|
||||||
|
"""Track cancellation tokens per calculation."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._cancel_tokens: dict[str, CancellationToken] = {}
|
||||||
|
|
||||||
|
async def send_progress(
|
||||||
|
self, ws: WebSocket, calc_id: str,
|
||||||
|
phase: str, progress: float, eta: Optional[float] = None,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
await ws.send_json({
|
||||||
|
"type": "progress",
|
||||||
|
"calculation_id": calc_id,
|
||||||
|
"phase": phase,
|
||||||
|
"progress": min(progress, 1.0),
|
||||||
|
"eta_seconds": eta,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"[WS] send_progress failed: {e}")
|
||||||
|
|
||||||
|
async def send_result(self, ws: WebSocket, calc_id: str, result: dict):
|
||||||
|
try:
|
||||||
|
await ws.send_json({
|
||||||
|
"type": "result",
|
||||||
|
"calculation_id": calc_id,
|
||||||
|
"data": result,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[WS] send_result failed: {e}")
|
||||||
|
|
||||||
|
async def send_error(self, ws: WebSocket, calc_id: str, error: str):
|
||||||
|
try:
|
||||||
|
await ws.send_json({
|
||||||
|
"type": "error",
|
||||||
|
"calculation_id": calc_id,
|
||||||
|
"message": error,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[WS] send_error failed: {e}")
|
||||||
|
|
||||||
|
async def send_partial_results(
|
||||||
|
self, ws: WebSocket, calc_id: str,
|
||||||
|
points: list, tile_idx: int, total_tiles: int,
|
||||||
|
):
|
||||||
|
"""Send per-tile partial results for progressive rendering."""
|
||||||
|
try:
|
||||||
|
await ws.send_json({
|
||||||
|
"type": "partial_results",
|
||||||
|
"calculation_id": calc_id,
|
||||||
|
"points": [p.model_dump() for p in points],
|
||||||
|
"tile": tile_idx,
|
||||||
|
"total_tiles": total_tiles,
|
||||||
|
"progress": (tile_idx + 1) / total_tiles,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"[WS] send_partial_results failed: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
ws_manager = ConnectionManager()
|
||||||
|
|
||||||
|
|
||||||
|
async def _run_calculation(ws: WebSocket, calc_id: str, data: dict):
|
||||||
|
"""Run coverage calculation with progress updates via WebSocket."""
|
||||||
|
cancel_token = CancellationToken()
|
||||||
|
ws_manager._cancel_tokens[calc_id] = cancel_token
|
||||||
|
|
||||||
|
# Shared progress state — written by worker threads, polled by event loop.
|
||||||
|
# Python GIL makes dict value assignment atomic for simple types.
|
||||||
|
_progress = {"phase": "Initializing", "pct": 0.0, "seq": 0}
|
||||||
|
_done = False
|
||||||
|
|
||||||
|
# Get event loop for cross-thread scheduling of WS sends.
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
_last_direct_pct = 0.0
|
||||||
|
_last_direct_phase = ""
|
||||||
|
|
||||||
|
def sync_progress_fn(phase: str, pct: float, _eta: Optional[float] = None):
|
||||||
|
"""Thread-safe progress callback — updates dict AND schedules direct WS send."""
|
||||||
|
nonlocal _last_direct_pct, _last_direct_phase
|
||||||
|
_progress["phase"] = phase
|
||||||
|
_progress["pct"] = pct
|
||||||
|
_progress["seq"] += 1
|
||||||
|
# Schedule direct WS send via event loop (works from any thread).
|
||||||
|
# Throttle: only send on phase change or >=2% progress.
|
||||||
|
if phase != _last_direct_phase or pct - _last_direct_pct >= 0.02:
|
||||||
|
_last_direct_pct = pct
|
||||||
|
_last_direct_phase = phase
|
||||||
|
try:
|
||||||
|
loop.call_soon_threadsafe(
|
||||||
|
asyncio.ensure_future,
|
||||||
|
ws_manager.send_progress(ws, calc_id, phase, pct),
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
pass # Event loop closed
|
||||||
|
|
||||||
|
try:
|
||||||
|
sites_data = data.get("sites", [])
|
||||||
|
settings_data = data.get("settings", {})
|
||||||
|
|
||||||
|
if not sites_data:
|
||||||
|
await ws_manager.send_error(ws, calc_id, "At least one site required")
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(sites_data) > 10:
|
||||||
|
await ws_manager.send_error(ws, calc_id, "Maximum 10 sites per request")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Parse sites and settings (same format as HTTP endpoint)
|
||||||
|
sites = [SiteParams(**s) for s in sites_data]
|
||||||
|
settings = CoverageSettings(**settings_data)
|
||||||
|
|
||||||
|
if settings.radius > 50000:
|
||||||
|
await ws_manager.send_error(ws, calc_id, "Maximum radius 50km")
|
||||||
|
return
|
||||||
|
if settings.resolution < 50:
|
||||||
|
await ws_manager.send_error(ws, calc_id, "Minimum resolution 50m")
|
||||||
|
return
|
||||||
|
|
||||||
|
effective_settings = apply_preset(settings.model_copy())
|
||||||
|
|
||||||
|
# Determine models used
|
||||||
|
from app.api.routes.coverage import _get_active_models
|
||||||
|
models_used = _get_active_models(effective_settings)
|
||||||
|
env = getattr(effective_settings, 'environment', 'urban')
|
||||||
|
primary_model = select_propagation_model(sites[0].frequency, env)
|
||||||
|
if primary_model.name not in models_used:
|
||||||
|
models_used.insert(0, primary_model.name)
|
||||||
|
|
||||||
|
await ws_manager.send_progress(ws, calc_id, "Initializing", 0.02)
|
||||||
|
|
||||||
|
# ── Tile callback for progressive results (large radius) ──
|
||||||
|
async def _tile_callback(tile_points, tile_idx, total_tiles):
|
||||||
|
await ws_manager.send_partial_results(
|
||||||
|
ws, calc_id, tile_points, tile_idx, total_tiles,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ── Backup progress poller: catches anything call_soon_threadsafe missed ──
|
||||||
|
async def progress_poller():
|
||||||
|
last_sent_seq = 0
|
||||||
|
last_sent_pct = 0.0
|
||||||
|
last_sent_phase = "Initializing"
|
||||||
|
while not _done:
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
|
seq = _progress["seq"]
|
||||||
|
pct = _progress["pct"]
|
||||||
|
phase = _progress["phase"]
|
||||||
|
# Send on any phase change OR >=3% progress (primary sends handle fine-grained)
|
||||||
|
if seq != last_sent_seq and (
|
||||||
|
phase != last_sent_phase
|
||||||
|
or pct - last_sent_pct >= 0.03
|
||||||
|
):
|
||||||
|
await ws_manager.send_progress(ws, calc_id, phase, pct)
|
||||||
|
last_sent_seq = seq
|
||||||
|
last_sent_pct = pct
|
||||||
|
last_sent_phase = phase
|
||||||
|
|
||||||
|
poller_task = asyncio.create_task(progress_poller())
|
||||||
|
|
||||||
|
# Dynamic timeout based on radius
|
||||||
|
radius_m = settings.radius
|
||||||
|
if radius_m > 30_000:
|
||||||
|
calc_timeout = 600.0 # 10 min for 30-50km
|
||||||
|
elif radius_m > 10_000:
|
||||||
|
calc_timeout = 480.0 # 8 min for 10-30km
|
||||||
|
else:
|
||||||
|
calc_timeout = 300.0 # 5 min for ≤10km
|
||||||
|
|
||||||
|
# Run calculation with timeout
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
if len(sites) == 1:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_coverage(
|
||||||
|
sites[0], settings, cancel_token,
|
||||||
|
progress_fn=sync_progress_fn,
|
||||||
|
tile_callback=_tile_callback,
|
||||||
|
),
|
||||||
|
timeout=calc_timeout,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
points = await asyncio.wait_for(
|
||||||
|
coverage_service.calculate_multi_site_coverage(
|
||||||
|
sites, settings, cancel_token,
|
||||||
|
progress_fn=sync_progress_fn,
|
||||||
|
tile_callback=_tile_callback,
|
||||||
|
),
|
||||||
|
timeout=calc_timeout,
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
cancel_token.cancel()
|
||||||
|
_done = True
|
||||||
|
await poller_task
|
||||||
|
from app.services.parallel_coverage_service import _kill_worker_processes
|
||||||
|
_kill_worker_processes()
|
||||||
|
timeout_min = int(calc_timeout / 60)
|
||||||
|
await ws_manager.send_error(ws, calc_id, f"Calculation timeout ({timeout_min} min)")
|
||||||
|
return
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
cancel_token.cancel()
|
||||||
|
_done = True
|
||||||
|
await poller_task
|
||||||
|
await ws_manager.send_error(ws, calc_id, "Calculation cancelled")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Stop poller and send final progress
|
||||||
|
_done = True
|
||||||
|
await poller_task
|
||||||
|
|
||||||
|
computation_time = time.time() - start_time
|
||||||
|
|
||||||
|
# Build response (identical format to HTTP endpoint)
|
||||||
|
rsrp_values = [p.rsrp for p in points]
|
||||||
|
los_count = sum(1 for p in points if p.has_los)
|
||||||
|
|
||||||
|
stats = {
|
||||||
|
"min_rsrp": min(rsrp_values) if rsrp_values else 0,
|
||||||
|
"max_rsrp": max(rsrp_values) if rsrp_values else 0,
|
||||||
|
"avg_rsrp": sum(rsrp_values) / len(rsrp_values) if rsrp_values else 0,
|
||||||
|
"los_percentage": (los_count / len(points) * 100) if points else 0,
|
||||||
|
"points_with_buildings": sum(1 for p in points if p.building_loss > 0),
|
||||||
|
"points_with_terrain_loss": sum(1 for p in points if p.terrain_loss > 0),
|
||||||
|
"points_with_reflection_gain": sum(1 for p in points if p.reflection_gain > 0),
|
||||||
|
"points_with_vegetation_loss": sum(1 for p in points if p.vegetation_loss > 0),
|
||||||
|
"points_with_rain_loss": sum(1 for p in points if p.rain_loss > 0),
|
||||||
|
"points_with_indoor_loss": sum(1 for p in points if p.indoor_loss > 0),
|
||||||
|
"points_with_atmospheric_loss": sum(1 for p in points if p.atmospheric_loss > 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"points": [p.model_dump() for p in points],
|
||||||
|
"count": len(points),
|
||||||
|
"settings": effective_settings.model_dump(),
|
||||||
|
"stats": stats,
|
||||||
|
"computation_time": round(computation_time, 2),
|
||||||
|
"models_used": models_used,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send "Complete" before result so frontend shows 100%
|
||||||
|
await ws_manager.send_progress(ws, calc_id, "Complete", 1.0)
|
||||||
|
await ws_manager.send_result(ws, calc_id, result)
|
||||||
|
logger.info(f"[WS] calc={calc_id} done: {len(points)} pts, {computation_time:.1f}s")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"[WS] Calculation error: {e}", exc_info=True)
|
||||||
|
_done = True
|
||||||
|
try:
|
||||||
|
await poller_task
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
await ws_manager.send_error(ws, calc_id, str(e))
|
||||||
|
finally:
|
||||||
|
ws_manager._cancel_tokens.pop(calc_id, None)
|
||||||
|
|
||||||
|
|
||||||
|
async def websocket_endpoint(websocket: WebSocket):
|
||||||
|
"""WebSocket endpoint for coverage calculations with progress."""
|
||||||
|
await websocket.accept()
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
data = await websocket.receive_json()
|
||||||
|
msg_type = data.get("type")
|
||||||
|
|
||||||
|
if msg_type == "calculate":
|
||||||
|
calc_id = data.get("id", "")
|
||||||
|
asyncio.create_task(_run_calculation(websocket, calc_id, data))
|
||||||
|
|
||||||
|
elif msg_type == "cancel":
|
||||||
|
calc_id = data.get("id")
|
||||||
|
token = ws_manager._cancel_tokens.get(calc_id)
|
||||||
|
if token:
|
||||||
|
token.cancel()
|
||||||
|
|
||||||
|
elif msg_type == "ping":
|
||||||
|
await websocket.send_json({"type": "pong"})
|
||||||
|
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
for token in ws_manager._cancel_tokens.values():
|
||||||
|
token.cancel()
|
||||||
|
except Exception:
|
||||||
|
for token in ws_manager._cancel_tokens.values():
|
||||||
|
token.cancel()
|
||||||
6
backend/app/core/__init__.py
Normal file
6
backend/app/core/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Core business logic for RFCP.
|
||||||
|
|
||||||
|
Existing modules: config.py, database.py
|
||||||
|
New modules: engine.py, grid.py, calculator.py, result.py
|
||||||
|
"""
|
||||||
BIN
backend/app/core/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/core/__pycache__/config.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/config.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/core/__pycache__/database.cpython-311.pyc
Normal file
BIN
backend/app/core/__pycache__/database.cpython-311.pyc
Normal file
Binary file not shown.
103
backend/app/core/calculator.py
Normal file
103
backend/app/core/calculator.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"""
|
||||||
|
Point calculator — coordinates per-point propagation calculation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput
|
||||||
|
from app.propagation.itu_r_p526 import KnifeEdgeDiffractionModel
|
||||||
|
from app.core.result import PointResult
|
||||||
|
|
||||||
|
|
||||||
|
class PointCalculator:
|
||||||
|
"""Calculates propagation for individual grid points."""
|
||||||
|
|
||||||
|
def __init__(self, model: PropagationModel, environment: str = "urban"):
|
||||||
|
self.model = model
|
||||||
|
self.environment = environment
|
||||||
|
self.diffraction = KnifeEdgeDiffractionModel()
|
||||||
|
|
||||||
|
def calculate_point(
|
||||||
|
self,
|
||||||
|
site_lat: float, site_lon: float, site_height: float,
|
||||||
|
site_power: float, site_gain: float, site_frequency: float,
|
||||||
|
point_lat: float, point_lon: float,
|
||||||
|
distance: float,
|
||||||
|
has_los: bool = True,
|
||||||
|
terrain_clearance: Optional[float] = None,
|
||||||
|
building_loss: float = 0.0,
|
||||||
|
extra_loss: float = 0.0,
|
||||||
|
azimuth: Optional[float] = None,
|
||||||
|
beamwidth: float = 360,
|
||||||
|
) -> PointResult:
|
||||||
|
if distance < 1:
|
||||||
|
distance = 1
|
||||||
|
|
||||||
|
prop_input = PropagationInput(
|
||||||
|
frequency_mhz=site_frequency,
|
||||||
|
distance_m=distance,
|
||||||
|
tx_height_m=site_height,
|
||||||
|
rx_height_m=1.5,
|
||||||
|
environment=self.environment,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.model.is_valid_for(prop_input):
|
||||||
|
output = self.model.calculate(prop_input)
|
||||||
|
path_loss = output.path_loss_db
|
||||||
|
else:
|
||||||
|
from app.propagation.free_space import FreeSpaceModel
|
||||||
|
output = FreeSpaceModel().calculate(prop_input)
|
||||||
|
path_loss = output.path_loss_db
|
||||||
|
|
||||||
|
antenna_loss = 0.0
|
||||||
|
if azimuth is not None and beamwidth < 360:
|
||||||
|
antenna_loss = self._antenna_pattern_loss(
|
||||||
|
site_lat, site_lon, point_lat, point_lon, azimuth, beamwidth,
|
||||||
|
)
|
||||||
|
|
||||||
|
terrain_loss = 0.0
|
||||||
|
if terrain_clearance is not None and terrain_clearance < 0:
|
||||||
|
terrain_loss = self.diffraction.calculate_clearance_loss(
|
||||||
|
terrain_clearance, site_frequency,
|
||||||
|
)
|
||||||
|
has_los = False
|
||||||
|
|
||||||
|
rsrp = (
|
||||||
|
site_power + site_gain
|
||||||
|
- path_loss - antenna_loss
|
||||||
|
- terrain_loss - building_loss - extra_loss
|
||||||
|
)
|
||||||
|
|
||||||
|
return PointResult(
|
||||||
|
lat=point_lat, lon=point_lon, rsrp=rsrp,
|
||||||
|
distance=distance, path_loss=path_loss,
|
||||||
|
terrain_loss=terrain_loss, building_loss=building_loss,
|
||||||
|
diffraction_loss=terrain_loss, has_los=has_los,
|
||||||
|
model_used=self.model.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _antenna_pattern_loss(
|
||||||
|
site_lat: float, site_lon: float,
|
||||||
|
point_lat: float, point_lon: float,
|
||||||
|
azimuth: float, beamwidth: float,
|
||||||
|
) -> float:
|
||||||
|
lat1, lon1 = math.radians(site_lat), math.radians(site_lon)
|
||||||
|
lat2, lon2 = math.radians(point_lat), math.radians(point_lon)
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
x = math.sin(dlon) * math.cos(lat2)
|
||||||
|
y = math.cos(lat1) * math.sin(lat2) - math.sin(lat1) * math.cos(lat2) * math.cos(dlon)
|
||||||
|
bearing = (math.degrees(math.atan2(x, y)) + 360) % 360
|
||||||
|
|
||||||
|
angle_diff = abs(bearing - azimuth)
|
||||||
|
if angle_diff > 180:
|
||||||
|
angle_diff = 360 - angle_diff
|
||||||
|
|
||||||
|
half_bw = beamwidth / 2
|
||||||
|
if angle_diff <= half_bw:
|
||||||
|
loss = 3 * (angle_diff / half_bw) ** 2
|
||||||
|
else:
|
||||||
|
loss = 3 + 12 * ((angle_diff - half_bw) / half_bw) ** 2
|
||||||
|
loss = min(loss, 25)
|
||||||
|
return loss
|
||||||
13
backend/app/core/config.py
Normal file
13
backend/app/core/config.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
MONGODB_URL: str = "mongodb://localhost:27017"
|
||||||
|
DATABASE_NAME: str = "rfcp"
|
||||||
|
TERRAIN_DATA_DIR: str = "/opt/rfcp/data/terrain"
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
22
backend/app/core/database.py
Normal file
22
backend/app/core/database.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from motor.motor_asyncio import AsyncIOMotorClient
|
||||||
|
from app.core.config import settings
|
||||||
|
|
||||||
|
|
||||||
|
class Database:
|
||||||
|
client: AsyncIOMotorClient = None
|
||||||
|
|
||||||
|
|
||||||
|
db = Database()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_database():
|
||||||
|
return db.client[settings.DATABASE_NAME]
|
||||||
|
|
||||||
|
|
||||||
|
async def connect_to_mongo():
|
||||||
|
db.client = AsyncIOMotorClient(settings.MONGODB_URL)
|
||||||
|
|
||||||
|
|
||||||
|
async def close_mongo_connection():
|
||||||
|
if db.client:
|
||||||
|
db.client.close()
|
||||||
240
backend/app/core/engine.py
Normal file
240
backend/app/core/engine.py
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
"""
|
||||||
|
CoverageEngine — main orchestrator for coverage calculations.
|
||||||
|
|
||||||
|
Coordinates data loading, model selection, parallel computation,
|
||||||
|
and result aggregation. Does NOT implement propagation physics
|
||||||
|
(delegated to models) or handle HTTP (delegated to API layer).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
from enum import Enum
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional, Callable, Awaitable
|
||||||
|
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput
|
||||||
|
from app.propagation.free_space import FreeSpaceModel
|
||||||
|
from app.propagation.okumura_hata import OkumuraHataModel
|
||||||
|
from app.propagation.cost231_hata import Cost231HataModel
|
||||||
|
from app.propagation.cost231_wi import Cost231WIModel
|
||||||
|
from app.propagation.itu_r_p1546 import ITUR_P1546Model
|
||||||
|
from app.propagation.longley_rice import LongleyRiceModel
|
||||||
|
from app.propagation.itu_r_p526 import KnifeEdgeDiffractionModel
|
||||||
|
|
||||||
|
from app.core.result import CoverageResult, PointResult, compute_stats
|
||||||
|
|
||||||
|
|
||||||
|
class BandType(Enum):
|
||||||
|
LTE = "lte" # 700-2600 MHz
|
||||||
|
UHF = "uhf" # 400-520 MHz
|
||||||
|
VHF = "vhf" # 136-174 MHz
|
||||||
|
CUSTOM = "custom" # User-defined
|
||||||
|
|
||||||
|
|
||||||
|
class PresetType(Enum):
|
||||||
|
FAST = "fast"
|
||||||
|
STANDARD = "standard"
|
||||||
|
DETAILED = "detailed"
|
||||||
|
FULL = "full"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Site:
|
||||||
|
id: str
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
height: float # meters AGL
|
||||||
|
power: float # dBm
|
||||||
|
gain: float # dBi
|
||||||
|
frequency: float # MHz
|
||||||
|
band_type: BandType = BandType.LTE
|
||||||
|
azimuth: Optional[float] = None
|
||||||
|
beamwidth: float = 65
|
||||||
|
tilt: float = 0
|
||||||
|
environment: str = "urban"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CoverageSettings:
|
||||||
|
radius: float = 10000
|
||||||
|
resolution: float = 200
|
||||||
|
min_signal: float = -120
|
||||||
|
preset: PresetType = PresetType.STANDARD
|
||||||
|
band_type: BandType = BandType.LTE
|
||||||
|
environment: str = "urban"
|
||||||
|
|
||||||
|
terrain_enabled: bool = True
|
||||||
|
buildings_enabled: bool = True
|
||||||
|
diffraction_enabled: bool = True
|
||||||
|
reflection_enabled: bool = False
|
||||||
|
|
||||||
|
# Legacy toggles (backward compat)
|
||||||
|
use_terrain: bool = True
|
||||||
|
use_buildings: bool = True
|
||||||
|
use_materials: bool = True
|
||||||
|
use_dominant_path: bool = False
|
||||||
|
use_street_canyon: bool = False
|
||||||
|
use_reflections: bool = False
|
||||||
|
use_water_reflection: bool = False
|
||||||
|
use_vegetation: bool = False
|
||||||
|
season: str = "summer"
|
||||||
|
rain_rate: float = 0.0
|
||||||
|
indoor_loss_type: str = "none"
|
||||||
|
use_atmospheric: bool = False
|
||||||
|
temperature_c: float = 15.0
|
||||||
|
humidity_percent: float = 50.0
|
||||||
|
|
||||||
|
|
||||||
|
ProgressCallback = Callable[[str, float, Optional[float]], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
class CoverageEngine:
|
||||||
|
"""
|
||||||
|
Main orchestrator for coverage calculations.
|
||||||
|
|
||||||
|
Selects the appropriate propagation model based on band type
|
||||||
|
and environment, then delegates to the existing coverage pipeline.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_model_registry = {
|
||||||
|
(BandType.LTE, "urban"): Cost231HataModel,
|
||||||
|
(BandType.LTE, "suburban"): OkumuraHataModel,
|
||||||
|
(BandType.LTE, "rural"): OkumuraHataModel,
|
||||||
|
(BandType.LTE, "open"): FreeSpaceModel,
|
||||||
|
(BandType.UHF, "urban"): OkumuraHataModel,
|
||||||
|
(BandType.UHF, "suburban"): OkumuraHataModel,
|
||||||
|
(BandType.UHF, "rural"): LongleyRiceModel,
|
||||||
|
(BandType.VHF, "urban"): ITUR_P1546Model,
|
||||||
|
(BandType.VHF, "suburban"): ITUR_P1546Model,
|
||||||
|
(BandType.VHF, "rural"): LongleyRiceModel,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._models = {}
|
||||||
|
self._init_models()
|
||||||
|
self.free_space = FreeSpaceModel()
|
||||||
|
self.diffraction = KnifeEdgeDiffractionModel()
|
||||||
|
|
||||||
|
def _init_models(self):
|
||||||
|
for key, model_cls in self._model_registry.items():
|
||||||
|
self._models[key] = model_cls()
|
||||||
|
|
||||||
|
def select_model(self, band: BandType, environment: str) -> PropagationModel:
|
||||||
|
key = (band, environment)
|
||||||
|
if key in self._models:
|
||||||
|
return self._models[key]
|
||||||
|
if (band, "urban") in self._models:
|
||||||
|
return self._models[(band, "urban")]
|
||||||
|
return OkumuraHataModel()
|
||||||
|
|
||||||
|
def get_available_models(self) -> dict:
|
||||||
|
models = {}
|
||||||
|
seen = set()
|
||||||
|
for (band, env), model in self._models.items():
|
||||||
|
if model.name not in seen:
|
||||||
|
seen.add(model.name)
|
||||||
|
models[model.name] = {
|
||||||
|
"frequency_range": model.frequency_range,
|
||||||
|
"distance_range": model.distance_range,
|
||||||
|
"bands": [],
|
||||||
|
}
|
||||||
|
models[model.name]["bands"].append(f"{band.value}/{env}")
|
||||||
|
return models
|
||||||
|
|
||||||
|
async def calculate(
|
||||||
|
self,
|
||||||
|
sites: List[Site],
|
||||||
|
settings: CoverageSettings,
|
||||||
|
progress_callback: Optional[ProgressCallback] = None,
|
||||||
|
) -> CoverageResult:
|
||||||
|
"""
|
||||||
|
Main calculation entry point.
|
||||||
|
|
||||||
|
Delegates actual per-point work to the legacy coverage_service
|
||||||
|
pipeline, wrapping it with the new clean interface.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
model = self.select_model(settings.band_type, settings.environment)
|
||||||
|
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("init", 0.05, None)
|
||||||
|
|
||||||
|
# Import legacy system
|
||||||
|
from app.services.coverage_service import (
|
||||||
|
coverage_service, SiteParams,
|
||||||
|
CoverageSettings as LegacySettings,
|
||||||
|
)
|
||||||
|
from app.services.parallel_coverage_service import CancellationToken
|
||||||
|
|
||||||
|
legacy_settings = LegacySettings(
|
||||||
|
radius=settings.radius,
|
||||||
|
resolution=settings.resolution,
|
||||||
|
min_signal=settings.min_signal,
|
||||||
|
use_terrain=settings.use_terrain,
|
||||||
|
use_buildings=settings.use_buildings,
|
||||||
|
use_materials=settings.use_materials,
|
||||||
|
use_dominant_path=settings.use_dominant_path,
|
||||||
|
use_street_canyon=settings.use_street_canyon,
|
||||||
|
use_reflections=settings.use_reflections,
|
||||||
|
use_water_reflection=settings.use_water_reflection,
|
||||||
|
use_vegetation=settings.use_vegetation,
|
||||||
|
season=settings.season,
|
||||||
|
rain_rate=settings.rain_rate,
|
||||||
|
indoor_loss_type=settings.indoor_loss_type,
|
||||||
|
use_atmospheric=settings.use_atmospheric,
|
||||||
|
temperature_c=settings.temperature_c,
|
||||||
|
humidity_percent=settings.humidity_percent,
|
||||||
|
preset=settings.preset.value if isinstance(settings.preset, PresetType) else settings.preset,
|
||||||
|
)
|
||||||
|
|
||||||
|
cancel_token = CancellationToken()
|
||||||
|
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("calculating", 0.25, None)
|
||||||
|
|
||||||
|
legacy_sites = [
|
||||||
|
SiteParams(
|
||||||
|
lat=s.lat, lon=s.lon, height=s.height,
|
||||||
|
power=s.power, gain=s.gain, frequency=s.frequency,
|
||||||
|
azimuth=s.azimuth, beamwidth=s.beamwidth,
|
||||||
|
)
|
||||||
|
for s in sites
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(legacy_sites) == 1:
|
||||||
|
points = await coverage_service.calculate_coverage(
|
||||||
|
legacy_sites[0], legacy_settings, cancel_token,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
points = await coverage_service.calculate_multi_site_coverage(
|
||||||
|
legacy_sites, legacy_settings, cancel_token,
|
||||||
|
)
|
||||||
|
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback("done", 1.0, None)
|
||||||
|
|
||||||
|
result_points = [
|
||||||
|
PointResult(
|
||||||
|
lat=p.lat, lon=p.lon, rsrp=p.rsrp,
|
||||||
|
distance=p.distance, path_loss=0.0,
|
||||||
|
terrain_loss=p.terrain_loss,
|
||||||
|
building_loss=p.building_loss,
|
||||||
|
diffraction_loss=0.0,
|
||||||
|
has_los=p.has_los,
|
||||||
|
model_used=model.name,
|
||||||
|
)
|
||||||
|
for p in points
|
||||||
|
]
|
||||||
|
|
||||||
|
computation_time = time.time() - start_time
|
||||||
|
|
||||||
|
return CoverageResult(
|
||||||
|
points=result_points,
|
||||||
|
stats=compute_stats(result_points),
|
||||||
|
computation_time=computation_time,
|
||||||
|
models_used=[model.name],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton
|
||||||
|
engine = CoverageEngine()
|
||||||
83
backend/app/core/grid.py
Normal file
83
backend/app/core/grid.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
Grid generation for coverage calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Tuple
|
||||||
|
from app.geometry.haversine import haversine_distance
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BoundingBox:
|
||||||
|
min_lat: float
|
||||||
|
min_lon: float
|
||||||
|
max_lat: float
|
||||||
|
max_lon: float
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Grid:
|
||||||
|
points: List[Tuple[float, float]]
|
||||||
|
bounding_box: BoundingBox
|
||||||
|
resolution: float
|
||||||
|
radius: float
|
||||||
|
|
||||||
|
|
||||||
|
class GridService:
|
||||||
|
"""Generate coverage grid points."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate(
|
||||||
|
center_lat: float,
|
||||||
|
center_lon: float,
|
||||||
|
radius: float,
|
||||||
|
resolution: float,
|
||||||
|
) -> Grid:
|
||||||
|
points = []
|
||||||
|
|
||||||
|
lat_step = resolution / 111000
|
||||||
|
lon_step = resolution / (111000 * np.cos(np.radians(center_lat)))
|
||||||
|
|
||||||
|
lat_delta = radius / 111000
|
||||||
|
lon_delta = radius / (111000 * np.cos(np.radians(center_lat)))
|
||||||
|
|
||||||
|
bbox = BoundingBox(
|
||||||
|
min_lat=center_lat - lat_delta,
|
||||||
|
min_lon=center_lon - lon_delta,
|
||||||
|
max_lat=center_lat + lat_delta,
|
||||||
|
max_lon=center_lon + lon_delta,
|
||||||
|
)
|
||||||
|
|
||||||
|
lat = center_lat - lat_delta
|
||||||
|
while lat <= center_lat + lat_delta:
|
||||||
|
lon = center_lon - lon_delta
|
||||||
|
while lon <= center_lon + lon_delta:
|
||||||
|
dist = haversine_distance(center_lat, center_lon, lat, lon)
|
||||||
|
if dist <= radius:
|
||||||
|
points.append((lat, lon))
|
||||||
|
lon += lon_step
|
||||||
|
lat += lat_step
|
||||||
|
|
||||||
|
return Grid(points=points, bounding_box=bbox, resolution=resolution, radius=radius)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_multi_site(sites: list, radius: float, resolution: float) -> Grid:
|
||||||
|
all_points = set()
|
||||||
|
min_lat = min_lon = float("inf")
|
||||||
|
max_lat = max_lon = float("-inf")
|
||||||
|
|
||||||
|
for site in sites:
|
||||||
|
grid = GridService.generate(site.lat, site.lon, radius, resolution)
|
||||||
|
for p in grid.points:
|
||||||
|
all_points.add((round(p[0], 7), round(p[1], 7)))
|
||||||
|
min_lat = min(min_lat, grid.bounding_box.min_lat)
|
||||||
|
min_lon = min(min_lon, grid.bounding_box.min_lon)
|
||||||
|
max_lat = max(max_lat, grid.bounding_box.max_lat)
|
||||||
|
max_lon = max(max_lon, grid.bounding_box.max_lon)
|
||||||
|
|
||||||
|
return Grid(
|
||||||
|
points=list(all_points),
|
||||||
|
bounding_box=BoundingBox(min_lat, min_lon, max_lat, max_lon),
|
||||||
|
resolution=resolution, radius=radius,
|
||||||
|
)
|
||||||
65
backend/app/core/result.py
Normal file
65
backend/app/core/result.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""
|
||||||
|
Coverage result aggregation and statistics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PointResult:
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
rsrp: float
|
||||||
|
distance: float
|
||||||
|
path_loss: float
|
||||||
|
terrain_loss: float
|
||||||
|
building_loss: float
|
||||||
|
diffraction_loss: float
|
||||||
|
has_los: bool
|
||||||
|
model_used: str
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
"lat": self.lat, "lon": self.lon,
|
||||||
|
"rsrp": self.rsrp, "distance": self.distance,
|
||||||
|
"path_loss": self.path_loss, "terrain_loss": self.terrain_loss,
|
||||||
|
"building_loss": self.building_loss, "diffraction_loss": self.diffraction_loss,
|
||||||
|
"has_los": self.has_los, "model_used": self.model_used,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CoverageResult:
|
||||||
|
points: List[PointResult]
|
||||||
|
stats: dict
|
||||||
|
computation_time: float
|
||||||
|
models_used: List[str]
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
"points": [p.to_dict() for p in self.points],
|
||||||
|
"count": len(self.points),
|
||||||
|
"stats": self.stats,
|
||||||
|
"computation_time": round(self.computation_time, 2),
|
||||||
|
"models_used": self.models_used,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def compute_stats(points: List[PointResult]) -> dict:
|
||||||
|
if not points:
|
||||||
|
return {"min_rsrp": 0, "max_rsrp": 0, "avg_rsrp": 0,
|
||||||
|
"los_percentage": 0, "total_points": 0}
|
||||||
|
|
||||||
|
rsrp_values = [p.rsrp for p in points]
|
||||||
|
los_count = sum(1 for p in points if p.has_los)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"min_rsrp": min(rsrp_values),
|
||||||
|
"max_rsrp": max(rsrp_values),
|
||||||
|
"avg_rsrp": sum(rsrp_values) / len(rsrp_values),
|
||||||
|
"los_percentage": los_count / len(points) * 100,
|
||||||
|
"total_points": len(points),
|
||||||
|
"points_with_buildings": sum(1 for p in points if p.building_loss > 0),
|
||||||
|
"points_with_terrain_loss": sum(1 for p in points if p.terrain_loss > 0),
|
||||||
|
}
|
||||||
38
backend/app/geometry/__init__.py
Normal file
38
backend/app/geometry/__init__.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""
|
||||||
|
Geometry operations for RF propagation calculations.
|
||||||
|
|
||||||
|
NumPy-dependent modules (haversine, intersection, reflection) are
|
||||||
|
imported lazily so pure-Python modules (diffraction, los) remain
|
||||||
|
available even when NumPy is not installed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from app.geometry.diffraction import knife_edge_loss
|
||||||
|
from app.geometry.los import check_los_terrain, fresnel_radius
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
"""Lazy import for NumPy-dependent geometry functions."""
|
||||||
|
_numpy_exports = {
|
||||||
|
"haversine_distance", "haversine_batch", "points_to_local_coords",
|
||||||
|
"line_segments_intersect_batch", "line_intersects_polygons_batch",
|
||||||
|
"calculate_reflection_points_batch", "find_best_reflection_path",
|
||||||
|
}
|
||||||
|
if name in _numpy_exports:
|
||||||
|
if name in ("haversine_distance", "haversine_batch", "points_to_local_coords"):
|
||||||
|
from app.geometry.haversine import haversine_distance, haversine_batch, points_to_local_coords
|
||||||
|
return locals()[name]
|
||||||
|
elif name in ("line_segments_intersect_batch", "line_intersects_polygons_batch"):
|
||||||
|
from app.geometry.intersection import line_segments_intersect_batch, line_intersects_polygons_batch
|
||||||
|
return locals()[name]
|
||||||
|
elif name in ("calculate_reflection_points_batch", "find_best_reflection_path"):
|
||||||
|
from app.geometry.reflection import calculate_reflection_points_batch, find_best_reflection_path
|
||||||
|
return locals()[name]
|
||||||
|
raise AttributeError(f"module 'app.geometry' has no attribute {name!r}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"haversine_distance", "haversine_batch", "points_to_local_coords",
|
||||||
|
"line_segments_intersect_batch", "line_intersects_polygons_batch",
|
||||||
|
"calculate_reflection_points_batch", "find_best_reflection_path",
|
||||||
|
"knife_edge_loss", "check_los_terrain", "fresnel_radius",
|
||||||
|
]
|
||||||
40
backend/app/geometry/diffraction.py
Normal file
40
backend/app/geometry/diffraction.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""
|
||||||
|
Knife-edge diffraction geometry calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
def knife_edge_loss(
|
||||||
|
d1_m: float,
|
||||||
|
d2_m: float,
|
||||||
|
h_m: float,
|
||||||
|
wavelength_m: float,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate diffraction loss over single knife edge.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
d1_m: Distance TX to obstacle
|
||||||
|
d2_m: Distance obstacle to RX
|
||||||
|
h_m: Obstacle height above LOS (positive = above)
|
||||||
|
wavelength_m: Signal wavelength
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loss in dB (>= 0)
|
||||||
|
"""
|
||||||
|
if d1_m <= 0 or d2_m <= 0 or wavelength_m <= 0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
v = h_m * math.sqrt(2 * (d1_m + d2_m) / (wavelength_m * d1_m * d2_m))
|
||||||
|
|
||||||
|
if v < -0.78:
|
||||||
|
L = 0.0
|
||||||
|
elif v < 0:
|
||||||
|
L = 6.02 + 9.11 * v - 1.27 * v ** 2
|
||||||
|
elif v < 2.4:
|
||||||
|
L = 6.02 + 9.11 * v + 1.65 * v ** 2
|
||||||
|
else:
|
||||||
|
L = 12.95 + 20 * math.log10(v)
|
||||||
|
|
||||||
|
return max(0.0, L)
|
||||||
50
backend/app/geometry/haversine.py
Normal file
50
backend/app/geometry/haversine.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
"""
|
||||||
|
Distance calculations using the haversine formula.
|
||||||
|
|
||||||
|
Supports both scalar and batch (NumPy array) operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
EARTH_RADIUS = 6371000 # meters
|
||||||
|
|
||||||
|
|
||||||
|
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||||
|
"""Calculate distance between two points in meters."""
|
||||||
|
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])
|
||||||
|
dlat = lat2 - lat1
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2) ** 2
|
||||||
|
c = 2 * np.arcsin(np.sqrt(a))
|
||||||
|
return float(EARTH_RADIUS * c)
|
||||||
|
|
||||||
|
|
||||||
|
def haversine_batch(
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lats2: np.ndarray, lons2: np.ndarray,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Distance from one point to many points (meters)."""
|
||||||
|
lat1_rad = np.radians(lat1)
|
||||||
|
lon1_rad = np.radians(lon1)
|
||||||
|
lats2_rad = np.radians(lats2)
|
||||||
|
lons2_rad = np.radians(lons2)
|
||||||
|
|
||||||
|
dlat = lats2_rad - lat1_rad
|
||||||
|
dlon = lons2_rad - lon1_rad
|
||||||
|
|
||||||
|
a = np.sin(dlat / 2) ** 2 + np.cos(lat1_rad) * np.cos(lats2_rad) * np.sin(dlon / 2) ** 2
|
||||||
|
c = 2 * np.arcsin(np.sqrt(a))
|
||||||
|
|
||||||
|
return EARTH_RADIUS * c
|
||||||
|
|
||||||
|
|
||||||
|
def points_to_local_coords(
|
||||||
|
ref_lat: float, ref_lon: float,
|
||||||
|
lats: np.ndarray, lons: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Convert lat/lon to local X/Y meters (equirectangular projection)."""
|
||||||
|
cos_lat = np.cos(np.radians(ref_lat))
|
||||||
|
x = (lons - ref_lon) * 111320.0 * cos_lat
|
||||||
|
y = (lats - ref_lat) * 110540.0
|
||||||
|
return x, y
|
||||||
116
backend/app/geometry/intersection.py
Normal file
116
backend/app/geometry/intersection.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
Vectorized line-segment and line-polygon intersection checks.
|
||||||
|
|
||||||
|
All operations use NumPy for batch processing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
|
def line_segments_intersect_batch(
|
||||||
|
p1: np.ndarray, p2: np.ndarray,
|
||||||
|
segments_start: np.ndarray, segments_end: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Check if line p1->p2 intersects with N segments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
p1, p2: shape (2,)
|
||||||
|
segments_start, segments_end: shape (N, 2)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
intersects: bool array (N,)
|
||||||
|
t_values: parameter along p1->p2 (N,)
|
||||||
|
"""
|
||||||
|
d = p2 - p1
|
||||||
|
seg_d = segments_end - segments_start
|
||||||
|
|
||||||
|
cross = d[0] * seg_d[:, 1] - d[1] * seg_d[:, 0]
|
||||||
|
parallel_mask = np.abs(cross) < 1e-10
|
||||||
|
cross_safe = np.where(parallel_mask, 1.0, cross)
|
||||||
|
|
||||||
|
dp = p1 - segments_start
|
||||||
|
t = (dp[:, 0] * seg_d[:, 1] - dp[:, 1] * seg_d[:, 0]) / cross_safe
|
||||||
|
u = (dp[:, 0] * d[1] - dp[:, 1] * d[0]) / cross_safe
|
||||||
|
|
||||||
|
intersects = ~parallel_mask & (t >= 0) & (t <= 1) & (u >= 0) & (u <= 1)
|
||||||
|
return intersects, t
|
||||||
|
|
||||||
|
|
||||||
|
def line_intersects_polygons_batch(
|
||||||
|
p1: np.ndarray, p2: np.ndarray,
|
||||||
|
polygons_x: np.ndarray, polygons_y: np.ndarray,
|
||||||
|
polygon_lengths: np.ndarray,
|
||||||
|
max_polygons: int = 30,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Check if line p1->p2 intersects multiple polygons.
|
||||||
|
|
||||||
|
Uses bounding-box pre-filter to limit work when polygon count is large.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
p1, p2: shape (2,)
|
||||||
|
polygons_x, polygons_y: flattened vertex arrays
|
||||||
|
polygon_lengths: vertices per polygon (num_polygons,)
|
||||||
|
max_polygons: only check nearest N polygons
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
intersects: bool (num_polygons,)
|
||||||
|
min_distances: distance to first hit (num_polygons,)
|
||||||
|
"""
|
||||||
|
num_polygons = len(polygon_lengths)
|
||||||
|
|
||||||
|
if num_polygons == 0:
|
||||||
|
return np.array([], dtype=bool), np.array([])
|
||||||
|
|
||||||
|
intersects = np.zeros(num_polygons, dtype=bool)
|
||||||
|
min_t = np.full(num_polygons, np.inf)
|
||||||
|
|
||||||
|
# Pre-filter: bounding box check
|
||||||
|
if num_polygons > max_polygons:
|
||||||
|
buf = 50.0
|
||||||
|
line_min_x = min(p1[0], p2[0]) - buf
|
||||||
|
line_max_x = max(p1[0], p2[0]) + buf
|
||||||
|
line_min_y = min(p1[1], p2[1]) - buf
|
||||||
|
line_max_y = max(p1[1], p2[1]) + buf
|
||||||
|
|
||||||
|
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||||
|
vi = 0
|
||||||
|
for i, length in enumerate(polygon_lengths):
|
||||||
|
if length >= 3:
|
||||||
|
cx = polygons_x[vi]
|
||||||
|
cy = polygons_y[vi]
|
||||||
|
if line_min_x <= cx <= line_max_x and line_min_y <= cy <= line_max_y:
|
||||||
|
nearby_mask[i] = True
|
||||||
|
vi += length
|
||||||
|
|
||||||
|
nearby_indices = np.where(nearby_mask)[0]
|
||||||
|
if len(nearby_indices) > max_polygons:
|
||||||
|
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||||
|
nearby_mask[nearby_indices[:max_polygons]] = True
|
||||||
|
else:
|
||||||
|
nearby_mask = np.ones(num_polygons, dtype=bool)
|
||||||
|
|
||||||
|
idx = 0
|
||||||
|
for i, length in enumerate(polygon_lengths):
|
||||||
|
if length < 3 or not nearby_mask[i]:
|
||||||
|
idx += length
|
||||||
|
continue
|
||||||
|
|
||||||
|
px = polygons_x[idx:idx + length]
|
||||||
|
py = polygons_y[idx:idx + length]
|
||||||
|
|
||||||
|
starts = np.stack([px, py], axis=1)
|
||||||
|
ends = np.stack([np.roll(px, -1), np.roll(py, -1)], axis=1)
|
||||||
|
|
||||||
|
edge_intersects, t_vals = line_segments_intersect_batch(p1, p2, starts, ends)
|
||||||
|
|
||||||
|
if np.any(edge_intersects):
|
||||||
|
intersects[i] = True
|
||||||
|
min_t[i] = np.min(t_vals[edge_intersects])
|
||||||
|
|
||||||
|
idx += length
|
||||||
|
|
||||||
|
line_length = np.linalg.norm(p2 - p1)
|
||||||
|
min_distances = min_t * line_length
|
||||||
|
|
||||||
|
return intersects, min_distances
|
||||||
85
backend/app/geometry/los.py
Normal file
85
backend/app/geometry/los.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""
|
||||||
|
Line-of-sight checks using terrain profile data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from typing import Optional, Dict, List
|
||||||
|
|
||||||
|
EARTH_RADIUS = 6371000
|
||||||
|
K_FACTOR = 4 / 3 # Standard atmospheric refraction
|
||||||
|
|
||||||
|
|
||||||
|
def check_los_terrain(
|
||||||
|
profile: List[dict],
|
||||||
|
tx_height: float,
|
||||||
|
rx_height: float,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Check line-of-sight from a terrain elevation profile.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
profile: List of dicts with 'elevation' and 'distance' keys.
|
||||||
|
tx_height: TX antenna height above ground (meters).
|
||||||
|
rx_height: RX height above ground (meters).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict with has_los, clearance, blocked_at
|
||||||
|
"""
|
||||||
|
if not profile:
|
||||||
|
return {"has_los": True, "clearance": 0.0, "blocked_at": None}
|
||||||
|
|
||||||
|
tx_ground = profile[0]["elevation"]
|
||||||
|
rx_ground = profile[-1]["elevation"]
|
||||||
|
tx_total = tx_ground + tx_height
|
||||||
|
rx_total = rx_ground + rx_height
|
||||||
|
total_distance = profile[-1]["distance"]
|
||||||
|
|
||||||
|
min_clearance = float("inf")
|
||||||
|
blocked_at = None
|
||||||
|
|
||||||
|
for point in profile:
|
||||||
|
d = point["distance"]
|
||||||
|
terrain_elev = point["elevation"]
|
||||||
|
|
||||||
|
if total_distance == 0:
|
||||||
|
los_height = tx_total
|
||||||
|
else:
|
||||||
|
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
|
||||||
|
|
||||||
|
# Earth curvature correction
|
||||||
|
effective_radius = K_FACTOR * EARTH_RADIUS
|
||||||
|
curvature = (d * (total_distance - d)) / (2 * effective_radius)
|
||||||
|
los_height_corrected = los_height - curvature
|
||||||
|
|
||||||
|
clearance = los_height_corrected - terrain_elev
|
||||||
|
|
||||||
|
if clearance < min_clearance:
|
||||||
|
min_clearance = clearance
|
||||||
|
if clearance <= 0:
|
||||||
|
blocked_at = d
|
||||||
|
|
||||||
|
return {
|
||||||
|
"has_los": min_clearance > 0,
|
||||||
|
"clearance": min_clearance,
|
||||||
|
"blocked_at": blocked_at,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def fresnel_radius(
|
||||||
|
d1_m: float, d2_m: float, wavelength_m: float, zone: int = 1
|
||||||
|
) -> float:
|
||||||
|
"""Calculate Fresnel zone radius at a point along the path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
d1_m: Distance from TX to point
|
||||||
|
d2_m: Distance from point to RX
|
||||||
|
wavelength_m: Signal wavelength
|
||||||
|
zone: Fresnel zone number (default 1)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Radius in meters
|
||||||
|
"""
|
||||||
|
total = d1_m + d2_m
|
||||||
|
if total <= 0:
|
||||||
|
return 0.0
|
||||||
|
return math.sqrt(zone * wavelength_m * d1_m * d2_m / total)
|
||||||
163
backend/app/geometry/reflection.py
Normal file
163
backend/app/geometry/reflection.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
"""
|
||||||
|
Vectorized reflection point calculations using mirror-image method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from typing import Tuple, Optional
|
||||||
|
from app.geometry.intersection import line_intersects_polygons_batch
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_reflection_points_batch(
|
||||||
|
tx: np.ndarray, rx: np.ndarray,
|
||||||
|
wall_starts: np.ndarray, wall_ends: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Calculate reflection points on N walls via mirror-image method.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tx, rx: shape (2,)
|
||||||
|
wall_starts, wall_ends: shape (N, 2)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
reflection_points: (N, 2)
|
||||||
|
valid: bool (N,)
|
||||||
|
"""
|
||||||
|
wall_vec = wall_ends - wall_starts
|
||||||
|
wall_length = np.linalg.norm(wall_vec, axis=1, keepdims=True)
|
||||||
|
wall_unit = wall_vec / np.maximum(wall_length, 1e-10)
|
||||||
|
|
||||||
|
normals = np.stack([-wall_unit[:, 1], wall_unit[:, 0]], axis=1)
|
||||||
|
|
||||||
|
tx_to_wall = tx - wall_starts
|
||||||
|
tx_dist_to_wall = np.sum(tx_to_wall * normals, axis=1, keepdims=True)
|
||||||
|
tx_mirror = tx - 2 * tx_dist_to_wall * normals
|
||||||
|
|
||||||
|
rx_to_mirror = tx_mirror - rx
|
||||||
|
|
||||||
|
cross_denom = (rx_to_mirror[:, 0] * wall_vec[:, 1] -
|
||||||
|
rx_to_mirror[:, 1] * wall_vec[:, 0])
|
||||||
|
|
||||||
|
valid_denom = np.abs(cross_denom) > 1e-10
|
||||||
|
cross_denom_safe = np.where(valid_denom, cross_denom, 1.0)
|
||||||
|
|
||||||
|
rx_to_start = wall_starts - rx
|
||||||
|
t = (rx_to_start[:, 0] * rx_to_mirror[:, 1] -
|
||||||
|
rx_to_start[:, 1] * rx_to_mirror[:, 0]) / cross_denom_safe
|
||||||
|
|
||||||
|
reflection_points = wall_starts + t[:, np.newaxis] * wall_vec
|
||||||
|
|
||||||
|
valid = valid_denom & (t >= 0) & (t <= 1) & (tx_dist_to_wall[:, 0] > 0)
|
||||||
|
|
||||||
|
return reflection_points, valid
|
||||||
|
|
||||||
|
|
||||||
|
def find_best_reflection_path(
|
||||||
|
tx: np.ndarray, rx: np.ndarray,
|
||||||
|
building_walls_start: np.ndarray,
|
||||||
|
building_walls_end: np.ndarray,
|
||||||
|
wall_to_building: np.ndarray,
|
||||||
|
obstacle_polygons_x: np.ndarray,
|
||||||
|
obstacle_polygons_y: np.ndarray,
|
||||||
|
obstacle_lengths: np.ndarray,
|
||||||
|
max_candidates: int = 50,
|
||||||
|
max_walls: int = 100,
|
||||||
|
max_los_checks: int = 10,
|
||||||
|
) -> Tuple[Optional[np.ndarray], float, float]:
|
||||||
|
"""Find best single-reflection path using vectorized ops.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_walls: Only consider closest N walls for reflection candidates.
|
||||||
|
max_los_checks: Only verify LOS for top N shortest reflection paths.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
best_reflection_point: (2,) or None
|
||||||
|
best_path_length: meters
|
||||||
|
best_reflection_loss: dB
|
||||||
|
"""
|
||||||
|
num_walls = len(building_walls_start)
|
||||||
|
if num_walls == 0:
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
# Limit walls by distance to path midpoint
|
||||||
|
if num_walls > max_walls:
|
||||||
|
midpoint = (tx + rx) / 2
|
||||||
|
wall_midpoints = (building_walls_start + building_walls_end) / 2
|
||||||
|
wall_distances = np.linalg.norm(wall_midpoints - midpoint, axis=1)
|
||||||
|
closest = np.argpartition(wall_distances, max_walls)[:max_walls]
|
||||||
|
building_walls_start = building_walls_start[closest]
|
||||||
|
building_walls_end = building_walls_end[closest]
|
||||||
|
wall_to_building = wall_to_building[closest]
|
||||||
|
|
||||||
|
refl_points, valid = calculate_reflection_points_batch(
|
||||||
|
tx, rx, building_walls_start, building_walls_end,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not np.any(valid):
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
valid_indices = np.where(valid)[0]
|
||||||
|
valid_refl = refl_points[valid]
|
||||||
|
|
||||||
|
tx_to_refl = np.linalg.norm(valid_refl - tx, axis=1)
|
||||||
|
refl_to_rx = np.linalg.norm(rx - valid_refl, axis=1)
|
||||||
|
path_lengths = tx_to_refl + refl_to_rx
|
||||||
|
|
||||||
|
# Direct distance filter
|
||||||
|
direct_dist = np.linalg.norm(rx - tx)
|
||||||
|
within_range = path_lengths <= direct_dist * 2.0
|
||||||
|
if not np.any(within_range):
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
valid_indices = valid_indices[within_range]
|
||||||
|
valid_refl = valid_refl[within_range]
|
||||||
|
path_lengths = path_lengths[within_range]
|
||||||
|
|
||||||
|
# Keep top candidates by shortest path
|
||||||
|
if len(valid_indices) > max_candidates:
|
||||||
|
top_idx = np.argpartition(path_lengths, max_candidates)[:max_candidates]
|
||||||
|
valid_indices = valid_indices[top_idx]
|
||||||
|
valid_refl = valid_refl[top_idx]
|
||||||
|
path_lengths = path_lengths[top_idx]
|
||||||
|
|
||||||
|
# Sort by path length for early exit
|
||||||
|
sort_order = np.argsort(path_lengths)
|
||||||
|
valid_refl = valid_refl[sort_order]
|
||||||
|
path_lengths = path_lengths[sort_order]
|
||||||
|
|
||||||
|
# Check LOS only for top N shortest candidates
|
||||||
|
check_count = min(len(valid_refl), max_los_checks)
|
||||||
|
best_idx = -1
|
||||||
|
best_length = np.inf
|
||||||
|
|
||||||
|
for i in range(check_count):
|
||||||
|
length = path_lengths[i]
|
||||||
|
if length >= best_length:
|
||||||
|
continue
|
||||||
|
|
||||||
|
refl_pt = valid_refl[i]
|
||||||
|
|
||||||
|
intersects1, _ = line_intersects_polygons_batch(
|
||||||
|
tx, refl_pt, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||||
|
)
|
||||||
|
if np.any(intersects1):
|
||||||
|
continue
|
||||||
|
|
||||||
|
intersects2, _ = line_intersects_polygons_batch(
|
||||||
|
refl_pt, rx, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||||
|
)
|
||||||
|
if np.any(intersects2):
|
||||||
|
continue
|
||||||
|
|
||||||
|
best_idx = i
|
||||||
|
best_length = length
|
||||||
|
break # sorted by length, first valid is best
|
||||||
|
|
||||||
|
if best_idx < 0:
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
best_point = valid_refl[best_idx]
|
||||||
|
|
||||||
|
# Reflection loss: 3-10 dB depending on path ratio
|
||||||
|
path_ratio = best_length / max(direct_dist, 1.0)
|
||||||
|
reflection_loss = 3.0 + 7.0 * min(1.0, (path_ratio - 1.0) * 2)
|
||||||
|
|
||||||
|
return best_point, best_length, reflection_loss
|
||||||
102
backend/app/main.py
Normal file
102
backend/app/main.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
from contextlib import asynccontextmanager
|
||||||
|
import logging
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from fastapi import FastAPI, WebSocket
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from app.core.database import connect_to_mongo, close_mongo_connection
|
||||||
|
from app.api.routes import health, projects, terrain, coverage, regions, system, gpu
|
||||||
|
from app.api.websocket import websocket_endpoint
|
||||||
|
|
||||||
|
logger = logging.getLogger("rfcp.startup")
|
||||||
|
|
||||||
|
|
||||||
|
def check_gpu_availability():
|
||||||
|
"""Log GPU status on startup for debugging."""
|
||||||
|
is_wsl = "microsoft" in platform.release().lower()
|
||||||
|
env_note = " (WSL2)" if is_wsl else ""
|
||||||
|
|
||||||
|
# Check CuPy / CUDA
|
||||||
|
try:
|
||||||
|
import cupy as cp
|
||||||
|
device_count = cp.cuda.runtime.getDeviceCount()
|
||||||
|
if device_count > 0:
|
||||||
|
props = cp.cuda.runtime.getDeviceProperties(0)
|
||||||
|
name = props["name"]
|
||||||
|
if isinstance(name, bytes):
|
||||||
|
name = name.decode()
|
||||||
|
mem_mb = props["totalGlobalMem"] // (1024 * 1024)
|
||||||
|
logger.info(f"GPU detected{env_note}: {name} ({mem_mb} MB VRAM)")
|
||||||
|
logger.info(f"CuPy {cp.__version__}, CUDA devices: {device_count}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"CuPy installed but no CUDA devices found{env_note}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"CuPy FAILED {env_note}: {e}")
|
||||||
|
if is_wsl:
|
||||||
|
logger.warning("Install: pip3 install cupy-cuda12x --break-system-packages")
|
||||||
|
else:
|
||||||
|
logger.warning("Install: pip install cupy-cuda12x")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"CuPy error{env_note}: {e}")
|
||||||
|
|
||||||
|
# Check PyOpenCL
|
||||||
|
try:
|
||||||
|
import pyopencl as cl
|
||||||
|
platforms = cl.get_platforms()
|
||||||
|
for p in platforms:
|
||||||
|
for d in p.get_devices():
|
||||||
|
logger.info(f"OpenCL device: {d.name.strip()}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug("PyOpenCL not installed (optional)")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
# Log GPU status on startup
|
||||||
|
check_gpu_availability()
|
||||||
|
await connect_to_mongo()
|
||||||
|
yield
|
||||||
|
await close_mongo_connection()
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="RFCP Backend API",
|
||||||
|
description="RF Coverage Planning Backend",
|
||||||
|
version="3.0.0",
|
||||||
|
lifespan=lifespan,
|
||||||
|
)
|
||||||
|
|
||||||
|
# CORS for frontend
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["https://rfcp.eliah.one", "http://localhost:5173", "http://127.0.0.1:8888"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# REST routes
|
||||||
|
app.include_router(health.router, prefix="/api/health", tags=["health"])
|
||||||
|
app.include_router(projects.router, prefix="/api/projects", tags=["projects"])
|
||||||
|
app.include_router(terrain.router, prefix="/api/terrain", tags=["terrain"])
|
||||||
|
app.include_router(coverage.router, prefix="/api/coverage", tags=["coverage"])
|
||||||
|
app.include_router(regions.router, prefix="/api/regions", tags=["regions"])
|
||||||
|
app.include_router(system.router, prefix="/api/system", tags=["system"])
|
||||||
|
app.include_router(gpu.router, prefix="/api/gpu", tags=["gpu"])
|
||||||
|
|
||||||
|
# WebSocket endpoint for real-time coverage with progress
|
||||||
|
app.websocket("/ws")(websocket_endpoint)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {"message": "RFCP Backend API", "version": "3.0.0"}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import uvicorn
|
||||||
|
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8090)
|
||||||
0
backend/app/models/__init__.py
Normal file
0
backend/app/models/__init__.py
Normal file
BIN
backend/app/models/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
backend/app/models/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/models/__pycache__/project.cpython-311.pyc
Normal file
BIN
backend/app/models/__pycache__/project.cpython-311.pyc
Normal file
Binary file not shown.
BIN
backend/app/models/__pycache__/site.cpython-311.pyc
Normal file
BIN
backend/app/models/__pycache__/site.cpython-311.pyc
Normal file
Binary file not shown.
21
backend/app/models/project.py
Normal file
21
backend/app/models/project.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.models.site import Site
|
||||||
|
|
||||||
|
|
||||||
|
class CoverageSettings(BaseModel):
|
||||||
|
radius: float = 10000.0
|
||||||
|
resolution: float = 200.0
|
||||||
|
min_signal: float = -105.0
|
||||||
|
max_signal: float = -65.0
|
||||||
|
|
||||||
|
|
||||||
|
class Project(BaseModel):
|
||||||
|
id: Optional[str] = None
|
||||||
|
name: str = "global"
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
sites: list[Site] = []
|
||||||
|
settings: CoverageSettings = Field(default_factory=CoverageSettings)
|
||||||
35
backend/app/models/site.py
Normal file
35
backend/app/models/site.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class Sector(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str = "Alpha"
|
||||||
|
power: float = 43.0
|
||||||
|
gain: float = 8.0
|
||||||
|
height: float = 30.0
|
||||||
|
frequency: float = 1800.0
|
||||||
|
azimuth: float = 0.0
|
||||||
|
beamwidth: float = 65.0
|
||||||
|
tilt: float = 0.0
|
||||||
|
antenna_type: str = "directional"
|
||||||
|
|
||||||
|
|
||||||
|
class Site(BaseModel):
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
lat: float
|
||||||
|
lon: float
|
||||||
|
height: float = 30.0
|
||||||
|
power: float = 43.0
|
||||||
|
gain: float = 8.0
|
||||||
|
frequency: float = 1800.0
|
||||||
|
antenna_type: str = "omni"
|
||||||
|
azimuth: Optional[float] = None
|
||||||
|
beamwidth: Optional[float] = None
|
||||||
|
tilt: Optional[float] = None
|
||||||
|
color: str = "#ef4444"
|
||||||
|
visible: bool = True
|
||||||
|
notes: Optional[str] = None
|
||||||
|
equipment: Optional[str] = None
|
||||||
|
sectors: list[Sector] = []
|
||||||
11
backend/app/parallel/__init__.py
Normal file
11
backend/app/parallel/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
"""
|
||||||
|
Parallel processing infrastructure for coverage calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from app.parallel.manager import SharedMemoryManager, SharedTerrainData, SharedBuildingData
|
||||||
|
from app.parallel.pool import ManagedProcessPool
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"SharedMemoryManager", "SharedTerrainData", "SharedBuildingData",
|
||||||
|
"ManagedProcessPool",
|
||||||
|
]
|
||||||
174
backend/app/parallel/manager.py
Normal file
174
backend/app/parallel/manager.py
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
"""
|
||||||
|
Shared Memory Manager for parallel processing.
|
||||||
|
|
||||||
|
Instead of copying building/terrain data to each worker,
|
||||||
|
store data in shared memory that all workers can read.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import multiprocessing.shared_memory as shm
|
||||||
|
import numpy as np
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SharedTerrainData:
|
||||||
|
"""Reference to terrain data in shared memory."""
|
||||||
|
shm_name: str
|
||||||
|
shape: tuple
|
||||||
|
bounds: tuple # (min_lat, min_lon, max_lat, max_lon)
|
||||||
|
resolution: float
|
||||||
|
|
||||||
|
def get_array(self) -> np.ndarray:
|
||||||
|
existing_shm = shm.SharedMemory(name=self.shm_name)
|
||||||
|
return np.ndarray(self.shape, dtype=np.int16, buffer=existing_shm.buf)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SharedBuildingData:
|
||||||
|
"""Reference to building data in shared memory."""
|
||||||
|
shm_centroids_name: str # (N, 2) float64
|
||||||
|
shm_heights_name: str # (N,) float32
|
||||||
|
shm_vertices_name: str # (total_verts, 2) float64
|
||||||
|
shm_offsets_name: str # (N+1,) int32
|
||||||
|
count: int
|
||||||
|
total_vertices: int
|
||||||
|
|
||||||
|
def get_centroids(self) -> np.ndarray:
|
||||||
|
existing = shm.SharedMemory(name=self.shm_centroids_name)
|
||||||
|
return np.ndarray((self.count, 2), dtype=np.float64, buffer=existing.buf)
|
||||||
|
|
||||||
|
def get_heights(self) -> np.ndarray:
|
||||||
|
existing = shm.SharedMemory(name=self.shm_heights_name)
|
||||||
|
return np.ndarray((self.count,), dtype=np.float32, buffer=existing.buf)
|
||||||
|
|
||||||
|
def get_offsets(self) -> np.ndarray:
|
||||||
|
existing = shm.SharedMemory(name=self.shm_offsets_name)
|
||||||
|
return np.ndarray((self.count + 1,), dtype=np.int32, buffer=existing.buf)
|
||||||
|
|
||||||
|
def get_vertices(self) -> np.ndarray:
|
||||||
|
existing = shm.SharedMemory(name=self.shm_vertices_name)
|
||||||
|
return np.ndarray((self.total_vertices, 2), dtype=np.float64, buffer=existing.buf)
|
||||||
|
|
||||||
|
def get_polygon(self, idx: int) -> np.ndarray:
|
||||||
|
offsets = self.get_offsets()
|
||||||
|
vertices = self.get_vertices()
|
||||||
|
start, end = offsets[idx], offsets[idx + 1]
|
||||||
|
return vertices[start:end]
|
||||||
|
|
||||||
|
|
||||||
|
class SharedMemoryManager:
|
||||||
|
"""
|
||||||
|
Manages shared memory blocks for parallel processing.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
manager = SharedMemoryManager()
|
||||||
|
terrain_ref = manager.store_terrain(heights, bounds, resolution)
|
||||||
|
buildings_ref = manager.store_buildings(buildings)
|
||||||
|
|
||||||
|
# Pass references (small dataclasses) to workers
|
||||||
|
pool.map(worker_func, points, terrain_ref, buildings_ref)
|
||||||
|
|
||||||
|
# Workers attach to shared memory — no copy!
|
||||||
|
terrain = terrain_ref.get_array()
|
||||||
|
|
||||||
|
# Cleanup when done
|
||||||
|
manager.cleanup()
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._shm_blocks: list = []
|
||||||
|
|
||||||
|
def store_terrain(
|
||||||
|
self, heights: np.ndarray, bounds: tuple, resolution: float,
|
||||||
|
) -> SharedTerrainData:
|
||||||
|
"""Store terrain heights in shared memory."""
|
||||||
|
shm_block = shm.SharedMemory(create=True, size=heights.nbytes)
|
||||||
|
self._shm_blocks.append(shm_block)
|
||||||
|
|
||||||
|
shm_array = np.ndarray(heights.shape, dtype=heights.dtype, buffer=shm_block.buf)
|
||||||
|
shm_array[:] = heights[:]
|
||||||
|
|
||||||
|
return SharedTerrainData(
|
||||||
|
shm_name=shm_block.name,
|
||||||
|
shape=heights.shape,
|
||||||
|
bounds=bounds,
|
||||||
|
resolution=resolution,
|
||||||
|
)
|
||||||
|
|
||||||
|
def store_buildings(self, buildings: list) -> Optional[SharedBuildingData]:
|
||||||
|
"""Store building data in shared memory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
buildings: List of Building objects or dicts with geometry.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SharedBuildingData reference, or None if no buildings.
|
||||||
|
"""
|
||||||
|
n = len(buildings)
|
||||||
|
if n == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Extract centroids
|
||||||
|
centroids = np.zeros((n, 2), dtype=np.float64)
|
||||||
|
heights = np.zeros(n, dtype=np.float32)
|
||||||
|
all_vertices = []
|
||||||
|
offsets = [0]
|
||||||
|
|
||||||
|
for i, b in enumerate(buildings):
|
||||||
|
# Support both dict and object forms
|
||||||
|
if hasattr(b, 'geometry'):
|
||||||
|
geom = b.geometry
|
||||||
|
h = getattr(b, 'height', 10.0)
|
||||||
|
else:
|
||||||
|
geom = b.get('geometry', [])
|
||||||
|
h = b.get('height', 10.0)
|
||||||
|
|
||||||
|
if geom:
|
||||||
|
lats = [p[1] for p in geom]
|
||||||
|
lons = [p[0] for p in geom]
|
||||||
|
centroids[i] = [sum(lats) / len(lats), sum(lons) / len(lons)]
|
||||||
|
for lon, lat in geom:
|
||||||
|
all_vertices.append([lat, lon])
|
||||||
|
heights[i] = h or 10.0
|
||||||
|
offsets.append(len(all_vertices))
|
||||||
|
|
||||||
|
vertices = np.array(all_vertices, dtype=np.float64) if all_vertices else np.zeros((0, 2), dtype=np.float64)
|
||||||
|
offsets = np.array(offsets, dtype=np.int32)
|
||||||
|
|
||||||
|
# Create shared memory
|
||||||
|
shm_centroids = shm.SharedMemory(create=True, size=max(centroids.nbytes, 1))
|
||||||
|
shm_heights = shm.SharedMemory(create=True, size=max(heights.nbytes, 1))
|
||||||
|
shm_vertices = shm.SharedMemory(create=True, size=max(vertices.nbytes, 1))
|
||||||
|
shm_offsets = shm.SharedMemory(create=True, size=max(offsets.nbytes, 1))
|
||||||
|
|
||||||
|
self._shm_blocks.extend([shm_centroids, shm_heights, shm_vertices, shm_offsets])
|
||||||
|
|
||||||
|
# Copy data
|
||||||
|
if centroids.nbytes > 0:
|
||||||
|
np.ndarray(centroids.shape, dtype=centroids.dtype, buffer=shm_centroids.buf)[:] = centroids
|
||||||
|
if heights.nbytes > 0:
|
||||||
|
np.ndarray(heights.shape, dtype=heights.dtype, buffer=shm_heights.buf)[:] = heights
|
||||||
|
if vertices.nbytes > 0:
|
||||||
|
np.ndarray(vertices.shape, dtype=vertices.dtype, buffer=shm_vertices.buf)[:] = vertices
|
||||||
|
if offsets.nbytes > 0:
|
||||||
|
np.ndarray(offsets.shape, dtype=offsets.dtype, buffer=shm_offsets.buf)[:] = offsets
|
||||||
|
|
||||||
|
return SharedBuildingData(
|
||||||
|
shm_centroids_name=shm_centroids.name,
|
||||||
|
shm_heights_name=shm_heights.name,
|
||||||
|
shm_vertices_name=shm_vertices.name,
|
||||||
|
shm_offsets_name=shm_offsets.name,
|
||||||
|
count=n,
|
||||||
|
total_vertices=len(all_vertices),
|
||||||
|
)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""Release all shared memory blocks."""
|
||||||
|
for block in self._shm_blocks:
|
||||||
|
try:
|
||||||
|
block.close()
|
||||||
|
block.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._shm_blocks.clear()
|
||||||
136
backend/app/parallel/pool.py
Normal file
136
backend/app/parallel/pool.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
"""
|
||||||
|
Managed process pool with automatic cleanup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
import multiprocessing as mp
|
||||||
|
from concurrent.futures import ProcessPoolExecutor, as_completed
|
||||||
|
from typing import List, Dict, Tuple, Optional, Callable
|
||||||
|
|
||||||
|
|
||||||
|
class ManagedProcessPool:
|
||||||
|
"""
|
||||||
|
Process pool wrapper with:
|
||||||
|
- Automatic cleanup on exit
|
||||||
|
- Worker process kill on failure
|
||||||
|
- Progress reporting
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, max_workers: int = 6):
|
||||||
|
self.max_workers = min(max_workers, 6)
|
||||||
|
self._pool: Optional[ProcessPoolExecutor] = None
|
||||||
|
|
||||||
|
def map_chunks(
|
||||||
|
self,
|
||||||
|
worker_fn: Callable,
|
||||||
|
chunks: List[tuple],
|
||||||
|
log_fn: Optional[Callable] = None,
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
Submit chunks to the pool and collect results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
worker_fn: Function to call for each chunk
|
||||||
|
chunks: List of (chunk_data, *args) tuples
|
||||||
|
log_fn: Progress logging function
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Flattened list of result dicts
|
||||||
|
"""
|
||||||
|
if log_fn is None:
|
||||||
|
log_fn = lambda msg: print(f"[POOL] {msg}", flush=True)
|
||||||
|
|
||||||
|
all_results: List[Dict] = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
ctx = mp.get_context('spawn')
|
||||||
|
self._pool = ProcessPoolExecutor(
|
||||||
|
max_workers=self.max_workers, mp_context=ctx,
|
||||||
|
)
|
||||||
|
|
||||||
|
futures = {
|
||||||
|
self._pool.submit(worker_fn, chunk): i
|
||||||
|
for i, chunk in enumerate(chunks)
|
||||||
|
}
|
||||||
|
|
||||||
|
completed = 0
|
||||||
|
t0 = time.time()
|
||||||
|
|
||||||
|
for future in as_completed(futures):
|
||||||
|
try:
|
||||||
|
chunk_results = future.result()
|
||||||
|
all_results.extend(chunk_results)
|
||||||
|
except Exception as e:
|
||||||
|
log_fn(f"Chunk error: {e}")
|
||||||
|
|
||||||
|
completed += 1
|
||||||
|
elapsed = time.time() - t0
|
||||||
|
pct = completed * 100 // len(chunks)
|
||||||
|
log_fn(f"Progress: {completed}/{len(chunks)} ({pct}%)")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log_fn(f"Pool error: {e}")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if self._pool:
|
||||||
|
self._pool.shutdown(wait=False, cancel_futures=True)
|
||||||
|
time.sleep(0.5)
|
||||||
|
killed = self._kill_orphans()
|
||||||
|
if killed > 0:
|
||||||
|
log_fn(f"Cleaned up {killed} orphaned workers")
|
||||||
|
|
||||||
|
return all_results
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _kill_orphans() -> int:
|
||||||
|
"""Kill orphaned rfcp-server worker processes."""
|
||||||
|
my_pid = os.getpid()
|
||||||
|
killed = 0
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
['tasklist', '/FI', 'IMAGENAME eq rfcp-server.exe', '/FO', 'CSV', '/NH'],
|
||||||
|
capture_output=True, text=True, timeout=5,
|
||||||
|
)
|
||||||
|
for line in result.stdout.strip().split('\n'):
|
||||||
|
if 'rfcp-server.exe' not in line:
|
||||||
|
continue
|
||||||
|
parts = line.split(',')
|
||||||
|
if len(parts) >= 2:
|
||||||
|
pid_str = parts[1].strip().strip('"')
|
||||||
|
try:
|
||||||
|
pid = int(pid_str)
|
||||||
|
if pid != my_pid:
|
||||||
|
subprocess.run(
|
||||||
|
['taskkill', '/F', '/PID', str(pid)],
|
||||||
|
capture_output=True, timeout=5,
|
||||||
|
)
|
||||||
|
killed += 1
|
||||||
|
except (ValueError, subprocess.TimeoutExpired):
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
['pgrep', '-f', 'rfcp-server'],
|
||||||
|
capture_output=True, text=True, timeout=5,
|
||||||
|
)
|
||||||
|
for pid_str in result.stdout.strip().split('\n'):
|
||||||
|
if not pid_str:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
pid = int(pid_str)
|
||||||
|
if pid != my_pid:
|
||||||
|
os.kill(pid, 9)
|
||||||
|
killed += 1
|
||||||
|
except (ValueError, ProcessLookupError, PermissionError):
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return killed
|
||||||
64
backend/app/parallel/worker.py
Normal file
64
backend/app/parallel/worker.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
"""
|
||||||
|
Worker functions for parallel coverage calculation.
|
||||||
|
|
||||||
|
These run in separate processes and access shared memory data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
from app.parallel.manager import SharedTerrainData, SharedBuildingData
|
||||||
|
|
||||||
|
|
||||||
|
def process_chunk(
|
||||||
|
chunk: List[tuple],
|
||||||
|
terrain_cache: dict,
|
||||||
|
buildings: list,
|
||||||
|
osm_data: dict,
|
||||||
|
config: dict,
|
||||||
|
) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Process a chunk of grid points.
|
||||||
|
|
||||||
|
This is the standard worker function used by both Ray and ProcessPoolExecutor.
|
||||||
|
It re-uses the existing coverage calculation logic.
|
||||||
|
"""
|
||||||
|
# Inject terrain cache into the module-level singleton
|
||||||
|
from app.services.terrain_service import terrain_service
|
||||||
|
terrain_service._tile_cache = terrain_cache
|
||||||
|
|
||||||
|
# Build spatial index
|
||||||
|
from app.services.spatial_index import SpatialIndex
|
||||||
|
spatial_idx = SpatialIndex()
|
||||||
|
if buildings:
|
||||||
|
spatial_idx.build(buildings)
|
||||||
|
|
||||||
|
# Process points using existing calculator
|
||||||
|
from app.services.coverage_service import CoverageService, SiteParams, CoverageSettings
|
||||||
|
|
||||||
|
site = SiteParams(**config['site_dict'])
|
||||||
|
settings = CoverageSettings(**config['settings_dict'])
|
||||||
|
svc = CoverageService()
|
||||||
|
|
||||||
|
timing = {
|
||||||
|
"los": 0.0, "buildings": 0.0, "antenna": 0.0,
|
||||||
|
"dominant_path": 0.0, "street_canyon": 0.0,
|
||||||
|
"reflection": 0.0, "vegetation": 0.0,
|
||||||
|
}
|
||||||
|
|
||||||
|
precomputed = config.get('precomputed')
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for lat, lon, point_elev in chunk:
|
||||||
|
pre = precomputed.get((lat, lon)) if precomputed else None
|
||||||
|
point = svc._calculate_point_sync(
|
||||||
|
site, lat, lon, settings,
|
||||||
|
buildings, osm_data.get('streets', []),
|
||||||
|
spatial_idx, osm_data.get('water_bodies', []),
|
||||||
|
osm_data.get('vegetation_areas', []),
|
||||||
|
config['site_elevation'], point_elev, timing,
|
||||||
|
precomputed_distance=pre.get('distance') if pre else None,
|
||||||
|
precomputed_path_loss=pre.get('path_loss') if pre else None,
|
||||||
|
)
|
||||||
|
if point.rsrp >= settings.min_signal:
|
||||||
|
results.append(point.model_dump())
|
||||||
|
|
||||||
|
return results
|
||||||
21
backend/app/propagation/__init__.py
Normal file
21
backend/app/propagation/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
"""
|
||||||
|
Propagation models for RF coverage calculation.
|
||||||
|
|
||||||
|
Each model implements the PropagationModel interface and is stateless/thread-safe.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
from app.propagation.free_space import FreeSpaceModel
|
||||||
|
from app.propagation.okumura_hata import OkumuraHataModel
|
||||||
|
from app.propagation.cost231_hata import Cost231HataModel
|
||||||
|
from app.propagation.cost231_wi import Cost231WIModel
|
||||||
|
from app.propagation.itu_r_p1546 import ITUR_P1546Model
|
||||||
|
from app.propagation.itu_r_p526 import KnifeEdgeDiffractionModel
|
||||||
|
from app.propagation.longley_rice import LongleyRiceModel
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"PropagationModel", "PropagationInput", "PropagationOutput",
|
||||||
|
"FreeSpaceModel", "OkumuraHataModel", "Cost231HataModel",
|
||||||
|
"Cost231WIModel", "ITUR_P1546Model", "KnifeEdgeDiffractionModel",
|
||||||
|
"LongleyRiceModel",
|
||||||
|
]
|
||||||
87
backend/app/propagation/base.py
Normal file
87
backend/app/propagation/base.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
"""
|
||||||
|
Abstract base class for all propagation models.
|
||||||
|
|
||||||
|
Each model implements a single, well-defined propagation algorithm.
|
||||||
|
Models are stateless and can be called concurrently.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PropagationInput:
|
||||||
|
"""Input for propagation calculation."""
|
||||||
|
frequency_mhz: float
|
||||||
|
distance_m: float
|
||||||
|
tx_height_m: float
|
||||||
|
rx_height_m: float
|
||||||
|
environment: str = "urban" # urban, suburban, rural, open
|
||||||
|
|
||||||
|
# Optional terrain info
|
||||||
|
terrain_clearance_m: Optional[float] = None
|
||||||
|
terrain_roughness_m: Optional[float] = None
|
||||||
|
|
||||||
|
# Optional building info
|
||||||
|
building_height_m: Optional[float] = None
|
||||||
|
street_width_m: Optional[float] = None
|
||||||
|
building_separation_m: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PropagationOutput:
|
||||||
|
"""Output from propagation calculation."""
|
||||||
|
path_loss_db: float
|
||||||
|
model_name: str
|
||||||
|
is_los: bool
|
||||||
|
breakdown: dict = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class PropagationModel(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class for all propagation models.
|
||||||
|
|
||||||
|
Each model implements a single, well-defined propagation algorithm.
|
||||||
|
Models are stateless and can be called concurrently.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def name(self) -> str:
|
||||||
|
"""Model name for logging/display."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
"""Valid frequency range (min_mhz, max_mhz)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
"""Valid distance range (min_m, max_m)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
"""
|
||||||
|
Calculate path loss for given input.
|
||||||
|
|
||||||
|
This method MUST be:
|
||||||
|
- Stateless (no side effects)
|
||||||
|
- Thread-safe (can be called concurrently)
|
||||||
|
- Fast (no I/O, no heavy computation)
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_valid_for(self, input: PropagationInput) -> bool:
|
||||||
|
"""Check if this model is valid for given input."""
|
||||||
|
freq_min, freq_max = self.frequency_range
|
||||||
|
dist_min, dist_max = self.distance_range
|
||||||
|
|
||||||
|
return (
|
||||||
|
freq_min <= input.frequency_mhz <= freq_max and
|
||||||
|
dist_min <= input.distance_m <= dist_max
|
||||||
|
)
|
||||||
62
backend/app/propagation/cost231_hata.py
Normal file
62
backend/app/propagation/cost231_hata.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
"""
|
||||||
|
COST-231 Hata model (extension of Okumura-Hata).
|
||||||
|
|
||||||
|
Valid for:
|
||||||
|
- Frequency: 1500-2000 MHz
|
||||||
|
- Distance: 1-20 km
|
||||||
|
|
||||||
|
Better for LTE bands than original Okumura-Hata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class Cost231HataModel(PropagationModel):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "COST-231-Hata"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (1500, 2000)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (100, 20000)
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
f = input.frequency_mhz
|
||||||
|
d = max(input.distance_m / 1000, 0.1)
|
||||||
|
hb = max(input.tx_height_m, 1.0)
|
||||||
|
hm = max(input.rx_height_m, 1.0)
|
||||||
|
|
||||||
|
# Mobile antenna correction (medium city)
|
||||||
|
a_hm = (1.1 * math.log10(f) - 0.7) * hm - (1.56 * math.log10(f) - 0.8)
|
||||||
|
|
||||||
|
# Metropolitan center correction
|
||||||
|
C_m = 3 if input.environment == "urban" else 0
|
||||||
|
|
||||||
|
L = (
|
||||||
|
46.3
|
||||||
|
+ 33.9 * math.log10(f)
|
||||||
|
- 13.82 * math.log10(hb)
|
||||||
|
- a_hm
|
||||||
|
+ (44.9 - 6.55 * math.log10(hb)) * math.log10(d)
|
||||||
|
+ C_m
|
||||||
|
)
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=False,
|
||||||
|
breakdown={
|
||||||
|
"base_loss": 46.3,
|
||||||
|
"frequency_term": 33.9 * math.log10(f),
|
||||||
|
"height_gain": -13.82 * math.log10(hb),
|
||||||
|
"mobile_correction": -a_hm,
|
||||||
|
"distance_term": (44.9 - 6.55 * math.log10(hb)) * math.log10(d),
|
||||||
|
"metro_correction": C_m,
|
||||||
|
},
|
||||||
|
)
|
||||||
114
backend/app/propagation/cost231_wi.py
Normal file
114
backend/app/propagation/cost231_wi.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
"""
|
||||||
|
COST-231 Walfisch-Ikegami model.
|
||||||
|
|
||||||
|
Valid for:
|
||||||
|
- Frequency: 800-2000 MHz
|
||||||
|
- Distance: 20m-5km
|
||||||
|
- Urban microcell environments
|
||||||
|
|
||||||
|
Accounts for building heights, street widths, and building separation.
|
||||||
|
Reference: COST 231 Final Report, Chapter 4.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class Cost231WIModel(PropagationModel):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "COST-231-WI"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (800, 2000)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (20, 5000)
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
f = input.frequency_mhz
|
||||||
|
d = max(input.distance_m / 1000, 0.02) # km
|
||||||
|
hb = max(input.tx_height_m, 4.0)
|
||||||
|
hm = max(input.rx_height_m, 1.0)
|
||||||
|
|
||||||
|
# Building parameters (defaults for typical urban)
|
||||||
|
h_roof = input.building_height_m or 15.0 # avg building height
|
||||||
|
w = input.street_width_m or 20.0 # street width
|
||||||
|
b = input.building_separation_m or 30.0 # building separation
|
||||||
|
|
||||||
|
delta_hb = hb - h_roof # TX above rooftop
|
||||||
|
delta_hm = h_roof - hm # rooftop above RX
|
||||||
|
|
||||||
|
# Free space loss
|
||||||
|
L_fs = 32.45 + 20 * math.log10(d) + 20 * math.log10(f)
|
||||||
|
|
||||||
|
# LOS case
|
||||||
|
if delta_hb > 0 and d < 0.5:
|
||||||
|
L = L_fs
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=True,
|
||||||
|
breakdown={"free_space": L_fs, "rooftop_diffraction": 0, "multiscreen": 0},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rooftop-to-street diffraction (L_rts)
|
||||||
|
phi = 90.0 # street orientation angle (worst case)
|
||||||
|
if phi < 35:
|
||||||
|
L_ori = -10 + 0.354 * phi
|
||||||
|
elif phi < 55:
|
||||||
|
L_ori = 2.5 + 0.075 * (phi - 35)
|
||||||
|
else:
|
||||||
|
L_ori = 4.0 - 0.114 * (phi - 55)
|
||||||
|
|
||||||
|
L_rts = (
|
||||||
|
-16.9
|
||||||
|
- 10 * math.log10(w)
|
||||||
|
+ 10 * math.log10(f)
|
||||||
|
+ 20 * math.log10(delta_hm)
|
||||||
|
+ L_ori
|
||||||
|
)
|
||||||
|
|
||||||
|
# Multi-screen diffraction (L_msd)
|
||||||
|
if delta_hb > 0:
|
||||||
|
L_bsh = -18 * math.log10(1 + delta_hb)
|
||||||
|
k_a = 54
|
||||||
|
k_d = 18
|
||||||
|
else:
|
||||||
|
L_bsh = 0
|
||||||
|
k_a = 54 - 0.8 * abs(delta_hb)
|
||||||
|
if d >= 0.5:
|
||||||
|
k_a = max(k_a, 54 - 0.8 * abs(delta_hb) * (d / 0.5))
|
||||||
|
k_d = 18 - 15 * abs(delta_hb) / h_roof
|
||||||
|
|
||||||
|
k_f = -4 + 0.7 * (f / 925 - 1) # medium city
|
||||||
|
if input.environment == "urban":
|
||||||
|
k_f = -4 + 1.5 * (f / 925 - 1)
|
||||||
|
|
||||||
|
L_msd = (
|
||||||
|
L_bsh
|
||||||
|
+ k_a
|
||||||
|
+ k_d * math.log10(d)
|
||||||
|
+ k_f * math.log10(f)
|
||||||
|
- 9 * math.log10(b)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Total NLOS loss
|
||||||
|
if L_rts + L_msd > 0:
|
||||||
|
L = L_fs + L_rts + L_msd
|
||||||
|
else:
|
||||||
|
L = L_fs
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=False,
|
||||||
|
breakdown={
|
||||||
|
"free_space": L_fs,
|
||||||
|
"rooftop_diffraction": max(L_rts, 0),
|
||||||
|
"multiscreen": max(L_msd, 0),
|
||||||
|
},
|
||||||
|
)
|
||||||
43
backend/app/propagation/free_space.py
Normal file
43
backend/app/propagation/free_space.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
"""
|
||||||
|
Free Space Path Loss (FSPL) model.
|
||||||
|
|
||||||
|
Used as baseline and for LOS conditions.
|
||||||
|
FSPL = 20*log10(d) + 20*log10(f) + 32.45
|
||||||
|
where d in km, f in MHz
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class FreeSpaceModel(PropagationModel):
|
||||||
|
"""Free Space Path Loss — theoretical minimum propagation loss."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "Free-Space"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (1, 100000)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (1, 1000000) # 1m to 1000km
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
d_km = max(input.distance_m / 1000, 0.001)
|
||||||
|
f = input.frequency_mhz
|
||||||
|
|
||||||
|
L = 20 * math.log10(d_km) + 20 * math.log10(f) + 32.45
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=True,
|
||||||
|
breakdown={
|
||||||
|
"distance_loss": 20 * math.log10(d_km),
|
||||||
|
"frequency_loss": 20 * math.log10(f),
|
||||||
|
"constant": 32.45,
|
||||||
|
},
|
||||||
|
)
|
||||||
74
backend/app/propagation/itu_r_p1546.py
Normal file
74
backend/app/propagation/itu_r_p1546.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
"""
|
||||||
|
ITU-R P.1546 model for point-to-area predictions.
|
||||||
|
|
||||||
|
Valid for:
|
||||||
|
- Frequency: 30-3000 MHz
|
||||||
|
- Distance: 1-1000 km
|
||||||
|
- Time percentages: 1%, 10%, 50%
|
||||||
|
|
||||||
|
Best for: VHF/UHF broadcasting and land mobile services.
|
||||||
|
Reference: ITU-R P.1546-6 (2019)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class ITUR_P1546Model(PropagationModel):
|
||||||
|
"""
|
||||||
|
Simplified P.1546 implementation.
|
||||||
|
|
||||||
|
Full implementation would include terrain clearance angle,
|
||||||
|
mixed path (land/sea), and time variability.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "ITU-R-P.1546"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (30, 3000)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (1000, 1000000) # 1-1000 km
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
f = input.frequency_mhz
|
||||||
|
d = max(input.distance_m / 1000, 1.0) # km
|
||||||
|
h1 = max(input.tx_height_m, 1.0)
|
||||||
|
|
||||||
|
# Nominal frequency bands
|
||||||
|
if f < 100:
|
||||||
|
f_nom = 100
|
||||||
|
elif f < 600:
|
||||||
|
f_nom = 600
|
||||||
|
else:
|
||||||
|
f_nom = 2000
|
||||||
|
|
||||||
|
# Basic field strength at 1 kW ERP (from curves, simplified regression)
|
||||||
|
E_ref = 106.9 - 20 * math.log10(d) # dBuV/m at 1kW
|
||||||
|
|
||||||
|
# Height gain for transmitter
|
||||||
|
delta_h1 = 20 * math.log10(h1 / 10) if h1 > 10 else 0
|
||||||
|
|
||||||
|
# Frequency correction
|
||||||
|
delta_f = 20 * math.log10(f / f_nom)
|
||||||
|
|
||||||
|
# Convert field strength to path loss
|
||||||
|
# L = 139.3 - E + 20*log10(f) (for 50 Ohm)
|
||||||
|
E = E_ref + delta_h1 - delta_f
|
||||||
|
L = 139.3 - E + 20 * math.log10(f)
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=d < 5,
|
||||||
|
breakdown={
|
||||||
|
"reference_field": E_ref,
|
||||||
|
"height_gain": delta_h1,
|
||||||
|
"frequency_correction": delta_f,
|
||||||
|
"path_loss": L,
|
||||||
|
},
|
||||||
|
)
|
||||||
87
backend/app/propagation/itu_r_p526.py
Normal file
87
backend/app/propagation/itu_r_p526.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
"""
|
||||||
|
Knife-edge diffraction model based on ITU-R P.526.
|
||||||
|
|
||||||
|
Used for calculating additional loss when terrain or obstacles
|
||||||
|
block the line of sight between TX and RX.
|
||||||
|
|
||||||
|
Reference: ITU-R P.526-15
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class KnifeEdgeDiffractionModel:
|
||||||
|
"""
|
||||||
|
Single knife-edge diffraction model.
|
||||||
|
|
||||||
|
Stateless utility — not a full PropagationModel since it calculates
|
||||||
|
additional loss, not total path loss.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_loss(
|
||||||
|
d1_m: float,
|
||||||
|
d2_m: float,
|
||||||
|
h_m: float,
|
||||||
|
wavelength_m: float,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate diffraction loss over single knife edge.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
d1_m: Distance from TX to obstacle
|
||||||
|
d2_m: Distance from obstacle to RX
|
||||||
|
h_m: Obstacle height above LOS line (positive = above)
|
||||||
|
wavelength_m: Signal wavelength
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loss in dB (always >= 0)
|
||||||
|
"""
|
||||||
|
if d1_m <= 0 or d2_m <= 0 or wavelength_m <= 0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
# Fresnel-Kirchhoff parameter
|
||||||
|
v = h_m * math.sqrt(2 * (d1_m + d2_m) / (wavelength_m * d1_m * d2_m))
|
||||||
|
|
||||||
|
# Diffraction loss (Lee approximation)
|
||||||
|
if v < -0.78:
|
||||||
|
L = 0.0
|
||||||
|
elif v < 0:
|
||||||
|
L = 6.02 + 9.11 * v - 1.27 * v ** 2
|
||||||
|
elif v < 2.4:
|
||||||
|
L = 6.02 + 9.11 * v + 1.65 * v ** 2
|
||||||
|
else:
|
||||||
|
L = 12.95 + 20 * math.log10(v)
|
||||||
|
|
||||||
|
return max(0.0, L)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def calculate_clearance_loss(
|
||||||
|
clearance_m: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Simplified diffraction loss from terrain clearance.
|
||||||
|
|
||||||
|
Matches the existing coverage_service._diffraction_loss logic.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
clearance_m: Minimum LOS clearance (negative = blocked)
|
||||||
|
frequency_mhz: Signal frequency
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loss in dB (0 if positive clearance)
|
||||||
|
"""
|
||||||
|
if clearance_m >= 0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
v = abs(clearance_m) / 10
|
||||||
|
|
||||||
|
if v <= 0:
|
||||||
|
loss = 0.0
|
||||||
|
elif v < 2.4:
|
||||||
|
loss = 6.02 + 9.11 * v - 1.27 * v ** 2
|
||||||
|
else:
|
||||||
|
loss = 13.0 + 20 * math.log10(v)
|
||||||
|
|
||||||
|
return min(loss, 40.0)
|
||||||
75
backend/app/propagation/longley_rice.py
Normal file
75
backend/app/propagation/longley_rice.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"""
|
||||||
|
Longley-Rice Irregular Terrain Model (ITM).
|
||||||
|
|
||||||
|
Best for:
|
||||||
|
- VHF/UHF over irregular terrain
|
||||||
|
- Point-to-point links
|
||||||
|
- Distances 1-2000 km
|
||||||
|
|
||||||
|
Note: This is a simplified area-mode version.
|
||||||
|
Full implementation requires terrain profile data.
|
||||||
|
|
||||||
|
Reference: NTIA Report 82-100
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class LongleyRiceModel(PropagationModel):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "Longley-Rice"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (20, 20000) # 20 MHz to 20 GHz
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (1000, 2000000) # 1-2000 km
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
"""
|
||||||
|
Simplified Longley-Rice (area mode).
|
||||||
|
|
||||||
|
For proper implementation, use splat! or NTIA ITM reference.
|
||||||
|
"""
|
||||||
|
f = input.frequency_mhz
|
||||||
|
d = max(input.distance_m / 1000, 1.0)
|
||||||
|
h1 = max(input.tx_height_m, 1.0)
|
||||||
|
h2 = max(input.rx_height_m, 1.0)
|
||||||
|
|
||||||
|
# Terrain irregularity parameter (simplified)
|
||||||
|
delta_h = input.terrain_roughness_m or 90 # Default: rolling hills
|
||||||
|
|
||||||
|
# Free space loss
|
||||||
|
L_fs = 32.45 + 20 * math.log10(d) + 20 * math.log10(f)
|
||||||
|
|
||||||
|
# Terrain clutter loss (simplified)
|
||||||
|
if delta_h < 10:
|
||||||
|
L_terrain = 0 # Flat
|
||||||
|
elif delta_h < 50:
|
||||||
|
L_terrain = 5 # Gently rolling
|
||||||
|
elif delta_h < 150:
|
||||||
|
L_terrain = 10 # Rolling hills
|
||||||
|
else:
|
||||||
|
L_terrain = 15 # Mountains
|
||||||
|
|
||||||
|
# Height gain
|
||||||
|
h_eff = h1 + h2
|
||||||
|
height_gain = 10 * math.log10(h_eff / 20) if h_eff > 20 else 0
|
||||||
|
|
||||||
|
L = L_fs + L_terrain - height_gain
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=delta_h < 10 and d < 10,
|
||||||
|
breakdown={
|
||||||
|
"free_space_loss": L_fs,
|
||||||
|
"terrain_loss": L_terrain,
|
||||||
|
"height_gain": height_gain,
|
||||||
|
},
|
||||||
|
)
|
||||||
74
backend/app/propagation/okumura_hata.py
Normal file
74
backend/app/propagation/okumura_hata.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
"""
|
||||||
|
Okumura-Hata empirical propagation model.
|
||||||
|
|
||||||
|
Valid for:
|
||||||
|
- Frequency: 150-1500 MHz
|
||||||
|
- Distance: 1-20 km
|
||||||
|
- TX height: 30-200 m
|
||||||
|
- RX height: 1-10 m
|
||||||
|
|
||||||
|
Reference: Hata (1980), "Empirical Formula for Propagation Loss
|
||||||
|
in Land Mobile Radio Services"
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from app.propagation.base import PropagationModel, PropagationInput, PropagationOutput
|
||||||
|
|
||||||
|
|
||||||
|
class OkumuraHataModel(PropagationModel):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return "Okumura-Hata"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def frequency_range(self) -> tuple:
|
||||||
|
return (150, 1500)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def distance_range(self) -> tuple:
|
||||||
|
return (100, 20000) # Extended to 100m minimum for practical use
|
||||||
|
|
||||||
|
def calculate(self, input: PropagationInput) -> PropagationOutput:
|
||||||
|
f = input.frequency_mhz
|
||||||
|
d = max(input.distance_m / 1000, 0.1) # km, min 100m
|
||||||
|
hb = max(input.tx_height_m, 1.0)
|
||||||
|
hm = max(input.rx_height_m, 1.0)
|
||||||
|
|
||||||
|
# Mobile antenna height correction factor
|
||||||
|
if input.environment == "urban" and f >= 400:
|
||||||
|
# Large city
|
||||||
|
a_hm = 3.2 * (math.log10(11.75 * hm) ** 2) - 4.97
|
||||||
|
else:
|
||||||
|
# Medium/small city
|
||||||
|
a_hm = (1.1 * math.log10(f) - 0.7) * hm - (1.56 * math.log10(f) - 0.8)
|
||||||
|
|
||||||
|
# Basic path loss (urban)
|
||||||
|
L_urban = (
|
||||||
|
69.55
|
||||||
|
+ 26.16 * math.log10(f)
|
||||||
|
- 13.82 * math.log10(hb)
|
||||||
|
- a_hm
|
||||||
|
+ (44.9 - 6.55 * math.log10(hb)) * math.log10(d)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Environment correction
|
||||||
|
if input.environment == "suburban":
|
||||||
|
L = L_urban - 2 * (math.log10(f / 28) ** 2) - 5.4
|
||||||
|
elif input.environment == "rural":
|
||||||
|
L = L_urban - 4.78 * (math.log10(f) ** 2) + 18.33 * math.log10(f) - 35.94
|
||||||
|
elif input.environment == "open":
|
||||||
|
L = L_urban - 4.78 * (math.log10(f) ** 2) + 18.33 * math.log10(f) - 40.94
|
||||||
|
else:
|
||||||
|
L = L_urban
|
||||||
|
|
||||||
|
return PropagationOutput(
|
||||||
|
path_loss_db=L,
|
||||||
|
model_name=self.name,
|
||||||
|
is_los=False,
|
||||||
|
breakdown={
|
||||||
|
"basic_loss": L_urban,
|
||||||
|
"environment_correction": L - L_urban,
|
||||||
|
"antenna_correction": a_hm,
|
||||||
|
},
|
||||||
|
)
|
||||||
1
backend/app/services/__init__.py
Normal file
1
backend/app/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Services package
|
||||||
98
backend/app/services/atmospheric_service.py
Normal file
98
backend/app/services/atmospheric_service.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class AtmosphericService:
|
||||||
|
"""ITU-R P.676 atmospheric absorption model"""
|
||||||
|
|
||||||
|
# Simplified model for frequencies < 50 GHz
|
||||||
|
# Standard atmosphere: T=15C, P=1013 hPa, humidity=50%
|
||||||
|
|
||||||
|
def calculate_atmospheric_loss(
|
||||||
|
self,
|
||||||
|
frequency_mhz: float,
|
||||||
|
distance_km: float,
|
||||||
|
temperature_c: float = 15.0,
|
||||||
|
humidity_percent: float = 50.0,
|
||||||
|
altitude_m: float = 0.0,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate atmospheric absorption loss
|
||||||
|
|
||||||
|
Args:
|
||||||
|
frequency_mhz: Frequency in MHz
|
||||||
|
distance_km: Path length in km
|
||||||
|
temperature_c: Temperature in Celsius
|
||||||
|
humidity_percent: Relative humidity (0-100)
|
||||||
|
altitude_m: Altitude above sea level
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loss in dB
|
||||||
|
"""
|
||||||
|
freq_ghz = frequency_mhz / 1000
|
||||||
|
|
||||||
|
# Below 1 GHz - negligible
|
||||||
|
if freq_ghz < 1.0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
# Calculate specific attenuation (dB/km)
|
||||||
|
gamma = self._specific_attenuation(freq_ghz, temperature_c, humidity_percent)
|
||||||
|
|
||||||
|
# Altitude correction (less atmosphere at higher altitudes)
|
||||||
|
altitude_factor = math.exp(-altitude_m / 8500) # Scale height ~8.5km
|
||||||
|
|
||||||
|
loss = gamma * distance_km * altitude_factor
|
||||||
|
|
||||||
|
return min(loss, 20.0) # Cap for reasonable distances
|
||||||
|
|
||||||
|
def _specific_attenuation(
|
||||||
|
self,
|
||||||
|
freq_ghz: float,
|
||||||
|
temperature_c: float,
|
||||||
|
humidity_percent: float,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate specific attenuation in dB/km
|
||||||
|
|
||||||
|
Simplified ITU-R P.676 model
|
||||||
|
"""
|
||||||
|
# Water vapor density (g/m3) - simplified
|
||||||
|
# Saturation vapor pressure (hPa)
|
||||||
|
es = 6.1121 * math.exp(
|
||||||
|
(18.678 - temperature_c / 234.5)
|
||||||
|
* (temperature_c / (257.14 + temperature_c))
|
||||||
|
)
|
||||||
|
rho = (humidity_percent / 100) * es * 216.7 / (273.15 + temperature_c)
|
||||||
|
|
||||||
|
# Oxygen absorption (dominant at 60 GHz, minor below 10 GHz)
|
||||||
|
if freq_ghz < 10:
|
||||||
|
gamma_o = 0.001 * freq_ghz ** 2 # Very small
|
||||||
|
elif freq_ghz < 57:
|
||||||
|
gamma_o = 0.001 * (freq_ghz / 10) ** 2.5
|
||||||
|
else:
|
||||||
|
# Near 60 GHz resonance
|
||||||
|
gamma_o = 15.0 # Peak absorption
|
||||||
|
|
||||||
|
# Water vapor absorption (peaks at 22 GHz and 183 GHz)
|
||||||
|
if freq_ghz < 10:
|
||||||
|
gamma_w = 0.0001 * rho * freq_ghz ** 2
|
||||||
|
elif freq_ghz < 50:
|
||||||
|
gamma_w = 0.001 * rho * (freq_ghz / 22) ** 2
|
||||||
|
else:
|
||||||
|
gamma_w = 0.01 * rho
|
||||||
|
|
||||||
|
return gamma_o + gamma_w
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_weather_description(loss_db: float) -> str:
|
||||||
|
"""Describe atmospheric conditions based on loss"""
|
||||||
|
if loss_db < 0.1:
|
||||||
|
return "clear"
|
||||||
|
elif loss_db < 0.5:
|
||||||
|
return "normal"
|
||||||
|
elif loss_db < 2.0:
|
||||||
|
return "humid"
|
||||||
|
else:
|
||||||
|
return "foggy"
|
||||||
|
|
||||||
|
|
||||||
|
atmospheric_service = AtmosphericService()
|
||||||
122
backend/app/services/boundary_service.py
Normal file
122
backend/app/services/boundary_service.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
"""
|
||||||
|
Coverage boundary calculation service.
|
||||||
|
|
||||||
|
Computes concave hull (alpha shape) from coverage points to generate
|
||||||
|
a realistic boundary that follows actual coverage contour.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_coverage_boundary(
|
||||||
|
points: list[dict],
|
||||||
|
threshold_dbm: float = -100,
|
||||||
|
simplify_tolerance: float = 0.001,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Calculate coverage boundary as concave hull of points above threshold.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
points: List of coverage points with 'lat', 'lon', 'rsrp' keys
|
||||||
|
threshold_dbm: RSRP threshold - points below this are excluded
|
||||||
|
simplify_tolerance: Simplification tolerance in degrees (~100m per 0.001)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of {'lat': float, 'lon': float} coordinates forming boundary polygon.
|
||||||
|
Empty list if boundary cannot be computed.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from shapely.geometry import MultiPoint
|
||||||
|
from shapely import concave_hull
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Shapely not installed - boundary calculation disabled")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Filter points above threshold
|
||||||
|
valid_coords = [
|
||||||
|
(p['lon'], p['lat']) # Shapely uses (x, y) = (lon, lat)
|
||||||
|
for p in points
|
||||||
|
if p.get('rsrp', -999) >= threshold_dbm
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(valid_coords) < 3:
|
||||||
|
logger.debug(f"Not enough points for boundary: {len(valid_coords)}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create MultiPoint geometry
|
||||||
|
mp = MultiPoint(valid_coords)
|
||||||
|
|
||||||
|
# Compute concave hull (alpha shape)
|
||||||
|
# ratio: 0 = convex hull, 1 = very tight fit
|
||||||
|
# 0.3-0.5 gives good balance between detail and smoothness
|
||||||
|
hull = concave_hull(mp, ratio=0.3)
|
||||||
|
|
||||||
|
if hull.is_empty:
|
||||||
|
logger.debug("Concave hull is empty")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Simplify to reduce points (0.001 deg ≈ 100m)
|
||||||
|
if simplify_tolerance > 0:
|
||||||
|
hull = hull.simplify(simplify_tolerance, preserve_topology=True)
|
||||||
|
|
||||||
|
# Extract coordinates based on geometry type
|
||||||
|
if hull.geom_type == 'Polygon':
|
||||||
|
coords = list(hull.exterior.coords)
|
||||||
|
return [{'lat': c[1], 'lon': c[0]} for c in coords]
|
||||||
|
|
||||||
|
elif hull.geom_type == 'MultiPolygon':
|
||||||
|
# Return largest polygon's exterior
|
||||||
|
largest = max(hull.geoms, key=lambda g: g.area)
|
||||||
|
coords = list(largest.exterior.coords)
|
||||||
|
return [{'lat': c[1], 'lon': c[0]} for c in coords]
|
||||||
|
|
||||||
|
elif hull.geom_type == 'GeometryCollection':
|
||||||
|
# Find polygons in collection
|
||||||
|
polygons = [g for g in hull.geoms if g.geom_type == 'Polygon']
|
||||||
|
if polygons:
|
||||||
|
largest = max(polygons, key=lambda g: g.area)
|
||||||
|
coords = list(largest.exterior.coords)
|
||||||
|
return [{'lat': c[1], 'lon': c[0]} for c in coords]
|
||||||
|
|
||||||
|
logger.debug(f"Unexpected hull geometry type: {hull.geom_type}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Boundary calculation error: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_multi_site_boundaries(
|
||||||
|
points: list[dict],
|
||||||
|
threshold_dbm: float = -100,
|
||||||
|
) -> dict[str, list[dict]]:
|
||||||
|
"""
|
||||||
|
Calculate separate boundaries for each site's coverage area.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
points: Coverage points with 'lat', 'lon', 'rsrp', 'site_id' keys
|
||||||
|
threshold_dbm: RSRP threshold
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping site_id to boundary coordinates list.
|
||||||
|
"""
|
||||||
|
# Group points by site_id
|
||||||
|
by_site: dict[str, list[dict]] = {}
|
||||||
|
for p in points:
|
||||||
|
site_id = p.get('site_id', 'default')
|
||||||
|
if site_id not in by_site:
|
||||||
|
by_site[site_id] = []
|
||||||
|
by_site[site_id].append(p)
|
||||||
|
|
||||||
|
# Calculate boundary for each site
|
||||||
|
boundaries = {}
|
||||||
|
for site_id, site_points in by_site.items():
|
||||||
|
boundary = calculate_coverage_boundary(site_points, threshold_dbm)
|
||||||
|
if boundary:
|
||||||
|
boundaries[site_id] = boundary
|
||||||
|
|
||||||
|
return boundaries
|
||||||
334
backend/app/services/buildings_service.py
Normal file
334
backend/app/services/buildings_service.py
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
from typing import List, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class Building(BaseModel):
|
||||||
|
"""Single building footprint"""
|
||||||
|
id: int
|
||||||
|
geometry: List[List[float]] # [[lon, lat], ...]
|
||||||
|
height: float # meters
|
||||||
|
levels: Optional[int] = None
|
||||||
|
building_type: Optional[str] = None
|
||||||
|
material: Optional[str] = None # Detected material type
|
||||||
|
tags: dict = {} # Store all OSM tags for material detection
|
||||||
|
|
||||||
|
|
||||||
|
class OSMCache:
|
||||||
|
"""Local cache for OSM data with expiry"""
|
||||||
|
|
||||||
|
CACHE_EXPIRY_DAYS = 30
|
||||||
|
|
||||||
|
def __init__(self, cache_type: str):
|
||||||
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.cache_path = self.data_path / 'osm' / cache_type
|
||||||
|
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||||
|
"""Generate cache key from bbox (rounded to 0.01 degree grid)"""
|
||||||
|
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
def _get_cache_file(self, cache_key: str) -> Path:
|
||||||
|
return self.cache_path / f"{cache_key}.json"
|
||||||
|
|
||||||
|
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[dict]:
|
||||||
|
"""Get cached data if available and not expired"""
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
if not cache_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(cache_file.read_text())
|
||||||
|
|
||||||
|
# Check expiry
|
||||||
|
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||||
|
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||||
|
return None
|
||||||
|
|
||||||
|
return data.get('data')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[OSMCache] Failed to read cache: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||||
|
"""Save data to cache"""
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache_data = {
|
||||||
|
'_cached_at': datetime.now().isoformat(),
|
||||||
|
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||||
|
'data': data
|
||||||
|
}
|
||||||
|
cache_file.write_text(json.dumps(cache_data))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[OSMCache] Failed to write cache: {e}")
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Clear all cached data"""
|
||||||
|
for f in self.cache_path.glob("*.json"):
|
||||||
|
f.unlink()
|
||||||
|
|
||||||
|
def get_size_mb(self) -> float:
|
||||||
|
"""Get cache size in MB"""
|
||||||
|
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||||
|
return total / (1024 * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
class BuildingsService:
|
||||||
|
"""
|
||||||
|
OpenStreetMap buildings via Overpass API with local caching.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OVERPASS_URLS = [
|
||||||
|
"https://overpass-api.de/api/interpreter",
|
||||||
|
"https://overpass.kumi.systems/api/interpreter",
|
||||||
|
]
|
||||||
|
DEFAULT_LEVEL_HEIGHT = 3.0 # meters per floor
|
||||||
|
DEFAULT_BUILDING_HEIGHT = 9.0 # 3 floors if unknown
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = OSMCache('buildings')
|
||||||
|
self._memory_cache: dict[str, List[Building]] = {}
|
||||||
|
self._max_cache_size = 50
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _safe_int(value) -> Optional[int]:
|
||||||
|
"""Safely parse int from OSM tag (handles '1a', '2-3', '5+', etc.)"""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return int(value)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
match = re.search(r'\d+', str(value))
|
||||||
|
if match:
|
||||||
|
return int(match.group())
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _safe_float(value) -> Optional[float]:
|
||||||
|
"""Safely parse float from OSM tag (handles '10 m', '~12', '10m')"""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
cleaned = str(value).lower().replace('m', '').replace('~', '').strip()
|
||||||
|
return float(cleaned)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
match = re.search(r'[\d.]+', str(value))
|
||||||
|
if match:
|
||||||
|
return float(match.group())
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _bbox_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||||
|
"""Generate memory cache key for bbox"""
|
||||||
|
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
async def fetch_buildings(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float,
|
||||||
|
use_cache: bool = True
|
||||||
|
) -> List[Building]:
|
||||||
|
"""Fetch buildings in bounding box from OSM, using cache if available"""
|
||||||
|
bbox_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
|
||||||
|
# Check memory cache
|
||||||
|
if use_cache and bbox_key in self._memory_cache:
|
||||||
|
return self._memory_cache[bbox_key]
|
||||||
|
|
||||||
|
# Check disk cache (OSMCache with expiry)
|
||||||
|
if use_cache:
|
||||||
|
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
if cached is not None:
|
||||||
|
print(f"[Buildings] Cache hit for bbox")
|
||||||
|
buildings = [Building(**b) for b in cached]
|
||||||
|
self._memory_cache[bbox_key] = buildings
|
||||||
|
return buildings
|
||||||
|
|
||||||
|
# Fetch from Overpass API with retry
|
||||||
|
print(f"[Buildings] Fetching from Overpass API...")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = None
|
||||||
|
max_retries = 3
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
url = self.OVERPASS_URLS[attempt % len(self.OVERPASS_URLS)]
|
||||||
|
try:
|
||||||
|
timeout = 60.0 * (attempt + 1) # 60s, 120s, 180s
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
response = await client.post(url, data={"data": query})
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Buildings] Overpass attempt {attempt + 1}/{max_retries} failed ({url}): {e}")
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
wait_time = 2 ** attempt # 1s, 2s
|
||||||
|
print(f"[Buildings] Retrying in {wait_time}s...")
|
||||||
|
await asyncio.sleep(wait_time)
|
||||||
|
else:
|
||||||
|
print(f"[Buildings] All {max_retries} attempts failed")
|
||||||
|
return []
|
||||||
|
|
||||||
|
buildings = self._parse_overpass_response(data)
|
||||||
|
|
||||||
|
# Save to disk cache
|
||||||
|
if buildings:
|
||||||
|
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||||
|
[b.model_dump() for b in buildings])
|
||||||
|
|
||||||
|
# Memory cache with size limit
|
||||||
|
if len(self._memory_cache) >= self._max_cache_size:
|
||||||
|
oldest = next(iter(self._memory_cache))
|
||||||
|
del self._memory_cache[oldest]
|
||||||
|
self._memory_cache[bbox_key] = buildings
|
||||||
|
|
||||||
|
return buildings
|
||||||
|
|
||||||
|
def _parse_overpass_response(self, data: dict) -> List[Building]:
|
||||||
|
"""Parse Overpass JSON response into Building objects"""
|
||||||
|
buildings = []
|
||||||
|
|
||||||
|
# Build node lookup
|
||||||
|
nodes = {}
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] == "node":
|
||||||
|
nodes[element["id"]] = (element["lon"], element["lat"])
|
||||||
|
|
||||||
|
# Process ways (building footprints)
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] != "way":
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags = element.get("tags", {})
|
||||||
|
if "building" not in tags:
|
||||||
|
continue
|
||||||
|
|
||||||
|
geometry = []
|
||||||
|
for node_id in element.get("nodes", []):
|
||||||
|
if node_id in nodes:
|
||||||
|
geometry.append(list(nodes[node_id]))
|
||||||
|
|
||||||
|
if len(geometry) < 3:
|
||||||
|
continue
|
||||||
|
|
||||||
|
height = self._estimate_height(tags)
|
||||||
|
|
||||||
|
material_str = None
|
||||||
|
if "building:material" in tags:
|
||||||
|
material_str = tags["building:material"]
|
||||||
|
elif "building:facade:material" in tags:
|
||||||
|
material_str = tags["building:facade:material"]
|
||||||
|
|
||||||
|
buildings.append(Building(
|
||||||
|
id=element["id"],
|
||||||
|
geometry=geometry,
|
||||||
|
height=height,
|
||||||
|
levels=self._safe_int(tags.get("building:levels")),
|
||||||
|
building_type=tags.get("building"),
|
||||||
|
material=material_str,
|
||||||
|
tags=tags
|
||||||
|
))
|
||||||
|
|
||||||
|
return buildings
|
||||||
|
|
||||||
|
def _estimate_height(self, tags: dict) -> float:
|
||||||
|
"""Estimate building height from OSM tags"""
|
||||||
|
if "height" in tags:
|
||||||
|
h = self._safe_float(tags["height"])
|
||||||
|
if h is not None and h > 0:
|
||||||
|
return h
|
||||||
|
|
||||||
|
if "building:levels" in tags:
|
||||||
|
levels = self._safe_int(tags["building:levels"])
|
||||||
|
if levels is not None and levels > 0:
|
||||||
|
return levels * self.DEFAULT_LEVEL_HEIGHT
|
||||||
|
|
||||||
|
building_type = tags.get("building", "yes")
|
||||||
|
type_heights = {
|
||||||
|
"house": 6.0,
|
||||||
|
"residential": 12.0,
|
||||||
|
"apartments": 18.0,
|
||||||
|
"commercial": 12.0,
|
||||||
|
"industrial": 8.0,
|
||||||
|
"warehouse": 6.0,
|
||||||
|
"garage": 3.0,
|
||||||
|
"shed": 2.5,
|
||||||
|
"roof": 3.0,
|
||||||
|
"church": 15.0,
|
||||||
|
"cathedral": 30.0,
|
||||||
|
"hospital": 15.0,
|
||||||
|
"school": 12.0,
|
||||||
|
"university": 15.0,
|
||||||
|
"office": 20.0,
|
||||||
|
"retail": 6.0,
|
||||||
|
}
|
||||||
|
|
||||||
|
return type_heights.get(building_type, self.DEFAULT_BUILDING_HEIGHT)
|
||||||
|
|
||||||
|
def point_in_building(self, lat: float, lon: float, building: Building) -> bool:
|
||||||
|
"""Check if point is inside building footprint (ray casting)"""
|
||||||
|
x, y = lon, lat
|
||||||
|
polygon = building.geometry
|
||||||
|
n = len(polygon)
|
||||||
|
inside = False
|
||||||
|
|
||||||
|
j = n - 1
|
||||||
|
for i in range(n):
|
||||||
|
xi, yi = polygon[i]
|
||||||
|
xj, yj = polygon[j]
|
||||||
|
|
||||||
|
if ((yi > y) != (yj > y)) and (x < (xj - xi) * (y - yi) / (yj - yi) + xi):
|
||||||
|
inside = not inside
|
||||||
|
j = i
|
||||||
|
|
||||||
|
return inside
|
||||||
|
|
||||||
|
def line_intersects_building(
|
||||||
|
self,
|
||||||
|
lat1: float, lon1: float, height1: float,
|
||||||
|
lat2: float, lon2: float, height2: float,
|
||||||
|
building: Building
|
||||||
|
) -> Optional[float]:
|
||||||
|
"""Check if line segment intersects building.
|
||||||
|
Returns distance along path where intersection occurs, or None."""
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
|
||||||
|
num_samples = 20
|
||||||
|
for i in range(num_samples):
|
||||||
|
t = i / num_samples
|
||||||
|
lat = lat1 + t * (lat2 - lat1)
|
||||||
|
lon = lon1 + t * (lon2 - lon1)
|
||||||
|
height = height1 + t * (height2 - height1)
|
||||||
|
|
||||||
|
if self.point_in_building(lat, lon, building):
|
||||||
|
if height < building.height:
|
||||||
|
dist = t * TerrainService.haversine_distance(lat1, lon1, lat2, lon2)
|
||||||
|
return dist
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
buildings_service = BuildingsService()
|
||||||
250
backend/app/services/cache.py
Normal file
250
backend/app/services/cache.py
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
"""
|
||||||
|
Unified cache management for RFCP services.
|
||||||
|
|
||||||
|
Provides a single interface for managing all cached data:
|
||||||
|
- Terrain tiles (SRTM .hgt files, in-memory NumPy arrays)
|
||||||
|
- OSM building data (disk JSON + in-memory)
|
||||||
|
- Spatial index data
|
||||||
|
|
||||||
|
Tracks memory usage and enforces limits to prevent
|
||||||
|
memory explosion during large-area calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Dict, Any, Callable
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class CacheEntry:
|
||||||
|
"""Single cache entry with metadata."""
|
||||||
|
__slots__ = ('value', 'created_at', 'last_accessed', 'size_bytes', 'hits')
|
||||||
|
|
||||||
|
def __init__(self, value: Any, size_bytes: int = 0):
|
||||||
|
self.value = value
|
||||||
|
self.created_at = time.monotonic()
|
||||||
|
self.last_accessed = self.created_at
|
||||||
|
self.size_bytes = size_bytes
|
||||||
|
self.hits = 0
|
||||||
|
|
||||||
|
def touch(self):
|
||||||
|
self.last_accessed = time.monotonic()
|
||||||
|
self.hits += 1
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryCache:
|
||||||
|
"""
|
||||||
|
In-memory LRU cache with byte-level tracking.
|
||||||
|
|
||||||
|
Thread-safe. Evicts least-recently-used entries when
|
||||||
|
max_size_bytes is exceeded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name: str, max_entries: int = 100, max_size_bytes: int = 500 * 1024 * 1024):
|
||||||
|
self.name = name
|
||||||
|
self.max_entries = max_entries
|
||||||
|
self.max_size_bytes = max_size_bytes
|
||||||
|
self._entries: Dict[str, CacheEntry] = {}
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self._total_bytes = 0
|
||||||
|
self._total_hits = 0
|
||||||
|
self._total_misses = 0
|
||||||
|
|
||||||
|
def get(self, key: str) -> Optional[Any]:
|
||||||
|
with self._lock:
|
||||||
|
entry = self._entries.get(key)
|
||||||
|
if entry is None:
|
||||||
|
self._total_misses += 1
|
||||||
|
return None
|
||||||
|
entry.touch()
|
||||||
|
self._total_hits += 1
|
||||||
|
return entry.value
|
||||||
|
|
||||||
|
def put(self, key: str, value: Any, size_bytes: int = 0):
|
||||||
|
with self._lock:
|
||||||
|
# Remove existing entry if present
|
||||||
|
if key in self._entries:
|
||||||
|
self._total_bytes -= self._entries[key].size_bytes
|
||||||
|
del self._entries[key]
|
||||||
|
|
||||||
|
# Evict if over limits
|
||||||
|
while (
|
||||||
|
len(self._entries) >= self.max_entries
|
||||||
|
or (self._total_bytes + size_bytes > self.max_size_bytes and self._entries)
|
||||||
|
):
|
||||||
|
self._evict_lru()
|
||||||
|
|
||||||
|
entry = CacheEntry(value, size_bytes)
|
||||||
|
self._entries[key] = entry
|
||||||
|
self._total_bytes += size_bytes
|
||||||
|
|
||||||
|
def remove(self, key: str) -> bool:
|
||||||
|
with self._lock:
|
||||||
|
entry = self._entries.pop(key, None)
|
||||||
|
if entry:
|
||||||
|
self._total_bytes -= entry.size_bytes
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
with self._lock:
|
||||||
|
self._entries.clear()
|
||||||
|
self._total_bytes = 0
|
||||||
|
|
||||||
|
def _evict_lru(self):
|
||||||
|
"""Remove least-recently-used entry. Must hold _lock."""
|
||||||
|
if not self._entries:
|
||||||
|
return
|
||||||
|
lru_key = min(self._entries, key=lambda k: self._entries[k].last_accessed)
|
||||||
|
entry = self._entries.pop(lru_key)
|
||||||
|
self._total_bytes -= entry.size_bytes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self) -> int:
|
||||||
|
return len(self._entries)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_bytes(self) -> int:
|
||||||
|
return self._total_bytes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_mb(self) -> float:
|
||||||
|
return self._total_bytes / (1024 * 1024)
|
||||||
|
|
||||||
|
def stats(self) -> dict:
|
||||||
|
total = self._total_hits + self._total_misses
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"entries": len(self._entries),
|
||||||
|
"size_mb": round(self.size_mb, 1),
|
||||||
|
"max_size_mb": round(self.max_size_bytes / (1024 * 1024), 1),
|
||||||
|
"hits": self._total_hits,
|
||||||
|
"misses": self._total_misses,
|
||||||
|
"hit_rate": round(self._total_hits / total * 100, 1) if total > 0 else 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DiskCache:
|
||||||
|
"""
|
||||||
|
Persistent disk cache with TTL expiry.
|
||||||
|
|
||||||
|
Used for OSM building data and other HTTP responses.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name: str, base_path: Optional[Path] = None, ttl_days: int = 30):
|
||||||
|
self.name = name
|
||||||
|
self.ttl_days = ttl_days
|
||||||
|
if base_path is None:
|
||||||
|
base_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.cache_path = base_path / 'cache' / name
|
||||||
|
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def _key_to_file(self, key: str) -> Path:
|
||||||
|
# Sanitize key for filesystem
|
||||||
|
safe = key.replace('/', '_').replace('\\', '_').replace(':', '_')
|
||||||
|
return self.cache_path / f"{safe}.json"
|
||||||
|
|
||||||
|
def get(self, key: str) -> Optional[Any]:
|
||||||
|
path = self._key_to_file(key)
|
||||||
|
if not path.exists():
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
data = json.loads(path.read_text())
|
||||||
|
cached_at = datetime.fromisoformat(data.get('_ts', '2000-01-01'))
|
||||||
|
if datetime.now() - cached_at > timedelta(days=self.ttl_days):
|
||||||
|
path.unlink(missing_ok=True)
|
||||||
|
return None
|
||||||
|
return data.get('v')
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def put(self, key: str, value: Any):
|
||||||
|
path = self._key_to_file(key)
|
||||||
|
try:
|
||||||
|
path.write_text(json.dumps({
|
||||||
|
'_ts': datetime.now().isoformat(),
|
||||||
|
'v': value,
|
||||||
|
}))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[DiskCache:{self.name}] Write error: {e}")
|
||||||
|
|
||||||
|
def remove(self, key: str) -> bool:
|
||||||
|
path = self._key_to_file(key)
|
||||||
|
if path.exists():
|
||||||
|
path.unlink()
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
for f in self.cache_path.glob("*.json"):
|
||||||
|
f.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
def size_mb(self) -> float:
|
||||||
|
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json") if f.exists())
|
||||||
|
return total / (1024 * 1024)
|
||||||
|
|
||||||
|
def stats(self) -> dict:
|
||||||
|
files = list(self.cache_path.glob("*.json"))
|
||||||
|
return {
|
||||||
|
"name": self.name,
|
||||||
|
"entries": len(files),
|
||||||
|
"size_mb": round(self.size_mb(), 1),
|
||||||
|
"ttl_days": self.ttl_days,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CacheManager:
|
||||||
|
"""
|
||||||
|
Unified cache manager for all RFCP services.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
- terrain: MemoryCache for SRTM tile arrays (~25MB each)
|
||||||
|
- buildings: MemoryCache for building lists
|
||||||
|
- spatial: MemoryCache for spatial index objects
|
||||||
|
- osm_disk: DiskCache for OSM API responses
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.terrain = MemoryCache(
|
||||||
|
"terrain",
|
||||||
|
max_entries=20, # ~500MB max (25MB per tile)
|
||||||
|
max_size_bytes=500 * 1024 * 1024,
|
||||||
|
)
|
||||||
|
self.buildings = MemoryCache(
|
||||||
|
"buildings",
|
||||||
|
max_entries=50,
|
||||||
|
max_size_bytes=200 * 1024 * 1024,
|
||||||
|
)
|
||||||
|
self.spatial = MemoryCache(
|
||||||
|
"spatial_index",
|
||||||
|
max_entries=50,
|
||||||
|
max_size_bytes=100 * 1024 * 1024,
|
||||||
|
)
|
||||||
|
self.osm_disk = DiskCache("osm", ttl_days=30)
|
||||||
|
|
||||||
|
def clear_all(self):
|
||||||
|
"""Clear all caches."""
|
||||||
|
self.terrain.clear()
|
||||||
|
self.buildings.clear()
|
||||||
|
self.spatial.clear()
|
||||||
|
self.osm_disk.clear()
|
||||||
|
|
||||||
|
def stats(self) -> dict:
|
||||||
|
"""Get stats for all caches."""
|
||||||
|
return {
|
||||||
|
"terrain": self.terrain.stats(),
|
||||||
|
"buildings": self.buildings.stats(),
|
||||||
|
"spatial": self.spatial.stats(),
|
||||||
|
"osm_disk": self.osm_disk.stats(),
|
||||||
|
"total_memory_mb": round(
|
||||||
|
self.terrain.size_mb + self.buildings.size_mb + self.spatial.size_mb, 1
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton
|
||||||
|
cache_manager = CacheManager()
|
||||||
241
backend/app/services/cache_db.py
Normal file
241
backend/app/services/cache_db.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
"""
|
||||||
|
SQLite cache for OSM data — buildings, vegetation, water, streets.
|
||||||
|
|
||||||
|
Replaces in-memory caching for large-area calculations. Instead of holding
|
||||||
|
hundreds of thousands of buildings in RAM, data is stored on disk in SQLite
|
||||||
|
and queried per-tile using spatial bbox queries.
|
||||||
|
|
||||||
|
Location: ~/.rfcp/osm_cache.db
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
def _default_db_path() -> str:
|
||||||
|
"""Get default database path at ~/.rfcp/osm_cache.db."""
|
||||||
|
cache_dir = Path.home() / '.rfcp'
|
||||||
|
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
return str(cache_dir / 'osm_cache.db')
|
||||||
|
|
||||||
|
|
||||||
|
class OSMCacheDB:
|
||||||
|
"""SQLite-backed cache for OSM feature data with bbox queries.
|
||||||
|
|
||||||
|
Stores buildings and vegetation as JSON blobs with bounding-box
|
||||||
|
columns for fast spatial queries. Cache freshness is tracked
|
||||||
|
per 1-degree cell (matching the OSM grid fetch pattern).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db_path: Optional[str] = None):
|
||||||
|
if db_path is None:
|
||||||
|
db_path = _default_db_path()
|
||||||
|
self.db_path = db_path
|
||||||
|
self._conn: Optional[sqlite3.Connection] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def conn(self) -> sqlite3.Connection:
|
||||||
|
"""Lazy connection with WAL mode for concurrent reads."""
|
||||||
|
if self._conn is None:
|
||||||
|
self._conn = sqlite3.connect(self.db_path, check_same_thread=False)
|
||||||
|
self._conn.execute("PRAGMA journal_mode=WAL")
|
||||||
|
self._conn.execute("PRAGMA synchronous=NORMAL")
|
||||||
|
self._init_tables()
|
||||||
|
return self._conn
|
||||||
|
|
||||||
|
def _init_tables(self):
|
||||||
|
assert self._conn is not None
|
||||||
|
self._conn.executescript("""
|
||||||
|
CREATE TABLE IF NOT EXISTS buildings (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
osm_id INTEGER,
|
||||||
|
min_lat REAL NOT NULL,
|
||||||
|
min_lon REAL NOT NULL,
|
||||||
|
max_lat REAL NOT NULL,
|
||||||
|
max_lon REAL NOT NULL,
|
||||||
|
height REAL DEFAULT 10.0,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
cell_key TEXT NOT NULL
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_bld_cell ON buildings(cell_key);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_bld_bbox
|
||||||
|
ON buildings(min_lat, max_lat, min_lon, max_lon);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS vegetation (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
osm_id INTEGER,
|
||||||
|
min_lat REAL NOT NULL,
|
||||||
|
min_lon REAL NOT NULL,
|
||||||
|
max_lat REAL NOT NULL,
|
||||||
|
max_lon REAL NOT NULL,
|
||||||
|
data TEXT NOT NULL,
|
||||||
|
cell_key TEXT NOT NULL
|
||||||
|
);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_veg_cell ON vegetation(cell_key);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_veg_bbox
|
||||||
|
ON vegetation(min_lat, max_lat, min_lon, max_lon);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS cache_meta (
|
||||||
|
cell_key TEXT NOT NULL,
|
||||||
|
data_type TEXT NOT NULL,
|
||||||
|
fetched_at REAL NOT NULL,
|
||||||
|
item_count INTEGER DEFAULT 0,
|
||||||
|
PRIMARY KEY (cell_key, data_type)
|
||||||
|
);
|
||||||
|
""")
|
||||||
|
self._conn.commit()
|
||||||
|
|
||||||
|
# ── Cell key helpers ──
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def cell_key(min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||||
|
"""Generate cell key from bbox (matches 1-degree grid alignment)."""
|
||||||
|
return f"{min_lat:.0f},{min_lon:.0f},{max_lat:.0f},{max_lon:.0f}"
|
||||||
|
|
||||||
|
def is_cell_cached(
|
||||||
|
self, cell_key: str, data_type: str, max_age_hours: float = 24.0
|
||||||
|
) -> bool:
|
||||||
|
"""Check if cell data is cached and fresh."""
|
||||||
|
cursor = self.conn.execute(
|
||||||
|
"SELECT fetched_at FROM cache_meta "
|
||||||
|
"WHERE cell_key = ? AND data_type = ?",
|
||||||
|
(cell_key, data_type),
|
||||||
|
)
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return False
|
||||||
|
age_hours = (time.time() - row[0]) / 3600
|
||||||
|
return age_hours < max_age_hours
|
||||||
|
|
||||||
|
def mark_cell_cached(self, cell_key: str, data_type: str, item_count: int):
|
||||||
|
"""Record that a cell has been fetched."""
|
||||||
|
self.conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO cache_meta "
|
||||||
|
"(cell_key, data_type, fetched_at, item_count) VALUES (?, ?, ?, ?)",
|
||||||
|
(cell_key, data_type, time.time(), item_count),
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# ── Buildings ──
|
||||||
|
|
||||||
|
def insert_buildings_bulk(self, buildings_data: List[Dict], cell_key: str):
|
||||||
|
"""Bulk insert serialised building dicts for a cell.
|
||||||
|
|
||||||
|
Each dict must have 'geometry' (list of [lon, lat]) and 'id'.
|
||||||
|
"""
|
||||||
|
rows = []
|
||||||
|
for b in buildings_data:
|
||||||
|
geom = b.get('geometry', [])
|
||||||
|
if not geom:
|
||||||
|
continue
|
||||||
|
lats = [p[1] for p in geom]
|
||||||
|
lons = [p[0] for p in geom]
|
||||||
|
rows.append((
|
||||||
|
b.get('id', 0),
|
||||||
|
min(lats), min(lons), max(lats), max(lons),
|
||||||
|
b.get('height', 10.0),
|
||||||
|
json.dumps(b),
|
||||||
|
cell_key,
|
||||||
|
))
|
||||||
|
|
||||||
|
if rows:
|
||||||
|
self.conn.executemany(
|
||||||
|
"INSERT INTO buildings "
|
||||||
|
"(osm_id, min_lat, min_lon, max_lat, max_lon, height, data, cell_key) "
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
rows,
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def query_buildings_bbox(
|
||||||
|
self,
|
||||||
|
min_lat: float, max_lat: float,
|
||||||
|
min_lon: float, max_lon: float,
|
||||||
|
limit: int = 20000,
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""Query buildings whose bbox overlaps the given bbox."""
|
||||||
|
cursor = self.conn.execute(
|
||||||
|
"SELECT data FROM buildings "
|
||||||
|
"WHERE max_lat >= ? AND min_lat <= ? "
|
||||||
|
"AND max_lon >= ? AND min_lon <= ? "
|
||||||
|
"LIMIT ?",
|
||||||
|
(min_lat, max_lat, min_lon, max_lon, limit),
|
||||||
|
)
|
||||||
|
return [json.loads(row[0]) for row in cursor]
|
||||||
|
|
||||||
|
# ── Vegetation ──
|
||||||
|
|
||||||
|
def insert_vegetation_bulk(self, veg_data: List[Dict], cell_key: str):
|
||||||
|
"""Bulk insert serialised vegetation dicts for a cell."""
|
||||||
|
rows = []
|
||||||
|
for v in veg_data:
|
||||||
|
geom = v.get('geometry', [])
|
||||||
|
if not geom:
|
||||||
|
continue
|
||||||
|
lats = [p[1] for p in geom]
|
||||||
|
lons = [p[0] for p in geom]
|
||||||
|
rows.append((
|
||||||
|
v.get('id', 0),
|
||||||
|
min(lats), min(lons), max(lats), max(lons),
|
||||||
|
json.dumps(v),
|
||||||
|
cell_key,
|
||||||
|
))
|
||||||
|
|
||||||
|
if rows:
|
||||||
|
self.conn.executemany(
|
||||||
|
"INSERT INTO vegetation "
|
||||||
|
"(osm_id, min_lat, min_lon, max_lat, max_lon, data, cell_key) "
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?, ?)",
|
||||||
|
rows,
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def query_vegetation_bbox(
|
||||||
|
self,
|
||||||
|
min_lat: float, max_lat: float,
|
||||||
|
min_lon: float, max_lon: float,
|
||||||
|
limit: int = 10000,
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""Query vegetation whose bbox overlaps the given bbox."""
|
||||||
|
cursor = self.conn.execute(
|
||||||
|
"SELECT data FROM vegetation "
|
||||||
|
"WHERE max_lat >= ? AND min_lat <= ? "
|
||||||
|
"AND max_lon >= ? AND min_lon <= ? "
|
||||||
|
"LIMIT ?",
|
||||||
|
(min_lat, max_lat, min_lon, max_lon, limit),
|
||||||
|
)
|
||||||
|
return [json.loads(row[0]) for row in cursor]
|
||||||
|
|
||||||
|
# ── Housekeeping ──
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the database connection."""
|
||||||
|
if self._conn:
|
||||||
|
self._conn.close()
|
||||||
|
self._conn = None
|
||||||
|
|
||||||
|
def get_stats(self) -> Dict[str, int]:
|
||||||
|
"""Get cache statistics."""
|
||||||
|
stats: Dict[str, int] = {}
|
||||||
|
for table in ('buildings', 'vegetation'):
|
||||||
|
cursor = self.conn.execute(f"SELECT COUNT(*) FROM {table}") # noqa: S608
|
||||||
|
stats[table] = cursor.fetchone()[0]
|
||||||
|
cursor = self.conn.execute("SELECT COUNT(*) FROM cache_meta")
|
||||||
|
stats['cached_cells'] = cursor.fetchone()[0]
|
||||||
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
# ── Singleton ──
|
||||||
|
|
||||||
|
_cache_db: Optional[OSMCacheDB] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_osm_cache_db() -> OSMCacheDB:
|
||||||
|
"""Get or create the singleton OSM cache database."""
|
||||||
|
global _cache_db
|
||||||
|
if _cache_db is None:
|
||||||
|
_cache_db = OSMCacheDB()
|
||||||
|
return _cache_db
|
||||||
1633
backend/app/services/coverage_service.py
Normal file
1633
backend/app/services/coverage_service.py
Normal file
File diff suppressed because it is too large
Load Diff
1005
backend/app/services/dominant_path_service.py
Normal file
1005
backend/app/services/dominant_path_service.py
Normal file
File diff suppressed because it is too large
Load Diff
309
backend/app/services/geometry_vectorized.py
Normal file
309
backend/app/services/geometry_vectorized.py
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
"""
|
||||||
|
Vectorized geometry operations using NumPy.
|
||||||
|
|
||||||
|
All functions operate on arrays, not single values.
|
||||||
|
Provides 10-50x speedup over Python loops for batch geometry checks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from typing import Tuple, Optional
|
||||||
|
|
||||||
|
EARTH_RADIUS = 6371000 # meters
|
||||||
|
|
||||||
|
|
||||||
|
def haversine_batch(
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lats2: np.ndarray, lons2: np.ndarray,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Distance from one point to many points (meters)."""
|
||||||
|
lat1_rad = np.radians(lat1)
|
||||||
|
lon1_rad = np.radians(lon1)
|
||||||
|
lats2_rad = np.radians(lats2)
|
||||||
|
lons2_rad = np.radians(lons2)
|
||||||
|
|
||||||
|
dlat = lats2_rad - lat1_rad
|
||||||
|
dlon = lons2_rad - lon1_rad
|
||||||
|
|
||||||
|
a = np.sin(dlat / 2) ** 2 + np.cos(lat1_rad) * np.cos(lats2_rad) * np.sin(dlon / 2) ** 2
|
||||||
|
c = 2 * np.arcsin(np.sqrt(a))
|
||||||
|
|
||||||
|
return EARTH_RADIUS * c
|
||||||
|
|
||||||
|
|
||||||
|
def points_to_local_coords(
|
||||||
|
ref_lat: float, ref_lon: float,
|
||||||
|
lats: np.ndarray, lons: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Convert lat/lon to local X/Y meters (equirectangular projection)."""
|
||||||
|
cos_lat = np.cos(np.radians(ref_lat))
|
||||||
|
x = (lons - ref_lon) * 111320.0 * cos_lat
|
||||||
|
y = (lats - ref_lat) * 110540.0
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
|
||||||
|
def line_segments_intersect_batch(
|
||||||
|
p1: np.ndarray, p2: np.ndarray,
|
||||||
|
segments_start: np.ndarray, segments_end: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Check if line p1->p2 intersects with N segments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
p1, p2: shape (2,)
|
||||||
|
segments_start, segments_end: shape (N, 2)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
intersects: bool array (N,)
|
||||||
|
t_values: parameter along p1->p2 (N,)
|
||||||
|
"""
|
||||||
|
d = p2 - p1
|
||||||
|
seg_d = segments_end - segments_start
|
||||||
|
|
||||||
|
cross = d[0] * seg_d[:, 1] - d[1] * seg_d[:, 0]
|
||||||
|
|
||||||
|
parallel_mask = np.abs(cross) < 1e-10
|
||||||
|
cross_safe = np.where(parallel_mask, 1.0, cross)
|
||||||
|
|
||||||
|
dp = p1 - segments_start
|
||||||
|
|
||||||
|
t = (dp[:, 0] * seg_d[:, 1] - dp[:, 1] * seg_d[:, 0]) / cross_safe
|
||||||
|
u = (dp[:, 0] * d[1] - dp[:, 1] * d[0]) / cross_safe
|
||||||
|
|
||||||
|
intersects = ~parallel_mask & (t >= 0) & (t <= 1) & (u >= 0) & (u <= 1)
|
||||||
|
|
||||||
|
return intersects, t
|
||||||
|
|
||||||
|
|
||||||
|
def line_intersects_polygons_batch(
|
||||||
|
p1: np.ndarray, p2: np.ndarray,
|
||||||
|
polygons_x: np.ndarray, polygons_y: np.ndarray,
|
||||||
|
polygon_lengths: np.ndarray,
|
||||||
|
max_polygons: int = 30,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Check if line p1->p2 intersects multiple polygons.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
p1, p2: shape (2,)
|
||||||
|
polygons_x, polygons_y: flattened vertex arrays
|
||||||
|
polygon_lengths: vertices per polygon (num_polygons,)
|
||||||
|
max_polygons: only check nearest N polygons (bbox pre-filter)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
intersects: bool (num_polygons,)
|
||||||
|
min_distances: distance to first hit (num_polygons,)
|
||||||
|
"""
|
||||||
|
num_polygons = len(polygon_lengths)
|
||||||
|
|
||||||
|
if num_polygons == 0:
|
||||||
|
return np.array([], dtype=bool), np.array([])
|
||||||
|
|
||||||
|
intersects = np.zeros(num_polygons, dtype=bool)
|
||||||
|
min_t = np.full(num_polygons, np.inf)
|
||||||
|
|
||||||
|
# Pre-filter: only check polygons whose first vertex is near the line bbox
|
||||||
|
if num_polygons > max_polygons:
|
||||||
|
buf = 50.0 # 50m buffer
|
||||||
|
line_min_x = min(p1[0], p2[0]) - buf
|
||||||
|
line_max_x = max(p1[0], p2[0]) + buf
|
||||||
|
line_min_y = min(p1[1], p2[1]) - buf
|
||||||
|
line_max_y = max(p1[1], p2[1]) + buf
|
||||||
|
|
||||||
|
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||||
|
vi = 0
|
||||||
|
for i, length in enumerate(polygon_lengths):
|
||||||
|
if length >= 3:
|
||||||
|
cx = polygons_x[vi]
|
||||||
|
cy = polygons_y[vi]
|
||||||
|
if line_min_x <= cx <= line_max_x and line_min_y <= cy <= line_max_y:
|
||||||
|
nearby_mask[i] = True
|
||||||
|
vi += length
|
||||||
|
|
||||||
|
# Cap at max_polygons
|
||||||
|
nearby_indices = np.where(nearby_mask)[0]
|
||||||
|
if len(nearby_indices) > max_polygons:
|
||||||
|
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||||
|
nearby_mask[nearby_indices[:max_polygons]] = True
|
||||||
|
else:
|
||||||
|
nearby_mask = np.ones(num_polygons, dtype=bool)
|
||||||
|
|
||||||
|
idx = 0
|
||||||
|
for i, length in enumerate(polygon_lengths):
|
||||||
|
if length < 3 or not nearby_mask[i]:
|
||||||
|
idx += length
|
||||||
|
continue
|
||||||
|
|
||||||
|
px = polygons_x[idx:idx + length]
|
||||||
|
py = polygons_y[idx:idx + length]
|
||||||
|
|
||||||
|
starts = np.stack([px, py], axis=1)
|
||||||
|
ends = np.stack([np.roll(px, -1), np.roll(py, -1)], axis=1)
|
||||||
|
|
||||||
|
edge_intersects, t_vals = line_segments_intersect_batch(p1, p2, starts, ends)
|
||||||
|
|
||||||
|
if np.any(edge_intersects):
|
||||||
|
intersects[i] = True
|
||||||
|
min_t[i] = np.min(t_vals[edge_intersects])
|
||||||
|
|
||||||
|
idx += length
|
||||||
|
|
||||||
|
line_length = np.linalg.norm(p2 - p1)
|
||||||
|
min_distances = min_t * line_length
|
||||||
|
|
||||||
|
return intersects, min_distances
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_reflection_points_batch(
|
||||||
|
tx: np.ndarray, rx: np.ndarray,
|
||||||
|
wall_starts: np.ndarray, wall_ends: np.ndarray,
|
||||||
|
) -> Tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Calculate reflection points on N walls via mirror-image method.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tx, rx: shape (2,)
|
||||||
|
wall_starts, wall_ends: shape (N, 2)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
reflection_points: (N, 2)
|
||||||
|
valid: bool (N,)
|
||||||
|
"""
|
||||||
|
wall_vec = wall_ends - wall_starts
|
||||||
|
wall_length = np.linalg.norm(wall_vec, axis=1, keepdims=True)
|
||||||
|
wall_unit = wall_vec / np.maximum(wall_length, 1e-10)
|
||||||
|
|
||||||
|
normals = np.stack([-wall_unit[:, 1], wall_unit[:, 0]], axis=1)
|
||||||
|
|
||||||
|
tx_to_wall = tx - wall_starts
|
||||||
|
tx_dist_to_wall = np.sum(tx_to_wall * normals, axis=1, keepdims=True)
|
||||||
|
tx_mirror = tx - 2 * tx_dist_to_wall * normals
|
||||||
|
|
||||||
|
rx_to_mirror = tx_mirror - rx
|
||||||
|
|
||||||
|
cross_denom = (rx_to_mirror[:, 0] * wall_vec[:, 1] -
|
||||||
|
rx_to_mirror[:, 1] * wall_vec[:, 0])
|
||||||
|
|
||||||
|
valid_denom = np.abs(cross_denom) > 1e-10
|
||||||
|
cross_denom_safe = np.where(valid_denom, cross_denom, 1.0)
|
||||||
|
|
||||||
|
rx_to_start = wall_starts - rx
|
||||||
|
t = (rx_to_start[:, 0] * rx_to_mirror[:, 1] -
|
||||||
|
rx_to_start[:, 1] * rx_to_mirror[:, 0]) / cross_denom_safe
|
||||||
|
|
||||||
|
reflection_points = wall_starts + t[:, np.newaxis] * wall_vec
|
||||||
|
|
||||||
|
valid = valid_denom & (t >= 0) & (t <= 1) & (tx_dist_to_wall[:, 0] > 0)
|
||||||
|
|
||||||
|
return reflection_points, valid
|
||||||
|
|
||||||
|
|
||||||
|
def find_best_reflection_path_vectorized(
|
||||||
|
tx: np.ndarray, rx: np.ndarray,
|
||||||
|
building_walls_start: np.ndarray,
|
||||||
|
building_walls_end: np.ndarray,
|
||||||
|
wall_to_building: np.ndarray,
|
||||||
|
obstacle_polygons_x: np.ndarray,
|
||||||
|
obstacle_polygons_y: np.ndarray,
|
||||||
|
obstacle_lengths: np.ndarray,
|
||||||
|
max_candidates: int = 50,
|
||||||
|
max_walls: int = 100,
|
||||||
|
max_los_checks: int = 10,
|
||||||
|
) -> Tuple[Optional[np.ndarray], float, float]:
|
||||||
|
"""Find best single-reflection path using vectorized ops.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_walls: Only consider closest N walls for reflection candidates.
|
||||||
|
max_los_checks: Only verify LOS for top N shortest reflection paths.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
best_reflection_point: (2,) or None
|
||||||
|
best_path_length: meters
|
||||||
|
best_reflection_loss: dB
|
||||||
|
"""
|
||||||
|
num_walls = len(building_walls_start)
|
||||||
|
if num_walls == 0:
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
# Limit walls by distance to path midpoint
|
||||||
|
if num_walls > max_walls:
|
||||||
|
midpoint = (tx + rx) / 2
|
||||||
|
wall_midpoints = (building_walls_start + building_walls_end) / 2
|
||||||
|
wall_distances = np.linalg.norm(wall_midpoints - midpoint, axis=1)
|
||||||
|
closest = np.argpartition(wall_distances, max_walls)[:max_walls]
|
||||||
|
building_walls_start = building_walls_start[closest]
|
||||||
|
building_walls_end = building_walls_end[closest]
|
||||||
|
wall_to_building = wall_to_building[closest]
|
||||||
|
|
||||||
|
refl_points, valid = calculate_reflection_points_batch(
|
||||||
|
tx, rx, building_walls_start, building_walls_end,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not np.any(valid):
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
valid_indices = np.where(valid)[0]
|
||||||
|
valid_refl = refl_points[valid]
|
||||||
|
|
||||||
|
tx_to_refl = np.linalg.norm(valid_refl - tx, axis=1)
|
||||||
|
refl_to_rx = np.linalg.norm(rx - valid_refl, axis=1)
|
||||||
|
path_lengths = tx_to_refl + refl_to_rx
|
||||||
|
|
||||||
|
# Direct distance filter: skip if reflection path > 2x direct
|
||||||
|
direct_dist = np.linalg.norm(rx - tx)
|
||||||
|
within_range = path_lengths <= direct_dist * 2.0
|
||||||
|
if not np.any(within_range):
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
valid_indices = valid_indices[within_range]
|
||||||
|
valid_refl = valid_refl[within_range]
|
||||||
|
path_lengths = path_lengths[within_range]
|
||||||
|
|
||||||
|
# Keep top candidates by shortest path
|
||||||
|
if len(valid_indices) > max_candidates:
|
||||||
|
top_idx = np.argpartition(path_lengths, max_candidates)[:max_candidates]
|
||||||
|
valid_indices = valid_indices[top_idx]
|
||||||
|
valid_refl = valid_refl[top_idx]
|
||||||
|
path_lengths = path_lengths[top_idx]
|
||||||
|
|
||||||
|
# Sort by path length for early exit
|
||||||
|
sort_order = np.argsort(path_lengths)
|
||||||
|
valid_refl = valid_refl[sort_order]
|
||||||
|
path_lengths = path_lengths[sort_order]
|
||||||
|
|
||||||
|
# Check LOS only for top N shortest candidates
|
||||||
|
check_count = min(len(valid_refl), max_los_checks)
|
||||||
|
best_idx = -1
|
||||||
|
best_length = np.inf
|
||||||
|
|
||||||
|
for i in range(check_count):
|
||||||
|
length = path_lengths[i]
|
||||||
|
if length >= best_length:
|
||||||
|
continue
|
||||||
|
|
||||||
|
refl_pt = valid_refl[i]
|
||||||
|
|
||||||
|
# TX -> reflection LOS
|
||||||
|
intersects1, _ = line_intersects_polygons_batch(
|
||||||
|
tx, refl_pt, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||||
|
)
|
||||||
|
if np.any(intersects1):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Reflection -> RX LOS
|
||||||
|
intersects2, _ = line_intersects_polygons_batch(
|
||||||
|
refl_pt, rx, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||||
|
)
|
||||||
|
if np.any(intersects2):
|
||||||
|
continue
|
||||||
|
|
||||||
|
best_idx = i
|
||||||
|
best_length = length
|
||||||
|
break # sorted by length, first valid is best
|
||||||
|
|
||||||
|
if best_idx < 0:
|
||||||
|
return None, np.inf, 0.0
|
||||||
|
|
||||||
|
best_point = valid_refl[best_idx]
|
||||||
|
|
||||||
|
# Reflection loss: 3-10 dB depending on path ratio
|
||||||
|
path_ratio = best_length / max(direct_dist, 1.0)
|
||||||
|
reflection_loss = 3.0 + 7.0 * min(1.0, (path_ratio - 1.0) * 2)
|
||||||
|
|
||||||
|
return best_point, best_length, reflection_loss
|
||||||
275
backend/app/services/gpu_backend.py
Normal file
275
backend/app/services/gpu_backend.py
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
"""
|
||||||
|
GPU Backend Manager — detects and manages compute backends.
|
||||||
|
|
||||||
|
Supports:
|
||||||
|
- CUDA via CuPy
|
||||||
|
- OpenCL via PyOpenCL (future)
|
||||||
|
- CPU via NumPy (always available)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from app.services.gpu_backend import gpu_manager
|
||||||
|
xp = gpu_manager.get_array_module() # cupy or numpy
|
||||||
|
status = gpu_manager.get_status()
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from enum import Enum
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class GPUBackend(str, Enum):
|
||||||
|
CUDA = "cuda"
|
||||||
|
OPENCL = "opencl"
|
||||||
|
CPU = "cpu"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GPUDevice:
|
||||||
|
backend: GPUBackend
|
||||||
|
index: int
|
||||||
|
name: str
|
||||||
|
memory_mb: int
|
||||||
|
extra: dict = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class GPUManager:
|
||||||
|
"""Singleton GPU manager with device detection and selection."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._devices: list[GPUDevice] = []
|
||||||
|
self._active_backend: GPUBackend = GPUBackend.CPU
|
||||||
|
self._active_device: Optional[GPUDevice] = None
|
||||||
|
self._cupy = None
|
||||||
|
self._detect_devices()
|
||||||
|
|
||||||
|
def _detect_devices(self):
|
||||||
|
"""Probe available GPU backends."""
|
||||||
|
# Always add CPU
|
||||||
|
cpu_device = GPUDevice(
|
||||||
|
backend=GPUBackend.CPU,
|
||||||
|
index=0,
|
||||||
|
name="CPU (NumPy)",
|
||||||
|
memory_mb=0,
|
||||||
|
)
|
||||||
|
self._devices.append(cpu_device)
|
||||||
|
|
||||||
|
# Try CuPy / CUDA
|
||||||
|
try:
|
||||||
|
import cupy as cp
|
||||||
|
device_count = cp.cuda.runtime.getDeviceCount()
|
||||||
|
for i in range(device_count):
|
||||||
|
props = cp.cuda.runtime.getDeviceProperties(i)
|
||||||
|
name = props["name"]
|
||||||
|
if isinstance(name, bytes):
|
||||||
|
name = name.decode()
|
||||||
|
mem_mb = props["totalGlobalMem"] // (1024 * 1024)
|
||||||
|
cuda_ver = cp.cuda.runtime.runtimeGetVersion()
|
||||||
|
device = GPUDevice(
|
||||||
|
backend=GPUBackend.CUDA,
|
||||||
|
index=i,
|
||||||
|
name=str(name),
|
||||||
|
memory_mb=mem_mb,
|
||||||
|
extra={"cuda_version": cuda_ver},
|
||||||
|
)
|
||||||
|
self._devices.append(device)
|
||||||
|
logger.info(f"[GPU] CUDA device {i}: {name} ({mem_mb} MB)")
|
||||||
|
if device_count > 0:
|
||||||
|
self._cupy = cp
|
||||||
|
except ImportError:
|
||||||
|
logger.info("[GPU] CuPy not installed — CUDA unavailable")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"[GPU] CuPy probe error: {e}")
|
||||||
|
|
||||||
|
# Try PyOpenCL (future — stub for detection only)
|
||||||
|
try:
|
||||||
|
import pyopencl as cl
|
||||||
|
platforms = cl.get_platforms()
|
||||||
|
for plat in platforms:
|
||||||
|
for dev in plat.get_devices():
|
||||||
|
mem_mb = dev.global_mem_size // (1024 * 1024)
|
||||||
|
device = GPUDevice(
|
||||||
|
backend=GPUBackend.OPENCL,
|
||||||
|
index=len([d for d in self._devices if d.backend == GPUBackend.OPENCL]),
|
||||||
|
name=dev.name.strip(),
|
||||||
|
memory_mb=mem_mb,
|
||||||
|
extra={"platform": plat.name.strip()},
|
||||||
|
)
|
||||||
|
self._devices.append(device)
|
||||||
|
logger.info(f"[GPU] OpenCL device: {device.name} ({mem_mb} MB)")
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"[GPU] OpenCL probe error: {e}")
|
||||||
|
|
||||||
|
# Auto-select best: prefer CUDA > OpenCL > CPU
|
||||||
|
cuda_devices = [d for d in self._devices if d.backend == GPUBackend.CUDA]
|
||||||
|
if cuda_devices:
|
||||||
|
self._active_backend = GPUBackend.CUDA
|
||||||
|
self._active_device = cuda_devices[0]
|
||||||
|
logger.info(f"[GPU] Active backend: CUDA — {self._active_device.name}")
|
||||||
|
else:
|
||||||
|
self._active_backend = GPUBackend.CPU
|
||||||
|
self._active_device = cpu_device
|
||||||
|
logger.info("[GPU] Active backend: CPU (NumPy)")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gpu_available(self) -> bool:
|
||||||
|
return self._active_backend != GPUBackend.CPU
|
||||||
|
|
||||||
|
def get_array_module(self) -> Any:
|
||||||
|
"""Return cupy (if CUDA active) or numpy."""
|
||||||
|
if self._active_backend == GPUBackend.CUDA and self._cupy is not None:
|
||||||
|
return self._cupy
|
||||||
|
return np
|
||||||
|
|
||||||
|
def to_cpu(self, arr: Any) -> np.ndarray:
|
||||||
|
"""Transfer array to CPU numpy."""
|
||||||
|
if hasattr(arr, 'get'):
|
||||||
|
return arr.get()
|
||||||
|
return np.asarray(arr)
|
||||||
|
|
||||||
|
def get_status(self) -> dict:
|
||||||
|
"""Full status dict for API."""
|
||||||
|
return {
|
||||||
|
"active_backend": self._active_backend.value,
|
||||||
|
"active_device": {
|
||||||
|
"backend": self._active_device.backend.value,
|
||||||
|
"index": self._active_device.index,
|
||||||
|
"name": self._active_device.name,
|
||||||
|
"memory_mb": self._active_device.memory_mb,
|
||||||
|
} if self._active_device else None,
|
||||||
|
"gpu_available": self.gpu_available,
|
||||||
|
"available_devices": [
|
||||||
|
{
|
||||||
|
"backend": d.backend.value,
|
||||||
|
"index": d.index,
|
||||||
|
"name": d.name,
|
||||||
|
"memory_mb": d.memory_mb,
|
||||||
|
}
|
||||||
|
for d in self._devices
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_devices(self) -> list[dict]:
|
||||||
|
"""Device list for API."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"backend": d.backend.value,
|
||||||
|
"index": d.index,
|
||||||
|
"name": d.name,
|
||||||
|
"memory_mb": d.memory_mb,
|
||||||
|
}
|
||||||
|
for d in self._devices
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_diagnostics(self) -> dict:
|
||||||
|
"""Full diagnostic info for troubleshooting GPU detection."""
|
||||||
|
import sys
|
||||||
|
import platform
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
is_wsl = "microsoft" in platform.release().lower()
|
||||||
|
|
||||||
|
diag = {
|
||||||
|
"python_version": sys.version,
|
||||||
|
"python_executable": sys.executable,
|
||||||
|
"platform": platform.platform(),
|
||||||
|
"is_wsl": is_wsl,
|
||||||
|
"numpy": {"version": np.__version__},
|
||||||
|
"cuda": {},
|
||||||
|
"opencl": {},
|
||||||
|
"nvidia_smi": None,
|
||||||
|
"detected_devices": len(self._devices),
|
||||||
|
"active_backend": self._active_backend.value,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check nvidia-smi (works even without CuPy)
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["nvidia-smi", "--query-gpu=name,memory.total,driver_version", "--format=csv,noheader"],
|
||||||
|
capture_output=True, text=True, timeout=5
|
||||||
|
)
|
||||||
|
if result.returncode == 0 and result.stdout.strip():
|
||||||
|
diag["nvidia_smi"] = result.stdout.strip()
|
||||||
|
except Exception:
|
||||||
|
diag["nvidia_smi"] = "not found or error"
|
||||||
|
|
||||||
|
# Check CuPy/CUDA
|
||||||
|
try:
|
||||||
|
import cupy as cp
|
||||||
|
diag["cuda"]["cupy_version"] = cp.__version__
|
||||||
|
diag["cuda"]["cuda_runtime_version"] = cp.cuda.runtime.runtimeGetVersion()
|
||||||
|
diag["cuda"]["device_count"] = cp.cuda.runtime.getDeviceCount()
|
||||||
|
for i in range(diag["cuda"]["device_count"]):
|
||||||
|
props = cp.cuda.runtime.getDeviceProperties(i)
|
||||||
|
name = props["name"]
|
||||||
|
if isinstance(name, bytes):
|
||||||
|
name = name.decode()
|
||||||
|
diag["cuda"][f"device_{i}"] = {
|
||||||
|
"name": str(name),
|
||||||
|
"memory_mb": props["totalGlobalMem"] // (1024 * 1024),
|
||||||
|
"compute_capability": f"{props['major']}.{props['minor']}",
|
||||||
|
}
|
||||||
|
except ImportError:
|
||||||
|
diag["cuda"]["error"] = "CuPy not installed"
|
||||||
|
if is_wsl:
|
||||||
|
diag["cuda"]["install_hint"] = "pip3 install cupy-cuda12x --break-system-packages"
|
||||||
|
else:
|
||||||
|
diag["cuda"]["install_hint"] = "pip install cupy-cuda12x"
|
||||||
|
except Exception as e:
|
||||||
|
diag["cuda"]["error"] = str(e)
|
||||||
|
|
||||||
|
# Check PyOpenCL
|
||||||
|
try:
|
||||||
|
import pyopencl as cl
|
||||||
|
diag["opencl"]["pyopencl_version"] = cl.VERSION_TEXT
|
||||||
|
diag["opencl"]["platforms"] = []
|
||||||
|
for p in cl.get_platforms():
|
||||||
|
platform_info = {"name": p.name.strip(), "devices": []}
|
||||||
|
for d in p.get_devices():
|
||||||
|
platform_info["devices"].append({
|
||||||
|
"name": d.name.strip(),
|
||||||
|
"type": cl.device_type.to_string(d.type),
|
||||||
|
"memory_mb": d.global_mem_size // (1024 * 1024),
|
||||||
|
"compute_units": d.max_compute_units,
|
||||||
|
})
|
||||||
|
diag["opencl"]["platforms"].append(platform_info)
|
||||||
|
except ImportError:
|
||||||
|
diag["opencl"]["error"] = "PyOpenCL not installed"
|
||||||
|
if is_wsl:
|
||||||
|
diag["opencl"]["install_hint"] = "pip3 install pyopencl --break-system-packages"
|
||||||
|
else:
|
||||||
|
diag["opencl"]["install_hint"] = "pip install pyopencl"
|
||||||
|
except Exception as e:
|
||||||
|
diag["opencl"]["error"] = str(e)
|
||||||
|
|
||||||
|
return diag
|
||||||
|
|
||||||
|
def set_device(self, backend: str, index: int = 0) -> dict:
|
||||||
|
"""Switch active compute device."""
|
||||||
|
target_backend = GPUBackend(backend)
|
||||||
|
candidates = [d for d in self._devices
|
||||||
|
if d.backend == target_backend and d.index == index]
|
||||||
|
if not candidates:
|
||||||
|
raise ValueError(f"No device found: backend={backend}, index={index}")
|
||||||
|
|
||||||
|
self._active_device = candidates[0]
|
||||||
|
self._active_backend = target_backend
|
||||||
|
|
||||||
|
if target_backend == GPUBackend.CUDA and self._cupy is not None:
|
||||||
|
self._cupy.cuda.Device(index).use()
|
||||||
|
|
||||||
|
logger.info(f"[GPU] Switched to: {self._active_device.name} ({target_backend.value})")
|
||||||
|
return {
|
||||||
|
"backend": self._active_backend.value,
|
||||||
|
"device": self._active_device.name,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton
|
||||||
|
gpu_manager = GPUManager()
|
||||||
574
backend/app/services/gpu_service.py
Normal file
574
backend/app/services/gpu_service.py
Normal file
@@ -0,0 +1,574 @@
|
|||||||
|
"""
|
||||||
|
GPU-accelerated computation service using CuPy.
|
||||||
|
Falls back to NumPy when CuPy/CUDA is not available.
|
||||||
|
|
||||||
|
Provides vectorized batch operations for coverage calculation:
|
||||||
|
- Haversine distance (site -> all grid points)
|
||||||
|
- Okumura-Hata path loss (all distances at once)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from app.services.gpu_service import gpu_service, GPU_AVAILABLE
|
||||||
|
"""
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from app.services.gpu_backend import gpu_manager
|
||||||
|
|
||||||
|
# Backward-compatible exports
|
||||||
|
GPU_AVAILABLE = gpu_manager.gpu_available
|
||||||
|
GPU_INFO: Dict[str, Any] | None = (
|
||||||
|
{
|
||||||
|
"name": gpu_manager._active_device.name,
|
||||||
|
"memory_mb": gpu_manager._active_device.memory_mb,
|
||||||
|
**gpu_manager._active_device.extra,
|
||||||
|
}
|
||||||
|
if gpu_manager.gpu_available and gpu_manager._active_device
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Array module: cupy on GPU, numpy on CPU
|
||||||
|
xp = gpu_manager.get_array_module()
|
||||||
|
|
||||||
|
|
||||||
|
def _to_cpu(arr):
|
||||||
|
"""Transfer array to CPU numpy if on GPU."""
|
||||||
|
return gpu_manager.to_cpu(arr)
|
||||||
|
|
||||||
|
|
||||||
|
class GPUService:
|
||||||
|
"""GPU-accelerated batch operations for coverage calculation."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
return gpu_manager.gpu_available
|
||||||
|
|
||||||
|
def get_info(self) -> Dict[str, Any]:
|
||||||
|
"""Return GPU info dict for system endpoint."""
|
||||||
|
if not gpu_manager.gpu_available:
|
||||||
|
return {"available": False, "name": None, "memory_mb": None}
|
||||||
|
return {"available": True, **(GPU_INFO or {})}
|
||||||
|
|
||||||
|
def precompute_distances(
|
||||||
|
self,
|
||||||
|
grid_lats: np.ndarray,
|
||||||
|
grid_lons: np.ndarray,
|
||||||
|
site_lat: float,
|
||||||
|
site_lon: float,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Vectorized haversine distance from site to all grid points.
|
||||||
|
|
||||||
|
Returns distances in meters as a CPU numpy array.
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
lat1 = _xp.radians(_xp.asarray(grid_lats, dtype=_xp.float64))
|
||||||
|
lon1 = _xp.radians(_xp.asarray(grid_lons, dtype=_xp.float64))
|
||||||
|
lat2 = _xp.radians(_xp.float64(site_lat))
|
||||||
|
lon2 = _xp.radians(_xp.float64(site_lon))
|
||||||
|
|
||||||
|
dlat = lat2 - lat1
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
|
||||||
|
a = _xp.sin(dlat / 2) ** 2 + _xp.cos(lat1) * _xp.cos(lat2) * _xp.sin(dlon / 2) ** 2
|
||||||
|
c = 2 * _xp.arcsin(_xp.sqrt(a))
|
||||||
|
|
||||||
|
distances = 6371000.0 * c
|
||||||
|
return _to_cpu(distances)
|
||||||
|
|
||||||
|
def precompute_path_loss(
|
||||||
|
self,
|
||||||
|
distances: np.ndarray,
|
||||||
|
frequency_mhz: float,
|
||||||
|
tx_height: float,
|
||||||
|
rx_height: float = 1.5,
|
||||||
|
environment: str = "urban",
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Vectorized path loss using the appropriate propagation model.
|
||||||
|
|
||||||
|
Selects model based on frequency (Phase 3.0 model selection), then
|
||||||
|
applies the correct formula in a single vectorized numpy pass.
|
||||||
|
|
||||||
|
Returns path loss in dB as a CPU numpy array.
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
d_arr = _xp.asarray(distances, dtype=_xp.float64)
|
||||||
|
d_km = _xp.maximum(d_arr / 1000.0, 0.1)
|
||||||
|
|
||||||
|
freq = float(frequency_mhz)
|
||||||
|
h_tx = max(float(tx_height), 1.0)
|
||||||
|
h_rx = max(float(rx_height), 1.0)
|
||||||
|
|
||||||
|
log_f = _xp.log10(_xp.float64(freq))
|
||||||
|
log_hb = _xp.log10(_xp.float64(max(h_tx, 1.0)))
|
||||||
|
|
||||||
|
if freq > 2000:
|
||||||
|
# Free-Space Path Loss: FSPL = 20*log10(d_km) + 20*log10(f) + 32.45
|
||||||
|
L = 20.0 * _xp.log10(d_km) + 20.0 * log_f + 32.45
|
||||||
|
|
||||||
|
elif freq > 1500:
|
||||||
|
# COST-231 Hata: extends Okumura-Hata to 1500-2000 MHz
|
||||||
|
a_hm = (1.1 * log_f - 0.7) * h_rx - (1.56 * log_f - 0.8)
|
||||||
|
L = (46.3 + 33.9 * log_f - 13.82 * log_hb - a_hm
|
||||||
|
+ (44.9 - 6.55 * log_hb) * _xp.log10(d_km))
|
||||||
|
if environment == "urban":
|
||||||
|
L += 3.0 # Metropolitan center correction
|
||||||
|
|
||||||
|
elif freq >= 150:
|
||||||
|
# Okumura-Hata: 150-1500 MHz
|
||||||
|
if environment == "urban" and freq >= 400:
|
||||||
|
a_hm = 3.2 * (_xp.log10(11.75 * h_rx) ** 2) - 4.97
|
||||||
|
else:
|
||||||
|
a_hm = (1.1 * log_f - 0.7) * h_rx - (1.56 * log_f - 0.8)
|
||||||
|
|
||||||
|
L_urban = (69.55 + 26.16 * log_f - 13.82 * log_hb - a_hm
|
||||||
|
+ (44.9 - 6.55 * log_hb) * _xp.log10(d_km))
|
||||||
|
|
||||||
|
if environment == "suburban":
|
||||||
|
L = L_urban - 2 * (_xp.log10(freq / 28) ** 2) - 5.4
|
||||||
|
elif environment == "rural":
|
||||||
|
L = L_urban - 4.78 * (log_f ** 2) + 18.33 * log_f - 35.94
|
||||||
|
elif environment == "open":
|
||||||
|
L = L_urban - 4.78 * (log_f ** 2) + 18.33 * log_f - 40.94
|
||||||
|
else:
|
||||||
|
L = L_urban
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Very low frequency — Longley-Rice simplified (area mode)
|
||||||
|
# Use FSPL as baseline with terrain roughness correction
|
||||||
|
L = 20.0 * _xp.log10(d_km) + 20.0 * log_f + 32.45 + 10.0
|
||||||
|
|
||||||
|
return _to_cpu(L)
|
||||||
|
|
||||||
|
def batch_terrain_los(
|
||||||
|
self,
|
||||||
|
site_lat: float,
|
||||||
|
site_lon: float,
|
||||||
|
site_height: float,
|
||||||
|
site_elevation: float,
|
||||||
|
grid_lats: np.ndarray,
|
||||||
|
grid_lons: np.ndarray,
|
||||||
|
grid_elevations: np.ndarray,
|
||||||
|
distances: np.ndarray,
|
||||||
|
frequency_mhz: float,
|
||||||
|
terrain_cache: dict,
|
||||||
|
num_samples: int = 30,
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
"""Batch compute terrain LOS and diffraction loss for all grid points.
|
||||||
|
|
||||||
|
This is the key GPU optimization — instead of sampling terrain profiles
|
||||||
|
one point at a time, we sample ALL profiles in parallel using vectorized
|
||||||
|
operations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
site_lat, site_lon: Site coordinates
|
||||||
|
site_height: Antenna height above ground (meters)
|
||||||
|
site_elevation: Ground elevation at site (meters)
|
||||||
|
grid_lats, grid_lons: All grid point coordinates
|
||||||
|
grid_elevations: Ground elevation at each grid point
|
||||||
|
distances: Pre-computed distances from site to each point (meters)
|
||||||
|
frequency_mhz: Frequency for diffraction calculation
|
||||||
|
terrain_cache: Dict[tile_name -> numpy array] from terrain_service
|
||||||
|
num_samples: Number of samples per terrain profile
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(has_los, terrain_loss) - both shape (N,)
|
||||||
|
has_los: boolean array, True if clear line of sight
|
||||||
|
terrain_loss: diffraction loss in dB (0 if has_los)
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
N = len(grid_lats)
|
||||||
|
|
||||||
|
if N == 0:
|
||||||
|
return np.array([], dtype=bool), np.array([], dtype=np.float64)
|
||||||
|
|
||||||
|
# Convert inputs to GPU arrays
|
||||||
|
g_lats = _xp.asarray(grid_lats, dtype=_xp.float64)
|
||||||
|
g_lons = _xp.asarray(grid_lons, dtype=_xp.float64)
|
||||||
|
g_elevs = _xp.asarray(grid_elevations, dtype=_xp.float64)
|
||||||
|
g_dists = _xp.asarray(distances, dtype=_xp.float64)
|
||||||
|
|
||||||
|
# Heights
|
||||||
|
tx_total = float(site_elevation + site_height)
|
||||||
|
rx_height = 1.5 # Receiver height above ground
|
||||||
|
|
||||||
|
# Earth curvature constants
|
||||||
|
EARTH_RADIUS = 6371000.0
|
||||||
|
K_FACTOR = 4.0 / 3.0
|
||||||
|
effective_radius = K_FACTOR * EARTH_RADIUS
|
||||||
|
|
||||||
|
# Sample terrain profiles for all points at once
|
||||||
|
# Create sample positions: shape (N, num_samples)
|
||||||
|
t = _xp.linspace(0, 1, num_samples, dtype=_xp.float64) # (S,)
|
||||||
|
t = t.reshape(1, -1) # (1, S)
|
||||||
|
|
||||||
|
# Interpolate lat/lon for all sample points
|
||||||
|
# sample_lats[i, j] = site_lat + t[j] * (grid_lats[i] - site_lat)
|
||||||
|
dlat = g_lats.reshape(-1, 1) - site_lat # (N, 1)
|
||||||
|
dlon = g_lons.reshape(-1, 1) - site_lon # (N, 1)
|
||||||
|
sample_lats = site_lat + t * dlat # (N, S)
|
||||||
|
sample_lons = site_lon + t * dlon # (N, S)
|
||||||
|
|
||||||
|
# Sample distances along path: shape (N, S)
|
||||||
|
sample_dists = t * g_dists.reshape(-1, 1) # (N, S)
|
||||||
|
|
||||||
|
# Get terrain elevations for all samples
|
||||||
|
# This is the tricky part - we need to look up from the tile cache
|
||||||
|
# For GPU efficiency, we'll do this on CPU then transfer
|
||||||
|
sample_lats_cpu = _to_cpu(sample_lats).flatten()
|
||||||
|
sample_lons_cpu = _to_cpu(sample_lons).flatten()
|
||||||
|
|
||||||
|
# Batch elevation lookup from cache
|
||||||
|
sample_elevs_cpu = self._batch_elevation_lookup(
|
||||||
|
sample_lats_cpu, sample_lons_cpu, terrain_cache
|
||||||
|
)
|
||||||
|
sample_elevs = _xp.asarray(sample_elevs_cpu, dtype=_xp.float64).reshape(N, num_samples)
|
||||||
|
|
||||||
|
# Compute LOS line height at each sample point
|
||||||
|
# Linear interpolation from tx to rx
|
||||||
|
rx_total = g_elevs + rx_height # (N,)
|
||||||
|
los_heights = tx_total + t * (rx_total.reshape(-1, 1) - tx_total) # (N, S)
|
||||||
|
|
||||||
|
# Earth curvature correction at each sample
|
||||||
|
total_dist = g_dists.reshape(-1, 1) # (N, 1)
|
||||||
|
d = sample_dists # (N, S)
|
||||||
|
curvature = (d * (total_dist - d)) / (2 * effective_radius) # (N, S)
|
||||||
|
los_heights_corrected = los_heights - curvature # (N, S)
|
||||||
|
|
||||||
|
# Clearance at each sample point
|
||||||
|
clearances = los_heights_corrected - sample_elevs # (N, S)
|
||||||
|
|
||||||
|
# Minimum clearance per profile
|
||||||
|
min_clearances = _xp.min(clearances, axis=1) # (N,)
|
||||||
|
|
||||||
|
# Has LOS if minimum clearance > 0
|
||||||
|
has_los = min_clearances > 0 # (N,)
|
||||||
|
|
||||||
|
# Diffraction loss for points without LOS
|
||||||
|
# Using simplified ITU-R P.526 formula
|
||||||
|
terrain_loss = _xp.zeros(N, dtype=_xp.float64)
|
||||||
|
|
||||||
|
# Only compute diffraction where blocked
|
||||||
|
blocked_mask = ~has_los
|
||||||
|
blocked_clearances = min_clearances[blocked_mask]
|
||||||
|
|
||||||
|
if _xp.any(blocked_mask):
|
||||||
|
# v = |clearance| / 10 (simplified Fresnel parameter)
|
||||||
|
v = _xp.abs(blocked_clearances) / 10.0
|
||||||
|
|
||||||
|
# Diffraction loss formula from ITU-R P.526
|
||||||
|
loss = _xp.where(
|
||||||
|
v <= 0,
|
||||||
|
_xp.zeros_like(v),
|
||||||
|
_xp.where(
|
||||||
|
v < 2.4,
|
||||||
|
6.02 + 9.11 * v + 1.65 * v ** 2,
|
||||||
|
12.95 + 20 * _xp.log10(v)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Cap at reasonable max
|
||||||
|
loss = _xp.minimum(loss, 40.0)
|
||||||
|
terrain_loss[blocked_mask] = loss
|
||||||
|
|
||||||
|
return _to_cpu(has_los).astype(bool), _to_cpu(terrain_loss)
|
||||||
|
|
||||||
|
def _batch_elevation_lookup(
|
||||||
|
self,
|
||||||
|
lats: np.ndarray,
|
||||||
|
lons: np.ndarray,
|
||||||
|
terrain_cache: dict,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Look up elevations from cached terrain tiles with bilinear interpolation.
|
||||||
|
|
||||||
|
Vectorized implementation: processes per-tile (1-4 tiles) instead of
|
||||||
|
per-point (thousands of points). Uses bilinear interpolation for
|
||||||
|
sub-meter accuracy (vs 15m error with nearest-neighbor at 30m resolution).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
lats, lons: Flattened arrays of coordinates
|
||||||
|
terrain_cache: Dict mapping tile_name -> numpy array
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
elevations: Same shape as input lats
|
||||||
|
"""
|
||||||
|
elevations = np.zeros(len(lats), dtype=np.float64)
|
||||||
|
|
||||||
|
# Vectorized tile identification
|
||||||
|
lat_ints = np.floor(lats).astype(int)
|
||||||
|
lon_ints = np.floor(lons).astype(int)
|
||||||
|
|
||||||
|
# Process per tile (usually 1-4 tiles, not per point)
|
||||||
|
unique_tiles = set(zip(lat_ints, lon_ints))
|
||||||
|
|
||||||
|
for lat_int, lon_int in unique_tiles:
|
||||||
|
lat_letter = 'N' if lat_int >= 0 else 'S'
|
||||||
|
lon_letter = 'E' if lon_int >= 0 else 'W'
|
||||||
|
tile_name = f"{lat_letter}{abs(lat_int):02d}{lon_letter}{abs(lon_int):03d}"
|
||||||
|
|
||||||
|
tile = terrain_cache.get(tile_name)
|
||||||
|
if tile is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Mask for points in this tile
|
||||||
|
mask = (lat_ints == lat_int) & (lon_ints == lon_int)
|
||||||
|
tile_lats = lats[mask]
|
||||||
|
tile_lons = lons[mask]
|
||||||
|
|
||||||
|
size = tile.shape[0]
|
||||||
|
|
||||||
|
# Vectorized bilinear interpolation
|
||||||
|
lat_frac = tile_lats - lat_int
|
||||||
|
lon_frac = tile_lons - lon_int
|
||||||
|
|
||||||
|
row_exact = (1.0 - lat_frac) * (size - 1)
|
||||||
|
col_exact = lon_frac * (size - 1)
|
||||||
|
|
||||||
|
r0 = np.clip(row_exact.astype(int), 0, size - 2)
|
||||||
|
c0 = np.clip(col_exact.astype(int), 0, size - 2)
|
||||||
|
r1 = r0 + 1
|
||||||
|
c1 = c0 + 1
|
||||||
|
|
||||||
|
dr = row_exact - r0
|
||||||
|
dc = col_exact - c0
|
||||||
|
|
||||||
|
# Get four corner values for all points at once
|
||||||
|
z00 = tile[r0, c0].astype(np.float64)
|
||||||
|
z01 = tile[r0, c1].astype(np.float64)
|
||||||
|
z10 = tile[r1, c0].astype(np.float64)
|
||||||
|
z11 = tile[r1, c1].astype(np.float64)
|
||||||
|
|
||||||
|
# Bilinear interpolation (vectorized)
|
||||||
|
result = (z00 * (1 - dr) * (1 - dc) +
|
||||||
|
z01 * (1 - dr) * dc +
|
||||||
|
z10 * dr * (1 - dc) +
|
||||||
|
z11 * dr * dc)
|
||||||
|
|
||||||
|
# Handle void values (-32768) - set to 0
|
||||||
|
void_mask = (z00 == -32768) | (z01 == -32768) | (z10 == -32768) | (z11 == -32768)
|
||||||
|
result[void_mask] = 0.0
|
||||||
|
|
||||||
|
elevations[mask] = result
|
||||||
|
|
||||||
|
return elevations
|
||||||
|
|
||||||
|
def batch_antenna_pattern(
|
||||||
|
self,
|
||||||
|
site_lat: float,
|
||||||
|
site_lon: float,
|
||||||
|
grid_lats: np.ndarray,
|
||||||
|
grid_lons: np.ndarray,
|
||||||
|
azimuth: float,
|
||||||
|
beamwidth: float,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Batch compute antenna pattern loss for all grid points.
|
||||||
|
|
||||||
|
Returns antenna_loss in dB, shape (N,)
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
N = len(grid_lats)
|
||||||
|
|
||||||
|
if N == 0 or azimuth is None or not beamwidth:
|
||||||
|
return np.zeros(N, dtype=np.float64)
|
||||||
|
|
||||||
|
# Convert to radians
|
||||||
|
lat1 = _xp.radians(_xp.float64(site_lat))
|
||||||
|
lon1 = _xp.radians(_xp.float64(site_lon))
|
||||||
|
lat2 = _xp.radians(_xp.asarray(grid_lats, dtype=_xp.float64))
|
||||||
|
lon2 = _xp.radians(_xp.asarray(grid_lons, dtype=_xp.float64))
|
||||||
|
|
||||||
|
# Calculate bearing from site to each point
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
x = _xp.sin(dlon) * _xp.cos(lat2)
|
||||||
|
y = _xp.cos(lat1) * _xp.sin(lat2) - _xp.sin(lat1) * _xp.cos(lat2) * _xp.cos(dlon)
|
||||||
|
bearings = (_xp.degrees(_xp.arctan2(x, y)) + 360) % 360
|
||||||
|
|
||||||
|
# Angle difference from antenna azimuth
|
||||||
|
angle_diff = _xp.abs(bearings - azimuth)
|
||||||
|
angle_diff = _xp.where(angle_diff > 180, 360 - angle_diff, angle_diff)
|
||||||
|
|
||||||
|
# Antenna pattern loss (simplified sector pattern)
|
||||||
|
half_bw = beamwidth / 2
|
||||||
|
in_main = angle_diff <= half_bw
|
||||||
|
loss_main = 3 * (angle_diff / half_bw) ** 2
|
||||||
|
loss_side = 3 + 12 * ((angle_diff - half_bw) / half_bw) ** 2
|
||||||
|
loss_side = _xp.minimum(loss_side, 25.0)
|
||||||
|
|
||||||
|
antenna_loss = _xp.where(in_main, loss_main, loss_side)
|
||||||
|
return _to_cpu(antenna_loss)
|
||||||
|
|
||||||
|
def batch_final_rsrp(
|
||||||
|
self,
|
||||||
|
tx_power: float,
|
||||||
|
tx_gain: float,
|
||||||
|
path_loss: np.ndarray,
|
||||||
|
terrain_loss: np.ndarray,
|
||||||
|
antenna_loss: np.ndarray,
|
||||||
|
building_loss: np.ndarray,
|
||||||
|
vegetation_loss: np.ndarray,
|
||||||
|
rain_loss: np.ndarray,
|
||||||
|
indoor_loss: np.ndarray,
|
||||||
|
atmospheric_loss: np.ndarray,
|
||||||
|
reflection_gain: np.ndarray,
|
||||||
|
fading_margin: float = 0.0,
|
||||||
|
) -> np.ndarray:
|
||||||
|
"""Vectorized final RSRP calculation.
|
||||||
|
|
||||||
|
RSRP = tx_power + tx_gain - path_loss - terrain_loss - antenna_loss
|
||||||
|
- building_loss - vegetation_loss - rain_loss - indoor_loss
|
||||||
|
- atmospheric_loss + reflection_gain - fading_margin
|
||||||
|
|
||||||
|
Returns RSRP in dBm, shape (N,)
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
|
||||||
|
rsrp = (
|
||||||
|
float(tx_power) + float(tx_gain)
|
||||||
|
- _xp.asarray(path_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(terrain_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(antenna_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(building_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(vegetation_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(rain_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(indoor_loss, dtype=_xp.float64)
|
||||||
|
- _xp.asarray(atmospheric_loss, dtype=_xp.float64)
|
||||||
|
+ _xp.asarray(reflection_gain, dtype=_xp.float64)
|
||||||
|
- float(fading_margin)
|
||||||
|
)
|
||||||
|
|
||||||
|
return _to_cpu(rsrp)
|
||||||
|
|
||||||
|
def calculate_interference(
|
||||||
|
self,
|
||||||
|
rsrp_grids: list,
|
||||||
|
frequencies: list,
|
||||||
|
) -> tuple:
|
||||||
|
"""Calculate C/I (carrier-to-interference) ratio for multi-site scenarios.
|
||||||
|
|
||||||
|
For each grid point:
|
||||||
|
- C = signal strength from strongest (serving) cell
|
||||||
|
- I = sum of signal strengths from all other co-frequency cells
|
||||||
|
- C/I = C(dBm) - 10*log10(sum of linear interference powers)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rsrp_grids: List of RSRP arrays, one per site, shape (N,) each
|
||||||
|
frequencies: List of frequencies (MHz) for each site
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(ci_ratio, best_server_idx, best_rsrp)
|
||||||
|
ci_ratio: C/I in dB, shape (N,)
|
||||||
|
best_server_idx: Index of serving cell per point, shape (N,)
|
||||||
|
best_rsrp: RSRP of serving cell per point, shape (N,)
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
|
||||||
|
if len(rsrp_grids) < 2:
|
||||||
|
# Single site - no interference, return infinity C/I
|
||||||
|
if rsrp_grids:
|
||||||
|
n_points = len(rsrp_grids[0])
|
||||||
|
return (
|
||||||
|
np.full(n_points, 50.0, dtype=np.float64), # 50 dB = effectively no interference
|
||||||
|
np.zeros(n_points, dtype=np.int32),
|
||||||
|
np.array(rsrp_grids[0], dtype=np.float64),
|
||||||
|
)
|
||||||
|
return np.array([]), np.array([]), np.array([])
|
||||||
|
|
||||||
|
# Stack RSRP grids: shape (num_sites, num_points)
|
||||||
|
rsrp_stack = _xp.stack([_xp.asarray(g, dtype=_xp.float64) for g in rsrp_grids], axis=0)
|
||||||
|
num_sites, num_points = rsrp_stack.shape
|
||||||
|
|
||||||
|
# Convert to linear power (mW)
|
||||||
|
rsrp_linear = _xp.power(10.0, rsrp_stack / 10.0)
|
||||||
|
|
||||||
|
# Best server per point
|
||||||
|
best_server_idx = _xp.argmax(rsrp_stack, axis=0)
|
||||||
|
best_rsrp = _xp.take_along_axis(rsrp_stack, best_server_idx[_xp.newaxis, :], axis=0)[0]
|
||||||
|
best_rsrp_linear = _xp.take_along_axis(rsrp_linear, best_server_idx[_xp.newaxis, :], axis=0)[0]
|
||||||
|
|
||||||
|
# Group sites by frequency for co-channel interference
|
||||||
|
freq_array = _xp.asarray(frequencies, dtype=_xp.float64)
|
||||||
|
|
||||||
|
# Calculate interference only from co-frequency sites
|
||||||
|
interference_linear = _xp.zeros(num_points, dtype=_xp.float64)
|
||||||
|
|
||||||
|
for point_idx in range(num_points):
|
||||||
|
serving_site = int(_to_cpu(best_server_idx[point_idx]))
|
||||||
|
serving_freq = frequencies[serving_site]
|
||||||
|
|
||||||
|
# Sum power from all other sites on same frequency
|
||||||
|
for site_idx in range(num_sites):
|
||||||
|
if site_idx != serving_site and frequencies[site_idx] == serving_freq:
|
||||||
|
interference_linear[point_idx] += rsrp_linear[site_idx, point_idx]
|
||||||
|
|
||||||
|
# C/I ratio in dB
|
||||||
|
# Avoid log10(0) with small epsilon
|
||||||
|
epsilon = 1e-30
|
||||||
|
ci_ratio = 10 * _xp.log10(best_rsrp_linear / (interference_linear + epsilon))
|
||||||
|
|
||||||
|
# Clip to reasonable range (-20 to 50 dB)
|
||||||
|
ci_ratio = _xp.clip(ci_ratio, -20, 50)
|
||||||
|
|
||||||
|
return (
|
||||||
|
_to_cpu(ci_ratio),
|
||||||
|
_to_cpu(best_server_idx).astype(np.int32),
|
||||||
|
_to_cpu(best_rsrp),
|
||||||
|
)
|
||||||
|
|
||||||
|
def calculate_interference_vectorized(
|
||||||
|
self,
|
||||||
|
rsrp_grids: list,
|
||||||
|
frequencies: list,
|
||||||
|
) -> tuple:
|
||||||
|
"""Fully vectorized C/I calculation (faster for GPU).
|
||||||
|
|
||||||
|
Same as calculate_interference but avoids Python loops.
|
||||||
|
"""
|
||||||
|
_xp = gpu_manager.get_array_module()
|
||||||
|
|
||||||
|
if len(rsrp_grids) < 2:
|
||||||
|
if rsrp_grids:
|
||||||
|
n_points = len(rsrp_grids[0])
|
||||||
|
return (
|
||||||
|
np.full(n_points, 50.0, dtype=np.float64),
|
||||||
|
np.zeros(n_points, dtype=np.int32),
|
||||||
|
np.array(rsrp_grids[0], dtype=np.float64),
|
||||||
|
)
|
||||||
|
return np.array([]), np.array([]), np.array([])
|
||||||
|
|
||||||
|
# Stack RSRP grids: shape (num_sites, num_points)
|
||||||
|
rsrp_stack = _xp.stack([_xp.asarray(g, dtype=_xp.float64) for g in rsrp_grids], axis=0)
|
||||||
|
num_sites, num_points = rsrp_stack.shape
|
||||||
|
|
||||||
|
# Convert to linear power (mW)
|
||||||
|
rsrp_linear = _xp.power(10.0, rsrp_stack / 10.0)
|
||||||
|
|
||||||
|
# Best server per point
|
||||||
|
best_server_idx = _xp.argmax(rsrp_stack, axis=0)
|
||||||
|
best_rsrp = _xp.take_along_axis(rsrp_stack, best_server_idx[_xp.newaxis, :], axis=0)[0]
|
||||||
|
best_rsrp_linear = _xp.take_along_axis(rsrp_linear, best_server_idx[_xp.newaxis, :], axis=0)[0]
|
||||||
|
|
||||||
|
# Create frequency match matrix: (num_sites, num_sites)
|
||||||
|
freq_array = _xp.asarray(frequencies, dtype=_xp.float64)
|
||||||
|
freq_match = freq_array[:, _xp.newaxis] == freq_array[_xp.newaxis, :]
|
||||||
|
|
||||||
|
# Total power from all sites
|
||||||
|
total_power = _xp.sum(rsrp_linear, axis=0)
|
||||||
|
|
||||||
|
# For simplified calculation (all sites same frequency):
|
||||||
|
# Interference = total - serving
|
||||||
|
interference_linear = total_power - best_rsrp_linear
|
||||||
|
|
||||||
|
# C/I ratio in dB
|
||||||
|
epsilon = 1e-30
|
||||||
|
ci_ratio = 10 * _xp.log10(best_rsrp_linear / (interference_linear + epsilon))
|
||||||
|
|
||||||
|
# Clip to reasonable range
|
||||||
|
ci_ratio = _xp.clip(ci_ratio, -20, 50)
|
||||||
|
|
||||||
|
return (
|
||||||
|
_to_cpu(ci_ratio),
|
||||||
|
_to_cpu(best_server_idx).astype(np.int32),
|
||||||
|
_to_cpu(best_rsrp),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton
|
||||||
|
gpu_service = GPUService()
|
||||||
82
backend/app/services/indoor_service.py
Normal file
82
backend/app/services/indoor_service.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class IndoorService:
|
||||||
|
"""ITU-R P.2109 building entry loss model"""
|
||||||
|
|
||||||
|
# Building Entry Loss (BEL) by construction type at 2 GHz
|
||||||
|
# Format: (median_loss_dB, std_dev_dB)
|
||||||
|
BUILDING_TYPES = {
|
||||||
|
"none": (0.0, 0.0), # Outdoor only
|
||||||
|
"light": (8.0, 4.0), # Wood frame, large windows
|
||||||
|
"medium": (15.0, 6.0), # Brick, standard windows
|
||||||
|
"heavy": (22.0, 8.0), # Concrete, small windows
|
||||||
|
"basement": (30.0, 10.0), # Underground
|
||||||
|
"vehicle": (6.0, 3.0), # Inside car
|
||||||
|
"train": (20.0, 5.0), # Inside train
|
||||||
|
}
|
||||||
|
|
||||||
|
# Frequency correction factor (dB per octave above 2 GHz)
|
||||||
|
FREQ_CORRECTION = 2.5
|
||||||
|
|
||||||
|
def calculate_indoor_loss(
|
||||||
|
self,
|
||||||
|
frequency_mhz: float,
|
||||||
|
building_type: str = "medium",
|
||||||
|
floor_number: int = 0,
|
||||||
|
depth_m: float = 0.0,
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate building entry/indoor penetration loss
|
||||||
|
|
||||||
|
Args:
|
||||||
|
frequency_mhz: Frequency in MHz
|
||||||
|
building_type: Type of building construction
|
||||||
|
floor_number: Floor number (0=ground, negative=basement)
|
||||||
|
depth_m: Distance from exterior wall in meters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loss in dB
|
||||||
|
"""
|
||||||
|
if building_type == "none":
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
base_loss, _ = self.BUILDING_TYPES.get(building_type, (15.0, 6.0))
|
||||||
|
|
||||||
|
# Frequency correction
|
||||||
|
freq_ghz = frequency_mhz / 1000
|
||||||
|
if freq_ghz > 2.0:
|
||||||
|
octaves = math.log2(freq_ghz / 2.0)
|
||||||
|
freq_correction = self.FREQ_CORRECTION * octaves
|
||||||
|
else:
|
||||||
|
freq_correction = 0.0
|
||||||
|
|
||||||
|
# Floor correction (higher floors = less loss due to better angle)
|
||||||
|
if floor_number > 0:
|
||||||
|
floor_correction = -1.5 * min(floor_number, 10)
|
||||||
|
elif floor_number < 0:
|
||||||
|
# Basement - additional loss
|
||||||
|
floor_correction = 5.0 * abs(floor_number)
|
||||||
|
else:
|
||||||
|
floor_correction = 0.0
|
||||||
|
|
||||||
|
# Depth correction (signal attenuates inside building)
|
||||||
|
# Approximately 0.5 dB per meter for first 10m
|
||||||
|
depth_correction = 0.5 * min(depth_m, 20)
|
||||||
|
|
||||||
|
total_loss = base_loss + freq_correction + floor_correction + depth_correction
|
||||||
|
|
||||||
|
return max(0.0, min(total_loss, 50.0)) # Clamp 0-50 dB
|
||||||
|
|
||||||
|
def calculate_outdoor_to_indoor_coverage(
|
||||||
|
self,
|
||||||
|
outdoor_rsrp: float,
|
||||||
|
building_type: str,
|
||||||
|
frequency_mhz: float,
|
||||||
|
) -> float:
|
||||||
|
"""Calculate expected indoor RSRP from outdoor signal"""
|
||||||
|
indoor_loss = self.calculate_indoor_loss(frequency_mhz, building_type)
|
||||||
|
return outdoor_rsrp - indoor_loss
|
||||||
|
|
||||||
|
|
||||||
|
indoor_service = IndoorService()
|
||||||
253
backend/app/services/los_service.py
Normal file
253
backend/app/services/los_service.py
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
import numpy as np
|
||||||
|
from typing import Tuple, List
|
||||||
|
from app.services.terrain_service import terrain_service, TerrainService
|
||||||
|
|
||||||
|
|
||||||
|
class LineOfSightService:
|
||||||
|
"""
|
||||||
|
Line-of-Sight calculations with terrain
|
||||||
|
"""
|
||||||
|
|
||||||
|
EARTH_RADIUS = 6371000 # meters
|
||||||
|
K_FACTOR = 4 / 3 # Standard atmospheric refraction
|
||||||
|
|
||||||
|
def __init__(self, terrain: TerrainService = None):
|
||||||
|
self.terrain = terrain or terrain_service
|
||||||
|
|
||||||
|
async def check_line_of_sight(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float = 1.5,
|
||||||
|
num_samples: int = 50
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Check line-of-sight between transmitter and receiver
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tx_lat, tx_lon: Transmitter coordinates
|
||||||
|
tx_height: Transmitter antenna height above ground (meters)
|
||||||
|
rx_lat, rx_lon: Receiver coordinates
|
||||||
|
rx_height: Receiver height above ground (meters), default 1.5m (person)
|
||||||
|
num_samples: Number of points to sample along path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{
|
||||||
|
"has_los": bool,
|
||||||
|
"clearance": float, # minimum clearance in meters (negative = blocked)
|
||||||
|
"blocked_at": float | None, # distance where blocked (meters)
|
||||||
|
"profile": [...] # elevation profile with LOS line
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
# Get elevation profile
|
||||||
|
profile = await self.terrain.get_elevation_profile(
|
||||||
|
tx_lat, tx_lon, rx_lat, rx_lon, num_samples
|
||||||
|
)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
return {"has_los": True, "clearance": 0, "blocked_at": None, "profile": []}
|
||||||
|
|
||||||
|
# Get endpoint elevations
|
||||||
|
tx_ground = profile[0]["elevation"]
|
||||||
|
rx_ground = profile[-1]["elevation"]
|
||||||
|
|
||||||
|
tx_total = tx_ground + tx_height
|
||||||
|
rx_total = rx_ground + rx_height
|
||||||
|
|
||||||
|
total_distance = profile[-1]["distance"]
|
||||||
|
|
||||||
|
min_clearance = float('inf')
|
||||||
|
blocked_at = None
|
||||||
|
|
||||||
|
# Check each point along path
|
||||||
|
for point in profile:
|
||||||
|
d = point["distance"]
|
||||||
|
terrain_elev = point["elevation"]
|
||||||
|
|
||||||
|
if total_distance == 0:
|
||||||
|
los_height = tx_total
|
||||||
|
else:
|
||||||
|
# Linear interpolation of LOS line
|
||||||
|
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
|
||||||
|
|
||||||
|
# Earth curvature correction (with atmospheric refraction)
|
||||||
|
# Effective Earth radius = K * actual radius
|
||||||
|
effective_radius = self.K_FACTOR * self.EARTH_RADIUS
|
||||||
|
curvature = (d * (total_distance - d)) / (2 * effective_radius)
|
||||||
|
|
||||||
|
# LOS height after curvature correction
|
||||||
|
los_height_corrected = los_height - curvature
|
||||||
|
|
||||||
|
# Clearance at this point
|
||||||
|
clearance = los_height_corrected - terrain_elev
|
||||||
|
|
||||||
|
# Add to profile for visualization
|
||||||
|
point["los_height"] = los_height_corrected
|
||||||
|
point["clearance"] = clearance
|
||||||
|
|
||||||
|
if clearance < min_clearance:
|
||||||
|
min_clearance = clearance
|
||||||
|
if clearance <= 0:
|
||||||
|
blocked_at = d
|
||||||
|
|
||||||
|
has_los = min_clearance > 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"has_los": has_los,
|
||||||
|
"clearance": min_clearance,
|
||||||
|
"blocked_at": blocked_at,
|
||||||
|
"profile": profile
|
||||||
|
}
|
||||||
|
|
||||||
|
def check_line_of_sight_sync(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float = 1.5,
|
||||||
|
num_samples: int = 50
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Sync LOS check - terrain tiles must be pre-loaded into memory.
|
||||||
|
Returns dict with has_los, clearance, blocked_at (no profile for speed).
|
||||||
|
"""
|
||||||
|
profile = self.terrain.get_elevation_profile_sync(
|
||||||
|
tx_lat, tx_lon, rx_lat, rx_lon, num_samples
|
||||||
|
)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
return {"has_los": True, "clearance": 0, "blocked_at": None}
|
||||||
|
|
||||||
|
tx_ground = profile[0]["elevation"]
|
||||||
|
rx_ground = profile[-1]["elevation"]
|
||||||
|
|
||||||
|
tx_total = tx_ground + tx_height
|
||||||
|
rx_total = rx_ground + rx_height
|
||||||
|
|
||||||
|
total_distance = profile[-1]["distance"]
|
||||||
|
|
||||||
|
min_clearance = float('inf')
|
||||||
|
blocked_at = None
|
||||||
|
|
||||||
|
for point in profile:
|
||||||
|
d = point["distance"]
|
||||||
|
terrain_elev = point["elevation"]
|
||||||
|
|
||||||
|
if total_distance == 0:
|
||||||
|
los_height = tx_total
|
||||||
|
else:
|
||||||
|
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
|
||||||
|
|
||||||
|
effective_radius = self.K_FACTOR * self.EARTH_RADIUS
|
||||||
|
curvature = (d * (total_distance - d)) / (2 * effective_radius)
|
||||||
|
los_height_corrected = los_height - curvature
|
||||||
|
clearance = los_height_corrected - terrain_elev
|
||||||
|
|
||||||
|
if clearance < min_clearance:
|
||||||
|
min_clearance = clearance
|
||||||
|
if clearance <= 0:
|
||||||
|
blocked_at = d
|
||||||
|
|
||||||
|
return {
|
||||||
|
"has_los": min_clearance > 0,
|
||||||
|
"clearance": min_clearance,
|
||||||
|
"blocked_at": blocked_at,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def calculate_fresnel_clearance(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
num_samples: int = 50
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Calculate Fresnel zone clearance
|
||||||
|
|
||||||
|
60% clearance of 1st Fresnel zone = good signal
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{
|
||||||
|
"clearance_percent": float, # worst-case clearance as % of required
|
||||||
|
"has_adequate_clearance": bool, # >= 60%
|
||||||
|
"worst_point_distance": float,
|
||||||
|
"fresnel_profile": [...]
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
profile = await self.terrain.get_elevation_profile(
|
||||||
|
tx_lat, tx_lon, rx_lat, rx_lon, num_samples
|
||||||
|
)
|
||||||
|
|
||||||
|
if not profile:
|
||||||
|
return {
|
||||||
|
"clearance_percent": 100.0,
|
||||||
|
"has_adequate_clearance": True,
|
||||||
|
"worst_point_distance": 0,
|
||||||
|
"fresnel_profile": []
|
||||||
|
}
|
||||||
|
|
||||||
|
tx_ground = profile[0]["elevation"]
|
||||||
|
rx_ground = profile[-1]["elevation"]
|
||||||
|
|
||||||
|
tx_total = tx_ground + tx_height
|
||||||
|
rx_total = rx_ground + rx_height
|
||||||
|
|
||||||
|
total_distance = profile[-1]["distance"]
|
||||||
|
|
||||||
|
if total_distance <= 0:
|
||||||
|
return {
|
||||||
|
"clearance_percent": 100.0,
|
||||||
|
"has_adequate_clearance": True,
|
||||||
|
"worst_point_distance": 0,
|
||||||
|
"fresnel_profile": profile
|
||||||
|
}
|
||||||
|
|
||||||
|
# Wavelength (lambda = c / f)
|
||||||
|
wavelength = 300.0 / frequency_mhz # meters
|
||||||
|
|
||||||
|
worst_clearance_pct = 100.0
|
||||||
|
worst_distance = 0.0
|
||||||
|
|
||||||
|
for point in profile:
|
||||||
|
d = point["distance"]
|
||||||
|
terrain_elev = point["elevation"]
|
||||||
|
|
||||||
|
if d <= 0 or d >= total_distance:
|
||||||
|
continue # Skip endpoints
|
||||||
|
|
||||||
|
# LOS height at this point
|
||||||
|
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
|
||||||
|
|
||||||
|
# 1st Fresnel zone radius at this point
|
||||||
|
d1 = d
|
||||||
|
d2 = total_distance - d
|
||||||
|
fresnel_radius = float(np.sqrt((wavelength * d1 * d2) / total_distance))
|
||||||
|
|
||||||
|
# Required clearance (60% of 1st Fresnel zone)
|
||||||
|
required_clearance = 0.6 * fresnel_radius
|
||||||
|
|
||||||
|
# Actual clearance
|
||||||
|
actual_clearance = los_height - terrain_elev
|
||||||
|
|
||||||
|
# Clearance as percentage of required
|
||||||
|
if required_clearance > 0:
|
||||||
|
clearance_pct = (actual_clearance / required_clearance) * 100
|
||||||
|
else:
|
||||||
|
clearance_pct = 100.0
|
||||||
|
|
||||||
|
# Add to profile
|
||||||
|
point["fresnel_radius"] = fresnel_radius
|
||||||
|
point["required_clearance"] = required_clearance
|
||||||
|
point["clearance_percent"] = clearance_pct
|
||||||
|
|
||||||
|
if clearance_pct < worst_clearance_pct:
|
||||||
|
worst_clearance_pct = clearance_pct
|
||||||
|
worst_distance = d
|
||||||
|
|
||||||
|
return {
|
||||||
|
"clearance_percent": float(worst_clearance_pct),
|
||||||
|
"has_adequate_clearance": worst_clearance_pct >= 60.0,
|
||||||
|
"worst_point_distance": float(worst_distance),
|
||||||
|
"fresnel_profile": profile
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
los_service = LineOfSightService()
|
||||||
128
backend/app/services/materials_service.py
Normal file
128
backend/app/services/materials_service.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import math
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class BuildingMaterial(Enum):
|
||||||
|
"""Building materials with RF properties"""
|
||||||
|
CONCRETE = "concrete"
|
||||||
|
BRICK = "brick"
|
||||||
|
GLASS = "glass"
|
||||||
|
WOOD = "wood"
|
||||||
|
METAL = "metal"
|
||||||
|
STONE = "stone"
|
||||||
|
PLASTER = "plaster"
|
||||||
|
UNKNOWN = "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
# ITU-R P.2040 based attenuation (dB per wall at 1-3 GHz)
|
||||||
|
MATERIAL_LOSS = {
|
||||||
|
BuildingMaterial.CONCRETE: 15.0,
|
||||||
|
BuildingMaterial.BRICK: 10.0,
|
||||||
|
BuildingMaterial.GLASS: 3.0,
|
||||||
|
BuildingMaterial.WOOD: 5.0,
|
||||||
|
BuildingMaterial.METAL: 25.0, # Or full reflection
|
||||||
|
BuildingMaterial.STONE: 12.0,
|
||||||
|
BuildingMaterial.PLASTER: 4.0,
|
||||||
|
BuildingMaterial.UNKNOWN: 10.0, # Default assumption
|
||||||
|
}
|
||||||
|
|
||||||
|
# Reflection coefficient (0-1, portion of signal reflected)
|
||||||
|
MATERIAL_REFLECTION = {
|
||||||
|
BuildingMaterial.CONCRETE: 0.6,
|
||||||
|
BuildingMaterial.BRICK: 0.5,
|
||||||
|
BuildingMaterial.GLASS: 0.3,
|
||||||
|
BuildingMaterial.WOOD: 0.2,
|
||||||
|
BuildingMaterial.METAL: 0.9,
|
||||||
|
BuildingMaterial.STONE: 0.55,
|
||||||
|
BuildingMaterial.PLASTER: 0.3,
|
||||||
|
BuildingMaterial.UNKNOWN: 0.4,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class MaterialsService:
|
||||||
|
"""Building material detection and RF properties"""
|
||||||
|
|
||||||
|
# OSM building:material tag mapping
|
||||||
|
OSM_MATERIAL_MAP = {
|
||||||
|
"concrete": BuildingMaterial.CONCRETE,
|
||||||
|
"brick": BuildingMaterial.BRICK,
|
||||||
|
"glass": BuildingMaterial.GLASS,
|
||||||
|
"wood": BuildingMaterial.WOOD,
|
||||||
|
"metal": BuildingMaterial.METAL,
|
||||||
|
"steel": BuildingMaterial.METAL,
|
||||||
|
"stone": BuildingMaterial.STONE,
|
||||||
|
"plaster": BuildingMaterial.PLASTER,
|
||||||
|
"cement_block": BuildingMaterial.CONCRETE,
|
||||||
|
"timber": BuildingMaterial.WOOD,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Fallback by building type
|
||||||
|
BUILDING_TYPE_MATERIAL = {
|
||||||
|
"industrial": BuildingMaterial.METAL,
|
||||||
|
"warehouse": BuildingMaterial.METAL,
|
||||||
|
"garage": BuildingMaterial.METAL,
|
||||||
|
"shed": BuildingMaterial.WOOD,
|
||||||
|
"house": BuildingMaterial.BRICK,
|
||||||
|
"residential": BuildingMaterial.CONCRETE,
|
||||||
|
"apartments": BuildingMaterial.CONCRETE,
|
||||||
|
"commercial": BuildingMaterial.GLASS, # Often glass facades
|
||||||
|
"office": BuildingMaterial.GLASS,
|
||||||
|
"retail": BuildingMaterial.GLASS,
|
||||||
|
"church": BuildingMaterial.STONE,
|
||||||
|
"cathedral": BuildingMaterial.STONE,
|
||||||
|
"school": BuildingMaterial.BRICK,
|
||||||
|
"hospital": BuildingMaterial.CONCRETE,
|
||||||
|
"university": BuildingMaterial.CONCRETE,
|
||||||
|
}
|
||||||
|
|
||||||
|
def detect_material(self, building_tags: dict) -> BuildingMaterial:
|
||||||
|
"""Detect building material from OSM tags"""
|
||||||
|
|
||||||
|
# Direct material tag
|
||||||
|
if "building:material" in building_tags:
|
||||||
|
material_str = building_tags["building:material"].lower()
|
||||||
|
if material_str in self.OSM_MATERIAL_MAP:
|
||||||
|
return self.OSM_MATERIAL_MAP[material_str]
|
||||||
|
|
||||||
|
# Facade material (often more relevant for RF)
|
||||||
|
if "building:facade:material" in building_tags:
|
||||||
|
material_str = building_tags["building:facade:material"].lower()
|
||||||
|
if material_str in self.OSM_MATERIAL_MAP:
|
||||||
|
return self.OSM_MATERIAL_MAP[material_str]
|
||||||
|
|
||||||
|
# Fallback by building type
|
||||||
|
building_type = building_tags.get("building", "yes").lower()
|
||||||
|
if building_type in self.BUILDING_TYPE_MATERIAL:
|
||||||
|
return self.BUILDING_TYPE_MATERIAL[building_type]
|
||||||
|
|
||||||
|
return BuildingMaterial.UNKNOWN
|
||||||
|
|
||||||
|
def get_penetration_loss(self, material: BuildingMaterial, frequency_mhz: float = 1800) -> float:
|
||||||
|
"""
|
||||||
|
Get RF penetration loss through wall
|
||||||
|
|
||||||
|
Frequency correction: +2dB per octave above 1GHz
|
||||||
|
"""
|
||||||
|
base_loss = MATERIAL_LOSS[material]
|
||||||
|
|
||||||
|
# Frequency correction (simplified)
|
||||||
|
freq_factor = max(0, (frequency_mhz - 1000) / 1000) * 2
|
||||||
|
|
||||||
|
return base_loss + freq_factor
|
||||||
|
|
||||||
|
def get_reflection_coefficient(self, material: BuildingMaterial) -> float:
|
||||||
|
"""Get reflection coefficient (0-1)"""
|
||||||
|
return MATERIAL_REFLECTION[material]
|
||||||
|
|
||||||
|
def get_reflection_loss(self, material: BuildingMaterial) -> float:
|
||||||
|
"""Get loss due to reflection (dB)"""
|
||||||
|
coeff = MATERIAL_REFLECTION[material]
|
||||||
|
if coeff <= 0:
|
||||||
|
return 30.0 # Effectively no reflection
|
||||||
|
|
||||||
|
# Reflection loss in dB = -10 * log10(coefficient)
|
||||||
|
return -10 * math.log10(coeff)
|
||||||
|
|
||||||
|
|
||||||
|
materials_service = MaterialsService()
|
||||||
167
backend/app/services/osm_client.py
Normal file
167
backend/app/services/osm_client.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"""
|
||||||
|
Dedicated OpenStreetMap Overpass API client.
|
||||||
|
|
||||||
|
Handles:
|
||||||
|
- Building footprint queries
|
||||||
|
- Vegetation area queries
|
||||||
|
- Water body queries
|
||||||
|
- Response parsing and error handling
|
||||||
|
- Rate limiting (Overpass requires courtesy)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
|
||||||
|
# Overpass API endpoints (primary + mirror)
|
||||||
|
OVERPASS_ENDPOINTS = [
|
||||||
|
"https://overpass-api.de/api/interpreter",
|
||||||
|
"https://overpass.kumi.systems/api/interpreter",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Minimum seconds between requests to same endpoint
|
||||||
|
RATE_LIMIT_SECONDS = 1.0
|
||||||
|
|
||||||
|
|
||||||
|
class OSMClient:
|
||||||
|
"""
|
||||||
|
OpenStreetMap Overpass API client with rate limiting
|
||||||
|
and automatic failover between endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, timeout: float = 60.0):
|
||||||
|
self.timeout = timeout
|
||||||
|
self._last_request_time: float = 0
|
||||||
|
self._current_endpoint = 0
|
||||||
|
|
||||||
|
async def _rate_limit(self):
|
||||||
|
"""Enforce rate limiting between requests."""
|
||||||
|
elapsed = time.monotonic() - self._last_request_time
|
||||||
|
if elapsed < RATE_LIMIT_SECONDS:
|
||||||
|
await asyncio.sleep(RATE_LIMIT_SECONDS - elapsed)
|
||||||
|
self._last_request_time = time.monotonic()
|
||||||
|
|
||||||
|
async def query(self, overpass_ql: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Execute an Overpass QL query with automatic failover.
|
||||||
|
|
||||||
|
Returns parsed JSON response or None on failure.
|
||||||
|
"""
|
||||||
|
await self._rate_limit()
|
||||||
|
|
||||||
|
for i in range(len(OVERPASS_ENDPOINTS)):
|
||||||
|
idx = (self._current_endpoint + i) % len(OVERPASS_ENDPOINTS)
|
||||||
|
endpoint = OVERPASS_ENDPOINTS[idx]
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||||
|
response = await client.post(
|
||||||
|
endpoint,
|
||||||
|
data={"data": overpass_ql},
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 429:
|
||||||
|
# Rate limited — try next endpoint
|
||||||
|
print(f"[OSM] Rate limited by {endpoint}, trying next...")
|
||||||
|
continue
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
self._current_endpoint = idx
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
print(f"[OSM] Timeout from {endpoint}")
|
||||||
|
continue
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
print(f"[OSM] HTTP error from {endpoint}: {e.response.status_code}")
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[OSM] Error from {endpoint}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
print("[OSM] All endpoints failed")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def fetch_buildings(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetch building footprints in a bounding box.
|
||||||
|
|
||||||
|
Returns list of raw OSM elements (ways and relations).
|
||||||
|
"""
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = await self.query(query)
|
||||||
|
if data is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return data.get("elements", [])
|
||||||
|
|
||||||
|
async def fetch_vegetation(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Fetch vegetation areas (forests, parks, etc.)."""
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["natural"="wood"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["landuse"="forest"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["natural"="tree_row"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["natural"="wood"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["landuse"="forest"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = await self.query(query)
|
||||||
|
if data is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return data.get("elements", [])
|
||||||
|
|
||||||
|
async def fetch_water(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Fetch water bodies (rivers, lakes, etc.)."""
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["natural"="water"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["waterway"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["natural"="water"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = await self.query(query)
|
||||||
|
if data is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return data.get("elements", [])
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton
|
||||||
|
osm_client = OSMClient()
|
||||||
1165
backend/app/services/parallel_coverage_service.py
Normal file
1165
backend/app/services/parallel_coverage_service.py
Normal file
File diff suppressed because it is too large
Load Diff
309
backend/app/services/reflection_service.py
Normal file
309
backend/app/services/reflection_service.py
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import numpy as np
|
||||||
|
from typing import List, Tuple, Optional
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from app.services.buildings_service import Building
|
||||||
|
from app.services.materials_service import materials_service
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ReflectionPath:
|
||||||
|
"""A reflection path with one or more bounces"""
|
||||||
|
points: List[Tuple[float, float]] # [TX, reflection1, reflection2, ..., RX]
|
||||||
|
total_distance: float
|
||||||
|
total_loss: float
|
||||||
|
reflection_count: int
|
||||||
|
materials: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class ReflectionService:
|
||||||
|
"""
|
||||||
|
Calculate reflection paths for RF propagation
|
||||||
|
|
||||||
|
- Single bounce (most common)
|
||||||
|
- Double bounce (around corners)
|
||||||
|
- Ground reflection
|
||||||
|
- Water surface reflection
|
||||||
|
"""
|
||||||
|
|
||||||
|
MAX_BOUNCES = 2
|
||||||
|
GROUND_REFLECTION_COEFF = 0.3 # Depends on surface
|
||||||
|
|
||||||
|
# Ground types and reflection coefficients
|
||||||
|
GROUND_REFLECTION = {
|
||||||
|
"urban": 0.3,
|
||||||
|
"suburban": 0.4,
|
||||||
|
"rural": 0.5,
|
||||||
|
"water": 0.8,
|
||||||
|
"desert": 0.6,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def find_reflection_paths(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
buildings: List[Building],
|
||||||
|
include_ground: bool = True
|
||||||
|
) -> List[ReflectionPath]:
|
||||||
|
"""Find all viable reflection paths"""
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
# Single-bounce building reflections
|
||||||
|
for building in buildings:
|
||||||
|
path = self._find_single_bounce(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz, building
|
||||||
|
)
|
||||||
|
if path:
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
# Ground reflection
|
||||||
|
if include_ground:
|
||||||
|
ground_path = self._calculate_ground_reflection(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz
|
||||||
|
)
|
||||||
|
if ground_path:
|
||||||
|
paths.append(ground_path)
|
||||||
|
|
||||||
|
# Sort by loss (best first)
|
||||||
|
paths.sort(key=lambda p: p.total_loss)
|
||||||
|
|
||||||
|
return paths[:5] # Return top 5
|
||||||
|
|
||||||
|
def _find_single_bounce(
|
||||||
|
self,
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz,
|
||||||
|
building: Building
|
||||||
|
) -> Optional[ReflectionPath]:
|
||||||
|
"""Find single-bounce reflection off building"""
|
||||||
|
|
||||||
|
# Find reflection point on building walls
|
||||||
|
geometry = building.geometry
|
||||||
|
|
||||||
|
for i in range(len(geometry) - 1):
|
||||||
|
wall_start = geometry[i]
|
||||||
|
wall_end = geometry[i + 1]
|
||||||
|
|
||||||
|
ref_point = self._specular_reflection_point(
|
||||||
|
(tx_lon, tx_lat), (rx_lon, rx_lat),
|
||||||
|
wall_start, wall_end
|
||||||
|
)
|
||||||
|
|
||||||
|
if not ref_point:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ref_lat, ref_lon = ref_point[1], ref_point[0]
|
||||||
|
|
||||||
|
# Calculate distances
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
d1 = TerrainService.haversine_distance(tx_lat, tx_lon, ref_lat, ref_lon)
|
||||||
|
d2 = TerrainService.haversine_distance(ref_lat, ref_lon, rx_lat, rx_lon)
|
||||||
|
total_dist = d1 + d2
|
||||||
|
|
||||||
|
# Direct distance check - reflection shouldn't be much longer
|
||||||
|
direct_dist = TerrainService.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
|
||||||
|
if total_dist > direct_dist * 1.5:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Path loss
|
||||||
|
path_loss = self._free_space_loss(total_dist, frequency_mhz)
|
||||||
|
|
||||||
|
# Reflection loss
|
||||||
|
material = materials_service.detect_material(building.tags)
|
||||||
|
reflection_loss = materials_service.get_reflection_loss(material)
|
||||||
|
|
||||||
|
total_loss = path_loss + reflection_loss
|
||||||
|
|
||||||
|
return ReflectionPath(
|
||||||
|
points=[(tx_lat, tx_lon), (ref_lat, ref_lon), (rx_lat, rx_lon)],
|
||||||
|
total_distance=total_dist,
|
||||||
|
total_loss=total_loss,
|
||||||
|
reflection_count=1,
|
||||||
|
materials=[material.value]
|
||||||
|
)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _calculate_ground_reflection(
|
||||||
|
self,
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz,
|
||||||
|
is_water: bool = False
|
||||||
|
) -> Optional[ReflectionPath]:
|
||||||
|
"""Calculate ground/water reflection path"""
|
||||||
|
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
|
||||||
|
# Reflection point (simplified - midpoint for flat ground)
|
||||||
|
mid_lat = (tx_lat + rx_lat) / 2
|
||||||
|
mid_lon = (tx_lon + rx_lon) / 2
|
||||||
|
|
||||||
|
# Path lengths
|
||||||
|
d1 = TerrainService.haversine_distance(tx_lat, tx_lon, mid_lat, mid_lon)
|
||||||
|
d2 = TerrainService.haversine_distance(mid_lat, mid_lon, rx_lat, rx_lon)
|
||||||
|
|
||||||
|
# Actual path length considering heights
|
||||||
|
path1 = np.sqrt(d1**2 + tx_height**2)
|
||||||
|
path2 = np.sqrt(d2**2 + rx_height**2)
|
||||||
|
total_dist = path1 + path2
|
||||||
|
|
||||||
|
# Path loss
|
||||||
|
path_loss = self._free_space_loss(total_dist, frequency_mhz)
|
||||||
|
|
||||||
|
# Reflection coefficient: water is much more reflective
|
||||||
|
coeff = self.GROUND_REFLECTION.get("water" if is_water else "rural", 0.4)
|
||||||
|
reflection_loss = -10 * np.log10(coeff)
|
||||||
|
|
||||||
|
total_loss = path_loss + reflection_loss
|
||||||
|
surface_type = "water" if is_water else "ground"
|
||||||
|
|
||||||
|
return ReflectionPath(
|
||||||
|
points=[(tx_lat, tx_lon), (mid_lat, mid_lon), (rx_lat, rx_lon)],
|
||||||
|
total_distance=total_dist,
|
||||||
|
total_loss=total_loss,
|
||||||
|
reflection_count=1,
|
||||||
|
materials=[surface_type]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _specular_reflection_point(
|
||||||
|
self,
|
||||||
|
tx: Tuple[float, float], # (lon, lat)
|
||||||
|
rx: Tuple[float, float],
|
||||||
|
wall_start: List[float], # [lon, lat]
|
||||||
|
wall_end: List[float]
|
||||||
|
) -> Optional[Tuple[float, float]]:
|
||||||
|
"""Calculate specular reflection point on wall"""
|
||||||
|
|
||||||
|
# Wall vector
|
||||||
|
wx = wall_end[0] - wall_start[0]
|
||||||
|
wy = wall_end[1] - wall_start[1]
|
||||||
|
wall_len = np.sqrt(wx**2 + wy**2)
|
||||||
|
|
||||||
|
if wall_len < 1e-10:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Normalize
|
||||||
|
wx /= wall_len
|
||||||
|
wy /= wall_len
|
||||||
|
|
||||||
|
# Wall normal (perpendicular)
|
||||||
|
nx = -wy
|
||||||
|
ny = wx
|
||||||
|
|
||||||
|
# Vector from wall start to TX
|
||||||
|
tx_rel_x = tx[0] - wall_start[0]
|
||||||
|
tx_rel_y = tx[1] - wall_start[1]
|
||||||
|
|
||||||
|
# Distance from TX to wall line
|
||||||
|
dist_to_wall = tx_rel_x * nx + tx_rel_y * ny
|
||||||
|
|
||||||
|
# Mirror TX across wall
|
||||||
|
mirror_x = tx[0] - 2 * dist_to_wall * nx
|
||||||
|
mirror_y = tx[1] - 2 * dist_to_wall * ny
|
||||||
|
|
||||||
|
# Line from mirror to RX
|
||||||
|
dx = rx[0] - mirror_x
|
||||||
|
dy = rx[1] - mirror_y
|
||||||
|
|
||||||
|
# Find intersection with wall
|
||||||
|
# Parametric: wall_start + t * wall_dir
|
||||||
|
# Parametric: mirror + s * (rx - mirror)
|
||||||
|
|
||||||
|
denom = dx * wy - dy * wx
|
||||||
|
if abs(denom) < 1e-10:
|
||||||
|
return None
|
||||||
|
|
||||||
|
t = ((wall_start[0] - mirror_x) * wy - (wall_start[1] - mirror_y) * wx) / denom
|
||||||
|
s = ((wall_start[0] - mirror_x) * dy - (wall_start[1] - mirror_y) * dx) / (-denom) if denom != 0 else 0
|
||||||
|
|
||||||
|
# Check if on wall segment and between mirror and RX
|
||||||
|
if 0 <= s <= 1 and 0 <= t <= 1:
|
||||||
|
ref_x = mirror_x + t * dx
|
||||||
|
ref_y = mirror_y + t * dy
|
||||||
|
return (ref_x, ref_y)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _free_space_loss(self, distance: float, frequency_mhz: float) -> float:
|
||||||
|
"""Free space path loss (dB)"""
|
||||||
|
if distance <= 0:
|
||||||
|
distance = 1
|
||||||
|
|
||||||
|
# FSPL = 20*log10(d) + 20*log10(f) + 20*log10(4*pi/c)
|
||||||
|
# Simplified: FSPL = 32.45 + 20*log10(f_MHz) + 20*log10(d_km)
|
||||||
|
d_km = distance / 1000
|
||||||
|
return 32.45 + 20 * np.log10(frequency_mhz) + 20 * np.log10(d_km + 0.001)
|
||||||
|
|
||||||
|
def combine_paths(
|
||||||
|
self,
|
||||||
|
direct_power_dbm: float,
|
||||||
|
reflection_paths: List[ReflectionPath],
|
||||||
|
tx_power_dbm: float
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Combine direct and reflected signals (power sum)
|
||||||
|
|
||||||
|
Returns total received power in dBm
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Convert to linear power
|
||||||
|
powers = []
|
||||||
|
|
||||||
|
if direct_power_dbm > -150: # Valid direct signal
|
||||||
|
powers.append(10 ** (direct_power_dbm / 10))
|
||||||
|
|
||||||
|
for path in reflection_paths:
|
||||||
|
reflected_power_dbm = tx_power_dbm - path.total_loss
|
||||||
|
if reflected_power_dbm > -150:
|
||||||
|
powers.append(10 ** (reflected_power_dbm / 10))
|
||||||
|
|
||||||
|
if not powers:
|
||||||
|
return -150.0 # No signal
|
||||||
|
|
||||||
|
# Sum powers (incoherent addition - conservative estimate)
|
||||||
|
total_power = sum(powers)
|
||||||
|
|
||||||
|
return 10 * np.log10(total_power)
|
||||||
|
|
||||||
|
|
||||||
|
def find_reflection_paths_sync(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
buildings: List[Building],
|
||||||
|
include_ground: bool = True
|
||||||
|
) -> List[ReflectionPath]:
|
||||||
|
"""Sync version (no I/O in the async original)"""
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
for building in buildings:
|
||||||
|
path = self._find_single_bounce(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz, building
|
||||||
|
)
|
||||||
|
if path:
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
if include_ground:
|
||||||
|
ground_path = self._calculate_ground_reflection(
|
||||||
|
tx_lat, tx_lon, tx_height,
|
||||||
|
rx_lat, rx_lon, rx_height,
|
||||||
|
frequency_mhz
|
||||||
|
)
|
||||||
|
if ground_path:
|
||||||
|
paths.append(ground_path)
|
||||||
|
|
||||||
|
paths.sort(key=lambda p: p.total_loss)
|
||||||
|
return paths[:5]
|
||||||
|
|
||||||
|
|
||||||
|
reflection_service = ReflectionService()
|
||||||
147
backend/app/services/spatial_index.py
Normal file
147
backend/app/services/spatial_index.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
"""
|
||||||
|
R-tree spatial index for fast building and geometry lookups.
|
||||||
|
|
||||||
|
Uses a simple grid-based approach (no external dependency) for
|
||||||
|
O(1) amortised lookups instead of O(n) linear scans.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import List, Tuple, Optional, Dict
|
||||||
|
from collections import defaultdict
|
||||||
|
from app.services.buildings_service import Building
|
||||||
|
|
||||||
|
|
||||||
|
class SpatialIndex:
|
||||||
|
"""Grid-based spatial index for fast building lookups"""
|
||||||
|
|
||||||
|
def __init__(self, cell_size: float = 0.001):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
cell_size: Grid cell size in degrees (~111m at equator)
|
||||||
|
"""
|
||||||
|
self.cell_size = cell_size
|
||||||
|
self._grid: Dict[Tuple[int, int], List[Building]] = defaultdict(list)
|
||||||
|
self._buildings: List[Building] = []
|
||||||
|
self._buildings_by_id: Dict[int, Building] = {}
|
||||||
|
|
||||||
|
def _cell_key(self, lat: float, lon: float) -> Tuple[int, int]:
|
||||||
|
"""Convert lat/lon to grid cell key"""
|
||||||
|
return (int(lat / self.cell_size), int(lon / self.cell_size))
|
||||||
|
|
||||||
|
def build(self, buildings: List[Building]):
|
||||||
|
"""Build spatial index from buildings list"""
|
||||||
|
self._grid.clear()
|
||||||
|
self._buildings = buildings
|
||||||
|
self._buildings_by_id = {b.id: b for b in buildings}
|
||||||
|
|
||||||
|
for building in buildings:
|
||||||
|
# Get bounding box of building
|
||||||
|
lons = [p[0] for p in building.geometry]
|
||||||
|
lats = [p[1] for p in building.geometry]
|
||||||
|
|
||||||
|
min_lon, max_lon = min(lons), max(lons)
|
||||||
|
min_lat, max_lat = min(lats), max(lats)
|
||||||
|
|
||||||
|
# Insert into all overlapping grid cells
|
||||||
|
min_cell_lat = int(min_lat / self.cell_size)
|
||||||
|
max_cell_lat = int(max_lat / self.cell_size)
|
||||||
|
min_cell_lon = int(min_lon / self.cell_size)
|
||||||
|
max_cell_lon = int(max_lon / self.cell_size)
|
||||||
|
|
||||||
|
for clat in range(min_cell_lat, max_cell_lat + 1):
|
||||||
|
for clon in range(min_cell_lon, max_cell_lon + 1):
|
||||||
|
self._grid[(clat, clon)].append(building)
|
||||||
|
|
||||||
|
def query_point(self, lat: float, lon: float, buffer_cells: int = 1) -> List[Building]:
|
||||||
|
"""Find buildings near a point"""
|
||||||
|
if not self._grid:
|
||||||
|
return self._buildings # Fallback to linear scan
|
||||||
|
|
||||||
|
center = self._cell_key(lat, lon)
|
||||||
|
results = set()
|
||||||
|
|
||||||
|
for dlat in range(-buffer_cells, buffer_cells + 1):
|
||||||
|
for dlon in range(-buffer_cells, buffer_cells + 1):
|
||||||
|
key = (center[0] + dlat, center[1] + dlon)
|
||||||
|
for b in self._grid.get(key, []):
|
||||||
|
results.add(b.id)
|
||||||
|
|
||||||
|
return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
|
||||||
|
|
||||||
|
def query_line(
|
||||||
|
self,
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lat2: float, lon2: float,
|
||||||
|
buffer_cells: int = 1
|
||||||
|
) -> List[Building]:
|
||||||
|
"""Find buildings along a line by walking the actual cells it passes through.
|
||||||
|
|
||||||
|
Samples points along the line at cell_size intervals and queries
|
||||||
|
a buffer around each sample — much faster than bounding-box scan
|
||||||
|
for long lines.
|
||||||
|
"""
|
||||||
|
if not self._grid:
|
||||||
|
return self._buildings
|
||||||
|
|
||||||
|
# Walk the line in cell_size steps, collecting unique cells
|
||||||
|
dlat = lat2 - lat1
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
length = max(abs(dlat), abs(dlon))
|
||||||
|
num_steps = max(1, int(length / self.cell_size) + 1)
|
||||||
|
|
||||||
|
visited_cells: set = set()
|
||||||
|
for s in range(num_steps + 1):
|
||||||
|
t = s / num_steps
|
||||||
|
lat = lat1 + t * dlat
|
||||||
|
lon = lon1 + t * dlon
|
||||||
|
center = self._cell_key(lat, lon)
|
||||||
|
for dy in range(-buffer_cells, buffer_cells + 1):
|
||||||
|
for dx in range(-buffer_cells, buffer_cells + 1):
|
||||||
|
visited_cells.add((center[0] + dy, center[1] + dx))
|
||||||
|
|
||||||
|
results = set()
|
||||||
|
for key in visited_cells:
|
||||||
|
for b in self._grid.get(key, []):
|
||||||
|
results.add(b.id)
|
||||||
|
|
||||||
|
return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
|
||||||
|
|
||||||
|
def query_bbox(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float
|
||||||
|
) -> List[Building]:
|
||||||
|
"""Find all buildings in bounding box"""
|
||||||
|
if not self._grid:
|
||||||
|
return self._buildings
|
||||||
|
|
||||||
|
min_clat = int(min_lat / self.cell_size)
|
||||||
|
max_clat = int(max_lat / self.cell_size)
|
||||||
|
min_clon = int(min_lon / self.cell_size)
|
||||||
|
max_clon = int(max_lon / self.cell_size)
|
||||||
|
|
||||||
|
results = set()
|
||||||
|
for clat in range(min_clat, max_clat + 1):
|
||||||
|
for clon in range(min_clon, max_clon + 1):
|
||||||
|
for b in self._grid.get((clat, clon), []):
|
||||||
|
results.add(b.id)
|
||||||
|
|
||||||
|
return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
|
||||||
|
|
||||||
|
|
||||||
|
# Global cache of spatial indices
|
||||||
|
_spatial_indices: dict[str, SpatialIndex] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def get_spatial_index(cache_key: str, buildings: List[Building]) -> SpatialIndex:
|
||||||
|
"""Get or create spatial index for buildings"""
|
||||||
|
if cache_key not in _spatial_indices:
|
||||||
|
idx = SpatialIndex()
|
||||||
|
idx.build(buildings)
|
||||||
|
_spatial_indices[cache_key] = idx
|
||||||
|
|
||||||
|
# Limit cache size
|
||||||
|
if len(_spatial_indices) > 20:
|
||||||
|
oldest = next(iter(_spatial_indices))
|
||||||
|
del _spatial_indices[oldest]
|
||||||
|
|
||||||
|
return _spatial_indices[cache_key]
|
||||||
425
backend/app/services/street_canyon_service.py
Normal file
425
backend/app/services/street_canyon_service.py
Normal file
@@ -0,0 +1,425 @@
|
|||||||
|
import asyncio
|
||||||
|
import numpy as np
|
||||||
|
from typing import List, Tuple, Optional
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import httpx
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Street:
|
||||||
|
"""Street segment from OSM"""
|
||||||
|
id: int
|
||||||
|
name: Optional[str]
|
||||||
|
geometry: List[Tuple[float, float]] # [(lat, lon), ...]
|
||||||
|
width: float # meters
|
||||||
|
highway_type: str # residential, primary, secondary, etc.
|
||||||
|
|
||||||
|
|
||||||
|
class StreetCanyonService:
|
||||||
|
"""
|
||||||
|
Street canyon propagation model (ITU-R P.1411)
|
||||||
|
|
||||||
|
Signal propagates along streets with reflections from building walls.
|
||||||
|
Loss increases at corners/turns.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OVERPASS_URLS = [
|
||||||
|
"https://overpass-api.de/api/interpreter",
|
||||||
|
"https://overpass.kumi.systems/api/interpreter",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Default street widths by type
|
||||||
|
STREET_WIDTHS = {
|
||||||
|
"motorway": 25.0,
|
||||||
|
"trunk": 20.0,
|
||||||
|
"primary": 15.0,
|
||||||
|
"secondary": 12.0,
|
||||||
|
"tertiary": 10.0,
|
||||||
|
"residential": 8.0,
|
||||||
|
"unclassified": 6.0,
|
||||||
|
"service": 5.0,
|
||||||
|
"footway": 2.0,
|
||||||
|
"path": 1.5,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Corner/turn loss
|
||||||
|
CORNER_LOSS_90 = 10.0 # dB for 90-degree turn
|
||||||
|
CORNER_LOSS_45 = 4.0 # dB for 45-degree turn
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
import os
|
||||||
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.cache_dir = self.data_path / 'osm' / 'streets'
|
||||||
|
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||||
|
self._cache: dict[str, List[Street]] = {}
|
||||||
|
|
||||||
|
async def fetch_streets(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float
|
||||||
|
) -> List[Street]:
|
||||||
|
"""Fetch street network from OSM"""
|
||||||
|
|
||||||
|
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
# Check memory cache
|
||||||
|
if cache_key in self._cache:
|
||||||
|
return self._cache[cache_key]
|
||||||
|
|
||||||
|
# Check disk cache
|
||||||
|
cache_file = self.cache_dir / f"{cache_key}.json"
|
||||||
|
if cache_file.exists():
|
||||||
|
try:
|
||||||
|
with open(cache_file) as f:
|
||||||
|
data = json.load(f)
|
||||||
|
streets = [Street(**s) for s in data]
|
||||||
|
self._cache[cache_key] = streets
|
||||||
|
print(f"[Streets] Cache hit for {cache_key}")
|
||||||
|
return streets
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fetch from Overpass
|
||||||
|
print(f"[Streets] Fetching from Overpass API for {cache_key}...")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
way["highway"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = None
|
||||||
|
max_retries = 3
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
url = self.OVERPASS_URLS[attempt % len(self.OVERPASS_URLS)]
|
||||||
|
try:
|
||||||
|
timeout = 60.0 * (attempt + 1)
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
response = await client.post(url, data={"data": query})
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Streets] Overpass attempt {attempt + 1}/{max_retries} failed ({url}): {e}")
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
await asyncio.sleep(2 ** attempt)
|
||||||
|
else:
|
||||||
|
print(f"[Streets] All {max_retries} attempts failed")
|
||||||
|
return []
|
||||||
|
|
||||||
|
streets = self._parse_streets(data)
|
||||||
|
|
||||||
|
# Cache to disk
|
||||||
|
if streets:
|
||||||
|
with open(cache_file, 'w') as f:
|
||||||
|
json.dump([{
|
||||||
|
"id": s.id,
|
||||||
|
"name": s.name,
|
||||||
|
"geometry": s.geometry,
|
||||||
|
"width": s.width,
|
||||||
|
"highway_type": s.highway_type
|
||||||
|
} for s in streets], f)
|
||||||
|
|
||||||
|
self._cache[cache_key] = streets
|
||||||
|
return streets
|
||||||
|
|
||||||
|
def _parse_streets(self, data: dict) -> List[Street]:
|
||||||
|
"""Parse Overpass response into Street objects"""
|
||||||
|
|
||||||
|
nodes = {}
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] == "node":
|
||||||
|
nodes[element["id"]] = (element["lat"], element["lon"])
|
||||||
|
|
||||||
|
streets = []
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] != "way":
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags = element.get("tags", {})
|
||||||
|
if "highway" not in tags:
|
||||||
|
continue
|
||||||
|
|
||||||
|
highway_type = tags["highway"]
|
||||||
|
|
||||||
|
# Skip non-road types
|
||||||
|
if highway_type in ["bus_stop", "crossing", "traffic_signals"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
geometry = []
|
||||||
|
for node_id in element.get("nodes", []):
|
||||||
|
if node_id in nodes:
|
||||||
|
geometry.append(nodes[node_id])
|
||||||
|
|
||||||
|
if len(geometry) < 2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get width
|
||||||
|
width = self._get_street_width(tags)
|
||||||
|
|
||||||
|
streets.append(Street(
|
||||||
|
id=element["id"],
|
||||||
|
name=tags.get("name"),
|
||||||
|
geometry=geometry,
|
||||||
|
width=width,
|
||||||
|
highway_type=highway_type
|
||||||
|
))
|
||||||
|
|
||||||
|
return streets
|
||||||
|
|
||||||
|
def _get_street_width(self, tags: dict) -> float:
|
||||||
|
"""Estimate street width from OSM tags"""
|
||||||
|
|
||||||
|
# Explicit width
|
||||||
|
if "width" in tags:
|
||||||
|
try:
|
||||||
|
return float(tags["width"].replace("m", "").strip())
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Calculate from lanes
|
||||||
|
if "lanes" in tags:
|
||||||
|
try:
|
||||||
|
lanes = int(tags["lanes"])
|
||||||
|
return lanes * 3.5 # ~3.5m per lane
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Default by highway type
|
||||||
|
highway_type = tags.get("highway", "residential")
|
||||||
|
return self.STREET_WIDTHS.get(highway_type, 8.0)
|
||||||
|
|
||||||
|
async def calculate_street_canyon_loss(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
streets: List[Street]
|
||||||
|
) -> Tuple[float, List[Tuple[float, float]]]:
|
||||||
|
"""
|
||||||
|
Calculate path loss through street canyon
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(path_loss_db, street_path as list of points)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Find path along streets from TX to RX
|
||||||
|
street_path = self._find_street_path(tx_lat, tx_lon, rx_lat, rx_lon, streets)
|
||||||
|
|
||||||
|
if not street_path:
|
||||||
|
return float('inf'), [] # No street path found
|
||||||
|
|
||||||
|
# Calculate loss along path
|
||||||
|
total_loss = 0.0
|
||||||
|
total_distance = 0.0
|
||||||
|
|
||||||
|
for i in range(len(street_path) - 1):
|
||||||
|
p1 = street_path[i]
|
||||||
|
p2 = street_path[i + 1]
|
||||||
|
|
||||||
|
# Segment distance
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
segment_dist = TerrainService.haversine_distance(p1[0], p1[1], p2[0], p2[1])
|
||||||
|
total_distance += segment_dist
|
||||||
|
|
||||||
|
# Street canyon loss (ITU-R P.1411 simplified)
|
||||||
|
# L = 32.4 + 20*log10(f_MHz) + 20*log10(d_km)
|
||||||
|
if segment_dist > 0:
|
||||||
|
segment_loss = 32.4 + 20 * np.log10(frequency_mhz) + 20 * np.log10(segment_dist / 1000 + 0.001)
|
||||||
|
total_loss += segment_loss * (segment_dist / total_distance) if total_distance > 0 else 0
|
||||||
|
|
||||||
|
# Corner loss
|
||||||
|
if i > 0:
|
||||||
|
corner_angle = self._calculate_corner_angle(
|
||||||
|
street_path[i - 1], p1, p2
|
||||||
|
)
|
||||||
|
corner_loss = self._corner_loss(corner_angle)
|
||||||
|
total_loss += corner_loss
|
||||||
|
|
||||||
|
return total_loss, street_path
|
||||||
|
|
||||||
|
def _find_street_path(
|
||||||
|
self,
|
||||||
|
start_lat: float, start_lon: float,
|
||||||
|
end_lat: float, end_lon: float,
|
||||||
|
streets: List[Street]
|
||||||
|
) -> List[Tuple[float, float]]:
|
||||||
|
"""
|
||||||
|
Find path along streets (simplified A* / greedy)
|
||||||
|
|
||||||
|
Returns list of (lat, lon) waypoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Find nearest street point to start and end
|
||||||
|
start_point = self._nearest_street_point(start_lat, start_lon, streets)
|
||||||
|
end_point = self._nearest_street_point(end_lat, end_lon, streets)
|
||||||
|
|
||||||
|
if not start_point or not end_point:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Simplified: just return direct street segments
|
||||||
|
# Full implementation would use A* pathfinding
|
||||||
|
path = [(start_lat, start_lon), start_point]
|
||||||
|
|
||||||
|
# Add intermediate points along streets toward destination
|
||||||
|
current = start_point
|
||||||
|
visited = set()
|
||||||
|
|
||||||
|
for _ in range(50): # Max iterations
|
||||||
|
if self._distance(current, end_point) < 50: # Within 50m
|
||||||
|
break
|
||||||
|
|
||||||
|
# Find next street segment toward destination
|
||||||
|
next_point = self._next_street_point(current, end_point, streets, visited)
|
||||||
|
if not next_point:
|
||||||
|
break
|
||||||
|
|
||||||
|
path.append(next_point)
|
||||||
|
visited.add((round(current[0], 5), round(current[1], 5)))
|
||||||
|
current = next_point
|
||||||
|
|
||||||
|
path.append(end_point)
|
||||||
|
path.append((end_lat, end_lon))
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _nearest_street_point(
|
||||||
|
self,
|
||||||
|
lat: float, lon: float,
|
||||||
|
streets: List[Street]
|
||||||
|
) -> Optional[Tuple[float, float]]:
|
||||||
|
"""Find nearest point on any street"""
|
||||||
|
|
||||||
|
best_point = None
|
||||||
|
best_dist = float('inf')
|
||||||
|
|
||||||
|
for street in streets:
|
||||||
|
for point in street.geometry:
|
||||||
|
dist = self._distance((lat, lon), point)
|
||||||
|
if dist < best_dist:
|
||||||
|
best_dist = dist
|
||||||
|
best_point = point
|
||||||
|
|
||||||
|
return best_point if best_dist < 200 else None # Max 200m to street
|
||||||
|
|
||||||
|
def _next_street_point(
|
||||||
|
self,
|
||||||
|
current: Tuple[float, float],
|
||||||
|
target: Tuple[float, float],
|
||||||
|
streets: List[Street],
|
||||||
|
visited: set
|
||||||
|
) -> Optional[Tuple[float, float]]:
|
||||||
|
"""Find next street point toward target"""
|
||||||
|
|
||||||
|
best_point = None
|
||||||
|
best_score = float('inf')
|
||||||
|
|
||||||
|
for street in streets:
|
||||||
|
for i, point in enumerate(street.geometry):
|
||||||
|
if (round(point[0], 5), round(point[1], 5)) in visited:
|
||||||
|
continue
|
||||||
|
|
||||||
|
dist_from_current = self._distance(current, point)
|
||||||
|
dist_to_target = self._distance(point, target)
|
||||||
|
|
||||||
|
# Must be close to current position
|
||||||
|
if dist_from_current > 100:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Score: prefer points closer to target
|
||||||
|
score = dist_to_target + dist_from_current * 0.5
|
||||||
|
|
||||||
|
if score < best_score:
|
||||||
|
best_score = score
|
||||||
|
best_point = point
|
||||||
|
|
||||||
|
return best_point
|
||||||
|
|
||||||
|
def _distance(self, p1: Tuple[float, float], p2: Tuple[float, float]) -> float:
|
||||||
|
"""Quick distance approximation (meters)"""
|
||||||
|
lat_diff = (p1[0] - p2[0]) * 111000
|
||||||
|
lon_diff = (p1[1] - p2[1]) * 111000 * np.cos(np.radians(p1[0]))
|
||||||
|
return np.sqrt(lat_diff**2 + lon_diff**2)
|
||||||
|
|
||||||
|
def _calculate_corner_angle(
|
||||||
|
self,
|
||||||
|
p1: Tuple[float, float],
|
||||||
|
p2: Tuple[float, float],
|
||||||
|
p3: Tuple[float, float]
|
||||||
|
) -> float:
|
||||||
|
"""Calculate angle at corner (degrees)"""
|
||||||
|
|
||||||
|
v1 = (p1[0] - p2[0], p1[1] - p2[1])
|
||||||
|
v2 = (p3[0] - p2[0], p3[1] - p2[1])
|
||||||
|
|
||||||
|
dot = v1[0] * v2[0] + v1[1] * v2[1]
|
||||||
|
mag1 = np.sqrt(v1[0]**2 + v1[1]**2)
|
||||||
|
mag2 = np.sqrt(v2[0]**2 + v2[1]**2)
|
||||||
|
|
||||||
|
if mag1 * mag2 < 1e-10:
|
||||||
|
return 180.0
|
||||||
|
|
||||||
|
cos_angle = dot / (mag1 * mag2)
|
||||||
|
cos_angle = max(-1, min(1, cos_angle))
|
||||||
|
|
||||||
|
return np.degrees(np.arccos(cos_angle))
|
||||||
|
|
||||||
|
def _corner_loss(self, angle_degrees: float) -> float:
|
||||||
|
"""Calculate loss due to corner/turn"""
|
||||||
|
|
||||||
|
# Straight = 180 deg, right angle = 90 deg
|
||||||
|
turn_angle = abs(180 - angle_degrees)
|
||||||
|
|
||||||
|
if turn_angle < 15:
|
||||||
|
return 0.0
|
||||||
|
elif turn_angle < 45:
|
||||||
|
return self.CORNER_LOSS_45 * (turn_angle / 45)
|
||||||
|
elif turn_angle < 90:
|
||||||
|
return self.CORNER_LOSS_45 + (self.CORNER_LOSS_90 - self.CORNER_LOSS_45) * ((turn_angle - 45) / 45)
|
||||||
|
else:
|
||||||
|
return self.CORNER_LOSS_90 + (turn_angle - 90) * 0.2 # Extra loss for sharp turns
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_street_canyon_loss_sync(
|
||||||
|
self,
|
||||||
|
tx_lat: float, tx_lon: float, tx_height: float,
|
||||||
|
rx_lat: float, rx_lon: float, rx_height: float,
|
||||||
|
frequency_mhz: float,
|
||||||
|
streets: List[Street]
|
||||||
|
) -> Tuple[float, List[Tuple[float, float]]]:
|
||||||
|
"""Sync version (no I/O in the async original)"""
|
||||||
|
street_path = self._find_street_path(tx_lat, tx_lon, rx_lat, rx_lon, streets)
|
||||||
|
|
||||||
|
if not street_path:
|
||||||
|
return float('inf'), []
|
||||||
|
|
||||||
|
total_loss = 0.0
|
||||||
|
total_distance = 0.0
|
||||||
|
|
||||||
|
for i in range(len(street_path) - 1):
|
||||||
|
p1 = street_path[i]
|
||||||
|
p2 = street_path[i + 1]
|
||||||
|
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
segment_dist = TerrainService.haversine_distance(p1[0], p1[1], p2[0], p2[1])
|
||||||
|
total_distance += segment_dist
|
||||||
|
|
||||||
|
if segment_dist > 0:
|
||||||
|
segment_loss = 32.4 + 20 * np.log10(frequency_mhz) + 20 * np.log10(segment_dist / 1000 + 0.001)
|
||||||
|
total_loss += segment_loss * (segment_dist / total_distance) if total_distance > 0 else 0
|
||||||
|
|
||||||
|
if i > 0:
|
||||||
|
corner_angle = self._calculate_corner_angle(
|
||||||
|
street_path[i - 1], p1, p2
|
||||||
|
)
|
||||||
|
corner_loss = self._corner_loss(corner_angle)
|
||||||
|
total_loss += corner_loss
|
||||||
|
|
||||||
|
return total_loss, street_path
|
||||||
|
|
||||||
|
|
||||||
|
street_canyon_service = StreetCanyonService()
|
||||||
488
backend/app/services/terrain_service.py
Normal file
488
backend/app/services/terrain_service.py
Normal file
@@ -0,0 +1,488 @@
|
|||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import asyncio
|
||||||
|
import gzip
|
||||||
|
import zipfile
|
||||||
|
import io
|
||||||
|
import numpy as np
|
||||||
|
import httpx
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class TerrainService:
|
||||||
|
"""
|
||||||
|
SRTM elevation data service with local caching.
|
||||||
|
- Stores tiles in RFCP_DATA_PATH/terrain/
|
||||||
|
- In-memory LRU cache (max 20 tiles)
|
||||||
|
- Auto-downloads from S3 mirror
|
||||||
|
- Supports both SRTM1 (3601x3601) and SRTM3 (1201x1201)
|
||||||
|
"""
|
||||||
|
|
||||||
|
SRTM_SOURCES = [
|
||||||
|
# Our tile server — SRTM1 (30m) preferred, uncompressed
|
||||||
|
{
|
||||||
|
"url": "https://terra.eliah.one/srtm1/{tile_name}.hgt",
|
||||||
|
"compressed": False,
|
||||||
|
"resolution": "srtm1",
|
||||||
|
},
|
||||||
|
# Our tile server — SRTM3 (90m) fallback
|
||||||
|
{
|
||||||
|
"url": "https://terra.eliah.one/srtm3/{tile_name}.hgt",
|
||||||
|
"compressed": False,
|
||||||
|
"resolution": "srtm3",
|
||||||
|
},
|
||||||
|
# Public AWS mirror — SRTM1, gzip compressed
|
||||||
|
{
|
||||||
|
"url": "https://elevation-tiles-prod.s3.amazonaws.com/skadi/{lat_dir}/{tile_name}.hgt.gz",
|
||||||
|
"compressed": True,
|
||||||
|
"resolution": "srtm1",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.terrain_path = self.data_path / 'terrain'
|
||||||
|
self.terrain_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# In-memory cache for loaded tiles
|
||||||
|
self._tile_cache: dict[str, np.ndarray] = {}
|
||||||
|
self._max_cache_tiles = 20 # ~500MB max
|
||||||
|
|
||||||
|
def get_tile_name(self, lat: float, lon: float) -> str:
|
||||||
|
"""Convert lat/lon to SRTM tile name (e.g., N48E035)"""
|
||||||
|
lat_int = int(lat) if lat >= 0 else int(lat) - 1
|
||||||
|
lon_int = int(lon) if lon >= 0 else int(lon) - 1
|
||||||
|
|
||||||
|
lat_letter = 'N' if lat_int >= 0 else 'S'
|
||||||
|
lon_letter = 'E' if lon_int >= 0 else 'W'
|
||||||
|
|
||||||
|
return f"{lat_letter}{abs(lat_int):02d}{lon_letter}{abs(lon_int):03d}"
|
||||||
|
|
||||||
|
def get_tile_path(self, tile_name: str) -> Path:
|
||||||
|
"""Get local path for tile"""
|
||||||
|
return self.terrain_path / f"{tile_name}.hgt"
|
||||||
|
|
||||||
|
async def download_tile(self, tile_name: str) -> bool:
|
||||||
|
"""Download SRTM tile from configured sources, preferring highest resolution."""
|
||||||
|
tile_path = self.get_tile_path(tile_name)
|
||||||
|
|
||||||
|
if tile_path.exists():
|
||||||
|
return True
|
||||||
|
|
||||||
|
lat_dir = tile_name[:3] # e.g., "N48"
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=60.0, follow_redirects=True) as client:
|
||||||
|
for source in self.SRTM_SOURCES:
|
||||||
|
url = source["url"].format(lat_dir=lat_dir, tile_name=tile_name)
|
||||||
|
try:
|
||||||
|
response = await client.get(url)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.content
|
||||||
|
|
||||||
|
# Skip empty responses
|
||||||
|
if len(data) < 1000:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if source["compressed"]:
|
||||||
|
if url.endswith('.gz'):
|
||||||
|
data = gzip.decompress(data)
|
||||||
|
elif url.endswith('.zip'):
|
||||||
|
with zipfile.ZipFile(io.BytesIO(data)) as zf:
|
||||||
|
for name in zf.namelist():
|
||||||
|
if name.endswith('.hgt'):
|
||||||
|
data = zf.read(name)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Validate tile size (SRTM1: 25,934,402 bytes, SRTM3: 2,884,802 bytes)
|
||||||
|
if len(data) not in (3601 * 3601 * 2, 1201 * 1201 * 2):
|
||||||
|
print(f"[Terrain] Invalid tile size {len(data)} from {url}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
tile_path.write_bytes(data)
|
||||||
|
res = source["resolution"]
|
||||||
|
size_mb = len(data) / 1048576
|
||||||
|
print(f"[Terrain] Downloaded {tile_name} ({res}, {size_mb:.1f} MB)")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Terrain] Failed from {url}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"[Terrain] Could not download {tile_name} from any source")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
||||||
|
"""Load tile from disk into memory cache using memory-mapped I/O.
|
||||||
|
|
||||||
|
Uses np.memmap so the OS pages data from disk on demand — near-zero
|
||||||
|
upfront RAM cost per tile (~25 MB savings each vs full load).
|
||||||
|
Falls back to np.frombuffer if memmap fails.
|
||||||
|
"""
|
||||||
|
# Check memory cache first
|
||||||
|
if tile_name in self._tile_cache:
|
||||||
|
return self._tile_cache[tile_name]
|
||||||
|
|
||||||
|
tile_path = self.get_tile_path(tile_name)
|
||||||
|
|
||||||
|
if not tile_path.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_size = tile_path.stat().st_size
|
||||||
|
|
||||||
|
# SRTM HGT format: big-endian signed 16-bit integers
|
||||||
|
if file_size == 3601 * 3601 * 2:
|
||||||
|
size = 3601 # SRTM1 (30m)
|
||||||
|
elif file_size == 1201 * 1201 * 2:
|
||||||
|
size = 1201 # SRTM3 (90m)
|
||||||
|
else:
|
||||||
|
print(f"[Terrain] Unknown tile size: {file_size} bytes for {tile_name}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Memory-mapped loading — OS pages from disk, near-zero RAM
|
||||||
|
try:
|
||||||
|
tile = np.memmap(
|
||||||
|
tile_path, dtype='>i2', mode='r', shape=(size, size),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# Fallback: full load into RAM
|
||||||
|
data = tile_path.read_bytes()
|
||||||
|
tile = np.frombuffer(data, dtype='>i2').reshape((size, size))
|
||||||
|
|
||||||
|
# Manage memory cache with LRU eviction
|
||||||
|
if len(self._tile_cache) >= self._max_cache_tiles:
|
||||||
|
oldest = next(iter(self._tile_cache))
|
||||||
|
del self._tile_cache[oldest]
|
||||||
|
|
||||||
|
self._tile_cache[tile_name] = tile
|
||||||
|
return tile
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Terrain] Failed to load {tile_name}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
||||||
|
"""Load tile into memory, downloading if needed"""
|
||||||
|
# Check memory cache
|
||||||
|
if tile_name in self._tile_cache:
|
||||||
|
return self._tile_cache[tile_name]
|
||||||
|
|
||||||
|
# Download if missing
|
||||||
|
if not self.get_tile_path(tile_name).exists():
|
||||||
|
success = await self.download_tile(tile_name)
|
||||||
|
if not success:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self._load_tile(tile_name)
|
||||||
|
|
||||||
|
def _bilinear_sample(self, tile: np.ndarray, lat: float, lon: float) -> float:
|
||||||
|
"""Sample elevation with bilinear interpolation for sub-meter accuracy.
|
||||||
|
|
||||||
|
SRTM1 at 30m means nearest-neighbor can have 15m positional error.
|
||||||
|
Bilinear interpolation reduces this to sub-meter accuracy.
|
||||||
|
"""
|
||||||
|
size = tile.shape[0]
|
||||||
|
|
||||||
|
# Tile southwest corner
|
||||||
|
lat_int = int(lat) if lat >= 0 else int(lat) - 1
|
||||||
|
lon_int = int(lon) if lon >= 0 else int(lon) - 1
|
||||||
|
|
||||||
|
# Fractional position within tile (0.0 to 1.0)
|
||||||
|
lat_frac = lat - lat_int # 0 = south edge, 1 = north edge
|
||||||
|
lon_frac = lon - lon_int # 0 = west edge, 1 = east edge
|
||||||
|
|
||||||
|
# Convert to row/col (note: rows go north to south!)
|
||||||
|
row_exact = (1.0 - lat_frac) * (size - 1) # 0 = north, size-1 = south
|
||||||
|
col_exact = lon_frac * (size - 1) # 0 = west, size-1 = east
|
||||||
|
|
||||||
|
# Four surrounding grid points
|
||||||
|
r0 = int(row_exact)
|
||||||
|
c0 = int(col_exact)
|
||||||
|
r1 = min(r0 + 1, size - 1)
|
||||||
|
c1 = min(c0 + 1, size - 1)
|
||||||
|
|
||||||
|
# Fractional position between grid points
|
||||||
|
dr = row_exact - r0
|
||||||
|
dc = col_exact - c0
|
||||||
|
|
||||||
|
# Get four corner values
|
||||||
|
z00 = tile[r0, c0]
|
||||||
|
z01 = tile[r0, c1]
|
||||||
|
z10 = tile[r1, c0]
|
||||||
|
z11 = tile[r1, c1]
|
||||||
|
|
||||||
|
# Handle void (-32768) values - fall back to nearest valid
|
||||||
|
void_val = -32768
|
||||||
|
corners = [(z00, r0, c0), (z01, r0, c1), (z10, r1, c0), (z11, r1, c1)]
|
||||||
|
if z00 == void_val or z01 == void_val or z10 == void_val or z11 == void_val:
|
||||||
|
valid = [(z, r, c) for z, r, c in corners if z != void_val]
|
||||||
|
if not valid:
|
||||||
|
return 0.0
|
||||||
|
# Return nearest valid value
|
||||||
|
return float(valid[0][0])
|
||||||
|
|
||||||
|
# Bilinear interpolation
|
||||||
|
elevation = (z00 * (1 - dr) * (1 - dc) +
|
||||||
|
z01 * (1 - dr) * dc +
|
||||||
|
z10 * dr * (1 - dc) +
|
||||||
|
z11 * dr * dc)
|
||||||
|
|
||||||
|
return float(elevation)
|
||||||
|
|
||||||
|
async def get_elevation(self, lat: float, lon: float) -> float:
|
||||||
|
"""Get elevation at specific coordinate with bilinear interpolation."""
|
||||||
|
tile_name = self.get_tile_name(lat, lon)
|
||||||
|
tile = await self.load_tile(tile_name)
|
||||||
|
|
||||||
|
if tile is None:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
return self._bilinear_sample(tile, lat, lon)
|
||||||
|
|
||||||
|
def get_elevation_sync(self, lat: float, lon: float) -> float:
|
||||||
|
"""Sync elevation lookup with bilinear interpolation. Returns 0.0 if tile not loaded."""
|
||||||
|
tile_name = self.get_tile_name(lat, lon)
|
||||||
|
tile = self._tile_cache.get(tile_name)
|
||||||
|
if tile is None:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
return self._bilinear_sample(tile, lat, lon)
|
||||||
|
|
||||||
|
def get_elevations_batch(self, lats: np.ndarray, lons: np.ndarray) -> np.ndarray:
|
||||||
|
"""Vectorized elevation lookup with bilinear interpolation.
|
||||||
|
|
||||||
|
Handles points spanning multiple tiles efficiently.
|
||||||
|
Groups points by tile, processes each tile with full NumPy vectorization.
|
||||||
|
Tiles must be pre-loaded into memory cache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
lats: Array of latitudes
|
||||||
|
lons: Array of longitudes
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Array of elevations (0.0 for missing tiles or void data)
|
||||||
|
"""
|
||||||
|
elevations = np.zeros(len(lats), dtype=np.float32)
|
||||||
|
|
||||||
|
# Compute tile indices for each point
|
||||||
|
lat_ints = np.floor(lats).astype(int)
|
||||||
|
lon_ints = np.floor(lons).astype(int)
|
||||||
|
|
||||||
|
# Group by tile using unique key
|
||||||
|
unique_tiles = set(zip(lat_ints, lon_ints))
|
||||||
|
|
||||||
|
for lat_int, lon_int in unique_tiles:
|
||||||
|
# Get tile name
|
||||||
|
lat_letter = 'N' if lat_int >= 0 else 'S'
|
||||||
|
lon_letter = 'E' if lon_int >= 0 else 'W'
|
||||||
|
tile_name = f"{lat_letter}{abs(lat_int):02d}{lon_letter}{abs(lon_int):03d}"
|
||||||
|
|
||||||
|
tile = self._tile_cache.get(tile_name)
|
||||||
|
if tile is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Mask for points in this tile
|
||||||
|
mask = (lat_ints == lat_int) & (lon_ints == lon_int)
|
||||||
|
tile_lats = lats[mask]
|
||||||
|
tile_lons = lons[mask]
|
||||||
|
|
||||||
|
size = tile.shape[0]
|
||||||
|
|
||||||
|
# Vectorized bilinear interpolation for all points in this tile
|
||||||
|
lat_frac = tile_lats - lat_int
|
||||||
|
lon_frac = tile_lons - lon_int
|
||||||
|
|
||||||
|
row_exact = (1.0 - lat_frac) * (size - 1)
|
||||||
|
col_exact = lon_frac * (size - 1)
|
||||||
|
|
||||||
|
r0 = np.clip(row_exact.astype(int), 0, size - 2)
|
||||||
|
c0 = np.clip(col_exact.astype(int), 0, size - 2)
|
||||||
|
r1 = r0 + 1
|
||||||
|
c1 = c0 + 1
|
||||||
|
|
||||||
|
dr = row_exact - r0
|
||||||
|
dc = col_exact - c0
|
||||||
|
|
||||||
|
# Get four corner values for all points at once
|
||||||
|
z00 = tile[r0, c0].astype(np.float32)
|
||||||
|
z01 = tile[r0, c1].astype(np.float32)
|
||||||
|
z10 = tile[r1, c0].astype(np.float32)
|
||||||
|
z11 = tile[r1, c1].astype(np.float32)
|
||||||
|
|
||||||
|
# Bilinear interpolation (vectorized)
|
||||||
|
result = (z00 * (1 - dr) * (1 - dc) +
|
||||||
|
z01 * (1 - dr) * dc +
|
||||||
|
z10 * dr * (1 - dc) +
|
||||||
|
z11 * dr * dc)
|
||||||
|
|
||||||
|
# Handle void values (-32768) - set to 0
|
||||||
|
void_mask = (z00 == -32768) | (z01 == -32768) | (z10 == -32768) | (z11 == -32768)
|
||||||
|
result[void_mask] = 0.0
|
||||||
|
|
||||||
|
elevations[mask] = result
|
||||||
|
|
||||||
|
return elevations
|
||||||
|
|
||||||
|
def get_required_tiles(self, center_lat: float, center_lon: float, radius_km: float) -> list:
|
||||||
|
"""Determine which tiles are needed for a coverage calculation."""
|
||||||
|
# Convert radius to degrees (approximate)
|
||||||
|
lat_delta = radius_km / 111.0 # ~111 km per degree latitude
|
||||||
|
lon_delta = radius_km / (111.0 * np.cos(np.radians(center_lat)))
|
||||||
|
|
||||||
|
min_lat = center_lat - lat_delta
|
||||||
|
max_lat = center_lat + lat_delta
|
||||||
|
min_lon = center_lon - lon_delta
|
||||||
|
max_lon = center_lon + lon_delta
|
||||||
|
|
||||||
|
tiles = []
|
||||||
|
for lat in range(int(np.floor(min_lat)), int(np.floor(max_lat)) + 1):
|
||||||
|
for lon in range(int(np.floor(min_lon)), int(np.floor(max_lon)) + 1):
|
||||||
|
lat_letter = 'N' if lat >= 0 else 'S'
|
||||||
|
lon_letter = 'E' if lon >= 0 else 'W'
|
||||||
|
tile_name = f"{lat_letter}{abs(lat):02d}{lon_letter}{abs(lon):03d}"
|
||||||
|
tiles.append(tile_name)
|
||||||
|
|
||||||
|
return tiles
|
||||||
|
|
||||||
|
def get_missing_tiles(self, center_lat: float, center_lon: float, radius_km: float) -> list:
|
||||||
|
"""Check which needed tiles are not available locally."""
|
||||||
|
required = self.get_required_tiles(center_lat, center_lon, radius_km)
|
||||||
|
return [t for t in required if not self.get_tile_path(t).exists()]
|
||||||
|
|
||||||
|
async def get_elevation_profile(
|
||||||
|
self,
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lat2: float, lon2: float,
|
||||||
|
num_points: int = 100
|
||||||
|
) -> List[dict]:
|
||||||
|
"""Get elevation profile between two points"""
|
||||||
|
lats = np.linspace(lat1, lat2, num_points)
|
||||||
|
lons = np.linspace(lon1, lon2, num_points)
|
||||||
|
|
||||||
|
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
|
||||||
|
distances = np.linspace(0, total_distance, num_points)
|
||||||
|
|
||||||
|
profile = []
|
||||||
|
for i, (lat, lon, dist) in enumerate(zip(lats, lons, distances)):
|
||||||
|
elev = await self.get_elevation(lat, lon)
|
||||||
|
profile.append({
|
||||||
|
"lat": float(lat),
|
||||||
|
"lon": float(lon),
|
||||||
|
"elevation": elev,
|
||||||
|
"distance": float(dist)
|
||||||
|
})
|
||||||
|
|
||||||
|
return profile
|
||||||
|
|
||||||
|
def get_elevation_profile_sync(
|
||||||
|
self,
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lat2: float, lon2: float,
|
||||||
|
num_points: int = 50
|
||||||
|
) -> List[dict]:
|
||||||
|
"""Sync elevation profile - tiles must be pre-loaded into memory cache."""
|
||||||
|
lats = np.linspace(lat1, lat2, num_points)
|
||||||
|
lons = np.linspace(lon1, lon2, num_points)
|
||||||
|
|
||||||
|
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
|
||||||
|
distances = np.linspace(0, total_distance, num_points)
|
||||||
|
|
||||||
|
profile = []
|
||||||
|
for i in range(num_points):
|
||||||
|
profile.append({
|
||||||
|
"lat": float(lats[i]),
|
||||||
|
"lon": float(lons[i]),
|
||||||
|
"elevation": self.get_elevation_sync(float(lats[i]), float(lons[i])),
|
||||||
|
"distance": float(distances[i])
|
||||||
|
})
|
||||||
|
|
||||||
|
return profile
|
||||||
|
|
||||||
|
async def ensure_tiles_for_bbox(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float
|
||||||
|
) -> list[str]:
|
||||||
|
"""Pre-download all tiles needed for a bounding box"""
|
||||||
|
tiles_needed = []
|
||||||
|
|
||||||
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||||
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||||
|
tile_name = self.get_tile_name(lat, lon)
|
||||||
|
tiles_needed.append(tile_name)
|
||||||
|
|
||||||
|
# Download in parallel (batches of 5 to avoid overload)
|
||||||
|
downloaded = []
|
||||||
|
batch_size = 5
|
||||||
|
for i in range(0, len(tiles_needed), batch_size):
|
||||||
|
batch = tiles_needed[i:i + batch_size]
|
||||||
|
results = await asyncio.gather(*[
|
||||||
|
self.download_tile(tile) for tile in batch
|
||||||
|
])
|
||||||
|
for tile, ok in zip(batch, results):
|
||||||
|
if ok:
|
||||||
|
downloaded.append(tile)
|
||||||
|
|
||||||
|
return downloaded
|
||||||
|
|
||||||
|
def get_cached_tiles(self) -> list[str]:
|
||||||
|
"""List all locally cached tile names"""
|
||||||
|
return [f.stem for f in self.terrain_path.glob("*.hgt")]
|
||||||
|
|
||||||
|
def get_cache_size_mb(self) -> float:
|
||||||
|
"""Get total terrain cache size in MB"""
|
||||||
|
total = sum(f.stat().st_size for f in self.terrain_path.glob("*.hgt"))
|
||||||
|
return total / (1024 * 1024)
|
||||||
|
|
||||||
|
def evict_disk_cache(self, max_size_mb: float = 2048.0):
|
||||||
|
"""LRU eviction of .hgt files when disk cache exceeds max_size_mb.
|
||||||
|
|
||||||
|
Deletes the oldest-accessed files until total size is under the limit.
|
||||||
|
"""
|
||||||
|
hgt_files = list(self.terrain_path.glob("*.hgt"))
|
||||||
|
if not hgt_files:
|
||||||
|
return
|
||||||
|
|
||||||
|
total = sum(f.stat().st_size for f in hgt_files)
|
||||||
|
if total / (1024 * 1024) <= max_size_mb:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Sort by access time (oldest first)
|
||||||
|
hgt_files.sort(key=lambda f: f.stat().st_atime)
|
||||||
|
|
||||||
|
evicted = 0
|
||||||
|
for f in hgt_files:
|
||||||
|
if total / (1024 * 1024) <= max_size_mb:
|
||||||
|
break
|
||||||
|
fsize = f.stat().st_size
|
||||||
|
# Remove from memory cache if loaded
|
||||||
|
stem = f.stem
|
||||||
|
self._tile_cache.pop(stem, None)
|
||||||
|
f.unlink()
|
||||||
|
total -= fsize
|
||||||
|
evicted += 1
|
||||||
|
|
||||||
|
if evicted:
|
||||||
|
print(f"[Terrain] Evicted {evicted} tiles, "
|
||||||
|
f"cache now {total / (1024 * 1024):.0f} MB")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||||
|
"""Calculate distance between two points in meters"""
|
||||||
|
EARTH_RADIUS = 6371000
|
||||||
|
|
||||||
|
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])
|
||||||
|
|
||||||
|
dlat = lat2 - lat1
|
||||||
|
dlon = lon2 - lon1
|
||||||
|
|
||||||
|
a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
|
||||||
|
c = 2 * np.arcsin(np.sqrt(a))
|
||||||
|
|
||||||
|
return EARTH_RADIUS * c
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
terrain_service = TerrainService()
|
||||||
142
backend/app/services/tile_processor.py
Normal file
142
backend/app/services/tile_processor.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
"""
|
||||||
|
Tile-based processing for large radius coverage calculations.
|
||||||
|
|
||||||
|
When radius > 10km, the coverage circle is split into 5km sub-tiles.
|
||||||
|
Each tile is processed independently — OSM data and terrain are loaded
|
||||||
|
per-tile and freed between tiles, keeping peak RAM usage bounded.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from app.services.tile_processor import (
|
||||||
|
generate_tile_grid, partition_grid_to_tiles,
|
||||||
|
TILE_THRESHOLD_M, get_adaptive_worker_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
if radius_m > TILE_THRESHOLD_M:
|
||||||
|
tiles = generate_tile_grid(center_lat, center_lon, radius_m)
|
||||||
|
tile_grids = partition_grid_to_tiles(grid, tiles)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List, Tuple, Dict
|
||||||
|
|
||||||
|
|
||||||
|
# Use tiled processing for radius above this threshold
|
||||||
|
TILE_THRESHOLD_M = 10000 # 10 km
|
||||||
|
|
||||||
|
# Default tile size — 5km balances overhead vs memory usage
|
||||||
|
DEFAULT_TILE_SIZE_M = 5000 # 5 km
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Tile:
|
||||||
|
"""A rectangular sub-tile of the coverage area."""
|
||||||
|
bbox: Tuple[float, float, float, float] # (min_lat, min_lon, max_lat, max_lon)
|
||||||
|
index: Tuple[int, int] # (row, col) in tile grid
|
||||||
|
|
||||||
|
|
||||||
|
def generate_tile_grid(
|
||||||
|
center_lat: float,
|
||||||
|
center_lon: float,
|
||||||
|
radius_m: float,
|
||||||
|
tile_size_m: float = DEFAULT_TILE_SIZE_M,
|
||||||
|
) -> List[Tile]:
|
||||||
|
"""Generate grid of tiles covering the coverage circle.
|
||||||
|
|
||||||
|
Only includes tiles that actually intersect the coverage circle.
|
||||||
|
Tiles are ordered row-by-row from SW to NE.
|
||||||
|
"""
|
||||||
|
cos_lat = math.cos(math.radians(center_lat))
|
||||||
|
|
||||||
|
# Full coverage area in degrees
|
||||||
|
lat_delta = radius_m / 111000
|
||||||
|
lon_delta = radius_m / (111000 * cos_lat)
|
||||||
|
|
||||||
|
# Number of tiles along each axis
|
||||||
|
n_tiles = max(1, math.ceil(radius_m * 2 / tile_size_m))
|
||||||
|
|
||||||
|
# Tile size in degrees
|
||||||
|
tile_lat = (2 * lat_delta) / n_tiles
|
||||||
|
tile_lon = (2 * lon_delta) / n_tiles
|
||||||
|
|
||||||
|
base_lat = center_lat - lat_delta
|
||||||
|
base_lon = center_lon - lon_delta
|
||||||
|
|
||||||
|
tiles = []
|
||||||
|
for row in range(n_tiles):
|
||||||
|
for col in range(n_tiles):
|
||||||
|
min_lat = base_lat + row * tile_lat
|
||||||
|
max_lat = base_lat + (row + 1) * tile_lat
|
||||||
|
min_lon = base_lon + col * tile_lon
|
||||||
|
max_lon = base_lon + (col + 1) * tile_lon
|
||||||
|
|
||||||
|
bbox = (min_lat, min_lon, max_lat, max_lon)
|
||||||
|
|
||||||
|
if _tile_intersects_circle(bbox, center_lat, center_lon, radius_m, cos_lat):
|
||||||
|
tiles.append(Tile(bbox=bbox, index=(row, col)))
|
||||||
|
|
||||||
|
return tiles
|
||||||
|
|
||||||
|
|
||||||
|
def _tile_intersects_circle(
|
||||||
|
bbox: Tuple[float, float, float, float],
|
||||||
|
center_lat: float,
|
||||||
|
center_lon: float,
|
||||||
|
radius_m: float,
|
||||||
|
cos_lat: float,
|
||||||
|
) -> bool:
|
||||||
|
"""Check if tile bbox intersects the coverage circle.
|
||||||
|
|
||||||
|
Uses fast equirectangular approximation — tiles are small (5km)
|
||||||
|
so full haversine is unnecessary for intersection testing.
|
||||||
|
"""
|
||||||
|
min_lat, min_lon, max_lat, max_lon = bbox
|
||||||
|
|
||||||
|
# Closest point on bbox to circle center
|
||||||
|
closest_lat = max(min_lat, min(center_lat, max_lat))
|
||||||
|
closest_lon = max(min_lon, min(center_lon, max_lon))
|
||||||
|
|
||||||
|
# Approximate distance in meters (equirectangular)
|
||||||
|
dlat = (closest_lat - center_lat) * 111000
|
||||||
|
dlon = (closest_lon - center_lon) * 111000 * cos_lat
|
||||||
|
dist_sq = dlat * dlat + dlon * dlon
|
||||||
|
|
||||||
|
return dist_sq <= radius_m * radius_m
|
||||||
|
|
||||||
|
|
||||||
|
def get_adaptive_worker_count(radius_m: float, base_workers: int) -> int:
|
||||||
|
"""Scale down workers for large calculations to prevent combined memory explosion.
|
||||||
|
|
||||||
|
Large radius = more buildings per tile = more memory per worker.
|
||||||
|
Reducing workers keeps total worker memory bounded.
|
||||||
|
"""
|
||||||
|
if radius_m > 30000:
|
||||||
|
return min(base_workers, 2)
|
||||||
|
elif radius_m > 20000:
|
||||||
|
return min(base_workers, 3)
|
||||||
|
elif radius_m > 10000:
|
||||||
|
return min(base_workers, 4)
|
||||||
|
return base_workers
|
||||||
|
|
||||||
|
|
||||||
|
def partition_grid_to_tiles(
|
||||||
|
grid: List[Tuple[float, float]],
|
||||||
|
tiles: List[Tile],
|
||||||
|
) -> Dict[Tuple[int, int], List[Tuple[float, float]]]:
|
||||||
|
"""Partition grid points into tiles by bbox containment.
|
||||||
|
|
||||||
|
Returns dict mapping tile index -> list of (lat, lon) points.
|
||||||
|
Points on tile boundaries are assigned to the first matching tile.
|
||||||
|
"""
|
||||||
|
tile_grids: Dict[Tuple[int, int], List[Tuple[float, float]]] = {
|
||||||
|
t.index: [] for t in tiles
|
||||||
|
}
|
||||||
|
|
||||||
|
for lat, lon in grid:
|
||||||
|
for tile in tiles:
|
||||||
|
min_lat, min_lon, max_lat, max_lon = tile.bbox
|
||||||
|
if min_lat <= lat <= max_lat and min_lon <= lon <= max_lon:
|
||||||
|
tile_grids[tile.index].append((lat, lon))
|
||||||
|
break
|
||||||
|
|
||||||
|
return tile_grids
|
||||||
323
backend/app/services/vegetation_service.py
Normal file
323
backend/app/services/vegetation_service.py
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
"""
|
||||||
|
OSM vegetation service for RF signal attenuation.
|
||||||
|
|
||||||
|
Forests and dense vegetation attenuate RF signals significantly.
|
||||||
|
Uses ITU-R P.833 approximations for foliage loss.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
from typing import List, Tuple, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class VegetationArea(BaseModel):
|
||||||
|
"""Vegetation area from OSM"""
|
||||||
|
id: int
|
||||||
|
geometry: List[Tuple[float, float]] # [(lon, lat), ...]
|
||||||
|
vegetation_type: str # forest, wood, scrub, orchard
|
||||||
|
density: str # dense, sparse, mixed
|
||||||
|
# Bounding box for fast rejection (computed from geometry)
|
||||||
|
min_lat: float = 0.0
|
||||||
|
max_lat: float = 0.0
|
||||||
|
min_lon: float = 0.0
|
||||||
|
max_lon: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class VegetationCache:
|
||||||
|
"""Local cache for vegetation data with expiry"""
|
||||||
|
|
||||||
|
CACHE_EXPIRY_DAYS = 30
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.cache_path = self.data_path / 'osm' / 'vegetation'
|
||||||
|
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||||
|
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
def _get_cache_file(self, cache_key: str) -> Path:
|
||||||
|
return self.cache_path / f"{cache_key}.json"
|
||||||
|
|
||||||
|
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
if not cache_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(cache_file.read_text())
|
||||||
|
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||||
|
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||||
|
return None
|
||||||
|
return data.get('data')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[VegetationCache] Failed to read cache: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache_data = {
|
||||||
|
'_cached_at': datetime.now().isoformat(),
|
||||||
|
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||||
|
'data': data
|
||||||
|
}
|
||||||
|
cache_file.write_text(json.dumps(cache_data))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[VegetationCache] Failed to write cache: {e}")
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
for f in self.cache_path.glob("*.json"):
|
||||||
|
f.unlink()
|
||||||
|
|
||||||
|
def get_size_mb(self) -> float:
|
||||||
|
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||||
|
return total / (1024 * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
class VegetationService:
|
||||||
|
"""OSM vegetation for signal attenuation"""
|
||||||
|
|
||||||
|
OVERPASS_URLS = [
|
||||||
|
"https://overpass-api.de/api/interpreter",
|
||||||
|
"https://overpass.kumi.systems/api/interpreter",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Attenuation dB per 100 meters of vegetation
|
||||||
|
ATTENUATION_DB_PER_100M = {
|
||||||
|
"forest": 8.0,
|
||||||
|
"wood": 6.0,
|
||||||
|
"tree_row": 2.0,
|
||||||
|
"scrub": 3.0,
|
||||||
|
"orchard": 2.0,
|
||||||
|
"vineyard": 1.0,
|
||||||
|
"meadow": 0.5,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Seasonal factor (summer = full foliage)
|
||||||
|
SEASONAL_FACTOR = {
|
||||||
|
"summer": 1.0,
|
||||||
|
"winter": 0.3,
|
||||||
|
"spring": 0.6,
|
||||||
|
"autumn": 0.7,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = VegetationCache()
|
||||||
|
self._memory_cache: dict[str, List[VegetationArea]] = {}
|
||||||
|
|
||||||
|
async def fetch_vegetation(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float
|
||||||
|
) -> List[VegetationArea]:
|
||||||
|
"""Fetch vegetation areas in bounding box, using cache if available"""
|
||||||
|
|
||||||
|
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
# Memory cache
|
||||||
|
if cache_key in self._memory_cache:
|
||||||
|
return self._memory_cache[cache_key]
|
||||||
|
|
||||||
|
# Disk cache with expiry
|
||||||
|
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
if cached is not None:
|
||||||
|
print(f"[Vegetation] Cache hit for bbox")
|
||||||
|
areas = []
|
||||||
|
for v in cached:
|
||||||
|
area = VegetationArea(**v)
|
||||||
|
# Recompute bbox if missing (backward compat with old cache)
|
||||||
|
if area.min_lat == 0.0 and area.max_lat == 0.0 and area.geometry:
|
||||||
|
lons = [p[0] for p in area.geometry]
|
||||||
|
lats = [p[1] for p in area.geometry]
|
||||||
|
area = VegetationArea(
|
||||||
|
id=area.id,
|
||||||
|
geometry=area.geometry,
|
||||||
|
vegetation_type=area.vegetation_type,
|
||||||
|
density=area.density,
|
||||||
|
min_lat=min(lats),
|
||||||
|
max_lat=max(lats),
|
||||||
|
min_lon=min(lons),
|
||||||
|
max_lon=max(lons),
|
||||||
|
)
|
||||||
|
areas.append(area)
|
||||||
|
self._memory_cache[cache_key] = areas
|
||||||
|
return areas
|
||||||
|
|
||||||
|
# Fetch from Overpass with retry
|
||||||
|
print(f"[Vegetation] Fetching from Overpass API...")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["landuse"="forest"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["natural"="wood"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["landuse"="orchard"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["natural"="scrub"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = None
|
||||||
|
max_retries = 3
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
url = self.OVERPASS_URLS[attempt % len(self.OVERPASS_URLS)]
|
||||||
|
try:
|
||||||
|
timeout = 60.0 * (attempt + 1) # 60s, 120s, 180s
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
response = await client.post(url, data={"data": query})
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Vegetation] Overpass attempt {attempt + 1}/{max_retries} failed ({url}): {e}")
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
wait_time = 2 ** attempt # 1s, 2s
|
||||||
|
print(f"[Vegetation] Retrying in {wait_time}s...")
|
||||||
|
await asyncio.sleep(wait_time)
|
||||||
|
else:
|
||||||
|
print(f"[Vegetation] All {max_retries} attempts failed")
|
||||||
|
return []
|
||||||
|
|
||||||
|
areas = self._parse_response(data)
|
||||||
|
|
||||||
|
# Save to disk cache
|
||||||
|
if areas:
|
||||||
|
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||||
|
[v.model_dump() for v in areas])
|
||||||
|
|
||||||
|
self._memory_cache[cache_key] = areas
|
||||||
|
return areas
|
||||||
|
|
||||||
|
def _parse_response(self, data: dict) -> List[VegetationArea]:
|
||||||
|
"""Parse Overpass response"""
|
||||||
|
nodes = {}
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] == "node":
|
||||||
|
nodes[element["id"]] = (element["lon"], element["lat"])
|
||||||
|
|
||||||
|
areas = []
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] != "way":
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags = element.get("tags", {})
|
||||||
|
|
||||||
|
veg_type = tags.get("landuse", tags.get("natural", "forest"))
|
||||||
|
|
||||||
|
geometry = []
|
||||||
|
for node_id in element.get("nodes", []):
|
||||||
|
if node_id in nodes:
|
||||||
|
geometry.append(nodes[node_id])
|
||||||
|
|
||||||
|
if len(geometry) < 3:
|
||||||
|
continue
|
||||||
|
|
||||||
|
leaf_type = tags.get("leaf_type", "mixed")
|
||||||
|
density = "dense" if leaf_type == "needleleaved" else "mixed"
|
||||||
|
|
||||||
|
# Compute bounding box from geometry (lon, lat tuples)
|
||||||
|
lons = [p[0] for p in geometry]
|
||||||
|
lats = [p[1] for p in geometry]
|
||||||
|
|
||||||
|
areas.append(VegetationArea(
|
||||||
|
id=element["id"],
|
||||||
|
geometry=geometry,
|
||||||
|
vegetation_type=veg_type,
|
||||||
|
density=density,
|
||||||
|
min_lat=min(lats),
|
||||||
|
max_lat=max(lats),
|
||||||
|
min_lon=min(lons),
|
||||||
|
max_lon=max(lons),
|
||||||
|
))
|
||||||
|
|
||||||
|
return areas
|
||||||
|
|
||||||
|
def calculate_vegetation_loss(
|
||||||
|
self,
|
||||||
|
lat1: float, lon1: float,
|
||||||
|
lat2: float, lon2: float,
|
||||||
|
vegetation_areas: List[VegetationArea],
|
||||||
|
season: str = "summer"
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate signal loss through vegetation along path.
|
||||||
|
|
||||||
|
Samples points along the TX->RX path and accumulates
|
||||||
|
attenuation for each segment inside vegetation.
|
||||||
|
|
||||||
|
Returns loss in dB (capped at 40 dB).
|
||||||
|
"""
|
||||||
|
from app.services.terrain_service import TerrainService
|
||||||
|
|
||||||
|
path_length = TerrainService.haversine_distance(lat1, lon1, lat2, lon2)
|
||||||
|
|
||||||
|
if path_length < 1:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
num_samples = max(10, int(path_length / 50))
|
||||||
|
|
||||||
|
segment_length = path_length / num_samples
|
||||||
|
total_loss = 0.0
|
||||||
|
|
||||||
|
for i in range(num_samples):
|
||||||
|
t = i / num_samples
|
||||||
|
lat = lat1 + t * (lat2 - lat1)
|
||||||
|
lon = lon1 + t * (lon2 - lon1)
|
||||||
|
|
||||||
|
veg = self._point_in_vegetation(lat, lon, vegetation_areas)
|
||||||
|
|
||||||
|
if veg:
|
||||||
|
attenuation = self.ATTENUATION_DB_PER_100M.get(veg.vegetation_type, 4.0)
|
||||||
|
seasonal = self.SEASONAL_FACTOR.get(season, 1.0)
|
||||||
|
total_loss += (segment_length / 100) * attenuation * seasonal
|
||||||
|
|
||||||
|
return min(total_loss, 40.0)
|
||||||
|
|
||||||
|
def _point_in_vegetation(
|
||||||
|
self,
|
||||||
|
lat: float, lon: float,
|
||||||
|
areas: List[VegetationArea]
|
||||||
|
) -> Optional[VegetationArea]:
|
||||||
|
"""Check if point is in vegetation area (with bbox pre-filter)"""
|
||||||
|
for area in areas:
|
||||||
|
# Quick bbox reject - skips 95%+ of polygons
|
||||||
|
if not (area.min_lat <= lat <= area.max_lat and
|
||||||
|
area.min_lon <= lon <= area.max_lon):
|
||||||
|
continue
|
||||||
|
if self._point_in_polygon(lat, lon, area.geometry):
|
||||||
|
return area
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _point_in_polygon(
|
||||||
|
lat: float, lon: float, polygon: List[Tuple[float, float]]
|
||||||
|
) -> bool:
|
||||||
|
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
|
||||||
|
n = len(polygon)
|
||||||
|
inside = False
|
||||||
|
|
||||||
|
j = n - 1
|
||||||
|
for i in range(n):
|
||||||
|
xi, yi = polygon[i] # lon, lat
|
||||||
|
xj, yj = polygon[j]
|
||||||
|
|
||||||
|
if ((yi > lat) != (yj > lat)) and (lon < (xj - xi) * (lat - yi) / (yj - yi) + xi):
|
||||||
|
inside = not inside
|
||||||
|
j = i
|
||||||
|
|
||||||
|
return inside
|
||||||
|
|
||||||
|
|
||||||
|
vegetation_service = VegetationService()
|
||||||
233
backend/app/services/water_service.py
Normal file
233
backend/app/services/water_service.py
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
"""
|
||||||
|
OSM water bodies service for RF reflection calculations.
|
||||||
|
|
||||||
|
Water surfaces produce strong specular reflections that can boost
|
||||||
|
or create multipath interference for RF signals.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
from typing import List, Tuple, Optional
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class WaterBody(BaseModel):
|
||||||
|
"""Water body from OSM"""
|
||||||
|
id: int
|
||||||
|
geometry: List[Tuple[float, float]] # [(lon, lat), ...]
|
||||||
|
water_type: str # river, lake, pond, reservoir
|
||||||
|
name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class WaterCache:
|
||||||
|
"""Local cache for water body data with expiry"""
|
||||||
|
|
||||||
|
CACHE_EXPIRY_DAYS = 30
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||||
|
self.cache_path = self.data_path / 'osm' / 'water'
|
||||||
|
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||||
|
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
def _get_cache_file(self, cache_key: str) -> Path:
|
||||||
|
return self.cache_path / f"{cache_key}.json"
|
||||||
|
|
||||||
|
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
if not cache_file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(cache_file.read_text())
|
||||||
|
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||||
|
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||||
|
return None
|
||||||
|
return data.get('data')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[WaterCache] Failed to read cache: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||||
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
cache_file = self._get_cache_file(cache_key)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache_data = {
|
||||||
|
'_cached_at': datetime.now().isoformat(),
|
||||||
|
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||||
|
'data': data
|
||||||
|
}
|
||||||
|
cache_file.write_text(json.dumps(cache_data))
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[WaterCache] Failed to write cache: {e}")
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
for f in self.cache_path.glob("*.json"):
|
||||||
|
f.unlink()
|
||||||
|
|
||||||
|
def get_size_mb(self) -> float:
|
||||||
|
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||||
|
return total / (1024 * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
class WaterService:
|
||||||
|
"""OSM water bodies for reflection calculations"""
|
||||||
|
|
||||||
|
OVERPASS_URLS = [
|
||||||
|
"https://overpass-api.de/api/interpreter",
|
||||||
|
"https://overpass.kumi.systems/api/interpreter",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Reflection coefficients by water type
|
||||||
|
REFLECTION_COEFF = {
|
||||||
|
"lake": 0.8,
|
||||||
|
"reservoir": 0.8,
|
||||||
|
"river": 0.7,
|
||||||
|
"pond": 0.75,
|
||||||
|
"water": 0.7,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = WaterCache()
|
||||||
|
self._memory_cache: dict[str, List[WaterBody]] = {}
|
||||||
|
|
||||||
|
async def fetch_water_bodies(
|
||||||
|
self,
|
||||||
|
min_lat: float, min_lon: float,
|
||||||
|
max_lat: float, max_lon: float
|
||||||
|
) -> List[WaterBody]:
|
||||||
|
"""Fetch water bodies in bounding box, using cache if available"""
|
||||||
|
|
||||||
|
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||||
|
|
||||||
|
# Memory cache
|
||||||
|
if cache_key in self._memory_cache:
|
||||||
|
return self._memory_cache[cache_key]
|
||||||
|
|
||||||
|
# Disk cache with expiry
|
||||||
|
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||||
|
if cached is not None:
|
||||||
|
print(f"[Water] Cache hit for bbox")
|
||||||
|
bodies = [WaterBody(**w) for w in cached]
|
||||||
|
self._memory_cache[cache_key] = bodies
|
||||||
|
return bodies
|
||||||
|
|
||||||
|
# Fetch from Overpass
|
||||||
|
print(f"[Water] Fetching from Overpass API...")
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
[out:json][timeout:30];
|
||||||
|
(
|
||||||
|
way["natural"="water"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
relation["natural"="water"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
way["waterway"]({min_lat},{min_lon},{max_lat},{max_lon});
|
||||||
|
);
|
||||||
|
out body;
|
||||||
|
>;
|
||||||
|
out skel qt;
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = None
|
||||||
|
max_retries = 3
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
url = self.OVERPASS_URLS[attempt % len(self.OVERPASS_URLS)]
|
||||||
|
try:
|
||||||
|
timeout = 60.0 * (attempt + 1)
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
response = await client.post(url, data={"data": query})
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[Water] Overpass attempt {attempt + 1}/{max_retries} failed ({url}): {e}")
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
await asyncio.sleep(2 ** attempt)
|
||||||
|
else:
|
||||||
|
print(f"[Water] All {max_retries} attempts failed")
|
||||||
|
return []
|
||||||
|
|
||||||
|
bodies = self._parse_response(data)
|
||||||
|
|
||||||
|
# Save to disk cache
|
||||||
|
if bodies:
|
||||||
|
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||||
|
[w.model_dump() for w in bodies])
|
||||||
|
|
||||||
|
self._memory_cache[cache_key] = bodies
|
||||||
|
return bodies
|
||||||
|
|
||||||
|
def _parse_response(self, data: dict) -> List[WaterBody]:
|
||||||
|
"""Parse Overpass response"""
|
||||||
|
nodes = {}
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] == "node":
|
||||||
|
nodes[element["id"]] = (element["lon"], element["lat"])
|
||||||
|
|
||||||
|
bodies = []
|
||||||
|
for element in data.get("elements", []):
|
||||||
|
if element["type"] != "way":
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags = element.get("tags", {})
|
||||||
|
water_type = tags.get("water", tags.get("waterway", tags.get("natural", "water")))
|
||||||
|
|
||||||
|
geometry = []
|
||||||
|
for node_id in element.get("nodes", []):
|
||||||
|
if node_id in nodes:
|
||||||
|
geometry.append(nodes[node_id])
|
||||||
|
|
||||||
|
if len(geometry) < 3:
|
||||||
|
continue
|
||||||
|
|
||||||
|
bodies.append(WaterBody(
|
||||||
|
id=element["id"],
|
||||||
|
geometry=geometry,
|
||||||
|
water_type=water_type,
|
||||||
|
name=tags.get("name")
|
||||||
|
))
|
||||||
|
|
||||||
|
return bodies
|
||||||
|
|
||||||
|
def get_reflection_coefficient(self, water_type: str) -> float:
|
||||||
|
"""Get reflection coefficient for water type"""
|
||||||
|
return self.REFLECTION_COEFF.get(water_type, 0.7)
|
||||||
|
|
||||||
|
def point_over_water(
|
||||||
|
self, lat: float, lon: float, water_bodies: List[WaterBody]
|
||||||
|
) -> Optional[WaterBody]:
|
||||||
|
"""Check if point is over water"""
|
||||||
|
for body in water_bodies:
|
||||||
|
if self._point_in_polygon(lat, lon, body.geometry):
|
||||||
|
return body
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _point_in_polygon(
|
||||||
|
lat: float, lon: float, polygon: List[Tuple[float, float]]
|
||||||
|
) -> bool:
|
||||||
|
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
|
||||||
|
n = len(polygon)
|
||||||
|
inside = False
|
||||||
|
|
||||||
|
j = n - 1
|
||||||
|
for i in range(n):
|
||||||
|
xi, yi = polygon[i] # lon, lat
|
||||||
|
xj, yj = polygon[j]
|
||||||
|
|
||||||
|
if ((yi > lat) != (yj > lat)) and (lon < (xj - xi) * (lat - yi) / (yj - yi) + xi):
|
||||||
|
inside = not inside
|
||||||
|
j = i
|
||||||
|
|
||||||
|
return inside
|
||||||
|
|
||||||
|
|
||||||
|
water_service = WaterService()
|
||||||
102
backend/app/services/weather_service.py
Normal file
102
backend/app/services/weather_service.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherService:
|
||||||
|
"""ITU-R P.838 rain attenuation model"""
|
||||||
|
|
||||||
|
# ITU-R P.838-3 coefficients for horizontal polarization
|
||||||
|
# Format: (frequency_GHz, k, alpha)
|
||||||
|
RAIN_COEFFICIENTS = {
|
||||||
|
0.7: (0.0000387, 0.912),
|
||||||
|
1.0: (0.0000887, 0.949),
|
||||||
|
1.8: (0.000292, 1.021),
|
||||||
|
2.1: (0.000425, 1.052),
|
||||||
|
2.6: (0.000683, 1.091),
|
||||||
|
3.5: (0.00138, 1.149),
|
||||||
|
5.0: (0.00361, 1.206),
|
||||||
|
10.0: (0.0245, 1.200),
|
||||||
|
20.0: (0.0906, 1.099),
|
||||||
|
30.0: (0.175, 1.021),
|
||||||
|
}
|
||||||
|
|
||||||
|
def calculate_rain_attenuation(
|
||||||
|
self,
|
||||||
|
frequency_mhz: float,
|
||||||
|
distance_km: float,
|
||||||
|
rain_rate: float, # mm/h
|
||||||
|
) -> float:
|
||||||
|
"""
|
||||||
|
Calculate rain attenuation in dB
|
||||||
|
|
||||||
|
Args:
|
||||||
|
frequency_mhz: Frequency in MHz
|
||||||
|
distance_km: Path length in km
|
||||||
|
rain_rate: Rain rate in mm/h (0=none, 5=light, 25=moderate, 50=heavy)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Attenuation in dB
|
||||||
|
"""
|
||||||
|
if rain_rate <= 0:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
freq_ghz = frequency_mhz / 1000
|
||||||
|
|
||||||
|
# Get interpolated coefficients
|
||||||
|
k, alpha = self._get_coefficients(freq_ghz)
|
||||||
|
|
||||||
|
# Specific attenuation (dB/km)
|
||||||
|
gamma_r = k * (rain_rate ** alpha)
|
||||||
|
|
||||||
|
# Effective path length reduction for longer paths
|
||||||
|
# Rain cells are typically 2-5 km
|
||||||
|
if distance_km > 2:
|
||||||
|
reduction_factor = 1 / (1 + distance_km / 35)
|
||||||
|
effective_distance = distance_km * reduction_factor
|
||||||
|
else:
|
||||||
|
effective_distance = distance_km
|
||||||
|
|
||||||
|
attenuation = gamma_r * effective_distance
|
||||||
|
|
||||||
|
return min(attenuation, 30.0) # Cap at 30 dB
|
||||||
|
|
||||||
|
def _get_coefficients(self, freq_ghz: float) -> tuple[float, float]:
|
||||||
|
"""Interpolate rain coefficients for frequency"""
|
||||||
|
freqs = sorted(self.RAIN_COEFFICIENTS.keys())
|
||||||
|
|
||||||
|
# Find surrounding frequencies
|
||||||
|
if freq_ghz <= freqs[0]:
|
||||||
|
return self.RAIN_COEFFICIENTS[freqs[0]]
|
||||||
|
if freq_ghz >= freqs[-1]:
|
||||||
|
return self.RAIN_COEFFICIENTS[freqs[-1]]
|
||||||
|
|
||||||
|
for i in range(len(freqs) - 1):
|
||||||
|
if freqs[i] <= freq_ghz <= freqs[i + 1]:
|
||||||
|
f1, f2 = freqs[i], freqs[i + 1]
|
||||||
|
k1, a1 = self.RAIN_COEFFICIENTS[f1]
|
||||||
|
k2, a2 = self.RAIN_COEFFICIENTS[f2]
|
||||||
|
|
||||||
|
# Linear interpolation
|
||||||
|
t = (freq_ghz - f1) / (f2 - f1)
|
||||||
|
k = k1 + t * (k2 - k1)
|
||||||
|
alpha = a1 + t * (a2 - a1)
|
||||||
|
|
||||||
|
return k, alpha
|
||||||
|
|
||||||
|
return self.RAIN_COEFFICIENTS[freqs[0]]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def rain_rate_from_description(description: str) -> float:
|
||||||
|
"""Convert rain description to rate"""
|
||||||
|
rates = {
|
||||||
|
"none": 0.0,
|
||||||
|
"drizzle": 2.5,
|
||||||
|
"light": 5.0,
|
||||||
|
"moderate": 12.5,
|
||||||
|
"heavy": 25.0,
|
||||||
|
"very_heavy": 50.0,
|
||||||
|
"extreme": 100.0,
|
||||||
|
}
|
||||||
|
return rates.get(description.lower(), 0.0)
|
||||||
|
|
||||||
|
|
||||||
|
weather_service = WeatherService()
|
||||||
3
backend/app/utils/__init__.py
Normal file
3
backend/app/utils/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
"""
|
||||||
|
Utility modules for RFCP backend.
|
||||||
|
"""
|
||||||
34
backend/app/utils/logging.py
Normal file
34
backend/app/utils/logging.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""
|
||||||
|
Structured logging for RFCP backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
|
||||||
|
|
||||||
|
_log_file = None
|
||||||
|
|
||||||
|
|
||||||
|
def rfcp_log(tag: str, msg: str):
|
||||||
|
"""Log with tag prefix, timestamp, and thread name.
|
||||||
|
|
||||||
|
Writes to stdout and a log file for reliability.
|
||||||
|
"""
|
||||||
|
global _log_file
|
||||||
|
ts = time.strftime('%H:%M:%S')
|
||||||
|
thr = threading.current_thread().name
|
||||||
|
line = f"[{tag} {ts}] [{thr}] {msg}"
|
||||||
|
print(line, flush=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if _log_file is None:
|
||||||
|
log_dir = os.environ.get('RFCP_DATA_PATH', './data')
|
||||||
|
os.makedirs(log_dir, exist_ok=True)
|
||||||
|
log_path = os.path.join(log_dir, 'rfcp-backend.log')
|
||||||
|
_log_file = open(log_path, 'a')
|
||||||
|
_log_file.write(line + '\n')
|
||||||
|
_log_file.flush()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
44
backend/app/utils/progress.py
Normal file
44
backend/app/utils/progress.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""
|
||||||
|
Progress reporting for long-running calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Optional, Callable, Awaitable
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressTracker:
|
||||||
|
"""Track and report calculation progress."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
total: int,
|
||||||
|
callback: Optional[Callable[[str, float, Optional[float]], Awaitable[None]]] = None,
|
||||||
|
phase: str = "calculating",
|
||||||
|
):
|
||||||
|
self.total = total
|
||||||
|
self.callback = callback
|
||||||
|
self.phase = phase
|
||||||
|
self.completed = 0
|
||||||
|
self.start_time = time.time()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def progress(self) -> float:
|
||||||
|
if self.total == 0:
|
||||||
|
return 1.0
|
||||||
|
return self.completed / self.total
|
||||||
|
|
||||||
|
@property
|
||||||
|
def eta_seconds(self) -> Optional[float]:
|
||||||
|
if self.completed == 0:
|
||||||
|
return None
|
||||||
|
elapsed = time.time() - self.start_time
|
||||||
|
rate = self.completed / elapsed
|
||||||
|
remaining = self.total - self.completed
|
||||||
|
return remaining / rate if rate > 0 else None
|
||||||
|
|
||||||
|
def update(self, n: int = 1):
|
||||||
|
self.completed += n
|
||||||
|
|
||||||
|
async def report(self):
|
||||||
|
if self.callback:
|
||||||
|
await self.callback(self.phase, self.progress, self.eta_seconds)
|
||||||
54
backend/app/utils/units.py
Normal file
54
backend/app/utils/units.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
"""
|
||||||
|
RF unit conversions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
def dbm_to_watts(dbm: float) -> float:
|
||||||
|
"""Convert dBm to watts."""
|
||||||
|
return 10 ** ((dbm - 30) / 10)
|
||||||
|
|
||||||
|
|
||||||
|
def watts_to_dbm(watts: float) -> float:
|
||||||
|
"""Convert watts to dBm."""
|
||||||
|
if watts <= 0:
|
||||||
|
return -float('inf')
|
||||||
|
return 10 * math.log10(watts) + 30
|
||||||
|
|
||||||
|
|
||||||
|
def dbm_to_mw(dbm: float) -> float:
|
||||||
|
"""Convert dBm to milliwatts."""
|
||||||
|
return 10 ** (dbm / 10)
|
||||||
|
|
||||||
|
|
||||||
|
def mw_to_dbm(mw: float) -> float:
|
||||||
|
"""Convert milliwatts to dBm."""
|
||||||
|
if mw <= 0:
|
||||||
|
return -float('inf')
|
||||||
|
return 10 * math.log10(mw)
|
||||||
|
|
||||||
|
|
||||||
|
def frequency_to_wavelength(frequency_mhz: float) -> float:
|
||||||
|
"""Convert frequency (MHz) to wavelength (meters)."""
|
||||||
|
return 300.0 / frequency_mhz
|
||||||
|
|
||||||
|
|
||||||
|
def wavelength_to_frequency(wavelength_m: float) -> float:
|
||||||
|
"""Convert wavelength (meters) to frequency (MHz)."""
|
||||||
|
return 300.0 / wavelength_m
|
||||||
|
|
||||||
|
|
||||||
|
def eirp_dbm(power_dbm: float, gain_dbi: float) -> float:
|
||||||
|
"""Calculate EIRP in dBm."""
|
||||||
|
return power_dbm + gain_dbi
|
||||||
|
|
||||||
|
|
||||||
|
def eirp_watts(power_dbm: float, gain_dbi: float) -> float:
|
||||||
|
"""Calculate EIRP in watts."""
|
||||||
|
return dbm_to_watts(power_dbm + gain_dbi)
|
||||||
|
|
||||||
|
|
||||||
|
def path_loss_to_signal_dbm(power_dbm: float, gain_dbi: float, path_loss_db: float) -> float:
|
||||||
|
"""Calculate received signal level in dBm from EIRP and path loss."""
|
||||||
|
return power_dbm + gain_dbi - path_loss_db
|
||||||
8
backend/requirements-dev.txt
Normal file
8
backend/requirements-dev.txt
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Development and testing dependencies
|
||||||
|
# Install with: pip install -r requirements-dev.txt
|
||||||
|
|
||||||
|
pytest>=7.0.0
|
||||||
|
pytest-asyncio>=0.21.0
|
||||||
|
httpx>=0.27.0
|
||||||
|
ruff>=0.1.0
|
||||||
|
mypy>=1.7.0
|
||||||
10
backend/requirements-gpu-nvidia.txt
Normal file
10
backend/requirements-gpu-nvidia.txt
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# NVIDIA GPU acceleration via CuPy
|
||||||
|
# Install with: pip install -r requirements-gpu-nvidia.txt
|
||||||
|
#
|
||||||
|
# Choose ONE based on your CUDA version:
|
||||||
|
# - cupy-cuda12x for CUDA 12.x (RTX 30xx, 40xx, newer)
|
||||||
|
# - cupy-cuda11x for CUDA 11.x (older cards)
|
||||||
|
#
|
||||||
|
# CuPy bundles CUDA runtime (~700 MB) - no separate CUDA install needed
|
||||||
|
|
||||||
|
cupy-cuda12x>=13.0.0
|
||||||
14
backend/requirements-gpu-opencl.txt
Normal file
14
backend/requirements-gpu-opencl.txt
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Intel/AMD GPU acceleration via PyOpenCL
|
||||||
|
# Install with: pip install -r requirements-gpu-opencl.txt
|
||||||
|
#
|
||||||
|
# Works with:
|
||||||
|
# - Intel UHD/Iris Graphics (integrated)
|
||||||
|
# - AMD Radeon (discrete)
|
||||||
|
# - NVIDIA GPUs (alternative to CUDA)
|
||||||
|
#
|
||||||
|
# Requires OpenCL runtime:
|
||||||
|
# - Intel: Intel GPU Computing Runtime
|
||||||
|
# - AMD: AMD Adrenalin driver (includes OpenCL)
|
||||||
|
# - NVIDIA: NVIDIA driver (includes OpenCL)
|
||||||
|
|
||||||
|
pyopencl>=2023.1
|
||||||
17
backend/requirements.txt
Normal file
17
backend/requirements.txt
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
fastapi==0.110.0
|
||||||
|
uvicorn[standard]==0.29.0
|
||||||
|
python-multipart==0.0.9
|
||||||
|
aiofiles==23.2.1
|
||||||
|
motor==3.3.2
|
||||||
|
pymongo==4.6.1
|
||||||
|
pydantic-settings==2.1.0
|
||||||
|
numpy==1.26.4
|
||||||
|
scipy==1.12.0
|
||||||
|
shapely>=2.0.0
|
||||||
|
requests==2.31.0
|
||||||
|
httpx==0.27.0
|
||||||
|
aiosqlite>=0.19.0
|
||||||
|
sqlalchemy>=2.0.0
|
||||||
|
ray[default]>=2.9.0
|
||||||
|
# GPU acceleration (optional — install cupy-cuda12x for NVIDIA GPU support)
|
||||||
|
# cupy-cuda12x>=13.0.0
|
||||||
89
backend/run_server.py
Normal file
89
backend/run_server.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Entry point for PyInstaller bundle"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
# Required for ProcessPoolExecutor to work in PyInstaller frozen exe on Windows.
|
||||||
|
# Must be called before any other multiprocessing usage.
|
||||||
|
multiprocessing.freeze_support()
|
||||||
|
|
||||||
|
# Force unbuffered stdout/stderr — critical for piped output (Electron, bat files)
|
||||||
|
os.environ['PYTHONUNBUFFERED'] = '1'
|
||||||
|
if hasattr(sys.stdout, 'reconfigure'):
|
||||||
|
try:
|
||||||
|
sys.stdout.reconfigure(line_buffering=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
if hasattr(sys.stderr, 'reconfigure'):
|
||||||
|
try:
|
||||||
|
sys.stderr.reconfigure(line_buffering=True)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
print("[RFCP] run_server.py starting...", flush=True)
|
||||||
|
|
||||||
|
# Set base path for PyInstaller
|
||||||
|
if getattr(sys, 'frozen', False):
|
||||||
|
base_dir = os.path.dirname(sys.executable)
|
||||||
|
os.chdir(base_dir)
|
||||||
|
print(f"[RFCP] Frozen mode, base dir: {base_dir}", flush=True)
|
||||||
|
|
||||||
|
# Fix uvicorn TTY detection — redirect None streams to a log file
|
||||||
|
# Use RFCP_LOG_PATH from Electron, or fallback to user-writable location
|
||||||
|
log_dir = os.environ.get('RFCP_LOG_PATH')
|
||||||
|
if not log_dir:
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
appdata = os.environ.get('APPDATA', os.path.expanduser('~'))
|
||||||
|
log_dir = os.path.join(appdata, 'rfcp-desktop', 'logs')
|
||||||
|
else:
|
||||||
|
log_dir = os.path.join(os.path.expanduser('~'), '.rfcp', 'logs')
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(log_dir, exist_ok=True)
|
||||||
|
log_path = os.path.join(log_dir, 'rfcp-server.log')
|
||||||
|
except Exception:
|
||||||
|
# Fallback to temp directory if all else fails
|
||||||
|
import tempfile
|
||||||
|
log_path = os.path.join(tempfile.gettempdir(), 'rfcp-server.log')
|
||||||
|
|
||||||
|
log_file = open(log_path, 'w')
|
||||||
|
if sys.stdout is None:
|
||||||
|
sys.stdout = log_file
|
||||||
|
if sys.stderr is None:
|
||||||
|
sys.stderr = log_file
|
||||||
|
if sys.stdin is None:
|
||||||
|
sys.stdin = open(os.devnull, 'r')
|
||||||
|
print(f"[RFCP] Log file: {log_path}", flush=True)
|
||||||
|
|
||||||
|
print("[RFCP] Importing uvicorn...", flush=True)
|
||||||
|
import uvicorn
|
||||||
|
|
||||||
|
print("[RFCP] Importing app.main...", flush=True)
|
||||||
|
try:
|
||||||
|
from app.main import app
|
||||||
|
print("[RFCP] App imported successfully", flush=True)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[RFCP] FATAL: Failed to import app: {e}", flush=True)
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
host = os.environ.get('RFCP_HOST', '127.0.0.1')
|
||||||
|
port = int(os.environ.get('RFCP_PORT', '8888'))
|
||||||
|
|
||||||
|
print(f"[RFCP] Starting uvicorn on {host}:{port}", flush=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
uvicorn.run(
|
||||||
|
app,
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
log_level='warning',
|
||||||
|
access_log=False,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[RFCP] FATAL: uvicorn.run failed: {e}", flush=True)
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
0
backend/tests/test_geometry/__init__.py
Normal file
0
backend/tests/test_geometry/__init__.py
Normal file
60
backend/tests/test_geometry/test_diffraction.py
Normal file
60
backend/tests/test_geometry/test_diffraction.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for knife-edge diffraction calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
from app.geometry.diffraction import knife_edge_loss
|
||||||
|
|
||||||
|
|
||||||
|
def freq_to_wl(freq_mhz):
|
||||||
|
return 300.0 / freq_mhz
|
||||||
|
|
||||||
|
|
||||||
|
class TestKnifeEdgeLoss:
|
||||||
|
def test_no_obstruction_low_loss(self):
|
||||||
|
"""Negative h means clearance above LOS — loss should be small."""
|
||||||
|
loss = knife_edge_loss(d1_m=500, d2_m=500, h_m=-10, wavelength_m=freq_to_wl(1800))
|
||||||
|
assert loss >= 0
|
||||||
|
assert loss < 3
|
||||||
|
|
||||||
|
def test_grazing_obstruction(self):
|
||||||
|
"""h=0 means exactly at LOS line — ~6 dB loss."""
|
||||||
|
loss = knife_edge_loss(d1_m=500, d2_m=500, h_m=0, wavelength_m=freq_to_wl(1800))
|
||||||
|
assert 5 < loss < 8
|
||||||
|
|
||||||
|
def test_obstruction_increases_loss(self):
|
||||||
|
wl = freq_to_wl(1800)
|
||||||
|
loss_low = knife_edge_loss(d1_m=500, d2_m=500, h_m=1, wavelength_m=wl)
|
||||||
|
loss_high = knife_edge_loss(d1_m=500, d2_m=500, h_m=10, wavelength_m=wl)
|
||||||
|
assert loss_high > loss_low
|
||||||
|
|
||||||
|
def test_higher_freq_more_loss(self):
|
||||||
|
"""Higher frequency = shorter wavelength = more diffraction loss."""
|
||||||
|
loss_low_f = knife_edge_loss(d1_m=500, d2_m=500, h_m=5, wavelength_m=freq_to_wl(450))
|
||||||
|
loss_high_f = knife_edge_loss(d1_m=500, d2_m=500, h_m=5, wavelength_m=freq_to_wl(1800))
|
||||||
|
assert loss_high_f > loss_low_f
|
||||||
|
|
||||||
|
def test_zero_distance_safe(self):
|
||||||
|
"""Should not crash on zero distances."""
|
||||||
|
loss = knife_edge_loss(d1_m=0, d2_m=500, h_m=5, wavelength_m=freq_to_wl(900))
|
||||||
|
assert loss >= 0
|
||||||
|
|
||||||
|
def test_large_clearance(self):
|
||||||
|
"""Very deep clearance (large negative h) should have near-zero loss."""
|
||||||
|
loss = knife_edge_loss(d1_m=500, d2_m=500, h_m=-50, wavelength_m=freq_to_wl(900))
|
||||||
|
assert loss < 1.0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
instance = TestKnifeEdgeLoss()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {method_name}: {e}")
|
||||||
|
print("\nAll tests completed.")
|
||||||
74
backend/tests/test_geometry/test_haversine.py
Normal file
74
backend/tests/test_geometry/test_haversine.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for haversine distance calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
from app.geometry.haversine import haversine_distance, haversine_batch, points_to_local_coords
|
||||||
|
|
||||||
|
|
||||||
|
class TestHaversineDistance:
|
||||||
|
def test_same_point_is_zero(self):
|
||||||
|
d = haversine_distance(50.45, 30.52, 50.45, 30.52)
|
||||||
|
assert abs(d) < 1.0
|
||||||
|
|
||||||
|
def test_known_distance(self):
|
||||||
|
# Kyiv to Kharkiv ≈ 410 km
|
||||||
|
d = haversine_distance(50.45, 30.52, 49.99, 36.23)
|
||||||
|
assert 400000 < d < 420000
|
||||||
|
|
||||||
|
def test_short_distance(self):
|
||||||
|
# ~111m for 0.001 degree lat
|
||||||
|
d = haversine_distance(50.0, 30.0, 50.001, 30.0)
|
||||||
|
assert 100 < d < 120
|
||||||
|
|
||||||
|
|
||||||
|
class TestHaversineBatch:
|
||||||
|
def test_single_point(self):
|
||||||
|
lats = np.array([50.001])
|
||||||
|
lons = np.array([30.0])
|
||||||
|
distances = haversine_batch(50.0, 30.0, lats, lons)
|
||||||
|
assert len(distances) == 1
|
||||||
|
assert 100 < distances[0] < 120
|
||||||
|
|
||||||
|
def test_multiple_points(self):
|
||||||
|
lats = np.array([50.001, 50.01, 50.1])
|
||||||
|
lons = np.array([30.0, 30.0, 30.0])
|
||||||
|
distances = haversine_batch(50.0, 30.0, lats, lons)
|
||||||
|
assert len(distances) == 3
|
||||||
|
# Should be monotonically increasing
|
||||||
|
assert distances[0] < distances[1] < distances[2]
|
||||||
|
|
||||||
|
|
||||||
|
class TestLocalCoords:
|
||||||
|
def test_same_point_is_origin(self):
|
||||||
|
x, y = points_to_local_coords(50.0, 30.0, np.array([50.0]), np.array([30.0]))
|
||||||
|
assert abs(x[0]) < 1.0
|
||||||
|
assert abs(y[0]) < 1.0
|
||||||
|
|
||||||
|
def test_north_is_positive_y(self):
|
||||||
|
x, y = points_to_local_coords(50.0, 30.0, np.array([50.001]), np.array([30.0]))
|
||||||
|
assert y[0] > 0
|
||||||
|
assert abs(x[0]) < 1.0
|
||||||
|
|
||||||
|
def test_east_is_positive_x(self):
|
||||||
|
x, y = points_to_local_coords(50.0, 30.0, np.array([50.0]), np.array([30.001]))
|
||||||
|
assert x[0] > 0
|
||||||
|
assert abs(y[0]) < 1.0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for cls in [TestHaversineDistance, TestHaversineBatch, TestLocalCoords]:
|
||||||
|
instance = cls()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {cls.__name__}.{method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {cls.__name__}.{method_name}: {e}")
|
||||||
|
|
||||||
|
print("\nAll tests completed.")
|
||||||
77
backend/tests/test_geometry/test_intersection.py
Normal file
77
backend/tests/test_geometry/test_intersection.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for line-segment intersection calculations.
|
||||||
|
|
||||||
|
These require NumPy, so use __main__ block with conditional import.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from app.geometry.intersection import line_segments_intersect_batch
|
||||||
|
|
||||||
|
|
||||||
|
class TestLineSegmentIntersect:
|
||||||
|
def test_crossing_lines(self):
|
||||||
|
"""Two crossing segments should intersect."""
|
||||||
|
# Line from (0,0)→(1,1) and (0,1)→(1,0)
|
||||||
|
result = line_segments_intersect_batch(
|
||||||
|
p1=np.array([0.0, 0.0]),
|
||||||
|
p2=np.array([1.0, 1.0]),
|
||||||
|
seg_starts=np.array([[0.0, 1.0]]),
|
||||||
|
seg_ends=np.array([[1.0, 0.0]]),
|
||||||
|
)
|
||||||
|
assert result[0] == True
|
||||||
|
|
||||||
|
def test_parallel_lines(self):
|
||||||
|
"""Parallel lines should not intersect."""
|
||||||
|
result = line_segments_intersect_batch(
|
||||||
|
p1=np.array([0.0, 0.0]),
|
||||||
|
p2=np.array([1.0, 0.0]),
|
||||||
|
seg_starts=np.array([[0.0, 1.0]]),
|
||||||
|
seg_ends=np.array([[1.0, 1.0]]),
|
||||||
|
)
|
||||||
|
assert result[0] == False
|
||||||
|
|
||||||
|
def test_non_crossing(self):
|
||||||
|
"""Segments that don't reach each other."""
|
||||||
|
result = line_segments_intersect_batch(
|
||||||
|
p1=np.array([0.0, 0.0]),
|
||||||
|
p2=np.array([0.5, 0.5]),
|
||||||
|
seg_starts=np.array([[0.8, 0.0]]),
|
||||||
|
seg_ends=np.array([[0.8, 1.0]]),
|
||||||
|
)
|
||||||
|
assert result[0] == False
|
||||||
|
|
||||||
|
def test_multiple_segments(self):
|
||||||
|
"""Batch test with multiple segments."""
|
||||||
|
result = line_segments_intersect_batch(
|
||||||
|
p1=np.array([0.0, 0.0]),
|
||||||
|
p2=np.array([1.0, 1.0]),
|
||||||
|
seg_starts=np.array([
|
||||||
|
[0.0, 1.0], # crosses
|
||||||
|
[2.0, 0.0], # doesn't cross
|
||||||
|
[0.5, 0.0], # crosses
|
||||||
|
]),
|
||||||
|
seg_ends=np.array([
|
||||||
|
[1.0, 0.0], # crosses
|
||||||
|
[2.0, 1.0], # doesn't cross
|
||||||
|
[0.5, 1.0], # crosses
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
assert result[0] == True
|
||||||
|
assert result[1] == False
|
||||||
|
assert result[2] == True
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
instance = TestLineSegmentIntersect()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {method_name}: {e}")
|
||||||
|
print("\nAll tests completed.")
|
||||||
103
backend/tests/test_geometry/test_los.py
Normal file
103
backend/tests/test_geometry/test_los.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for line-of-sight and Fresnel zone calculations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
from app.geometry.los import fresnel_radius, check_los_terrain
|
||||||
|
|
||||||
|
|
||||||
|
def freq_to_wavelength(freq_mhz):
|
||||||
|
return 300.0 / freq_mhz
|
||||||
|
|
||||||
|
|
||||||
|
class TestFresnelRadius:
|
||||||
|
def test_positive_result(self):
|
||||||
|
r = fresnel_radius(500, 500, freq_to_wavelength(1800))
|
||||||
|
assert r > 0
|
||||||
|
|
||||||
|
def test_symmetric(self):
|
||||||
|
wl = freq_to_wavelength(900)
|
||||||
|
r1 = fresnel_radius(300, 700, wl)
|
||||||
|
r2 = fresnel_radius(700, 300, wl)
|
||||||
|
assert abs(r1 - r2) < 0.001
|
||||||
|
|
||||||
|
def test_lower_freq_larger_radius(self):
|
||||||
|
r_high = fresnel_radius(500, 500, freq_to_wavelength(1800))
|
||||||
|
r_low = fresnel_radius(500, 500, freq_to_wavelength(900))
|
||||||
|
assert r_low > r_high
|
||||||
|
|
||||||
|
def test_center_is_maximum(self):
|
||||||
|
"""Fresnel radius is largest at the midpoint of the path."""
|
||||||
|
wl = freq_to_wavelength(900)
|
||||||
|
r_center = fresnel_radius(500, 500, wl)
|
||||||
|
r_offset = fresnel_radius(200, 800, wl)
|
||||||
|
assert r_center > r_offset
|
||||||
|
|
||||||
|
def test_known_value(self):
|
||||||
|
"""First Fresnel zone radius at midpoint of 1km path at 1GHz ~ 8.66m."""
|
||||||
|
# F1 = sqrt(lambda * d1 * d2 / (d1+d2))
|
||||||
|
# lambda = 0.3m at 1000MHz, d1=d2=500m
|
||||||
|
# F1 = sqrt(0.3 * 500 * 500 / 1000) = sqrt(75) ~ 8.66m
|
||||||
|
r = fresnel_radius(500, 500, freq_to_wavelength(1000))
|
||||||
|
assert 8.0 < r < 9.5
|
||||||
|
|
||||||
|
def test_zero_distance(self):
|
||||||
|
r = fresnel_radius(0, 500, freq_to_wavelength(900))
|
||||||
|
assert r == 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckLosTerrain:
|
||||||
|
def test_flat_terrain_has_los(self):
|
||||||
|
profile = [
|
||||||
|
{"elevation": 100, "distance": 0},
|
||||||
|
{"elevation": 100, "distance": 250},
|
||||||
|
{"elevation": 100, "distance": 500},
|
||||||
|
{"elevation": 100, "distance": 750},
|
||||||
|
{"elevation": 100, "distance": 1000},
|
||||||
|
]
|
||||||
|
result = check_los_terrain(profile, tx_height=30, rx_height=1.5)
|
||||||
|
assert result["has_los"] is True
|
||||||
|
assert result["clearance"] > 0
|
||||||
|
|
||||||
|
def test_hill_blocks_los(self):
|
||||||
|
profile = [
|
||||||
|
{"elevation": 100, "distance": 0},
|
||||||
|
{"elevation": 100, "distance": 250},
|
||||||
|
{"elevation": 200, "distance": 500}, # 100m hill
|
||||||
|
{"elevation": 100, "distance": 750},
|
||||||
|
{"elevation": 100, "distance": 1000},
|
||||||
|
]
|
||||||
|
result = check_los_terrain(profile, tx_height=10, rx_height=1.5)
|
||||||
|
assert result["has_los"] is False
|
||||||
|
assert result["blocked_at"] is not None
|
||||||
|
|
||||||
|
def test_empty_profile(self):
|
||||||
|
result = check_los_terrain([], tx_height=30, rx_height=1.5)
|
||||||
|
assert result["has_los"] is True
|
||||||
|
|
||||||
|
def test_high_antenna_clears_hill(self):
|
||||||
|
profile = [
|
||||||
|
{"elevation": 100, "distance": 0},
|
||||||
|
{"elevation": 110, "distance": 500},
|
||||||
|
{"elevation": 100, "distance": 1000},
|
||||||
|
]
|
||||||
|
# TX at 150m (100+50), RX at 101.5m. LOS at 500m ≈ 125.75m, terrain=110m → clear
|
||||||
|
result = check_los_terrain(profile, tx_height=50, rx_height=1.5)
|
||||||
|
assert result["has_los"] is True
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for cls in [TestFresnelRadius, TestCheckLosTerrain]:
|
||||||
|
instance = cls()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {cls.__name__}.{method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {cls.__name__}.{method_name}: {e}")
|
||||||
|
print("\nAll tests completed.")
|
||||||
1
backend/tests/test_integration/__init__.py
Normal file
1
backend/tests/test_integration/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
127
backend/tests/test_integration/test_calculator.py
Normal file
127
backend/tests/test_integration/test_calculator.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for the PointCalculator.
|
||||||
|
|
||||||
|
Verifies end-to-end point calculation with various
|
||||||
|
propagation models and environmental conditions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
from app.core.calculator import PointCalculator
|
||||||
|
from app.propagation.free_space import FreeSpaceModel
|
||||||
|
from app.propagation.okumura_hata import OkumuraHataModel
|
||||||
|
from app.propagation.cost231_hata import Cost231HataModel
|
||||||
|
|
||||||
|
|
||||||
|
class TestPointCalculatorFSPL:
|
||||||
|
def test_basic_calculation(self):
|
||||||
|
calc = PointCalculator(FreeSpaceModel())
|
||||||
|
result = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.001, point_lon=30.0,
|
||||||
|
distance=111,
|
||||||
|
)
|
||||||
|
assert result.rsrp > -50 # Strong signal at short range
|
||||||
|
assert result.has_los is True
|
||||||
|
assert result.model_used == "Free-Space"
|
||||||
|
assert result.path_loss > 0
|
||||||
|
assert result.terrain_loss == 0
|
||||||
|
assert result.building_loss == 0
|
||||||
|
|
||||||
|
def test_signal_decreases_with_distance(self):
|
||||||
|
calc = PointCalculator(FreeSpaceModel())
|
||||||
|
near = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.001, point_lon=30.0, distance=100,
|
||||||
|
)
|
||||||
|
far = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.01, point_lon=30.0, distance=1000,
|
||||||
|
)
|
||||||
|
assert near.rsrp > far.rsrp
|
||||||
|
|
||||||
|
def test_terrain_obstruction(self):
|
||||||
|
calc = PointCalculator(FreeSpaceModel())
|
||||||
|
los = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.01, point_lon=30.0, distance=1000,
|
||||||
|
)
|
||||||
|
nlos = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.01, point_lon=30.0, distance=1000,
|
||||||
|
terrain_clearance=-10,
|
||||||
|
)
|
||||||
|
assert nlos.rsrp < los.rsrp
|
||||||
|
assert nlos.has_los is False
|
||||||
|
assert nlos.terrain_loss > 0
|
||||||
|
|
||||||
|
def test_building_loss_applied(self):
|
||||||
|
calc = PointCalculator(FreeSpaceModel())
|
||||||
|
no_building = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.01, point_lon=30.0, distance=1000,
|
||||||
|
)
|
||||||
|
with_building = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.01, point_lon=30.0, distance=1000,
|
||||||
|
building_loss=20,
|
||||||
|
)
|
||||||
|
assert abs(no_building.rsrp - with_building.rsrp - 20) < 0.1
|
||||||
|
|
||||||
|
|
||||||
|
class TestPointCalculatorAntenna:
|
||||||
|
def test_off_axis_reduces_signal(self):
|
||||||
|
calc = PointCalculator(FreeSpaceModel())
|
||||||
|
omni = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.001, point_lon=30.0, distance=111,
|
||||||
|
)
|
||||||
|
directional = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=1800,
|
||||||
|
point_lat=50.001, point_lon=30.0, distance=111,
|
||||||
|
azimuth=90, beamwidth=65, # Pointing East, point is North
|
||||||
|
)
|
||||||
|
assert directional.rsrp < omni.rsrp
|
||||||
|
|
||||||
|
|
||||||
|
class TestPointCalculatorModelFallback:
|
||||||
|
def test_out_of_range_uses_fspl(self):
|
||||||
|
"""When Okumura-Hata is out of valid range, should fall back to FSPL."""
|
||||||
|
calc = PointCalculator(OkumuraHataModel())
|
||||||
|
# 50m distance is below Okumura-Hata minimum (1km)
|
||||||
|
result = calc.calculate_point(
|
||||||
|
site_lat=50.0, site_lon=30.0, site_height=30,
|
||||||
|
site_power=43, site_gain=18, site_frequency=900,
|
||||||
|
point_lat=50.0, point_lon=30.0001, distance=50,
|
||||||
|
)
|
||||||
|
# Should still return a valid result (via FSPL fallback)
|
||||||
|
assert result.rsrp != 0
|
||||||
|
assert result.path_loss > 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for cls_name, cls in [
|
||||||
|
("FSPL", TestPointCalculatorFSPL),
|
||||||
|
("Antenna", TestPointCalculatorAntenna),
|
||||||
|
("Fallback", TestPointCalculatorModelFallback),
|
||||||
|
]:
|
||||||
|
instance = cls()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {cls_name}.{method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {cls_name}.{method_name}: {e}")
|
||||||
|
print("\nAll tests completed.")
|
||||||
115
backend/tests/test_integration/test_engine.py
Normal file
115
backend/tests/test_integration/test_engine.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for the CoverageEngine orchestrator.
|
||||||
|
|
||||||
|
Tests model selection, available models API, and the
|
||||||
|
engine's coordination logic (without running actual
|
||||||
|
coverage calculations, which require terrain data).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
|
from app.core.engine import CoverageEngine, BandType, PresetType, CoverageSettings
|
||||||
|
|
||||||
|
|
||||||
|
class TestEngineModelSelection:
|
||||||
|
def test_lte_urban_uses_cost231(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.LTE, "urban")
|
||||||
|
assert model.name == "COST-231-Hata"
|
||||||
|
|
||||||
|
def test_lte_suburban_uses_okumura(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.LTE, "suburban")
|
||||||
|
assert model.name == "Okumura-Hata"
|
||||||
|
|
||||||
|
def test_lte_open_uses_fspl(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.LTE, "open")
|
||||||
|
assert model.name == "Free-Space"
|
||||||
|
|
||||||
|
def test_uhf_urban_uses_okumura(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.UHF, "urban")
|
||||||
|
assert model.name == "Okumura-Hata"
|
||||||
|
|
||||||
|
def test_uhf_rural_uses_longley_rice(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.UHF, "rural")
|
||||||
|
assert model.name == "Longley-Rice"
|
||||||
|
|
||||||
|
def test_vhf_urban_uses_p1546(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.VHF, "urban")
|
||||||
|
assert model.name == "ITU-R-P.1546"
|
||||||
|
|
||||||
|
def test_vhf_rural_uses_longley_rice(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.VHF, "rural")
|
||||||
|
assert model.name == "Longley-Rice"
|
||||||
|
|
||||||
|
def test_unknown_band_falls_back(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
model = engine.select_model(BandType.CUSTOM, "desert")
|
||||||
|
assert model is not None # Should not crash
|
||||||
|
|
||||||
|
|
||||||
|
class TestEngineModelsAPI:
|
||||||
|
def test_returns_dict(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
models = engine.get_available_models()
|
||||||
|
assert isinstance(models, dict)
|
||||||
|
assert len(models) >= 5
|
||||||
|
|
||||||
|
def test_model_info_structure(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
models = engine.get_available_models()
|
||||||
|
for name, info in models.items():
|
||||||
|
assert "frequency_range" in info
|
||||||
|
assert "distance_range" in info
|
||||||
|
assert "bands" in info
|
||||||
|
assert len(info["bands"]) > 0
|
||||||
|
|
||||||
|
def test_all_expected_models_present(self):
|
||||||
|
engine = CoverageEngine()
|
||||||
|
models = engine.get_available_models()
|
||||||
|
expected = {"COST-231-Hata", "Okumura-Hata", "Free-Space", "Longley-Rice", "ITU-R-P.1546"}
|
||||||
|
assert expected.issubset(set(models.keys()))
|
||||||
|
|
||||||
|
|
||||||
|
class TestCoverageSettings:
|
||||||
|
def test_default_settings(self):
|
||||||
|
s = CoverageSettings()
|
||||||
|
assert s.radius == 10000
|
||||||
|
assert s.resolution == 200
|
||||||
|
assert s.preset == PresetType.STANDARD
|
||||||
|
assert s.band_type == BandType.LTE
|
||||||
|
|
||||||
|
def test_preset_values(self):
|
||||||
|
assert PresetType.FAST.value == "fast"
|
||||||
|
assert PresetType.STANDARD.value == "standard"
|
||||||
|
assert PresetType.DETAILED.value == "detailed"
|
||||||
|
assert PresetType.FULL.value == "full"
|
||||||
|
|
||||||
|
def test_band_type_values(self):
|
||||||
|
assert BandType.LTE.value == "lte"
|
||||||
|
assert BandType.UHF.value == "uhf"
|
||||||
|
assert BandType.VHF.value == "vhf"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for cls_name, cls in [
|
||||||
|
("ModelSelection", TestEngineModelSelection),
|
||||||
|
("ModelsAPI", TestEngineModelsAPI),
|
||||||
|
("CoverageSettings", TestCoverageSettings),
|
||||||
|
]:
|
||||||
|
instance = cls()
|
||||||
|
for method_name in [m for m in dir(instance) if m.startswith("test_")]:
|
||||||
|
try:
|
||||||
|
getattr(instance, method_name)()
|
||||||
|
print(f" PASS {cls_name}.{method_name}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" FAIL {cls_name}.{method_name}: {e}")
|
||||||
|
print("\nAll tests completed.")
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user