@mytec: iter2.4 ready for testing

This commit is contained in:
2026-02-01 10:48:23 +02:00
parent 7893c57bc9
commit 5488633e43
19 changed files with 1448 additions and 69 deletions

View File

@@ -0,0 +1,821 @@
# RFCP Phase 2.4: GPU Acceleration + Elevation Layer
**Date:** February 1, 2025
**Type:** Performance + UI Enhancement
**Priority:** HIGH
**Depends on:** Phase 2.3 (Performance fixes)
---
## 🎯 Goals
1. **Elevation Layer** — візуалізація рельєфу на карті
2. **GPU Acceleration** — прискорення розрахунків через CUDA
3. **Bug Fixes** — закриття app, timeout handling
---
## 🐛 Bug Fixes (CRITICAL — Do First!)
### Bug 2.4.0a: App Close Still Not Working
**Symptoms:**
- Clicking X closes window but processes stay running
- rfcp-server.exe stays in Task Manager
- Have to manually kill processes
**File:** `desktop/main.js`
**Debug steps:**
1. Add console.log at START of killBackend():
```javascript
function killBackend() {
console.log('[KILL] killBackend() called, pid:', backendPid);
// ... rest of function
}
```
2. Add console.log in close handler:
```javascript
mainWindow.on('close', (event) => {
console.log('[CLOSE] Window close event triggered');
killBackend();
});
```
3. Check if the issue is:
- killBackend() not being called at all
- taskkill not working (wrong PID?)
- Process spawning children that aren't killed
**Potential fix:**
```javascript
function killBackend() {
console.log('[KILL] killBackend() called');
if (!backendPid && !backendProcess) {
console.log('[KILL] No backend to kill');
return;
}
const pid = backendPid || backendProcess?.pid;
console.log('[KILL] Killing PID:', pid);
if (process.platform === 'win32') {
// Force kill entire process tree
try {
require('child_process').execSync(`taskkill /F /T /PID ${pid}`, {
stdio: 'ignore'
});
console.log('[KILL] taskkill completed');
} catch (e) {
console.log('[KILL] taskkill error:', e.message);
}
}
backendProcess = null;
backendPid = null;
}
```
4. Add in app quit:
```javascript
app.on('before-quit', () => {
console.log('[QUIT] before-quit event');
killBackend();
});
app.on('will-quit', () => {
console.log('[QUIT] will-quit event');
killBackend();
});
```
---
### Bug 2.4.0b: Calculation Continues After Timeout
**Symptoms:**
- User gets "timeout" error in UI
- But backend keeps calculating (CPU stays loaded)
- Machine stays slow until manually kill process
**File:** `backend/app/services/coverage_service.py`
**Root cause:** asyncio.wait_for() cancels the coroutine but:
- ProcessPoolExecutor workers keep running
- Ray tasks keep running
- No cancellation signal sent
**Fix in coverage_service.py:**
```python
# Add cancellation flag
_calculation_cancelled = False
async def calculate_coverage(sites, settings):
global _calculation_cancelled
_calculation_cancelled = False
try:
result = await asyncio.wait_for(
_do_calculation(sites, settings),
timeout=300 # 5 minutes
)
return result
except asyncio.TimeoutError:
_calculation_cancelled = True
_cleanup_running_tasks() # NEW
raise HTTPException(408, "Calculation timeout")
def _cleanup_running_tasks():
"""Stop any running parallel workers."""
global _calculation_cancelled
_calculation_cancelled = True
# If using Ray
if RAY_AVAILABLE and ray.is_initialized():
# Cancel pending tasks
# Ray doesn't have great cancellation, but we can try
pass
# If using ProcessPoolExecutor - it will check flag
_clog("Calculation cancelled, cleaning up workers")
```
**In parallel workers, check cancellation:**
```python
def _process_chunk(chunk, ...):
results = []
for point in chunk:
# Check if cancelled
if _calculation_cancelled:
_clog("Worker detected cancellation, stopping")
break
result = _calculate_point_sync(point, ...)
results.append(result)
return results
```
---
## 📊 Part A: Elevation Layer
### A.1: Backend API
**New file:** `backend/app/api/routes/terrain.py`
```python
from fastapi import APIRouter, Query
from typing import List
from app.services.terrain_service import terrain_service
router = APIRouter(prefix="/api/terrain", tags=["terrain"])
@router.get("/elevation-grid")
async def get_elevation_grid(
min_lat: float = Query(..., description="South boundary"),
max_lat: float = Query(..., description="North boundary"),
min_lon: float = Query(..., description="West boundary"),
max_lon: float = Query(..., description="East boundary"),
resolution: int = Query(100, description="Grid resolution in meters")
) -> dict:
"""
Get elevation grid for a bounding box.
Returns a 2D array of elevations for rendering terrain layer.
"""
# Calculate grid dimensions
lat_range = max_lat - min_lat
lon_range = max_lon - min_lon
# Approximate meters per degree
meters_per_lat = 111000
meters_per_lon = 111000 * cos(radians((min_lat + max_lat) / 2))
# Grid size
rows = int((lat_range * meters_per_lat) / resolution)
cols = int((lon_range * meters_per_lon) / resolution)
# Cap to reasonable size
rows = min(rows, 200)
cols = min(cols, 200)
# Build elevation grid
elevations = []
lat_step = lat_range / rows
lon_step = lon_range / cols
for i in range(rows):
row = []
lat = max_lat - (i + 0.5) * lat_step # Start from north
for j in range(cols):
lon = min_lon + (j + 0.5) * lon_step
elev = terrain_service.get_elevation_sync(lat, lon)
row.append(elev)
elevations.append(row)
# Get min/max for color scaling
flat = [e for row in elevations for e in row]
return {
"elevations": elevations,
"rows": rows,
"cols": cols,
"min_elevation": min(flat),
"max_elevation": max(flat),
"bbox": {
"min_lat": min_lat,
"max_lat": max_lat,
"min_lon": min_lon,
"max_lon": max_lon
}
}
```
**Register in main.py:**
```python
from app.api.routes import terrain
app.include_router(terrain.router)
```
---
### A.2: Frontend Component
**New file:** `frontend/src/components/ElevationLayer.tsx`
```tsx
import { useEffect, useRef } from 'react';
import { useMap } from 'react-leaflet';
import L from 'leaflet';
interface ElevationLayerProps {
enabled: boolean;
opacity: number;
bbox: {
minLat: number;
maxLat: number;
minLon: number;
maxLon: number;
} | null;
}
// Color scale: blue (low) → green → yellow → brown (high)
const ELEVATION_COLORS = [
{ threshold: 0, color: [33, 102, 172] }, // #2166ac deep blue
{ threshold: 100, color: [103, 169, 207] }, // #67a9cf light blue
{ threshold: 150, color: [145, 207, 96] }, // #91cf60 green
{ threshold: 200, color: [254, 224, 139] }, // #fee08b yellow
{ threshold: 250, color: [252, 141, 89] }, // #fc8d59 orange
{ threshold: 300, color: [215, 48, 39] }, // #d73027 red
{ threshold: 400, color: [165, 0, 38] }, // #a50026 dark red
];
function getColorForElevation(elevation: number): [number, number, number] {
for (let i = ELEVATION_COLORS.length - 1; i >= 0; i--) {
if (elevation >= ELEVATION_COLORS[i].threshold) {
if (i === ELEVATION_COLORS.length - 1) {
return ELEVATION_COLORS[i].color as [number, number, number];
}
// Interpolate between this and next color
const low = ELEVATION_COLORS[i];
const high = ELEVATION_COLORS[i + 1];
const t = (elevation - low.threshold) / (high.threshold - low.threshold);
return [
Math.round(low.color[0] + t * (high.color[0] - low.color[0])),
Math.round(low.color[1] + t * (high.color[1] - low.color[1])),
Math.round(low.color[2] + t * (high.color[2] - low.color[2])),
];
}
}
return ELEVATION_COLORS[0].color as [number, number, number];
}
export function ElevationLayer({ enabled, opacity, bbox }: ElevationLayerProps) {
const map = useMap();
const canvasRef = useRef<HTMLCanvasElement | null>(null);
const overlayRef = useRef<L.ImageOverlay | null>(null);
useEffect(() => {
if (!enabled || !bbox) {
// Remove overlay if disabled
if (overlayRef.current) {
map.removeLayer(overlayRef.current);
overlayRef.current = null;
}
return;
}
// Fetch elevation data
const fetchElevation = async () => {
const params = new URLSearchParams({
min_lat: bbox.minLat.toString(),
max_lat: bbox.maxLat.toString(),
min_lon: bbox.minLon.toString(),
max_lon: bbox.maxLon.toString(),
resolution: '100',
});
const response = await fetch(`/api/terrain/elevation-grid?${params}`);
const data = await response.json();
// Create canvas
const canvas = document.createElement('canvas');
canvas.width = data.cols;
canvas.height = data.rows;
const ctx = canvas.getContext('2d')!;
const imageData = ctx.createImageData(data.cols, data.rows);
// Fill pixel data
for (let i = 0; i < data.rows; i++) {
for (let j = 0; j < data.cols; j++) {
const elevation = data.elevations[i][j];
const color = getColorForElevation(elevation);
const idx = (i * data.cols + j) * 4;
imageData.data[idx] = color[0]; // R
imageData.data[idx + 1] = color[1]; // G
imageData.data[idx + 2] = color[2]; // B
imageData.data[idx + 3] = 255; // A
}
}
ctx.putImageData(imageData, 0, 0);
// Create overlay
const bounds = L.latLngBounds(
[bbox.minLat, bbox.minLon],
[bbox.maxLat, bbox.maxLon]
);
if (overlayRef.current) {
map.removeLayer(overlayRef.current);
}
overlayRef.current = L.imageOverlay(canvas.toDataURL(), bounds, {
opacity: opacity,
interactive: false,
});
overlayRef.current.addTo(map);
};
fetchElevation();
return () => {
if (overlayRef.current) {
map.removeLayer(overlayRef.current);
}
};
}, [enabled, opacity, bbox, map]);
return null;
}
```
---
### A.3: Layer Controls UI
**Update:** `frontend/src/App.tsx` or create `LayerControls.tsx`
```tsx
// Add to state
const [showElevation, setShowElevation] = useState(false);
const [elevationOpacity, setElevationOpacity] = useState(0.5);
// Add to UI (in settings panel or toolbar)
<div className="layer-controls">
<h4>Map Layers</h4>
<label className="layer-toggle">
<input
type="checkbox"
checked={showElevation}
onChange={(e) => setShowElevation(e.target.checked)}
/>
Show Elevation
</label>
{showElevation && (
<div className="elevation-opacity">
<label>Opacity: {Math.round(elevationOpacity * 100)}%</label>
<input
type="range"
min="0.2"
max="1"
step="0.1"
value={elevationOpacity}
onChange={(e) => setElevationOpacity(parseFloat(e.target.value))}
/>
</div>
)}
{/* Elevation legend */}
{showElevation && (
<div className="elevation-legend">
<div className="legend-item">
<span className="color-box" style={{background: '#2166ac'}}></span>
&lt;100m
</div>
<div className="legend-item">
<span className="color-box" style={{background: '#91cf60'}}></span>
150-200m
</div>
<div className="legend-item">
<span className="color-box" style={{background: '#fee08b'}}></span>
200-250m
</div>
<div className="legend-item">
<span className="color-box" style={{background: '#d73027'}}></span>
&gt;300m
</div>
</div>
)}
</div>
// In Map component
<ElevationLayer
enabled={showElevation}
opacity={elevationOpacity}
bbox={mapBounds} // Current map view bounds
/>
```
---
## ⚡ Part B: GPU Acceleration
### B.1: GPU Service
**New file:** `backend/app/services/gpu_service.py`
```python
"""
GPU acceleration for coverage calculations using CuPy.
Falls back to NumPy if CUDA not available.
"""
import numpy as np
from typing import Tuple, Optional
import os
# Try to import CuPy
GPU_AVAILABLE = False
GPU_INFO = None
try:
import cupy as cp
# Check if CUDA actually works
try:
cp.cuda.runtime.getDeviceCount()
GPU_AVAILABLE = True
# Get GPU info
props = cp.cuda.runtime.getDeviceProperties(0)
GPU_INFO = {
'name': props['name'].decode() if isinstance(props['name'], bytes) else props['name'],
'memory_mb': props['totalGlobalMem'] // (1024 * 1024),
'cuda_version': cp.cuda.runtime.runtimeGetVersion(),
}
print(f"[GPU] CUDA available: {GPU_INFO['name']} ({GPU_INFO['memory_mb']} MB)")
except Exception as e:
print(f"[GPU] CUDA device check failed: {e}")
except ImportError:
print("[GPU] CuPy not installed, using CPU only")
def get_array_module():
"""Get the appropriate array module (cupy or numpy)."""
if GPU_AVAILABLE:
return cp
return np
def to_gpu(array: np.ndarray) -> 'cp.ndarray | np.ndarray':
"""Move array to GPU if available."""
if GPU_AVAILABLE:
return cp.asarray(array)
return array
def to_cpu(array) -> np.ndarray:
"""Move array back to CPU."""
if GPU_AVAILABLE and hasattr(array, 'get'):
return array.get()
return np.asarray(array)
class GPUService:
"""GPU-accelerated calculations for coverage planning."""
def __init__(self):
self.enabled = GPU_AVAILABLE
self.info = GPU_INFO
def calculate_distances_batch(
self,
site_lat: float,
site_lon: float,
point_lats: np.ndarray,
point_lons: np.ndarray,
) -> np.ndarray:
"""
Calculate Haversine distances from site to all points.
Vectorized for GPU acceleration.
Args:
site_lat, site_lon: Site coordinates (degrees)
point_lats, point_lons: Arrays of point coordinates (degrees)
Returns:
Array of distances in meters
"""
xp = get_array_module()
# Move to GPU if available
lats = to_gpu(point_lats)
lons = to_gpu(point_lons)
# Convert to radians
lat1 = xp.radians(site_lat)
lon1 = xp.radians(site_lon)
lat2 = xp.radians(lats)
lon2 = xp.radians(lons)
# Haversine formula (vectorized)
dlat = lat2 - lat1
dlon = lon2 - lon1
a = xp.sin(dlat / 2) ** 2 + xp.cos(lat1) * xp.cos(lat2) * xp.sin(dlon / 2) ** 2
c = 2 * xp.arcsin(xp.sqrt(a))
R = 6371000 # Earth radius in meters
distances = R * c
return to_cpu(distances)
def calculate_free_space_path_loss_batch(
self,
distances: np.ndarray,
frequency_mhz: float,
) -> np.ndarray:
"""
Calculate Free Space Path Loss for all distances.
FSPL = 20*log10(d) + 20*log10(f) + 20*log10(4π/c)
= 20*log10(d_km) + 20*log10(f_mhz) + 32.45
"""
xp = get_array_module()
d = to_gpu(distances)
# Avoid log(0)
d_km = xp.maximum(d / 1000.0, 0.001)
fspl = 20 * xp.log10(d_km) + 20 * xp.log10(frequency_mhz) + 32.45
return to_cpu(fspl)
def calculate_okumura_hata_batch(
self,
distances: np.ndarray,
frequency_mhz: float,
tx_height: float,
rx_height: float = 1.5,
environment: str = 'urban',
) -> np.ndarray:
"""
Calculate Okumura-Hata path loss for all distances.
Vectorized for GPU acceleration.
"""
xp = get_array_module()
d = to_gpu(distances)
# Avoid log(0)
d_km = xp.maximum(d / 1000.0, 0.001)
f = frequency_mhz
hb = tx_height
hm = rx_height
# Mobile antenna height correction (urban)
if f <= 200:
a_hm = 8.29 * (xp.log10(1.54 * hm)) ** 2 - 1.1
elif f >= 400:
a_hm = 3.2 * (xp.log10(11.75 * hm)) ** 2 - 4.97
else:
a_hm = (1.1 * xp.log10(f) - 0.7) * hm - (1.56 * xp.log10(f) - 0.8)
# Base formula
L = (69.55 + 26.16 * xp.log10(f)
- 13.82 * xp.log10(hb)
- a_hm
+ (44.9 - 6.55 * xp.log10(hb)) * xp.log10(d_km))
# Environment corrections
if environment == 'suburban':
L = L - 2 * (xp.log10(f / 28)) ** 2 - 5.4
elif environment == 'rural':
L = L - 4.78 * (xp.log10(f)) ** 2 + 18.33 * xp.log10(f) - 40.94
return to_cpu(L)
def calculate_rsrp_batch(
self,
distances: np.ndarray,
tx_power_dbm: float,
antenna_gain_dbi: float,
frequency_mhz: float,
tx_height: float,
environment: str = 'urban',
) -> np.ndarray:
"""
Calculate RSRP for all points (basic, without terrain/buildings).
"""
path_loss = self.calculate_okumura_hata_batch(
distances, frequency_mhz, tx_height,
environment=environment
)
rsrp = tx_power_dbm + antenna_gain_dbi - path_loss
return rsrp
# Singleton instance
gpu_service = GPUService()
```
---
### B.2: Integration with Coverage Service
**Update:** `backend/app/services/coverage_service.py`
```python
from app.services.gpu_service import gpu_service, GPU_AVAILABLE
# In calculate_coverage, before point loop:
async def calculate_coverage(sites, settings):
# ... existing Phase 1 & 2 code ...
# Phase 2.5: Pre-calculate with GPU if available
if GPU_AVAILABLE and len(grid) > 100:
_clog(f"Using GPU acceleration for {len(grid)} points")
# Prepare arrays
point_lats = np.array([p[0] for p in grid])
point_lons = np.array([p[1] for p in grid])
# Calculate all distances at once (GPU)
all_distances = gpu_service.calculate_distances_batch(
site.lat, site.lon, point_lats, point_lons
)
# Calculate all basic path losses at once (GPU)
all_path_losses = gpu_service.calculate_okumura_hata_batch(
all_distances,
site.frequency,
site.height,
environment='urban' if settings.use_buildings else 'rural'
)
# Store for use in point loop
precomputed = {
'distances': all_distances,
'path_losses': all_path_losses,
}
_clog(f"GPU pre-calculation done: {len(grid)} distances + path losses")
else:
precomputed = None
# Phase 3: Point loop (uses precomputed if available)
# ... modify _calculate_point_sync to accept precomputed values ...
```
---
### B.3: System Info Update
**Update:** `backend/app/api/routes/system.py`
```python
from app.services.gpu_service import GPU_AVAILABLE, GPU_INFO
@router.get("/api/system/info")
async def get_system_info():
return {
"cpu_cores": mp.cpu_count(),
"parallel_workers": min(mp.cpu_count() - 2, 14),
"parallel_backend": "ray" if RAY_AVAILABLE else "process_pool" if mp.cpu_count() > 1 else "sequential",
"ray_available": RAY_AVAILABLE,
"gpu": GPU_INFO, # Now includes name, memory, cuda_version
"gpu_available": GPU_AVAILABLE,
}
```
---
### B.4: Requirements
**Update:** `backend/requirements.txt`
```
# ... existing requirements ...
# GPU acceleration (optional)
# Install with: pip install cupy-cuda12x
# Or for CUDA 11.x: pip install cupy-cuda11x
# cupy-cuda12x>=12.0.0
```
**Note:** CuPy is optional. Code falls back to NumPy if not installed.
---
## 📁 Files to Create/Modify
**New files:**
- `backend/app/api/routes/terrain.py`
- `backend/app/services/gpu_service.py`
- `frontend/src/components/ElevationLayer.tsx`
**Modified files:**
- `backend/app/main.py` — register terrain router
- `backend/app/services/coverage_service.py` — GPU integration, cancellation
- `backend/app/api/routes/system.py` — GPU info
- `backend/requirements.txt` — cupy optional
- `desktop/main.js` — fix app close (debug + fix)
- `frontend/src/App.tsx` — elevation layer toggle
---
## 🧪 Testing
### Test Elevation Layer:
```bash
# Start app
./rfcp-debug.bat
# In browser console or via curl:
curl "http://localhost:8888/api/terrain/elevation-grid?min_lat=48.5&max_lat=48.7&min_lon=36.0&max_lon=36.2&resolution=100"
# Should return JSON with elevations array
```
### Test GPU:
```bash
# Check system info
curl http://localhost:8888/api/system/info
# Should show:
# "gpu_available": true,
# "gpu": {"name": "NVIDIA GeForce RTX 4060", "memory_mb": 8192, ...}
```
### Test App Close:
```
1. Start app via RFCP.exe (not debug bat)
2. Click X to close
3. Check Task Manager - rfcp-server.exe should NOT be running
4. If still running - check console logs for [KILL] messages
```
---
## ✅ Success Criteria
- [ ] Elevation layer toggleable on map
- [ ] Elevation colors match terrain (verify with known locations)
- [ ] GPU detected and shown in system info (if NVIDIA card present)
- [ ] Fast preset 2x faster with GPU
- [ ] App closes completely when clicking X
- [ ] No orphan processes after timeout
- [ ] All existing presets still work
---
## 📈 Expected Performance
| Operation | CPU (NumPy) | GPU (CuPy) | Speedup |
|-----------|-------------|------------|---------|
| 10k distances | 5ms | 0.1ms | 50x |
| 10k path losses | 10ms | 0.2ms | 50x |
| Full calculation* | 10s | 3s | 3x |
*Full calculation limited by CPU-bound terrain/building checks
---
## 🔜 Next Phase
Phase 2.5: Advanced Visualization
- LOS ray visualization (show blocked paths)
- 3D terrain view
- Antenna pattern visualization
- Multi-site interference view

View File

@@ -12,6 +12,7 @@ from app.services.coverage_service import (
apply_preset, apply_preset,
PRESETS, PRESETS,
) )
from app.services.parallel_coverage_service import CancellationToken
router = APIRouter() router = APIRouter()
@@ -59,6 +60,7 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
# Time the calculation # Time the calculation
start_time = time.time() start_time = time.time()
cancel_token = CancellationToken()
try: try:
# Calculate with 5-minute timeout # Calculate with 5-minute timeout
@@ -66,7 +68,8 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
points = await asyncio.wait_for( points = await asyncio.wait_for(
coverage_service.calculate_coverage( coverage_service.calculate_coverage(
request.sites[0], request.sites[0],
request.settings request.settings,
cancel_token,
), ),
timeout=300.0 timeout=300.0
) )
@@ -74,12 +77,17 @@ async def calculate_coverage(request: CoverageRequest) -> CoverageResponse:
points = await asyncio.wait_for( points = await asyncio.wait_for(
coverage_service.calculate_multi_site_coverage( coverage_service.calculate_multi_site_coverage(
request.sites, request.sites,
request.settings request.settings,
cancel_token,
), ),
timeout=300.0 timeout=300.0
) )
except asyncio.TimeoutError: except asyncio.TimeoutError:
cancel_token.cancel()
raise HTTPException(408, "Calculation timeout (5 min) — try smaller radius or lower resolution") raise HTTPException(408, "Calculation timeout (5 min) — try smaller radius or lower resolution")
except asyncio.CancelledError:
cancel_token.cancel()
raise HTTPException(499, "Client disconnected")
computation_time = time.time() - start_time computation_time = time.time() - start_time

View File

@@ -21,25 +21,24 @@ async def get_system_info():
except Exception: except Exception:
pass pass
# Check GPU # Check GPU via gpu_service
gpu_info = None from app.services.gpu_service import gpu_service
try: gpu_info = gpu_service.get_info()
import cupy as cp
if cp.cuda.runtime.getDeviceCount() > 0: # Determine parallel backend
props = cp.cuda.runtime.getDeviceProperties(0) if ray_available:
gpu_info = { parallel_backend = "ray"
"name": props["name"].decode(), elif cpu_cores > 1:
"memory_mb": props["totalGlobalMem"] // (1024 * 1024), parallel_backend = "process_pool"
} else:
except Exception: parallel_backend = "sequential"
pass
return { return {
"cpu_cores": cpu_cores, "cpu_cores": cpu_cores,
"parallel_workers": min(cpu_cores, 14), "parallel_workers": min(cpu_cores, 14),
"parallel_backend": "ray" if ray_available else "sequential", "parallel_backend": parallel_backend,
"ray_available": ray_available, "ray_available": ray_available,
"ray_initialized": ray_initialized, "ray_initialized": ray_initialized,
"gpu": gpu_info, "gpu": gpu_info,
"gpu_enabled": gpu_info is not None, "gpu_available": gpu_info.get("available", False),
} }

View File

@@ -1,4 +1,6 @@
import os import os
import asyncio
import math
from fastapi import APIRouter, HTTPException, Query from fastapi import APIRouter, HTTPException, Query
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
@@ -11,6 +13,46 @@ from app.services.los_service import los_service
router = APIRouter() router = APIRouter()
def _build_elevation_grid(min_lat, max_lat, min_lon, max_lon, resolution):
"""Build a 2D elevation grid. Runs in thread executor (CPU-bound)."""
import numpy as np
rows = min(resolution, 200)
cols = min(resolution, 200)
lats = np.linspace(max_lat, min_lat, rows) # north to south
lons = np.linspace(min_lon, max_lon, cols)
grid = []
min_elev = float('inf')
max_elev = float('-inf')
for lat in lats:
row = []
for lon in lons:
elev = terrain_service.get_elevation_sync(float(lat), float(lon))
row.append(elev)
if elev < min_elev:
min_elev = elev
if elev > max_elev:
max_elev = elev
grid.append(row)
return {
"grid": grid,
"rows": rows,
"cols": cols,
"min_elevation": min_elev if min_elev != float('inf') else 0,
"max_elevation": max_elev if max_elev != float('-inf') else 0,
"bbox": {
"min_lat": min_lat,
"max_lat": max_lat,
"min_lon": min_lon,
"max_lon": max_lon,
},
}
@router.get("/elevation") @router.get("/elevation")
async def get_elevation( async def get_elevation(
lat: float = Query(..., ge=-90, le=90, description="Latitude"), lat: float = Query(..., ge=-90, le=90, description="Latitude"),
@@ -26,6 +68,42 @@ async def get_elevation(
} }
@router.get("/elevation-grid")
async def get_elevation_grid(
min_lat: float = Query(..., ge=-90, le=90, description="South boundary"),
max_lat: float = Query(..., ge=-90, le=90, description="North boundary"),
min_lon: float = Query(..., ge=-180, le=180, description="West boundary"),
max_lon: float = Query(..., ge=-180, le=180, description="East boundary"),
resolution: int = Query(100, ge=10, le=200, description="Grid size (rows/cols)"),
):
"""Get elevation grid for a bounding box. Returns a 2D array for terrain visualization."""
if max_lat <= min_lat or max_lon <= min_lon:
raise HTTPException(400, "Invalid bbox: max must be greater than min")
if (max_lat - min_lat) > 2.0 or (max_lon - min_lon) > 2.0:
raise HTTPException(400, "Bbox too large (max 2 degrees per axis)")
# Ensure terrain tiles are loaded for this area
await terrain_service.ensure_tiles_for_bbox(min_lat, min_lon, max_lat, max_lon)
# Pre-load all tiles that cover the bbox
lat_start = int(math.floor(min_lat))
lat_end = int(math.floor(max_lat))
lon_start = int(math.floor(min_lon))
lon_end = int(math.floor(max_lon))
for lat_i in range(lat_start, lat_end + 1):
for lon_i in range(lon_start, lon_end + 1):
tile_name = terrain_service.get_tile_name(lat_i + 0.5, lon_i + 0.5)
terrain_service._load_tile(tile_name)
# Build grid in thread executor (CPU-bound sync calls)
loop = asyncio.get_event_loop()
result = await loop.run_in_executor(
None, _build_elevation_grid,
min_lat, max_lat, min_lon, max_lon, resolution,
)
return result
@router.get("/profile") @router.get("/profile")
async def get_elevation_profile( async def get_elevation_profile(
lat1: float = Query(..., description="Start latitude"), lat1: float = Query(..., description="Start latitude"),
@@ -87,9 +165,9 @@ async def check_fresnel_clearance(
@router.get("/tiles") @router.get("/tiles")
async def list_cached_tiles(): async def list_cached_tiles():
"""List cached SRTM tiles""" """List cached SRTM tiles"""
tiles = list(terrain_service.cache_dir.glob("*.hgt")) tiles = list(terrain_service.terrain_path.glob("*.hgt"))
return { return {
"cache_dir": str(terrain_service.cache_dir), "cache_dir": str(terrain_service.terrain_path),
"tiles": [t.stem for t in tiles], "tiles": [t.stem for t in tiles],
"count": len(tiles) "count": len(tiles)
} }

View File

@@ -55,6 +55,7 @@ from app.services.indoor_service import indoor_service
from app.services.atmospheric_service import atmospheric_service from app.services.atmospheric_service import atmospheric_service
from app.services.parallel_coverage_service import ( from app.services.parallel_coverage_service import (
calculate_coverage_parallel, get_cpu_count, get_parallel_backend, calculate_coverage_parallel, get_cpu_count, get_parallel_backend,
CancellationToken,
) )
@@ -280,7 +281,8 @@ class CoverageService:
async def calculate_coverage( async def calculate_coverage(
self, self,
site: SiteParams, site: SiteParams,
settings: CoverageSettings settings: CoverageSettings,
cancel_token: Optional[CancellationToken] = None,
) -> List[CoveragePoint]: ) -> List[CoveragePoint]:
""" """
Calculate coverage grid for a single site Calculate coverage grid for a single site
@@ -352,6 +354,32 @@ class CoverageService:
f"pre-computed {len(grid)} elevations") f"pre-computed {len(grid)} elevations")
_clog(f"━━━ PHASE 2 done: {terrain_time:.1f}s ━━━") _clog(f"━━━ PHASE 2 done: {terrain_time:.1f}s ━━━")
# ━━━ PHASE 2.5: Vectorized pre-computation (GPU/NumPy) ━━━
from app.services.gpu_service import gpu_service
t_gpu = time.time()
grid_lats = np.array([lat for lat, lon in grid])
grid_lons = np.array([lon for lat, lon in grid])
pre_distances = gpu_service.precompute_distances(
grid_lats, grid_lons, site.lat, site.lon
)
pre_path_loss = gpu_service.precompute_path_loss(
pre_distances, site.frequency, site.height
)
# Build lookup dict for point loop
precomputed = {}
for i, (lat, lon) in enumerate(grid):
precomputed[(lat, lon)] = {
'distance': float(pre_distances[i]),
'path_loss': float(pre_path_loss[i]),
}
gpu_time = time.time() - t_gpu
_clog(f"━━━ PHASE 2.5: Vectorized pre-computation done: {gpu_time:.3f}s "
f"({len(grid)} points, backend={'GPU' if gpu_service.available else 'CPU/NumPy'}) ━━━")
# ━━━ PHASE 3: Point calculation ━━━ # ━━━ PHASE 3: Point calculation ━━━
dominant_path_service._log_count = 0 # Reset diagnostic counter dominant_path_service._log_count = 0 # Reset diagnostic counter
t_points = time.time() t_points = time.time()
@@ -368,12 +396,15 @@ class CoverageService:
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
result_dicts, timing = await loop.run_in_executor( result_dicts, timing = await loop.run_in_executor(
None, None,
calculate_coverage_parallel, lambda: calculate_coverage_parallel(
grid, point_elevations, grid, point_elevations,
site.model_dump(), settings.model_dump(), site.model_dump(), settings.model_dump(),
self.terrain._tile_cache, self.terrain._tile_cache,
buildings, streets, water_bodies, vegetation_areas, buildings, streets, water_bodies, vegetation_areas,
site_elevation, num_workers, _clog, site_elevation, num_workers, _clog,
cancel_token=cancel_token,
precomputed=precomputed,
),
) )
# Convert dicts back to CoveragePoint objects # Convert dicts back to CoveragePoint objects
@@ -389,10 +420,13 @@ class CoverageService:
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
points, timing = await loop.run_in_executor( points, timing = await loop.run_in_executor(
None, None,
self._run_point_loop, lambda: self._run_point_loop(
grid, site, settings, buildings, streets, grid, site, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas, spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations site_elevation, point_elevations,
cancel_token=cancel_token,
precomputed=precomputed,
),
) )
points_time = time.time() - t_points points_time = time.time() - t_points
@@ -423,7 +457,8 @@ class CoverageService:
async def calculate_multi_site_coverage( async def calculate_multi_site_coverage(
self, self,
sites: List[SiteParams], sites: List[SiteParams],
settings: CoverageSettings settings: CoverageSettings,
cancel_token: Optional[CancellationToken] = None,
) -> List[CoveragePoint]: ) -> List[CoveragePoint]:
""" """
Calculate combined coverage from multiple sites Calculate combined coverage from multiple sites
@@ -437,7 +472,7 @@ class CoverageService:
# Get all individual coverages # Get all individual coverages
all_coverages = await asyncio.gather(*[ all_coverages = await asyncio.gather(*[
self.calculate_coverage(site, settings) self.calculate_coverage(site, settings, cancel_token)
for site in sites for site in sites
]) ])
@@ -485,7 +520,8 @@ class CoverageService:
def _run_point_loop( def _run_point_loop(
self, grid, site, settings, buildings, streets, self, grid, site, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas, spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations site_elevation, point_elevations,
cancel_token=None, precomputed=None,
): ):
"""Sync point loop - runs in ThreadPoolExecutor, bypasses event loop.""" """Sync point loop - runs in ThreadPoolExecutor, bypasses event loop."""
points = [] points = []
@@ -496,14 +532,22 @@ class CoverageService:
log_interval = max(1, total // 20) log_interval = max(1, total // 20)
for i, (lat, lon) in enumerate(grid): for i, (lat, lon) in enumerate(grid):
if cancel_token and cancel_token.is_cancelled:
_clog(f"Cancelled at {i}/{total}")
break
if i % log_interval == 0: if i % log_interval == 0:
_clog(f"Progress: {i}/{total} ({i*100//total}%)") _clog(f"Progress: {i}/{total} ({i*100//total}%)")
pre = precomputed.get((lat, lon)) if precomputed else None
point = self._calculate_point_sync( point = self._calculate_point_sync(
site, lat, lon, settings, buildings, streets, site, lat, lon, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas, spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations.get((lat, lon), 0.0), site_elevation, point_elevations.get((lat, lon), 0.0),
timing timing,
precomputed_distance=pre.get('distance') if pre else None,
precomputed_path_loss=pre.get('path_loss') if pre else None,
) )
if point.rsrp >= settings.min_signal: if point.rsrp >= settings.min_signal:
points.append(point) points.append(point)
@@ -523,17 +567,25 @@ class CoverageService:
vegetation_areas: List[VegetationArea], vegetation_areas: List[VegetationArea],
site_elevation: float, site_elevation: float,
point_elevation: float, point_elevation: float,
timing: dict timing: dict,
precomputed_distance: Optional[float] = None,
precomputed_path_loss: Optional[float] = None,
) -> CoveragePoint: ) -> CoveragePoint:
"""Fully synchronous point calculation. All terrain tiles must be pre-loaded.""" """Fully synchronous point calculation. All terrain tiles must be pre-loaded."""
# Distance # Distance (use precomputed if available)
distance = TerrainService.haversine_distance(site.lat, site.lon, lat, lon) if precomputed_distance is not None:
distance = precomputed_distance
else:
distance = TerrainService.haversine_distance(site.lat, site.lon, lat, lon)
if distance < 1: if distance < 1:
distance = 1 distance = 1
# Base path loss # Base path loss (use precomputed if available)
path_loss = self._okumura_hata(distance, site.frequency, site.height, 1.5) if precomputed_path_loss is not None:
path_loss = precomputed_path_loss
else:
path_loss = self._okumura_hata(distance, site.frequency, site.height, 1.5)
# Antenna pattern # Antenna pattern
antenna_loss = 0.0 antenna_loss = 0.0

View File

@@ -0,0 +1,119 @@
"""
GPU-accelerated computation service using CuPy.
Falls back to NumPy when CuPy/CUDA is not available.
Provides vectorized batch operations for coverage calculation:
- Haversine distance (site → all grid points)
- Okumura-Hata path loss (all distances at once)
Usage:
from app.services.gpu_service import gpu_service, GPU_AVAILABLE
"""
import numpy as np
from typing import Dict, Any, Optional
# ── Try CuPy import ──
GPU_AVAILABLE = False
GPU_INFO: Optional[Dict[str, Any]] = None
cp = None
try:
import cupy as _cp
if _cp.cuda.runtime.getDeviceCount() > 0:
cp = _cp
GPU_AVAILABLE = True
props = _cp.cuda.runtime.getDeviceProperties(0)
GPU_INFO = {
"name": props["name"].decode() if isinstance(props["name"], bytes) else str(props["name"]),
"memory_mb": props["totalGlobalMem"] // (1024 * 1024),
"cuda_version": _cp.cuda.runtime.runtimeGetVersion(),
}
print(f"[GPU] CUDA available: {GPU_INFO['name']} ({GPU_INFO['memory_mb']} MB)", flush=True)
except ImportError:
print("[GPU] CuPy not installed — using CPU/NumPy", flush=True)
except Exception as e:
print(f"[GPU] CUDA check failed: {e} — using CPU/NumPy", flush=True)
# Array module: cupy on GPU, numpy on CPU
xp = cp if GPU_AVAILABLE else np
def _to_cpu(arr):
"""Transfer array to CPU numpy if on GPU."""
if GPU_AVAILABLE and hasattr(arr, 'get'):
return arr.get()
return np.asarray(arr)
class GPUService:
"""GPU-accelerated batch operations for coverage calculation."""
@property
def available(self) -> bool:
return GPU_AVAILABLE
def get_info(self) -> Dict[str, Any]:
"""Return GPU info dict for system endpoint."""
if not GPU_AVAILABLE:
return {"available": False, "name": None, "memory_mb": None}
return {"available": True, **GPU_INFO}
def precompute_distances(
self,
grid_lats: np.ndarray,
grid_lons: np.ndarray,
site_lat: float,
site_lon: float,
) -> np.ndarray:
"""Vectorized haversine distance from site to all grid points.
Returns distances in meters as a CPU numpy array.
"""
lat1 = xp.radians(xp.asarray(grid_lats, dtype=xp.float64))
lon1 = xp.radians(xp.asarray(grid_lons, dtype=xp.float64))
lat2 = xp.radians(xp.float64(site_lat))
lon2 = xp.radians(xp.float64(site_lon))
dlat = lat2 - lat1
dlon = lon2 - lon1
a = xp.sin(dlat / 2) ** 2 + xp.cos(lat1) * xp.cos(lat2) * xp.sin(dlon / 2) ** 2
c = 2 * xp.arcsin(xp.sqrt(a))
distances = 6371000.0 * c
return _to_cpu(distances)
def precompute_path_loss(
self,
distances: np.ndarray,
frequency_mhz: float,
tx_height: float,
rx_height: float = 1.5,
) -> np.ndarray:
"""Vectorized Okumura-Hata path loss for all distances.
Returns path loss in dB as a CPU numpy array.
"""
d_arr = xp.asarray(distances, dtype=xp.float64)
d_km = xp.maximum(d_arr / 1000.0, 0.1)
freq = float(frequency_mhz)
h_tx = float(tx_height)
h_rx = float(rx_height)
log_f = xp.log10(xp.float64(freq))
log_hb = xp.log10(xp.float64(h_tx))
a_hm = (1.1 * log_f - 0.7) * h_rx - (1.56 * log_f - 0.8)
L = (69.55 + 26.16 * log_f - 13.82 * log_hb - a_hm
+ (44.9 - 6.55 * log_hb) * xp.log10(d_km))
return _to_cpu(L)
# Singleton
gpu_service = GPUService()

View File

@@ -24,11 +24,28 @@ Usage:
import os import os
import sys import sys
import time import time
import threading
import multiprocessing as mp import multiprocessing as mp
from typing import List, Dict, Tuple, Any, Optional, Callable from typing import List, Dict, Tuple, Any, Optional, Callable
import numpy as np import numpy as np
# ── Cancellation token ──
class CancellationToken:
"""Thread-safe cancellation token for cooperative cancellation."""
def __init__(self):
self._event = threading.Event()
def cancel(self):
self._event.set()
@property
def is_cancelled(self) -> bool:
return self._event.is_set()
# ── Try to import Ray ── # ── Try to import Ray ──
RAY_AVAILABLE = False RAY_AVAILABLE = False
@@ -80,14 +97,19 @@ def _ray_process_chunk_impl(chunk, terrain_cache, buildings, osm_data, config):
"reflection": 0.0, "vegetation": 0.0, "reflection": 0.0, "vegetation": 0.0,
} }
precomputed = config.get('precomputed')
results = [] results = []
for lat, lon, point_elev in chunk: for lat, lon, point_elev in chunk:
pre = precomputed.get((lat, lon)) if precomputed else None
point = svc._calculate_point_sync( point = svc._calculate_point_sync(
site, lat, lon, settings, site, lat, lon, settings,
buildings, osm_data.get('streets', []), buildings, osm_data.get('streets', []),
_worker_spatial_idx, osm_data.get('water_bodies', []), _worker_spatial_idx, osm_data.get('water_bodies', []),
osm_data.get('vegetation_areas', []), osm_data.get('vegetation_areas', []),
config['site_elevation'], point_elev, timing, config['site_elevation'], point_elev, timing,
precomputed_distance=pre.get('distance') if pre else None,
precomputed_path_loss=pre.get('path_loss') if pre else None,
) )
if point.rsrp >= settings.min_signal: if point.rsrp >= settings.min_signal:
results.append(point.model_dump()) results.append(point.model_dump())
@@ -162,13 +184,16 @@ def calculate_coverage_parallel(
site_elevation: float, site_elevation: float,
num_workers: Optional[int] = None, num_workers: Optional[int] = None,
log_fn: Optional[Callable[[str], None]] = None, log_fn: Optional[Callable[[str], None]] = None,
cancel_token: Optional[CancellationToken] = None,
precomputed: Optional[Dict] = None,
) -> Tuple[List[Dict], Dict[str, float]]: ) -> Tuple[List[Dict], Dict[str, float]]:
"""Calculate coverage points in parallel. """Calculate coverage points in parallel.
Uses Ray if available (shared memory, zero-copy numpy), otherwise Uses Ray if available (shared memory, zero-copy numpy), otherwise
falls back to sequential single-threaded calculation. falls back to ProcessPoolExecutor or sequential single-threaded calculation.
Same signature as before — drop-in replacement. cancel_token: cooperative cancellation — checked between chunks.
precomputed: dict mapping (lat, lon) -> {distance, path_loss} from GPU pre-computation.
""" """
if log_fn is None: if log_fn is None:
log_fn = lambda msg: print(f"[PARALLEL] {msg}", flush=True) log_fn = lambda msg: print(f"[PARALLEL] {msg}", flush=True)
@@ -185,7 +210,7 @@ def calculate_coverage_parallel(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
terrain_cache, buildings, streets, water_bodies, terrain_cache, buildings, streets, water_bodies,
vegetation_areas, site_elevation, vegetation_areas, site_elevation,
num_workers, log_fn, num_workers, log_fn, cancel_token, precomputed,
) )
except Exception as e: except Exception as e:
log_fn(f"Ray execution failed: {e} — falling back to sequential") log_fn(f"Ray execution failed: {e} — falling back to sequential")
@@ -198,7 +223,7 @@ def calculate_coverage_parallel(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
terrain_cache, buildings, streets, water_bodies, terrain_cache, buildings, streets, water_bodies,
vegetation_areas, site_elevation, vegetation_areas, site_elevation,
pool_workers, log_fn, pool_workers, log_fn, cancel_token, precomputed,
) )
except Exception as e: except Exception as e:
log_fn(f"ProcessPool failed: {e} — falling back to sequential") log_fn(f"ProcessPool failed: {e} — falling back to sequential")
@@ -208,7 +233,7 @@ def calculate_coverage_parallel(
return _calculate_sequential( return _calculate_sequential(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
buildings, streets, water_bodies, vegetation_areas, buildings, streets, water_bodies, vegetation_areas,
site_elevation, log_fn, site_elevation, log_fn, cancel_token, precomputed,
) )
@@ -219,15 +244,13 @@ def _calculate_with_ray(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
terrain_cache, buildings, streets, water_bodies, terrain_cache, buildings, streets, water_bodies,
vegetation_areas, site_elevation, vegetation_areas, site_elevation,
num_workers, log_fn, num_workers, log_fn, cancel_token=None, precomputed=None,
): ):
"""Execute using Ray shared-memory object store.""" """Execute using Ray shared-memory object store."""
total_points = len(grid) total_points = len(grid)
log_fn(f"Ray mode: {total_points} points, {num_workers} workers") log_fn(f"Ray mode: {total_points} points, {num_workers} workers")
# ── Put large data into Ray object store ── # ── Put large data into Ray object store ──
# Numpy arrays (terrain tiles) get zero-copy shared memory.
# Python objects (buildings) get serialized once, stored in plasma.
t_put = time.time() t_put = time.time()
terrain_ref = ray.put(terrain_cache) terrain_ref = ray.put(terrain_cache)
@@ -239,12 +262,15 @@ def _calculate_with_ray(
}) })
cache_key = f"{site_dict['lat']:.4f},{site_dict['lon']:.4f},{len(buildings)}" cache_key = f"{site_dict['lat']:.4f},{site_dict['lon']:.4f},{len(buildings)}"
config_ref = ray.put({ config = {
'site_dict': site_dict, 'site_dict': site_dict,
'settings_dict': settings_dict, 'settings_dict': settings_dict,
'site_elevation': site_elevation, 'site_elevation': site_elevation,
'cache_key': cache_key, 'cache_key': cache_key,
}) }
if precomputed:
config['precomputed'] = precomputed
config_ref = ray.put(config)
put_time = time.time() - t_put put_time = time.time() - t_put
log_fn(f"ray.put() done in {put_time:.1f}s") log_fn(f"ray.put() done in {put_time:.1f}s")
@@ -273,9 +299,19 @@ def _calculate_with_ray(
completed_chunks = 0 completed_chunks = 0
while remaining: while remaining:
# Check cancellation before waiting
if cancel_token and cancel_token.is_cancelled:
log_fn(f"Cancelled — aborting {len(remaining)} remaining Ray chunks")
for ref in remaining:
try:
ray.cancel(ref, force=True)
except Exception:
pass
break
# Wait for at least 1 result, batch up to ~10% for progress logging # Wait for at least 1 result, batch up to ~10% for progress logging
batch = max(1, min(len(remaining), total_chunks // 10 or 1)) batch = max(1, min(len(remaining), total_chunks // 10 or 1))
done, remaining = ray.wait(remaining, num_returns=batch, timeout=600) done, remaining = ray.wait(remaining, num_returns=batch, timeout=30)
for ref in done: for ref in done:
try: try:
@@ -333,14 +369,19 @@ def _pool_worker_process_chunk(args):
"reflection": 0.0, "vegetation": 0.0, "reflection": 0.0, "vegetation": 0.0,
} }
precomputed = config.get('precomputed')
results = [] results = []
for lat, lon, point_elev in chunk: for lat, lon, point_elev in chunk:
pre = precomputed.get((lat, lon)) if precomputed else None
point = svc._calculate_point_sync( point = svc._calculate_point_sync(
site, lat, lon, settings, site, lat, lon, settings,
buildings, osm_data.get('streets', []), buildings, osm_data.get('streets', []),
spatial_idx, osm_data.get('water_bodies', []), spatial_idx, osm_data.get('water_bodies', []),
osm_data.get('vegetation_areas', []), osm_data.get('vegetation_areas', []),
config['site_elevation'], point_elev, timing, config['site_elevation'], point_elev, timing,
precomputed_distance=pre.get('distance') if pre else None,
precomputed_path_loss=pre.get('path_loss') if pre else None,
) )
if point.rsrp >= settings.min_signal: if point.rsrp >= settings.min_signal:
results.append(point.model_dump()) results.append(point.model_dump())
@@ -352,7 +393,7 @@ def _calculate_with_process_pool(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
terrain_cache, buildings, streets, water_bodies, terrain_cache, buildings, streets, water_bodies,
vegetation_areas, site_elevation, vegetation_areas, site_elevation,
num_workers, log_fn, num_workers, log_fn, cancel_token=None, precomputed=None,
): ):
"""Execute using ProcessPoolExecutor with reduced workers to limit memory.""" """Execute using ProcessPoolExecutor with reduced workers to limit memory."""
from concurrent.futures import ProcessPoolExecutor, as_completed from concurrent.futures import ProcessPoolExecutor, as_completed
@@ -375,6 +416,8 @@ def _calculate_with_process_pool(
'settings_dict': settings_dict, 'settings_dict': settings_dict,
'site_elevation': site_elevation, 'site_elevation': site_elevation,
} }
if precomputed:
config['precomputed'] = precomputed
osm_data = { osm_data = {
'streets': streets, 'streets': streets,
'water_bodies': water_bodies, 'water_bodies': water_bodies,
@@ -395,6 +438,13 @@ def _calculate_with_process_pool(
completed_chunks = 0 completed_chunks = 0
for future in as_completed(futures): for future in as_completed(futures):
# Check cancellation between chunks
if cancel_token and cancel_token.is_cancelled:
log_fn(f"Cancelled — cancelling {len(futures) - completed_chunks - 1} pending futures")
for f in futures:
f.cancel()
break
try: try:
chunk_results = future.result() chunk_results = future.result()
all_results.extend(chunk_results) all_results.extend(chunk_results)
@@ -428,7 +478,7 @@ def _calculate_with_process_pool(
def _calculate_sequential( def _calculate_sequential(
grid, point_elevations, site_dict, settings_dict, grid, point_elevations, site_dict, settings_dict,
buildings, streets, water_bodies, vegetation_areas, buildings, streets, water_bodies, vegetation_areas,
site_elevation, log_fn, site_elevation, log_fn, cancel_token=None, precomputed=None,
): ):
"""Sequential fallback — no extra dependencies, runs in calling thread.""" """Sequential fallback — no extra dependencies, runs in calling thread."""
from app.services.coverage_service import CoverageService, SiteParams, CoverageSettings from app.services.coverage_service import CoverageService, SiteParams, CoverageSettings
@@ -453,15 +503,26 @@ def _calculate_sequential(
t0 = time.time() t0 = time.time()
results = [] results = []
for i, (lat, lon) in enumerate(grid): for i, (lat, lon) in enumerate(grid):
# Check cancellation
if cancel_token and cancel_token.is_cancelled:
log_fn(f"Sequential cancelled at {i}/{total}")
break
if i % log_interval == 0: if i % log_interval == 0:
log_fn(f"Sequential: {i}/{total} ({i * 100 // total}%)") log_fn(f"Sequential: {i}/{total} ({i * 100 // total}%)")
point_elev = point_elevations.get((lat, lon), 0.0) point_elev = point_elevations.get((lat, lon), 0.0)
# Use precomputed values if available
pre = precomputed.get((lat, lon)) if precomputed else None
point = svc._calculate_point_sync( point = svc._calculate_point_sync(
site, lat, lon, settings, site, lat, lon, settings,
buildings, streets, spatial_idx, buildings, streets, spatial_idx,
water_bodies, vegetation_areas, water_bodies, vegetation_areas,
site_elevation, point_elev, timing, site_elevation, point_elev, timing,
precomputed_distance=pre.get('distance') if pre else None,
precomputed_path_loss=pre.get('path_loss') if pre else None,
) )
if point.rsrp >= settings.min_signal: if point.rsrp >= settings.min_signal:
results.append(point.model_dump()) results.append(point.model_dump())

View File

@@ -12,3 +12,5 @@ httpx==0.27.0
aiosqlite>=0.19.0 aiosqlite>=0.19.0
sqlalchemy>=2.0.0 sqlalchemy>=2.0.0
ray[default]>=2.9.0 ray[default]>=2.9.0
# GPU acceleration (optional — install cupy-cuda12x for NVIDIA GPU support)
# cupy-cuda12x>=13.0.0

View File

@@ -270,11 +270,11 @@ function createMainWindow() {
// Save window state on close and trigger shutdown // Save window state on close and trigger shutdown
mainWindow.on('close', () => { mainWindow.on('close', () => {
log('[CLOSE] Window close event fired, isQuitting=' + isQuitting);
try { try {
const bounds = mainWindow.getBounds(); const bounds = mainWindow.getBounds();
store.set('windowState', bounds); store.set('windowState', bounds);
} catch (_e) {} } catch (_e) {}
log('Main window closing — killing backend');
isQuitting = true; isQuitting = true;
killBackend(); killBackend();
}); });
@@ -321,34 +321,43 @@ function createMainWindow() {
function killBackend() { function killBackend() {
const pid = backendPid || backendProcess?.pid; const pid = backendPid || backendProcess?.pid;
if (!pid) return; if (!pid) {
log('[KILL] killBackend() called — no backend PID to kill');
return;
}
log(`Killing backend (PID ${pid})...`); log(`[KILL] killBackend() called, platform=${process.platform}, PID=${pid}`);
try { try {
if (process.platform === 'win32') { if (process.platform === 'win32') {
// Windows: taskkill with /F (force) /T (tree — kills child processes too) // Windows: taskkill with /F (force) /T (tree — kills child processes too)
log(`[KILL] Running: taskkill /F /T /PID ${pid}`);
execSync(`taskkill /F /T /PID ${pid}`, { stdio: 'ignore' }); execSync(`taskkill /F /T /PID ${pid}`, { stdio: 'ignore' });
log('[KILL] taskkill completed successfully');
} else { } else {
// Unix: kill process group // Unix: kill process group
try { try {
log(`[KILL] Sending SIGTERM to process group -${pid}`);
process.kill(-pid, 'SIGTERM'); process.kill(-pid, 'SIGTERM');
} catch (_e) { } catch (_e) {
log(`[KILL] Process group kill failed, sending SIGTERM to PID ${pid}`);
process.kill(pid, 'SIGTERM'); process.kill(pid, 'SIGTERM');
} }
} }
} catch (e) { } catch (e) {
log(`[KILL] Primary kill failed: ${e.message}, trying SIGKILL fallback`);
// Fallback: try normal kill via process handle // Fallback: try normal kill via process handle
try { try {
backendProcess?.kill('SIGKILL'); backendProcess?.kill('SIGKILL');
log('[KILL] Fallback SIGKILL sent via process handle');
} catch (_e2) { } catch (_e2) {
// Already dead — that's fine log('[KILL] Fallback also failed — process likely already dead');
} }
} }
backendPid = null; backendPid = null;
backendProcess = null; backendProcess = null;
log('Backend killed'); log(`[KILL] Backend cleanup complete (PID was ${pid})`);
} }
// ── App lifecycle ────────────────────────────────────────────────── // ── App lifecycle ──────────────────────────────────────────────────
@@ -381,7 +390,7 @@ app.whenReady().then(async () => {
}); });
app.on('window-all-closed', () => { app.on('window-all-closed', () => {
log('Event: window-all-closed'); log('[CLOSE] window-all-closed fired');
isQuitting = true; isQuitting = true;
killBackend(); killBackend();
@@ -397,13 +406,13 @@ app.on('activate', () => {
}); });
app.on('before-quit', () => { app.on('before-quit', () => {
log('Event: before-quit'); log('[CLOSE] before-quit fired');
isQuitting = true; isQuitting = true;
killBackend(); killBackend();
}); });
app.on('will-quit', () => { app.on('will-quit', () => {
log('Event: will-quit'); log('[CLOSE] will-quit fired');
killBackend(); killBackend();
if (backendLogStream) { if (backendLogStream) {
@@ -414,6 +423,10 @@ app.on('will-quit', () => {
// Last resort: ensure backend is killed when Node process exits // Last resort: ensure backend is killed when Node process exits
process.on('exit', () => { process.on('exit', () => {
try {
console.log(`[KILL] process.exit handler, backendPid=${backendPid}`);
} catch (_e) { /* log stream may be closed */ }
if (backendPid) { if (backendPid) {
try { try {
if (process.platform === 'win32') { if (process.platform === 'win32') {

View File

@@ -102,6 +102,8 @@ export default function App() {
const setShowElevationInfo = useSettingsStore((s) => s.setShowElevationInfo); const setShowElevationInfo = useSettingsStore((s) => s.setShowElevationInfo);
const showElevationOverlay = useSettingsStore((s) => s.showElevationOverlay); const showElevationOverlay = useSettingsStore((s) => s.showElevationOverlay);
const setShowElevationOverlay = useSettingsStore((s) => s.setShowElevationOverlay); const setShowElevationOverlay = useSettingsStore((s) => s.setShowElevationOverlay);
const elevationOpacity = useSettingsStore((s) => s.elevationOpacity);
const setElevationOpacity = useSettingsStore((s) => s.setElevationOpacity);
// History (undo/redo) // History (undo/redo)
const canUndo = useHistoryStore((s) => s.canUndo); const canUndo = useHistoryStore((s) => s.canUndo);
@@ -1059,6 +1061,19 @@ export default function App() {
/> />
Elevation Colors Elevation Colors
</label> </label>
{showElevationOverlay && (
<div className="pl-6">
<NumberInput
label="Opacity"
value={Math.round(elevationOpacity * 100)}
onChange={(v) => setElevationOpacity(v / 100)}
min={10}
max={100}
step={10}
unit="%"
/>
</div>
)}
</div> </div>
</div> </div>

View File

@@ -0,0 +1,176 @@
import { useEffect, useRef, useCallback } from 'react';
import { useMap } from 'react-leaflet';
import L from 'leaflet';
import { api } from '@/services/api.ts';
interface ElevationLayerProps {
visible: boolean;
opacity: number;
}
// Terrain color gradient: low = green, mid = yellow/tan, high = brown/white
const COLOR_STOPS = [
{ elev: 0, r: 20, g: 100, b: 40 }, // dark green
{ elev: 100, r: 50, g: 160, b: 60 }, // green
{ elev: 200, r: 130, g: 200, b: 80 }, // yellow-green
{ elev: 350, r: 210, g: 190, b: 100 }, // tan
{ elev: 500, r: 180, g: 140, b: 80 }, // brown
{ elev: 800, r: 160, g: 120, b: 90 }, // dark brown
{ elev: 1200, r: 200, g: 190, b: 180 }, // light grey
{ elev: 2000, r: 240, g: 240, b: 240 }, // near white
];
function getColorForElevation(elev: number): [number, number, number] {
if (elev <= COLOR_STOPS[0].elev) {
return [COLOR_STOPS[0].r, COLOR_STOPS[0].g, COLOR_STOPS[0].b];
}
for (let i = 1; i < COLOR_STOPS.length; i++) {
if (elev <= COLOR_STOPS[i].elev) {
const low = COLOR_STOPS[i - 1];
const high = COLOR_STOPS[i];
const t = (elev - low.elev) / (high.elev - low.elev);
return [
Math.round(low.r + t * (high.r - low.r)),
Math.round(low.g + t * (high.g - low.g)),
Math.round(low.b + t * (high.b - low.b)),
];
}
}
const last = COLOR_STOPS[COLOR_STOPS.length - 1];
return [last.r, last.g, last.b];
}
export default function ElevationLayer({ visible, opacity }: ElevationLayerProps) {
const map = useMap();
const overlayRef = useRef<L.ImageOverlay | null>(null);
const debounceRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const abortRef = useRef<AbortController | null>(null);
const lastBoundsRef = useRef<string>('');
const removeOverlay = useCallback(() => {
if (overlayRef.current) {
map.removeLayer(overlayRef.current);
overlayRef.current = null;
}
}, [map]);
const fetchAndRender = useCallback(async () => {
// Abort previous request
if (abortRef.current) {
abortRef.current.abort();
}
abortRef.current = new AbortController();
const bounds = map.getBounds();
const minLat = bounds.getSouth();
const maxLat = bounds.getNorth();
const minLon = bounds.getWest();
const maxLon = bounds.getEast();
// Skip if bbox is too large (zoomed out too far)
if ((maxLat - minLat) > 2.0 || (maxLon - minLon) > 2.0) {
removeOverlay();
return;
}
// Skip if bounds haven't changed significantly
const boundsKey = `${minLat.toFixed(3)},${maxLat.toFixed(3)},${minLon.toFixed(3)},${maxLon.toFixed(3)}`;
if (boundsKey === lastBoundsRef.current) return;
lastBoundsRef.current = boundsKey;
// Choose resolution based on viewport size
const zoom = map.getZoom();
const resolution = zoom >= 13 ? 150 : zoom >= 10 ? 100 : 60;
try {
const data = await api.getElevationGrid(minLat, maxLat, minLon, maxLon, resolution);
// Check if component was unmounted or request was superseded
if (abortRef.current?.signal.aborted) return;
// Render to canvas
const canvas = document.createElement('canvas');
canvas.width = data.cols;
canvas.height = data.rows;
const ctx = canvas.getContext('2d');
if (!ctx) return;
const imageData = ctx.createImageData(data.cols, data.rows);
const pixels = imageData.data;
for (let row = 0; row < data.rows; row++) {
for (let col = 0; col < data.cols; col++) {
const elev = data.grid[row][col];
const [r, g, b] = getColorForElevation(elev);
const idx = (row * data.cols + col) * 4;
pixels[idx] = r;
pixels[idx + 1] = g;
pixels[idx + 2] = b;
pixels[idx + 3] = 255;
}
}
ctx.putImageData(imageData, 0, 0);
// Remove old overlay
removeOverlay();
// Add new overlay
const leafletBounds = L.latLngBounds(
[data.bbox.min_lat, data.bbox.min_lon],
[data.bbox.max_lat, data.bbox.max_lon],
);
overlayRef.current = L.imageOverlay(canvas.toDataURL(), leafletBounds, {
opacity,
interactive: false,
zIndex: 97,
});
overlayRef.current.addTo(map);
} catch (_e) {
// Silently ignore fetch errors (network issues, aborts, etc.)
}
}, [map, opacity, removeOverlay]);
// Update opacity on existing overlay
useEffect(() => {
if (overlayRef.current) {
overlayRef.current.setOpacity(opacity);
}
}, [opacity]);
// Main effect: toggle visibility and listen to map moves
useEffect(() => {
if (!visible) {
removeOverlay();
lastBoundsRef.current = '';
return;
}
const onMoveEnd = () => {
if (debounceRef.current) {
clearTimeout(debounceRef.current);
}
debounceRef.current = setTimeout(() => {
fetchAndRender();
}, 500);
};
map.on('moveend', onMoveEnd);
// Initial fetch
fetchAndRender();
return () => {
map.off('moveend', onMoveEnd);
if (debounceRef.current) {
clearTimeout(debounceRef.current);
}
if (abortRef.current) {
abortRef.current.abort();
}
removeOverlay();
};
}, [map, visible, fetchAndRender, removeOverlay]);
return null;
}

View File

@@ -11,6 +11,7 @@ import MapExtras from './MapExtras.tsx';
import CoordinateGrid from './CoordinateGrid.tsx'; import CoordinateGrid from './CoordinateGrid.tsx';
import MeasurementTool from './MeasurementTool.tsx'; import MeasurementTool from './MeasurementTool.tsx';
import ElevationDisplay from './ElevationDisplay.tsx'; import ElevationDisplay from './ElevationDisplay.tsx';
import ElevationLayer from './ElevationLayer.tsx';
interface MapViewProps { interface MapViewProps {
onMapClick: (lat: number, lon: number) => void; onMapClick: (lat: number, lon: number) => void;
@@ -60,6 +61,7 @@ export default function MapView({ onMapClick, onEditSite, children }: MapViewPro
const showElevationInfo = useSettingsStore((s) => s.showElevationInfo); const showElevationInfo = useSettingsStore((s) => s.showElevationInfo);
const showElevationOverlay = useSettingsStore((s) => s.showElevationOverlay); const showElevationOverlay = useSettingsStore((s) => s.showElevationOverlay);
const setShowElevationOverlay = useSettingsStore((s) => s.setShowElevationOverlay); const setShowElevationOverlay = useSettingsStore((s) => s.setShowElevationOverlay);
const elevationOpacity = useSettingsStore((s) => s.elevationOpacity);
const addToast = useToastStore((s) => s.addToast); const addToast = useToastStore((s) => s.addToast);
const mapRef = useRef<LeafletMap | null>(null); const mapRef = useRef<LeafletMap | null>(null);
@@ -95,16 +97,8 @@ export default function MapView({ onMapClick, onEditSite, children }: MapViewPro
zIndex={100} zIndex={100}
/> />
)} )}
{/* Elevation color overlay (OpenTopoMap — no API key required) */} {/* Elevation color overlay from SRTM terrain data */}
{showElevationOverlay && ( <ElevationLayer visible={showElevationOverlay} opacity={elevationOpacity} />
<TileLayer
attribution='Map data: &copy; <a href="https://openstreetmap.org">OpenStreetMap</a>, SRTM | Style: &copy; <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)'
url="https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png"
opacity={0.5}
maxZoom={17}
zIndex={97}
/>
)}
<MapClickHandler onMapClick={onMapClick} /> <MapClickHandler onMapClick={onMapClick} />
<MapExtras /> <MapExtras />
{showElevationInfo && <ElevationDisplay />} {showElevationInfo && <ElevationDisplay />}

View File

@@ -97,6 +97,22 @@ export interface Preset {
estimated_speed: string; estimated_speed: string;
} }
// === Elevation grid types ===
export interface ElevationGridResponse {
grid: number[][];
rows: number;
cols: number;
min_elevation: number;
max_elevation: number;
bbox: {
min_lat: number;
max_lat: number;
min_lon: number;
max_lon: number;
};
}
// === API Client === // === API Client ===
class ApiService { class ApiService {
@@ -148,6 +164,27 @@ class ApiService {
return data.elevation; return data.elevation;
} }
async getElevationGrid(
minLat: number,
maxLat: number,
minLon: number,
maxLon: number,
resolution: number = 100,
): Promise<ElevationGridResponse> {
const params = new URLSearchParams({
min_lat: minLat.toString(),
max_lat: maxLat.toString(),
min_lon: minLon.toString(),
max_lon: maxLon.toString(),
resolution: resolution.toString(),
});
const response = await fetch(
`${API_BASE}/api/terrain/elevation-grid?${params}`
);
if (!response.ok) throw new Error('Failed to fetch elevation grid');
return response.json();
}
// === Region / Caching API === // === Region / Caching API ===
async getRegions(): Promise<RegionInfo[]> { async getRegions(): Promise<RegionInfo[]> {

View File

@@ -11,6 +11,7 @@ interface SettingsState {
measurementMode: boolean; measurementMode: boolean;
showElevationInfo: boolean; showElevationInfo: boolean;
showElevationOverlay: boolean; showElevationOverlay: boolean;
elevationOpacity: number;
setTheme: (theme: Theme) => void; setTheme: (theme: Theme) => void;
setShowTerrain: (show: boolean) => void; setShowTerrain: (show: boolean) => void;
setTerrainOpacity: (opacity: number) => void; setTerrainOpacity: (opacity: number) => void;
@@ -18,6 +19,7 @@ interface SettingsState {
setMeasurementMode: (mode: boolean) => void; setMeasurementMode: (mode: boolean) => void;
setShowElevationInfo: (show: boolean) => void; setShowElevationInfo: (show: boolean) => void;
setShowElevationOverlay: (show: boolean) => void; setShowElevationOverlay: (show: boolean) => void;
setElevationOpacity: (opacity: number) => void;
} }
function applyTheme(theme: Theme) { function applyTheme(theme: Theme) {
@@ -41,6 +43,7 @@ export const useSettingsStore = create<SettingsState>()(
measurementMode: false, measurementMode: false,
showElevationInfo: false, showElevationInfo: false,
showElevationOverlay: false, showElevationOverlay: false,
elevationOpacity: 0.5,
setTheme: (theme: Theme) => { setTheme: (theme: Theme) => {
set({ theme }); set({ theme });
applyTheme(theme); applyTheme(theme);
@@ -51,6 +54,7 @@ export const useSettingsStore = create<SettingsState>()(
setMeasurementMode: (mode: boolean) => set({ measurementMode: mode }), setMeasurementMode: (mode: boolean) => set({ measurementMode: mode }),
setShowElevationInfo: (show: boolean) => set({ showElevationInfo: show }), setShowElevationInfo: (show: boolean) => set({ showElevationInfo: show }),
setShowElevationOverlay: (show: boolean) => set({ showElevationOverlay: show }), setShowElevationOverlay: (show: boolean) => set({ showElevationOverlay: show }),
setElevationOpacity: (opacity: number) => set({ elevationOpacity: opacity }),
}), }),
{ {
name: 'rfcp-settings', name: 'rfcp-settings',