@mytec: iter2.2 ready for testing

This commit is contained in:
2026-01-31 16:16:15 +02:00
parent baf57ad77f
commit f6a39df366
9 changed files with 901 additions and 191 deletions

View File

@@ -0,0 +1,256 @@
from fastapi import APIRouter, BackgroundTasks, HTTPException
from pydantic import BaseModel
from typing import Optional
import asyncio
import uuid
router = APIRouter()
# Predefined regions
REGIONS = {
"ukraine": {
"name": "Ukraine",
"bbox": [44.0, 22.0, 52.5, 40.5], # min_lat, min_lon, max_lat, max_lon
"srtm_tiles": 120,
"estimated_size_gb": 3.0,
},
"ukraine_east": {
"name": "Eastern Ukraine (Donbas)",
"bbox": [47.0, 34.0, 50.5, 40.5],
"srtm_tiles": 24,
"estimated_size_gb": 0.6,
},
"ukraine_central": {
"name": "Central Ukraine",
"bbox": [48.0, 30.0, 51.0, 36.0],
"srtm_tiles": 18,
"estimated_size_gb": 0.5,
},
"kyiv_region": {
"name": "Kyiv Region",
"bbox": [49.5, 29.5, 51.5, 32.5],
"srtm_tiles": 6,
"estimated_size_gb": 0.15,
},
}
# Download progress tracking (in-memory)
_download_tasks: dict[str, dict] = {}
class RegionInfo(BaseModel):
id: str
name: str
bbox: list[float]
srtm_tiles: int
estimated_size_gb: float
downloaded: bool = False
download_progress: float = 0.0
class DownloadProgress(BaseModel):
task_id: str
region_id: str
status: str # queued, downloading_terrain, downloading_osm, done, error
progress: float # 0-100
current_step: str
downloaded_mb: float
error: Optional[str] = None
@router.get("/available")
async def list_regions() -> list[RegionInfo]:
"""List available regions for download"""
from app.services.terrain_service import terrain_service
cached_tiles = set(terrain_service.get_cached_tiles())
result = []
for region_id, info in REGIONS.items():
min_lat, min_lon, max_lat, max_lon = info["bbox"]
needed_tiles = set()
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
tile = terrain_service.get_tile_name(lat, lon)
needed_tiles.add(tile)
downloaded_tiles = needed_tiles & cached_tiles
progress = len(downloaded_tiles) / len(needed_tiles) * 100 if needed_tiles else 0
result.append(RegionInfo(
id=region_id,
name=info["name"],
bbox=info["bbox"],
srtm_tiles=info["srtm_tiles"],
estimated_size_gb=info["estimated_size_gb"],
downloaded=progress >= 100,
download_progress=progress
))
return result
@router.post("/download/{region_id}")
async def start_download(region_id: str, background_tasks: BackgroundTasks) -> dict:
"""Start downloading a region in the background"""
if region_id not in REGIONS:
raise HTTPException(404, f"Region '{region_id}' not found")
# Check if already downloading
for task_id, task in _download_tasks.items():
if task["region_id"] == region_id and task["status"] not in ["done", "error"]:
return {"task_id": task_id, "status": "already_downloading"}
task_id = str(uuid.uuid4())[:8]
_download_tasks[task_id] = {
"region_id": region_id,
"status": "queued",
"progress": 0.0,
"current_step": "Starting...",
"downloaded_mb": 0.0,
"error": None
}
background_tasks.add_task(_download_region_task, task_id, region_id)
return {"task_id": task_id, "status": "started"}
async def _download_region_task(task_id: str, region_id: str):
"""Background task to download region data"""
from app.services.terrain_service import terrain_service
from app.services.buildings_service import buildings_service
from app.services.water_service import water_service
from app.services.vegetation_service import vegetation_service
task = _download_tasks[task_id]
region = REGIONS[region_id]
min_lat, min_lon, max_lat, max_lon = region["bbox"]
try:
# Phase 1: Download SRTM tiles (0-70%)
task["status"] = "downloading_terrain"
task["current_step"] = "Downloading terrain data..."
# Count total tiles
total_tiles = 0
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
total_tiles += 1
downloaded_count = 0
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
tile_name = terrain_service.get_tile_name(lat, lon)
await terrain_service.download_tile(tile_name)
downloaded_count += 1
task["progress"] = (downloaded_count / total_tiles) * 70.0
task["current_step"] = f"Terrain: {downloaded_count}/{total_tiles} tiles"
task["downloaded_mb"] = terrain_service.get_cache_size_mb()
# Phase 2: Pre-cache OSM data (70-100%)
task["status"] = "downloading_osm"
task["current_step"] = "Downloading building data..."
total_chunks = 0
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
total_chunks += 1
done_chunks = 0
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
chunk_min_lat = float(lat)
chunk_min_lon = float(lon)
chunk_max_lat = float(lat + 1)
chunk_max_lon = float(lon + 1)
try:
await buildings_service.fetch_buildings(
chunk_min_lat, chunk_min_lon,
chunk_max_lat, chunk_max_lon
)
except Exception as e:
print(f"[Region] Buildings chunk error: {e}")
try:
await water_service.fetch_water_bodies(
chunk_min_lat, chunk_min_lon,
chunk_max_lat, chunk_max_lon
)
except Exception as e:
print(f"[Region] Water chunk error: {e}")
try:
await vegetation_service.fetch_vegetation(
chunk_min_lat, chunk_min_lon,
chunk_max_lat, chunk_max_lon
)
except Exception as e:
print(f"[Region] Vegetation chunk error: {e}")
done_chunks += 1
task["progress"] = 70 + (done_chunks / total_chunks) * 30
task["current_step"] = f"OSM data: {done_chunks}/{total_chunks} chunks"
# Delay to avoid Overpass rate limiting
await asyncio.sleep(1.0)
task["status"] = "done"
task["progress"] = 100.0
task["current_step"] = "Complete!"
except Exception as e:
task["status"] = "error"
task["error"] = str(e)
task["current_step"] = f"Error: {e}"
@router.get("/download/{task_id}/progress")
async def get_download_progress(task_id: str) -> DownloadProgress:
"""Get download progress for a task"""
if task_id not in _download_tasks:
raise HTTPException(404, "Task not found")
task = _download_tasks[task_id]
return DownloadProgress(
task_id=task_id,
region_id=task["region_id"],
status=task["status"],
progress=task["progress"],
current_step=task["current_step"],
downloaded_mb=task["downloaded_mb"],
error=task["error"]
)
@router.delete("/cache")
async def clear_cache() -> dict:
"""Clear all OSM cached data (keeps SRTM terrain)"""
from app.services.buildings_service import buildings_service
from app.services.water_service import water_service
from app.services.vegetation_service import vegetation_service
buildings_service.cache.clear()
water_service.cache.clear()
vegetation_service.cache.clear()
return {"status": "ok", "message": "OSM cache cleared"}
@router.get("/cache/stats")
async def get_cache_stats() -> dict:
"""Get cache statistics"""
from app.services.terrain_service import terrain_service
from app.services.buildings_service import buildings_service
from app.services.water_service import water_service
from app.services.vegetation_service import vegetation_service
return {
"terrain_mb": round(terrain_service.get_cache_size_mb(), 2),
"terrain_tiles": len(terrain_service.get_cached_tiles()),
"buildings_mb": round(buildings_service.cache.get_size_mb(), 2),
"water_mb": round(water_service.cache.get_size_mb(), 2),
"vegetation_mb": round(vegetation_service.cache.get_size_mb(), 2),
}

View File

@@ -4,7 +4,7 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from app.core.database import connect_to_mongo, close_mongo_connection
from app.api.routes import health, projects, terrain, coverage
from app.api.routes import health, projects, terrain, coverage, regions
@asynccontextmanager
@@ -24,7 +24,7 @@ app = FastAPI(
# CORS for frontend
app.add_middleware(
CORSMiddleware,
allow_origins=["https://rfcp.eliah.one", "http://localhost:5173"],
allow_origins=["https://rfcp.eliah.one", "http://localhost:5173", "http://127.0.0.1:8888"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
@@ -35,6 +35,7 @@ app.include_router(health.router, prefix="/api/health", tags=["health"])
app.include_router(projects.router, prefix="/api/projects", tags=["projects"])
app.include_router(terrain.router, prefix="/api/terrain", tags=["terrain"])
app.include_router(coverage.router, prefix="/api/coverage", tags=["coverage"])
app.include_router(regions.router, prefix="/api/regions", tags=["regions"])
@app.get("/")

View File

@@ -1,12 +1,11 @@
import os
import re
import httpx
import asyncio
import json
from typing import List, Optional
from pydantic import BaseModel
from functools import lru_cache
import hashlib
import json
from pathlib import Path
from datetime import datetime, timedelta
class Building(BaseModel):
@@ -20,24 +19,89 @@ class Building(BaseModel):
tags: dict = {} # Store all OSM tags for material detection
class OSMCache:
"""Local cache for OSM data with expiry"""
CACHE_EXPIRY_DAYS = 30
def __init__(self, cache_type: str):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.cache_path = self.data_path / 'osm' / cache_type
self.cache_path.mkdir(parents=True, exist_ok=True)
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
"""Generate cache key from bbox (rounded to 0.01 degree grid)"""
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
def _get_cache_file(self, cache_key: str) -> Path:
return self.cache_path / f"{cache_key}.json"
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[dict]:
"""Get cached data if available and not expired"""
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
if not cache_file.exists():
return None
try:
data = json.loads(cache_file.read_text())
# Check expiry
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
return None
return data.get('data')
except Exception as e:
print(f"[OSMCache] Failed to read cache: {e}")
return None
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
"""Save data to cache"""
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
try:
cache_data = {
'_cached_at': datetime.now().isoformat(),
'_bbox': [min_lat, min_lon, max_lat, max_lon],
'data': data
}
cache_file.write_text(json.dumps(cache_data))
except Exception as e:
print(f"[OSMCache] Failed to write cache: {e}")
def clear(self):
"""Clear all cached data"""
for f in self.cache_path.glob("*.json"):
f.unlink()
def get_size_mb(self) -> float:
"""Get cache size in MB"""
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
return total / (1024 * 1024)
class BuildingsService:
"""
OpenStreetMap buildings via Overpass API
OpenStreetMap buildings via Overpass API with local caching.
"""
OVERPASS_URL = "https://overpass-api.de/api/interpreter"
DEFAULT_LEVEL_HEIGHT = 3.0 # meters per floor
DEFAULT_BUILDING_HEIGHT = 9.0 # 3 floors if unknown
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/buildings"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
def __init__(self):
self.cache = OSMCache('buildings')
self._memory_cache: dict[str, List[Building]] = {}
self._max_cache_size = 50 # bbox regions
self._max_cache_size = 50
@staticmethod
def _safe_int(value) -> Optional[int]:
"""Safely parse int from OSM tag (handles '1а', '2-3', '5+', etc.)"""
"""Safely parse int from OSM tag (handles '1a', '2-3', '5+', etc.)"""
if not value:
return None
try:
@@ -63,10 +127,8 @@ class BuildingsService:
return None
def _bbox_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
"""Generate cache key for bbox"""
# Round to 0.01 degree (~1km) grid for cache efficiency
key = f"{min_lat:.2f},{min_lon:.2f},{max_lat:.2f},{max_lon:.2f}"
return hashlib.md5(key.encode()).hexdigest()[:12]
"""Generate memory cache key for bbox"""
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
async def fetch_buildings(
self,
@@ -74,35 +136,25 @@ class BuildingsService:
max_lat: float, max_lon: float,
use_cache: bool = True
) -> List[Building]:
"""
Fetch buildings in bounding box from OSM
Args:
min_lat, min_lon, max_lat, max_lon: Bounding box
use_cache: Whether to use cached results
Returns:
List of Building objects with height estimates
"""
cache_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
"""Fetch buildings in bounding box from OSM, using cache if available"""
bbox_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
# Check memory cache
if use_cache and cache_key in self._memory_cache:
return self._memory_cache[cache_key]
if use_cache and bbox_key in self._memory_cache:
return self._memory_cache[bbox_key]
# Check disk cache
cache_file = self.cache_dir / f"{cache_key}.json"
if use_cache and cache_file.exists():
try:
with open(cache_file, 'r') as f:
data = json.load(f)
buildings = [Building(**b) for b in data]
self._memory_cache[cache_key] = buildings
# Check disk cache (OSMCache with expiry)
if use_cache:
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
if cached is not None:
print(f"[Buildings] Cache hit for bbox")
buildings = [Building(**b) for b in cached]
self._memory_cache[bbox_key] = buildings
return buildings
except Exception:
pass # Fetch fresh if cache corrupted
# Fetch from Overpass API
print(f"[Buildings] Fetching from Overpass API...")
query = f"""
[out:json][timeout:30];
(
@@ -123,23 +175,21 @@ class BuildingsService:
response.raise_for_status()
data = response.json()
except Exception as e:
print(f"Overpass API error: {e}")
print(f"[Buildings] Overpass API error: {e}")
return []
# Parse response
buildings = self._parse_overpass_response(data)
# Cache results
# Save to disk cache
if buildings:
# Disk cache
with open(cache_file, 'w') as f:
json.dump([b.model_dump() for b in buildings], f)
self.cache.set(min_lat, min_lon, max_lat, max_lon,
[b.model_dump() for b in buildings])
# Memory cache (with size limit)
if len(self._memory_cache) >= self._max_cache_size:
oldest = next(iter(self._memory_cache))
del self._memory_cache[oldest]
self._memory_cache[cache_key] = buildings
# Memory cache with size limit
if len(self._memory_cache) >= self._max_cache_size:
oldest = next(iter(self._memory_cache))
del self._memory_cache[oldest]
self._memory_cache[bbox_key] = buildings
return buildings
@@ -162,19 +212,16 @@ class BuildingsService:
if "building" not in tags:
continue
# Get geometry
geometry = []
for node_id in element.get("nodes", []):
if node_id in nodes:
geometry.append(list(nodes[node_id]))
if len(geometry) < 3:
continue # Invalid polygon
continue
# Estimate height
height = self._estimate_height(tags)
# Detect material from tags
material_str = None
if "building:material" in tags:
material_str = tags["building:material"]
@@ -195,19 +242,16 @@ class BuildingsService:
def _estimate_height(self, tags: dict) -> float:
"""Estimate building height from OSM tags"""
# Explicit height tag
if "height" in tags:
h = self._safe_float(tags["height"])
if h is not None and h > 0:
return h
# Calculate from levels
if "building:levels" in tags:
levels = self._safe_int(tags["building:levels"])
if levels is not None and levels > 0:
return levels * self.DEFAULT_LEVEL_HEIGHT
# Default based on building type
building_type = tags.get("building", "yes")
type_heights = {
"house": 6.0,
@@ -254,18 +298,10 @@ class BuildingsService:
lat2: float, lon2: float, height2: float,
building: Building
) -> Optional[float]:
"""
Check if line segment intersects building
Returns:
Distance along path where intersection occurs, or None
"""
# Simplified 2D check + height comparison
# For accurate 3D intersection, would need proper ray-polygon intersection
"""Check if line segment intersects building.
Returns distance along path where intersection occurs, or None."""
from app.services.terrain_service import TerrainService
# Sample points along line
num_samples = 20
for i in range(num_samples):
t = i / num_samples
@@ -274,9 +310,7 @@ class BuildingsService:
height = height1 + t * (height2 - height1)
if self.point_in_building(lat, lon, building):
# Check if signal height is below building
if height < building.height:
# Calculate distance
dist = t * TerrainService.haversine_distance(lat1, lon1, lat2, lon2)
return dist

View File

@@ -1,34 +1,37 @@
import os
import struct
import asyncio
import aiofiles
import gzip
import zipfile
import io
import numpy as np
import httpx
from pathlib import Path
from typing import List, Optional, Tuple
import numpy as np
class TerrainService:
"""
SRTM elevation data service
- Downloads and caches .hgt tiles
- Provides elevation lookups
- Generates elevation profiles
SRTM elevation data service with local caching.
- Stores tiles in RFCP_DATA_PATH/terrain/
- In-memory LRU cache (max 20 tiles)
- Auto-downloads from S3 mirror
- Supports both SRTM1 (3601x3601) and SRTM3 (1201x1201)
"""
# SRTM tile dimensions (1 arc-second = 3601x3601, 3 arc-second = 1201x1201)
TILE_SIZE = 3601 # 1 arc-second (30m resolution)
# Mirror URLs for SRTM data (USGS requires login, use mirrors)
SRTM_MIRRORS = [
SRTM_SOURCES = [
"https://elevation-tiles-prod.s3.amazonaws.com/skadi/{lat_dir}/{tile_name}.hgt.gz",
"https://s3.amazonaws.com/elevation-tiles-prod/skadi/{lat_dir}/{tile_name}.hgt.gz",
]
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/srtm"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
self._tile_cache: dict[str, np.ndarray] = {} # In-memory cache
self._max_cached_tiles = 10 # Limit memory usage
def __init__(self):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.terrain_path = self.data_path / 'terrain'
self.terrain_path.mkdir(parents=True, exist_ok=True)
# In-memory cache for loaded tiles
self._tile_cache: dict[str, np.ndarray] = {}
self._max_cache_tiles = 20 # ~500MB max
def get_tile_name(self, lat: float, lon: float) -> str:
"""Convert lat/lon to SRTM tile name (e.g., N48E035)"""
@@ -42,73 +45,96 @@ class TerrainService:
def get_tile_path(self, tile_name: str) -> Path:
"""Get local path for tile"""
return self.cache_dir / f"{tile_name}.hgt"
return self.terrain_path / f"{tile_name}.hgt"
async def download_tile(self, tile_name: str) -> bool:
"""Download SRTM tile from mirror"""
import gzip
"""Download SRTM tile if not cached locally"""
tile_path = self.get_tile_path(tile_name)
if tile_path.exists():
return True
lat_dir = tile_name[:3] # e.g., "N48"
async with httpx.AsyncClient(timeout=60.0) as client:
for mirror in self.SRTM_MIRRORS:
url = mirror.format(lat_dir=lat_dir, tile_name=tile_name)
for source_url in self.SRTM_SOURCES:
url = source_url.format(lat_dir=lat_dir, tile_name=tile_name)
try:
response = await client.get(url)
if response.status_code == 200:
# Decompress gzip
decompressed = gzip.decompress(response.content)
data = response.content
async with aiofiles.open(tile_path, 'wb') as f:
await f.write(decompressed)
if url.endswith('.gz'):
data = gzip.decompress(data)
elif url.endswith('.zip'):
with zipfile.ZipFile(io.BytesIO(data)) as zf:
for name in zf.namelist():
if name.endswith('.hgt'):
data = zf.read(name)
break
print(f"Downloaded {tile_name} from {mirror}")
tile_path.write_bytes(data)
print(f"[Terrain] Downloaded {tile_name} ({len(data)} bytes)")
return True
except Exception as e:
print(f"Failed to download from {mirror}: {e}")
print(f"[Terrain] Failed from {url}: {e}")
continue
print(f"Failed to download tile {tile_name}")
print(f"[Terrain] Could not download {tile_name}")
return False
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile into memory (with caching)"""
# Check memory cache
def _load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile from disk into memory cache"""
# Check memory cache first
if tile_name in self._tile_cache:
return self._tile_cache[tile_name]
tile_path = self.get_tile_path(tile_name)
# Download if missing
if not tile_path.exists():
return None
try:
data = tile_path.read_bytes()
# SRTM HGT format: big-endian signed 16-bit integers
if len(data) == 3601 * 3601 * 2:
size = 3601 # SRTM1 (30m)
elif len(data) == 1201 * 1201 * 2:
size = 1201 # SRTM3 (90m)
else:
print(f"[Terrain] Unknown tile size: {len(data)} bytes for {tile_name}")
return None
tile = np.frombuffer(data, dtype='>i2').reshape((size, size))
# Manage memory cache with LRU eviction
if len(self._tile_cache) >= self._max_cache_tiles:
oldest = next(iter(self._tile_cache))
del self._tile_cache[oldest]
self._tile_cache[tile_name] = tile
return tile
except Exception as e:
print(f"[Terrain] Failed to load {tile_name}: {e}")
return None
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile into memory, downloading if needed"""
# Check memory cache
if tile_name in self._tile_cache:
return self._tile_cache[tile_name]
# Download if missing
if not self.get_tile_path(tile_name).exists():
success = await self.download_tile(tile_name)
if not success:
return None
# Read HGT file (big-endian signed 16-bit integers)
try:
async with aiofiles.open(tile_path, 'rb') as f:
data = await f.read()
# Parse as numpy array
arr = np.frombuffer(data, dtype='>i2').reshape(self.TILE_SIZE, self.TILE_SIZE)
# Manage cache size
if len(self._tile_cache) >= self._max_cached_tiles:
# Remove oldest entry
oldest = next(iter(self._tile_cache))
del self._tile_cache[oldest]
self._tile_cache[tile_name] = arr
return arr
except Exception as e:
print(f"Error loading tile {tile_name}: {e}")
return None
return self._load_tile(tile_name)
async def get_elevation(self, lat: float, lon: float) -> float:
"""Get elevation at specific coordinate (meters above sea level)"""
@@ -116,7 +142,9 @@ class TerrainService:
tile = await self.load_tile(tile_name)
if tile is None:
return 0.0 # No data, assume sea level
return 0.0
size = tile.shape[0]
# Calculate position within tile
lat_int = int(lat) if lat >= 0 else int(lat) - 1
@@ -125,13 +153,12 @@ class TerrainService:
lat_frac = lat - lat_int
lon_frac = lon - lon_int
# Row 0 = north edge, row 3600 = south edge
row = int((1 - lat_frac) * (self.TILE_SIZE - 1))
col = int(lon_frac * (self.TILE_SIZE - 1))
# Row 0 = north edge, last row = south edge
row = int((1 - lat_frac) * (size - 1))
col = int(lon_frac * (size - 1))
# Clamp to valid range
row = max(0, min(row, self.TILE_SIZE - 1))
col = max(0, min(col, self.TILE_SIZE - 1))
row = max(0, min(row, size - 1))
col = max(0, min(col, size - 1))
elevation = tile[row, col]
@@ -147,15 +174,10 @@ class TerrainService:
lat2: float, lon2: float,
num_points: int = 100
) -> List[dict]:
"""
Get elevation profile between two points
Returns list of {lat, lon, elevation, distance} dicts
"""
"""Get elevation profile between two points"""
lats = np.linspace(lat1, lat2, num_points)
lons = np.linspace(lon1, lon2, num_points)
# Calculate cumulative distances
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
distances = np.linspace(0, total_distance, num_points)
@@ -171,10 +193,46 @@ class TerrainService:
return profile
async def ensure_tiles_for_bbox(
self,
min_lat: float, min_lon: float,
max_lat: float, max_lon: float
) -> list[str]:
"""Pre-download all tiles needed for a bounding box"""
tiles_needed = []
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
tile_name = self.get_tile_name(lat, lon)
tiles_needed.append(tile_name)
# Download in parallel (batches of 5 to avoid overload)
downloaded = []
batch_size = 5
for i in range(0, len(tiles_needed), batch_size):
batch = tiles_needed[i:i + batch_size]
results = await asyncio.gather(*[
self.download_tile(tile) for tile in batch
])
for tile, ok in zip(batch, results):
if ok:
downloaded.append(tile)
return downloaded
def get_cached_tiles(self) -> list[str]:
"""List all locally cached tile names"""
return [f.stem for f in self.terrain_path.glob("*.hgt")]
def get_cache_size_mb(self) -> float:
"""Get total terrain cache size in MB"""
total = sum(f.stat().st_size for f in self.terrain_path.glob("*.hgt"))
return total / (1024 * 1024)
@staticmethod
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
"""Calculate distance between two points in meters"""
EARTH_RADIUS = 6371000 # meters
EARTH_RADIUS = 6371000
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])

View File

@@ -5,11 +5,13 @@ Forests and dense vegetation attenuate RF signals significantly.
Uses ITU-R P.833 approximations for foliage loss.
"""
import os
import httpx
import json
from typing import List, Tuple, Optional
from pydantic import BaseModel
import json
from pathlib import Path
from datetime import datetime, timedelta
class VegetationArea(BaseModel):
@@ -20,6 +22,62 @@ class VegetationArea(BaseModel):
density: str # dense, sparse, mixed
class VegetationCache:
"""Local cache for vegetation data with expiry"""
CACHE_EXPIRY_DAYS = 30
def __init__(self):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.cache_path = self.data_path / 'osm' / 'vegetation'
self.cache_path.mkdir(parents=True, exist_ok=True)
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
def _get_cache_file(self, cache_key: str) -> Path:
return self.cache_path / f"{cache_key}.json"
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
if not cache_file.exists():
return None
try:
data = json.loads(cache_file.read_text())
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
return None
return data.get('data')
except Exception as e:
print(f"[VegetationCache] Failed to read cache: {e}")
return None
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
try:
cache_data = {
'_cached_at': datetime.now().isoformat(),
'_bbox': [min_lat, min_lon, max_lat, max_lon],
'data': data
}
cache_file.write_text(json.dumps(cache_data))
except Exception as e:
print(f"[VegetationCache] Failed to write cache: {e}")
def clear(self):
for f in self.cache_path.glob("*.json"):
f.unlink()
def get_size_mb(self) -> float:
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
return total / (1024 * 1024)
class VegetationService:
"""OSM vegetation for signal attenuation"""
@@ -44,33 +102,33 @@ class VegetationService:
"autumn": 0.7,
}
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/vegetation"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
self._cache: dict[str, List[VegetationArea]] = {}
def __init__(self):
self.cache = VegetationCache()
self._memory_cache: dict[str, List[VegetationArea]] = {}
async def fetch_vegetation(
self,
min_lat: float, min_lon: float,
max_lat: float, max_lon: float
) -> List[VegetationArea]:
"""Fetch vegetation areas in bounding box"""
"""Fetch vegetation areas in bounding box, using cache if available"""
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
if cache_key in self._cache:
return self._cache[cache_key]
# Memory cache
if cache_key in self._memory_cache:
return self._memory_cache[cache_key]
cache_file = self.cache_dir / f"{cache_key}.json"
if cache_file.exists():
try:
with open(cache_file) as f:
data = json.load(f)
areas = [VegetationArea(**v) for v in data]
self._cache[cache_key] = areas
return areas
except Exception:
pass
# Disk cache with expiry
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
if cached is not None:
print(f"[Vegetation] Cache hit for bbox")
areas = [VegetationArea(**v) for v in cached]
self._memory_cache[cache_key] = areas
return areas
# Fetch from Overpass
print(f"[Vegetation] Fetching from Overpass API...")
query = f"""
[out:json][timeout:30];
@@ -91,17 +149,17 @@ class VegetationService:
response.raise_for_status()
data = response.json()
except Exception as e:
print(f"Vegetation fetch error: {e}")
print(f"[Vegetation] Fetch error: {e}")
return []
areas = self._parse_response(data)
# Cache
# Save to disk cache
if areas:
with open(cache_file, 'w') as f:
json.dump([v.model_dump() for v in areas], f)
self._cache[cache_key] = areas
self.cache.set(min_lat, min_lon, max_lat, max_lon,
[v.model_dump() for v in areas])
self._memory_cache[cache_key] = areas
return areas
def _parse_response(self, data: dict) -> List[VegetationArea]:
@@ -128,7 +186,6 @@ class VegetationService:
if len(geometry) < 3:
continue
# Determine density from leaf_type tag
leaf_type = tags.get("leaf_type", "mixed")
density = "dense" if leaf_type == "needleleaved" else "mixed"
@@ -151,7 +208,7 @@ class VegetationService:
"""
Calculate signal loss through vegetation along path.
Samples points along the TXRX path and accumulates
Samples points along the TX->RX path and accumulates
attenuation for each segment inside vegetation.
Returns loss in dB (capped at 40 dB).
@@ -163,7 +220,6 @@ class VegetationService:
if path_length < 1:
return 0.0
# Sample points along path — every ~50m
num_samples = max(10, int(path_length / 50))
segment_length = path_length / num_samples
@@ -174,7 +230,6 @@ class VegetationService:
lat = lat1 + t * (lat2 - lat1)
lon = lon1 + t * (lon2 - lon1)
# Check if sample point is inside any vegetation area
veg = self._point_in_vegetation(lat, lon, vegetation_areas)
if veg:
@@ -182,7 +237,7 @@ class VegetationService:
seasonal = self.SEASONAL_FACTOR.get(season, 1.0)
total_loss += (segment_length / 100) * attenuation * seasonal
return min(total_loss, 40.0) # Cap at 40 dB
return min(total_loss, 40.0)
def _point_in_vegetation(
self,
@@ -199,7 +254,7 @@ class VegetationService:
def _point_in_polygon(
lat: float, lon: float, polygon: List[Tuple[float, float]]
) -> bool:
"""Ray casting algorithm polygon coords are (lon, lat)"""
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
n = len(polygon)
inside = False

View File

@@ -5,11 +5,13 @@ Water surfaces produce strong specular reflections that can boost
or create multipath interference for RF signals.
"""
import os
import httpx
import json
from typing import List, Tuple, Optional
from pydantic import BaseModel
import json
from pathlib import Path
from datetime import datetime, timedelta
class WaterBody(BaseModel):
@@ -20,6 +22,62 @@ class WaterBody(BaseModel):
name: Optional[str] = None
class WaterCache:
"""Local cache for water body data with expiry"""
CACHE_EXPIRY_DAYS = 30
def __init__(self):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.cache_path = self.data_path / 'osm' / 'water'
self.cache_path.mkdir(parents=True, exist_ok=True)
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
def _get_cache_file(self, cache_key: str) -> Path:
return self.cache_path / f"{cache_key}.json"
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
if not cache_file.exists():
return None
try:
data = json.loads(cache_file.read_text())
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
return None
return data.get('data')
except Exception as e:
print(f"[WaterCache] Failed to read cache: {e}")
return None
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
try:
cache_data = {
'_cached_at': datetime.now().isoformat(),
'_bbox': [min_lat, min_lon, max_lat, max_lon],
'data': data
}
cache_file.write_text(json.dumps(cache_data))
except Exception as e:
print(f"[WaterCache] Failed to write cache: {e}")
def clear(self):
for f in self.cache_path.glob("*.json"):
f.unlink()
def get_size_mb(self) -> float:
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
return total / (1024 * 1024)
class WaterService:
"""OSM water bodies for reflection calculations"""
@@ -34,33 +92,33 @@ class WaterService:
"water": 0.7,
}
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/water"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
self._cache: dict[str, List[WaterBody]] = {}
def __init__(self):
self.cache = WaterCache()
self._memory_cache: dict[str, List[WaterBody]] = {}
async def fetch_water_bodies(
self,
min_lat: float, min_lon: float,
max_lat: float, max_lon: float
) -> List[WaterBody]:
"""Fetch water bodies in bounding box"""
"""Fetch water bodies in bounding box, using cache if available"""
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
if cache_key in self._cache:
return self._cache[cache_key]
# Memory cache
if cache_key in self._memory_cache:
return self._memory_cache[cache_key]
cache_file = self.cache_dir / f"{cache_key}.json"
if cache_file.exists():
try:
with open(cache_file) as f:
data = json.load(f)
bodies = [WaterBody(**w) for w in data]
self._cache[cache_key] = bodies
return bodies
except Exception:
pass
# Disk cache with expiry
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
if cached is not None:
print(f"[Water] Cache hit for bbox")
bodies = [WaterBody(**w) for w in cached]
self._memory_cache[cache_key] = bodies
return bodies
# Fetch from Overpass
print(f"[Water] Fetching from Overpass API...")
query = f"""
[out:json][timeout:30];
@@ -80,17 +138,17 @@ class WaterService:
response.raise_for_status()
data = response.json()
except Exception as e:
print(f"Water fetch error: {e}")
print(f"[Water] Fetch error: {e}")
return []
bodies = self._parse_response(data)
# Cache
# Save to disk cache
if bodies:
with open(cache_file, 'w') as f:
json.dump([w.model_dump() for w in bodies], f)
self._cache[cache_key] = bodies
self.cache.set(min_lat, min_lon, max_lat, max_lon,
[w.model_dump() for w in bodies])
self._memory_cache[cache_key] = bodies
return bodies
def _parse_response(self, data: dict) -> List[WaterBody]:
@@ -106,8 +164,6 @@ class WaterService:
continue
tags = element.get("tags", {})
# Determine water type
water_type = tags.get("water", tags.get("waterway", tags.get("natural", "water")))
geometry = []
@@ -144,7 +200,7 @@ class WaterService:
def _point_in_polygon(
lat: float, lon: float, polygon: List[Tuple[float, float]]
) -> bool:
"""Ray casting algorithm polygon coords are (lon, lat)"""
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
n = len(polygon)
inside = False

View File

@@ -27,6 +27,8 @@ import ThemeToggle from '@/components/ui/ThemeToggle.tsx';
import Button from '@/components/ui/Button.tsx';
import NumberInput from '@/components/ui/NumberInput.tsx';
import ConfirmDialog from '@/components/ui/ConfirmDialog.tsx';
import { RegionWizard } from '@/components/RegionWizard.tsx';
import { isDesktop } from '@/lib/desktop.ts';
/**
* Restore a sites snapshot: replace all sites in IndexedDB + Zustand.
@@ -117,6 +119,26 @@ export default function App() {
const [showShortcuts, setShowShortcuts] = useState(false);
const [kbDeleteTarget, setKbDeleteTarget] = useState<{ id: string; name: string } | null>(null);
// Region wizard for first-run (desktop mode only)
const [showWizard, setShowWizard] = useState(false);
useEffect(() => {
if (!isDesktop()) return;
const skipped = localStorage.getItem('rfcp_region_wizard_skipped');
if (skipped) return;
api.getRegions()
.then((regions) => {
const hasDownloaded = regions.some((r) => r.downloaded);
if (!hasDownloaded) {
setShowWizard(true);
}
})
.catch(() => {
// Backend not ready yet, skip wizard
});
}, []);
// Resizable sidebar
const PANEL_MIN = 300;
const PANEL_MAX = 600;
@@ -1084,6 +1106,11 @@ export default function App() {
)}
<ToastContainer />
{/* First-run region download wizard (desktop only) */}
{showWizard && (
<RegionWizard onComplete={() => setShowWizard(false)} />
)}
</div>
);
}

View File

@@ -0,0 +1,165 @@
import { useState, useEffect, useRef } from 'react';
import { api } from '@/services/api.ts';
import type { RegionInfo, DownloadProgress } from '@/services/api.ts';
export function RegionWizard({ onComplete }: { onComplete: () => void }) {
const [regions, setRegions] = useState<RegionInfo[]>([]);
const [selectedRegion, setSelectedRegion] = useState<string | null>(null);
const [downloading, setDownloading] = useState(false);
const [progress, setProgress] = useState<DownloadProgress | null>(null);
const [error, setError] = useState<string | null>(null);
const pollRef = useRef<ReturnType<typeof setInterval> | null>(null);
useEffect(() => {
api.getRegions()
.then(setRegions)
.catch((err) => {
console.error('Failed to load regions:', err);
setError('Failed to connect to backend');
});
return () => {
if (pollRef.current) clearInterval(pollRef.current);
};
}, []);
const startDownload = async () => {
if (!selectedRegion) return;
setDownloading(true);
setError(null);
try {
const { task_id } = await api.downloadRegion(selectedRegion);
pollRef.current = setInterval(async () => {
try {
const prog = await api.getDownloadProgress(task_id);
setProgress(prog);
if (prog.status === 'done') {
if (pollRef.current) clearInterval(pollRef.current);
setDownloading(false);
// Brief delay so user sees "Complete!" before closing
setTimeout(() => onComplete(), 1000);
} else if (prog.status === 'error') {
if (pollRef.current) clearInterval(pollRef.current);
setDownloading(false);
setError(prog.error || 'Download failed');
}
} catch {
// Polling error, keep trying
}
}, 1000);
} catch (err) {
setDownloading(false);
setError(err instanceof Error ? err.message : 'Download failed');
}
};
const skipDownload = () => {
localStorage.setItem('rfcp_region_wizard_skipped', 'true');
onComplete();
};
return (
<div className="fixed inset-0 z-[9999] bg-black/90 flex items-center justify-center">
<div className="bg-slate-900 rounded-xl p-8 max-w-lg w-[90%] text-white shadow-2xl border border-slate-700">
<h1 className="text-3xl font-bold bg-gradient-to-r from-cyan-400 to-emerald-400 bg-clip-text text-transparent">
Welcome to RFCP
</h1>
<h2 className="text-sm text-slate-400 mt-1 mb-5">
RF Coverage Planner
</h2>
<p className="text-sm text-slate-300 mb-5">
Select a region to download for offline use.
This includes terrain elevation and building data.
</p>
{error && (
<div className="mb-4 p-3 bg-red-900/30 border border-red-700 rounded-lg text-sm text-red-300">
{error}
</div>
)}
{!downloading ? (
<>
{/* Region list */}
<div className="space-y-2 mb-6">
{regions.map((region) => (
<button
key={region.id}
onClick={() => setSelectedRegion(region.id)}
className={`w-full flex items-center gap-3 p-3.5 rounded-lg border-2 transition-all text-left ${
selectedRegion === region.id
? 'border-cyan-400 bg-slate-800'
: 'border-transparent bg-slate-800/50 hover:bg-slate-800'
} ${region.downloaded ? 'opacity-60' : ''}`}
>
<div className="flex-1">
<div className="font-medium text-sm">{region.name}</div>
{region.download_progress > 0 && region.download_progress < 100 && (
<div className="text-xs text-slate-400 mt-0.5">
{region.download_progress.toFixed(0)}% cached
</div>
)}
</div>
<div className="text-xs text-slate-400">
~{region.estimated_size_gb} GB
</div>
{region.downloaded && (
<span className="text-xs bg-emerald-500 text-black px-2 py-0.5 rounded font-medium">
Downloaded
</span>
)}
</button>
))}
{regions.length === 0 && !error && (
<div className="text-center py-6 text-slate-400 text-sm">
Loading regions...
</div>
)}
</div>
{/* Actions */}
<div className="flex gap-3">
<button
onClick={startDownload}
disabled={!selectedRegion}
className="flex-1 py-2.5 px-4 rounded-lg font-semibold text-sm text-black bg-gradient-to-r from-cyan-400 to-emerald-400 hover:from-cyan-300 hover:to-emerald-300 disabled:opacity-40 disabled:cursor-not-allowed transition-all"
>
Download Selected Region
</button>
<button
onClick={skipDownload}
className="py-2.5 px-4 rounded-lg text-sm text-slate-400 border border-slate-600 hover:border-slate-400 hover:text-slate-200 transition-all"
>
Skip (Online Mode)
</button>
</div>
</>
) : (
/* Download progress */
<div className="mt-2">
<div className="h-2 bg-slate-700 rounded-full overflow-hidden">
<div
className="h-full bg-gradient-to-r from-cyan-400 to-emerald-400 transition-all duration-300"
style={{ width: `${progress?.progress || 0}%` }}
/>
</div>
<div className="mt-3 text-center text-sm text-slate-400">
{progress?.current_step || 'Starting...'}
</div>
<div className="mt-1 text-center text-xs text-slate-500">
{(progress?.downloaded_mb || 0).toFixed(1)} MB downloaded
{' '}&middot;{' '}
{(progress?.progress || 0).toFixed(0)}%
</div>
</div>
)}
</div>
</div>
);
}

View File

@@ -147,6 +147,64 @@ class ApiService {
const data = await response.json();
return data.elevation;
}
// === Region / Caching API ===
async getRegions(): Promise<RegionInfo[]> {
const response = await fetch(`${API_BASE}/api/regions/available`);
if (!response.ok) throw new Error('Failed to fetch regions');
return response.json();
}
async downloadRegion(regionId: string): Promise<{ task_id: string; status: string }> {
const response = await fetch(`${API_BASE}/api/regions/download/${regionId}`, {
method: 'POST',
});
if (!response.ok) throw new Error('Failed to start download');
return response.json();
}
async getDownloadProgress(taskId: string): Promise<DownloadProgress> {
const response = await fetch(`${API_BASE}/api/regions/download/${taskId}/progress`);
if (!response.ok) throw new Error('Failed to get progress');
return response.json();
}
async getCacheStats(): Promise<CacheStats> {
const response = await fetch(`${API_BASE}/api/regions/cache/stats`);
if (!response.ok) throw new Error('Failed to get cache stats');
return response.json();
}
}
// === Region types ===
export interface RegionInfo {
id: string;
name: string;
bbox: number[];
srtm_tiles: number;
estimated_size_gb: number;
downloaded: boolean;
download_progress: number;
}
export interface DownloadProgress {
task_id: string;
region_id: string;
status: string;
progress: number;
current_step: string;
downloaded_mb: number;
error?: string;
}
export interface CacheStats {
terrain_mb: number;
terrain_tiles: number;
buildings_mb: number;
water_mb: number;
vegetation_mb: number;
}
export const api = new ApiService();