@mytec: iter2.2 ready for testing

This commit is contained in:
2026-01-31 16:16:15 +02:00
parent baf57ad77f
commit f6a39df366
9 changed files with 901 additions and 191 deletions

View File

@@ -1,34 +1,37 @@
import os
import struct
import asyncio
import aiofiles
import gzip
import zipfile
import io
import numpy as np
import httpx
from pathlib import Path
from typing import List, Optional, Tuple
import numpy as np
class TerrainService:
"""
SRTM elevation data service
- Downloads and caches .hgt tiles
- Provides elevation lookups
- Generates elevation profiles
SRTM elevation data service with local caching.
- Stores tiles in RFCP_DATA_PATH/terrain/
- In-memory LRU cache (max 20 tiles)
- Auto-downloads from S3 mirror
- Supports both SRTM1 (3601x3601) and SRTM3 (1201x1201)
"""
# SRTM tile dimensions (1 arc-second = 3601x3601, 3 arc-second = 1201x1201)
TILE_SIZE = 3601 # 1 arc-second (30m resolution)
# Mirror URLs for SRTM data (USGS requires login, use mirrors)
SRTM_MIRRORS = [
SRTM_SOURCES = [
"https://elevation-tiles-prod.s3.amazonaws.com/skadi/{lat_dir}/{tile_name}.hgt.gz",
"https://s3.amazonaws.com/elevation-tiles-prod/skadi/{lat_dir}/{tile_name}.hgt.gz",
]
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/srtm"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
self._tile_cache: dict[str, np.ndarray] = {} # In-memory cache
self._max_cached_tiles = 10 # Limit memory usage
def __init__(self):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.terrain_path = self.data_path / 'terrain'
self.terrain_path.mkdir(parents=True, exist_ok=True)
# In-memory cache for loaded tiles
self._tile_cache: dict[str, np.ndarray] = {}
self._max_cache_tiles = 20 # ~500MB max
def get_tile_name(self, lat: float, lon: float) -> str:
"""Convert lat/lon to SRTM tile name (e.g., N48E035)"""
@@ -42,73 +45,96 @@ class TerrainService:
def get_tile_path(self, tile_name: str) -> Path:
"""Get local path for tile"""
return self.cache_dir / f"{tile_name}.hgt"
return self.terrain_path / f"{tile_name}.hgt"
async def download_tile(self, tile_name: str) -> bool:
"""Download SRTM tile from mirror"""
import gzip
"""Download SRTM tile if not cached locally"""
tile_path = self.get_tile_path(tile_name)
if tile_path.exists():
return True
lat_dir = tile_name[:3] # e.g., "N48"
async with httpx.AsyncClient(timeout=60.0) as client:
for mirror in self.SRTM_MIRRORS:
url = mirror.format(lat_dir=lat_dir, tile_name=tile_name)
for source_url in self.SRTM_SOURCES:
url = source_url.format(lat_dir=lat_dir, tile_name=tile_name)
try:
response = await client.get(url)
if response.status_code == 200:
# Decompress gzip
decompressed = gzip.decompress(response.content)
data = response.content
async with aiofiles.open(tile_path, 'wb') as f:
await f.write(decompressed)
if url.endswith('.gz'):
data = gzip.decompress(data)
elif url.endswith('.zip'):
with zipfile.ZipFile(io.BytesIO(data)) as zf:
for name in zf.namelist():
if name.endswith('.hgt'):
data = zf.read(name)
break
print(f"Downloaded {tile_name} from {mirror}")
tile_path.write_bytes(data)
print(f"[Terrain] Downloaded {tile_name} ({len(data)} bytes)")
return True
except Exception as e:
print(f"Failed to download from {mirror}: {e}")
print(f"[Terrain] Failed from {url}: {e}")
continue
print(f"Failed to download tile {tile_name}")
print(f"[Terrain] Could not download {tile_name}")
return False
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile into memory (with caching)"""
# Check memory cache
def _load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile from disk into memory cache"""
# Check memory cache first
if tile_name in self._tile_cache:
return self._tile_cache[tile_name]
tile_path = self.get_tile_path(tile_name)
# Download if missing
if not tile_path.exists():
return None
try:
data = tile_path.read_bytes()
# SRTM HGT format: big-endian signed 16-bit integers
if len(data) == 3601 * 3601 * 2:
size = 3601 # SRTM1 (30m)
elif len(data) == 1201 * 1201 * 2:
size = 1201 # SRTM3 (90m)
else:
print(f"[Terrain] Unknown tile size: {len(data)} bytes for {tile_name}")
return None
tile = np.frombuffer(data, dtype='>i2').reshape((size, size))
# Manage memory cache with LRU eviction
if len(self._tile_cache) >= self._max_cache_tiles:
oldest = next(iter(self._tile_cache))
del self._tile_cache[oldest]
self._tile_cache[tile_name] = tile
return tile
except Exception as e:
print(f"[Terrain] Failed to load {tile_name}: {e}")
return None
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
"""Load tile into memory, downloading if needed"""
# Check memory cache
if tile_name in self._tile_cache:
return self._tile_cache[tile_name]
# Download if missing
if not self.get_tile_path(tile_name).exists():
success = await self.download_tile(tile_name)
if not success:
return None
# Read HGT file (big-endian signed 16-bit integers)
try:
async with aiofiles.open(tile_path, 'rb') as f:
data = await f.read()
# Parse as numpy array
arr = np.frombuffer(data, dtype='>i2').reshape(self.TILE_SIZE, self.TILE_SIZE)
# Manage cache size
if len(self._tile_cache) >= self._max_cached_tiles:
# Remove oldest entry
oldest = next(iter(self._tile_cache))
del self._tile_cache[oldest]
self._tile_cache[tile_name] = arr
return arr
except Exception as e:
print(f"Error loading tile {tile_name}: {e}")
return None
return self._load_tile(tile_name)
async def get_elevation(self, lat: float, lon: float) -> float:
"""Get elevation at specific coordinate (meters above sea level)"""
@@ -116,7 +142,9 @@ class TerrainService:
tile = await self.load_tile(tile_name)
if tile is None:
return 0.0 # No data, assume sea level
return 0.0
size = tile.shape[0]
# Calculate position within tile
lat_int = int(lat) if lat >= 0 else int(lat) - 1
@@ -125,13 +153,12 @@ class TerrainService:
lat_frac = lat - lat_int
lon_frac = lon - lon_int
# Row 0 = north edge, row 3600 = south edge
row = int((1 - lat_frac) * (self.TILE_SIZE - 1))
col = int(lon_frac * (self.TILE_SIZE - 1))
# Row 0 = north edge, last row = south edge
row = int((1 - lat_frac) * (size - 1))
col = int(lon_frac * (size - 1))
# Clamp to valid range
row = max(0, min(row, self.TILE_SIZE - 1))
col = max(0, min(col, self.TILE_SIZE - 1))
row = max(0, min(row, size - 1))
col = max(0, min(col, size - 1))
elevation = tile[row, col]
@@ -147,15 +174,10 @@ class TerrainService:
lat2: float, lon2: float,
num_points: int = 100
) -> List[dict]:
"""
Get elevation profile between two points
Returns list of {lat, lon, elevation, distance} dicts
"""
"""Get elevation profile between two points"""
lats = np.linspace(lat1, lat2, num_points)
lons = np.linspace(lon1, lon2, num_points)
# Calculate cumulative distances
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
distances = np.linspace(0, total_distance, num_points)
@@ -171,10 +193,46 @@ class TerrainService:
return profile
async def ensure_tiles_for_bbox(
self,
min_lat: float, min_lon: float,
max_lat: float, max_lon: float
) -> list[str]:
"""Pre-download all tiles needed for a bounding box"""
tiles_needed = []
for lat in range(int(min_lat), int(max_lat) + 1):
for lon in range(int(min_lon), int(max_lon) + 1):
tile_name = self.get_tile_name(lat, lon)
tiles_needed.append(tile_name)
# Download in parallel (batches of 5 to avoid overload)
downloaded = []
batch_size = 5
for i in range(0, len(tiles_needed), batch_size):
batch = tiles_needed[i:i + batch_size]
results = await asyncio.gather(*[
self.download_tile(tile) for tile in batch
])
for tile, ok in zip(batch, results):
if ok:
downloaded.append(tile)
return downloaded
def get_cached_tiles(self) -> list[str]:
"""List all locally cached tile names"""
return [f.stem for f in self.terrain_path.glob("*.hgt")]
def get_cache_size_mb(self) -> float:
"""Get total terrain cache size in MB"""
total = sum(f.stat().st_size for f in self.terrain_path.glob("*.hgt"))
return total / (1024 * 1024)
@staticmethod
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
"""Calculate distance between two points in meters"""
EARTH_RADIUS = 6371000 # meters
EARTH_RADIUS = 6371000
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])