@mytec: iter2.2 ready for testing
This commit is contained in:
@@ -1,12 +1,11 @@
|
||||
import os
|
||||
import re
|
||||
import httpx
|
||||
import asyncio
|
||||
import json
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
from functools import lru_cache
|
||||
import hashlib
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class Building(BaseModel):
|
||||
@@ -20,24 +19,89 @@ class Building(BaseModel):
|
||||
tags: dict = {} # Store all OSM tags for material detection
|
||||
|
||||
|
||||
class OSMCache:
|
||||
"""Local cache for OSM data with expiry"""
|
||||
|
||||
CACHE_EXPIRY_DAYS = 30
|
||||
|
||||
def __init__(self, cache_type: str):
|
||||
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
self.cache_path = self.data_path / 'osm' / cache_type
|
||||
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||
"""Generate cache key from bbox (rounded to 0.01 degree grid)"""
|
||||
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
def _get_cache_file(self, cache_key: str) -> Path:
|
||||
return self.cache_path / f"{cache_key}.json"
|
||||
|
||||
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[dict]:
|
||||
"""Get cached data if available and not expired"""
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
data = json.loads(cache_file.read_text())
|
||||
|
||||
# Check expiry
|
||||
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||
return None
|
||||
|
||||
return data.get('data')
|
||||
|
||||
except Exception as e:
|
||||
print(f"[OSMCache] Failed to read cache: {e}")
|
||||
return None
|
||||
|
||||
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||
"""Save data to cache"""
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
try:
|
||||
cache_data = {
|
||||
'_cached_at': datetime.now().isoformat(),
|
||||
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||
'data': data
|
||||
}
|
||||
cache_file.write_text(json.dumps(cache_data))
|
||||
|
||||
except Exception as e:
|
||||
print(f"[OSMCache] Failed to write cache: {e}")
|
||||
|
||||
def clear(self):
|
||||
"""Clear all cached data"""
|
||||
for f in self.cache_path.glob("*.json"):
|
||||
f.unlink()
|
||||
|
||||
def get_size_mb(self) -> float:
|
||||
"""Get cache size in MB"""
|
||||
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||
return total / (1024 * 1024)
|
||||
|
||||
|
||||
class BuildingsService:
|
||||
"""
|
||||
OpenStreetMap buildings via Overpass API
|
||||
OpenStreetMap buildings via Overpass API with local caching.
|
||||
"""
|
||||
|
||||
OVERPASS_URL = "https://overpass-api.de/api/interpreter"
|
||||
DEFAULT_LEVEL_HEIGHT = 3.0 # meters per floor
|
||||
DEFAULT_BUILDING_HEIGHT = 9.0 # 3 floors if unknown
|
||||
|
||||
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/buildings"):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||
def __init__(self):
|
||||
self.cache = OSMCache('buildings')
|
||||
self._memory_cache: dict[str, List[Building]] = {}
|
||||
self._max_cache_size = 50 # bbox regions
|
||||
self._max_cache_size = 50
|
||||
|
||||
@staticmethod
|
||||
def _safe_int(value) -> Optional[int]:
|
||||
"""Safely parse int from OSM tag (handles '1а', '2-3', '5+', etc.)"""
|
||||
"""Safely parse int from OSM tag (handles '1a', '2-3', '5+', etc.)"""
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
@@ -63,10 +127,8 @@ class BuildingsService:
|
||||
return None
|
||||
|
||||
def _bbox_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||
"""Generate cache key for bbox"""
|
||||
# Round to 0.01 degree (~1km) grid for cache efficiency
|
||||
key = f"{min_lat:.2f},{min_lon:.2f},{max_lat:.2f},{max_lon:.2f}"
|
||||
return hashlib.md5(key.encode()).hexdigest()[:12]
|
||||
"""Generate memory cache key for bbox"""
|
||||
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
async def fetch_buildings(
|
||||
self,
|
||||
@@ -74,35 +136,25 @@ class BuildingsService:
|
||||
max_lat: float, max_lon: float,
|
||||
use_cache: bool = True
|
||||
) -> List[Building]:
|
||||
"""
|
||||
Fetch buildings in bounding box from OSM
|
||||
|
||||
Args:
|
||||
min_lat, min_lon, max_lat, max_lon: Bounding box
|
||||
use_cache: Whether to use cached results
|
||||
|
||||
Returns:
|
||||
List of Building objects with height estimates
|
||||
"""
|
||||
cache_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
|
||||
"""Fetch buildings in bounding box from OSM, using cache if available"""
|
||||
bbox_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
|
||||
|
||||
# Check memory cache
|
||||
if use_cache and cache_key in self._memory_cache:
|
||||
return self._memory_cache[cache_key]
|
||||
if use_cache and bbox_key in self._memory_cache:
|
||||
return self._memory_cache[bbox_key]
|
||||
|
||||
# Check disk cache
|
||||
cache_file = self.cache_dir / f"{cache_key}.json"
|
||||
if use_cache and cache_file.exists():
|
||||
try:
|
||||
with open(cache_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
buildings = [Building(**b) for b in data]
|
||||
self._memory_cache[cache_key] = buildings
|
||||
# Check disk cache (OSMCache with expiry)
|
||||
if use_cache:
|
||||
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||
if cached is not None:
|
||||
print(f"[Buildings] Cache hit for bbox")
|
||||
buildings = [Building(**b) for b in cached]
|
||||
self._memory_cache[bbox_key] = buildings
|
||||
return buildings
|
||||
except Exception:
|
||||
pass # Fetch fresh if cache corrupted
|
||||
|
||||
# Fetch from Overpass API
|
||||
print(f"[Buildings] Fetching from Overpass API...")
|
||||
|
||||
query = f"""
|
||||
[out:json][timeout:30];
|
||||
(
|
||||
@@ -123,23 +175,21 @@ class BuildingsService:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
print(f"Overpass API error: {e}")
|
||||
print(f"[Buildings] Overpass API error: {e}")
|
||||
return []
|
||||
|
||||
# Parse response
|
||||
buildings = self._parse_overpass_response(data)
|
||||
|
||||
# Cache results
|
||||
# Save to disk cache
|
||||
if buildings:
|
||||
# Disk cache
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump([b.model_dump() for b in buildings], f)
|
||||
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||
[b.model_dump() for b in buildings])
|
||||
|
||||
# Memory cache (with size limit)
|
||||
if len(self._memory_cache) >= self._max_cache_size:
|
||||
oldest = next(iter(self._memory_cache))
|
||||
del self._memory_cache[oldest]
|
||||
self._memory_cache[cache_key] = buildings
|
||||
# Memory cache with size limit
|
||||
if len(self._memory_cache) >= self._max_cache_size:
|
||||
oldest = next(iter(self._memory_cache))
|
||||
del self._memory_cache[oldest]
|
||||
self._memory_cache[bbox_key] = buildings
|
||||
|
||||
return buildings
|
||||
|
||||
@@ -162,19 +212,16 @@ class BuildingsService:
|
||||
if "building" not in tags:
|
||||
continue
|
||||
|
||||
# Get geometry
|
||||
geometry = []
|
||||
for node_id in element.get("nodes", []):
|
||||
if node_id in nodes:
|
||||
geometry.append(list(nodes[node_id]))
|
||||
|
||||
if len(geometry) < 3:
|
||||
continue # Invalid polygon
|
||||
continue
|
||||
|
||||
# Estimate height
|
||||
height = self._estimate_height(tags)
|
||||
|
||||
# Detect material from tags
|
||||
material_str = None
|
||||
if "building:material" in tags:
|
||||
material_str = tags["building:material"]
|
||||
@@ -195,19 +242,16 @@ class BuildingsService:
|
||||
|
||||
def _estimate_height(self, tags: dict) -> float:
|
||||
"""Estimate building height from OSM tags"""
|
||||
# Explicit height tag
|
||||
if "height" in tags:
|
||||
h = self._safe_float(tags["height"])
|
||||
if h is not None and h > 0:
|
||||
return h
|
||||
|
||||
# Calculate from levels
|
||||
if "building:levels" in tags:
|
||||
levels = self._safe_int(tags["building:levels"])
|
||||
if levels is not None and levels > 0:
|
||||
return levels * self.DEFAULT_LEVEL_HEIGHT
|
||||
|
||||
# Default based on building type
|
||||
building_type = tags.get("building", "yes")
|
||||
type_heights = {
|
||||
"house": 6.0,
|
||||
@@ -254,18 +298,10 @@ class BuildingsService:
|
||||
lat2: float, lon2: float, height2: float,
|
||||
building: Building
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Check if line segment intersects building
|
||||
|
||||
Returns:
|
||||
Distance along path where intersection occurs, or None
|
||||
"""
|
||||
# Simplified 2D check + height comparison
|
||||
# For accurate 3D intersection, would need proper ray-polygon intersection
|
||||
|
||||
"""Check if line segment intersects building.
|
||||
Returns distance along path where intersection occurs, or None."""
|
||||
from app.services.terrain_service import TerrainService
|
||||
|
||||
# Sample points along line
|
||||
num_samples = 20
|
||||
for i in range(num_samples):
|
||||
t = i / num_samples
|
||||
@@ -274,9 +310,7 @@ class BuildingsService:
|
||||
height = height1 + t * (height2 - height1)
|
||||
|
||||
if self.point_in_building(lat, lon, building):
|
||||
# Check if signal height is below building
|
||||
if height < building.height:
|
||||
# Calculate distance
|
||||
dist = t * TerrainService.haversine_distance(lat1, lon1, lat2, lon2)
|
||||
return dist
|
||||
|
||||
|
||||
@@ -1,34 +1,37 @@
|
||||
import os
|
||||
import struct
|
||||
import asyncio
|
||||
import aiofiles
|
||||
import gzip
|
||||
import zipfile
|
||||
import io
|
||||
import numpy as np
|
||||
import httpx
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
import numpy as np
|
||||
|
||||
|
||||
class TerrainService:
|
||||
"""
|
||||
SRTM elevation data service
|
||||
- Downloads and caches .hgt tiles
|
||||
- Provides elevation lookups
|
||||
- Generates elevation profiles
|
||||
SRTM elevation data service with local caching.
|
||||
- Stores tiles in RFCP_DATA_PATH/terrain/
|
||||
- In-memory LRU cache (max 20 tiles)
|
||||
- Auto-downloads from S3 mirror
|
||||
- Supports both SRTM1 (3601x3601) and SRTM3 (1201x1201)
|
||||
"""
|
||||
|
||||
# SRTM tile dimensions (1 arc-second = 3601x3601, 3 arc-second = 1201x1201)
|
||||
TILE_SIZE = 3601 # 1 arc-second (30m resolution)
|
||||
|
||||
# Mirror URLs for SRTM data (USGS requires login, use mirrors)
|
||||
SRTM_MIRRORS = [
|
||||
SRTM_SOURCES = [
|
||||
"https://elevation-tiles-prod.s3.amazonaws.com/skadi/{lat_dir}/{tile_name}.hgt.gz",
|
||||
"https://s3.amazonaws.com/elevation-tiles-prod/skadi/{lat_dir}/{tile_name}.hgt.gz",
|
||||
]
|
||||
|
||||
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/srtm"):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||
self._tile_cache: dict[str, np.ndarray] = {} # In-memory cache
|
||||
self._max_cached_tiles = 10 # Limit memory usage
|
||||
def __init__(self):
|
||||
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
self.terrain_path = self.data_path / 'terrain'
|
||||
self.terrain_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# In-memory cache for loaded tiles
|
||||
self._tile_cache: dict[str, np.ndarray] = {}
|
||||
self._max_cache_tiles = 20 # ~500MB max
|
||||
|
||||
def get_tile_name(self, lat: float, lon: float) -> str:
|
||||
"""Convert lat/lon to SRTM tile name (e.g., N48E035)"""
|
||||
@@ -42,73 +45,96 @@ class TerrainService:
|
||||
|
||||
def get_tile_path(self, tile_name: str) -> Path:
|
||||
"""Get local path for tile"""
|
||||
return self.cache_dir / f"{tile_name}.hgt"
|
||||
return self.terrain_path / f"{tile_name}.hgt"
|
||||
|
||||
async def download_tile(self, tile_name: str) -> bool:
|
||||
"""Download SRTM tile from mirror"""
|
||||
import gzip
|
||||
|
||||
"""Download SRTM tile if not cached locally"""
|
||||
tile_path = self.get_tile_path(tile_name)
|
||||
|
||||
if tile_path.exists():
|
||||
return True
|
||||
|
||||
lat_dir = tile_name[:3] # e.g., "N48"
|
||||
|
||||
async with httpx.AsyncClient(timeout=60.0) as client:
|
||||
for mirror in self.SRTM_MIRRORS:
|
||||
url = mirror.format(lat_dir=lat_dir, tile_name=tile_name)
|
||||
for source_url in self.SRTM_SOURCES:
|
||||
url = source_url.format(lat_dir=lat_dir, tile_name=tile_name)
|
||||
try:
|
||||
response = await client.get(url)
|
||||
|
||||
if response.status_code == 200:
|
||||
# Decompress gzip
|
||||
decompressed = gzip.decompress(response.content)
|
||||
data = response.content
|
||||
|
||||
async with aiofiles.open(tile_path, 'wb') as f:
|
||||
await f.write(decompressed)
|
||||
if url.endswith('.gz'):
|
||||
data = gzip.decompress(data)
|
||||
elif url.endswith('.zip'):
|
||||
with zipfile.ZipFile(io.BytesIO(data)) as zf:
|
||||
for name in zf.namelist():
|
||||
if name.endswith('.hgt'):
|
||||
data = zf.read(name)
|
||||
break
|
||||
|
||||
print(f"Downloaded {tile_name} from {mirror}")
|
||||
tile_path.write_bytes(data)
|
||||
print(f"[Terrain] Downloaded {tile_name} ({len(data)} bytes)")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to download from {mirror}: {e}")
|
||||
print(f"[Terrain] Failed from {url}: {e}")
|
||||
continue
|
||||
|
||||
print(f"Failed to download tile {tile_name}")
|
||||
print(f"[Terrain] Could not download {tile_name}")
|
||||
return False
|
||||
|
||||
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
||||
"""Load tile into memory (with caching)"""
|
||||
# Check memory cache
|
||||
def _load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
||||
"""Load tile from disk into memory cache"""
|
||||
# Check memory cache first
|
||||
if tile_name in self._tile_cache:
|
||||
return self._tile_cache[tile_name]
|
||||
|
||||
tile_path = self.get_tile_path(tile_name)
|
||||
|
||||
# Download if missing
|
||||
if not tile_path.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
data = tile_path.read_bytes()
|
||||
|
||||
# SRTM HGT format: big-endian signed 16-bit integers
|
||||
if len(data) == 3601 * 3601 * 2:
|
||||
size = 3601 # SRTM1 (30m)
|
||||
elif len(data) == 1201 * 1201 * 2:
|
||||
size = 1201 # SRTM3 (90m)
|
||||
else:
|
||||
print(f"[Terrain] Unknown tile size: {len(data)} bytes for {tile_name}")
|
||||
return None
|
||||
|
||||
tile = np.frombuffer(data, dtype='>i2').reshape((size, size))
|
||||
|
||||
# Manage memory cache with LRU eviction
|
||||
if len(self._tile_cache) >= self._max_cache_tiles:
|
||||
oldest = next(iter(self._tile_cache))
|
||||
del self._tile_cache[oldest]
|
||||
|
||||
self._tile_cache[tile_name] = tile
|
||||
return tile
|
||||
|
||||
except Exception as e:
|
||||
print(f"[Terrain] Failed to load {tile_name}: {e}")
|
||||
return None
|
||||
|
||||
async def load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
||||
"""Load tile into memory, downloading if needed"""
|
||||
# Check memory cache
|
||||
if tile_name in self._tile_cache:
|
||||
return self._tile_cache[tile_name]
|
||||
|
||||
# Download if missing
|
||||
if not self.get_tile_path(tile_name).exists():
|
||||
success = await self.download_tile(tile_name)
|
||||
if not success:
|
||||
return None
|
||||
|
||||
# Read HGT file (big-endian signed 16-bit integers)
|
||||
try:
|
||||
async with aiofiles.open(tile_path, 'rb') as f:
|
||||
data = await f.read()
|
||||
|
||||
# Parse as numpy array
|
||||
arr = np.frombuffer(data, dtype='>i2').reshape(self.TILE_SIZE, self.TILE_SIZE)
|
||||
|
||||
# Manage cache size
|
||||
if len(self._tile_cache) >= self._max_cached_tiles:
|
||||
# Remove oldest entry
|
||||
oldest = next(iter(self._tile_cache))
|
||||
del self._tile_cache[oldest]
|
||||
|
||||
self._tile_cache[tile_name] = arr
|
||||
return arr
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error loading tile {tile_name}: {e}")
|
||||
return None
|
||||
return self._load_tile(tile_name)
|
||||
|
||||
async def get_elevation(self, lat: float, lon: float) -> float:
|
||||
"""Get elevation at specific coordinate (meters above sea level)"""
|
||||
@@ -116,7 +142,9 @@ class TerrainService:
|
||||
tile = await self.load_tile(tile_name)
|
||||
|
||||
if tile is None:
|
||||
return 0.0 # No data, assume sea level
|
||||
return 0.0
|
||||
|
||||
size = tile.shape[0]
|
||||
|
||||
# Calculate position within tile
|
||||
lat_int = int(lat) if lat >= 0 else int(lat) - 1
|
||||
@@ -125,13 +153,12 @@ class TerrainService:
|
||||
lat_frac = lat - lat_int
|
||||
lon_frac = lon - lon_int
|
||||
|
||||
# Row 0 = north edge, row 3600 = south edge
|
||||
row = int((1 - lat_frac) * (self.TILE_SIZE - 1))
|
||||
col = int(lon_frac * (self.TILE_SIZE - 1))
|
||||
# Row 0 = north edge, last row = south edge
|
||||
row = int((1 - lat_frac) * (size - 1))
|
||||
col = int(lon_frac * (size - 1))
|
||||
|
||||
# Clamp to valid range
|
||||
row = max(0, min(row, self.TILE_SIZE - 1))
|
||||
col = max(0, min(col, self.TILE_SIZE - 1))
|
||||
row = max(0, min(row, size - 1))
|
||||
col = max(0, min(col, size - 1))
|
||||
|
||||
elevation = tile[row, col]
|
||||
|
||||
@@ -147,15 +174,10 @@ class TerrainService:
|
||||
lat2: float, lon2: float,
|
||||
num_points: int = 100
|
||||
) -> List[dict]:
|
||||
"""
|
||||
Get elevation profile between two points
|
||||
|
||||
Returns list of {lat, lon, elevation, distance} dicts
|
||||
"""
|
||||
"""Get elevation profile between two points"""
|
||||
lats = np.linspace(lat1, lat2, num_points)
|
||||
lons = np.linspace(lon1, lon2, num_points)
|
||||
|
||||
# Calculate cumulative distances
|
||||
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
|
||||
distances = np.linspace(0, total_distance, num_points)
|
||||
|
||||
@@ -171,10 +193,46 @@ class TerrainService:
|
||||
|
||||
return profile
|
||||
|
||||
async def ensure_tiles_for_bbox(
|
||||
self,
|
||||
min_lat: float, min_lon: float,
|
||||
max_lat: float, max_lon: float
|
||||
) -> list[str]:
|
||||
"""Pre-download all tiles needed for a bounding box"""
|
||||
tiles_needed = []
|
||||
|
||||
for lat in range(int(min_lat), int(max_lat) + 1):
|
||||
for lon in range(int(min_lon), int(max_lon) + 1):
|
||||
tile_name = self.get_tile_name(lat, lon)
|
||||
tiles_needed.append(tile_name)
|
||||
|
||||
# Download in parallel (batches of 5 to avoid overload)
|
||||
downloaded = []
|
||||
batch_size = 5
|
||||
for i in range(0, len(tiles_needed), batch_size):
|
||||
batch = tiles_needed[i:i + batch_size]
|
||||
results = await asyncio.gather(*[
|
||||
self.download_tile(tile) for tile in batch
|
||||
])
|
||||
for tile, ok in zip(batch, results):
|
||||
if ok:
|
||||
downloaded.append(tile)
|
||||
|
||||
return downloaded
|
||||
|
||||
def get_cached_tiles(self) -> list[str]:
|
||||
"""List all locally cached tile names"""
|
||||
return [f.stem for f in self.terrain_path.glob("*.hgt")]
|
||||
|
||||
def get_cache_size_mb(self) -> float:
|
||||
"""Get total terrain cache size in MB"""
|
||||
total = sum(f.stat().st_size for f in self.terrain_path.glob("*.hgt"))
|
||||
return total / (1024 * 1024)
|
||||
|
||||
@staticmethod
|
||||
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||
"""Calculate distance between two points in meters"""
|
||||
EARTH_RADIUS = 6371000 # meters
|
||||
EARTH_RADIUS = 6371000
|
||||
|
||||
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@ Forests and dense vegetation attenuate RF signals significantly.
|
||||
Uses ITU-R P.833 approximations for foliage loss.
|
||||
"""
|
||||
|
||||
import os
|
||||
import httpx
|
||||
import json
|
||||
from typing import List, Tuple, Optional
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class VegetationArea(BaseModel):
|
||||
@@ -20,6 +22,62 @@ class VegetationArea(BaseModel):
|
||||
density: str # dense, sparse, mixed
|
||||
|
||||
|
||||
class VegetationCache:
|
||||
"""Local cache for vegetation data with expiry"""
|
||||
|
||||
CACHE_EXPIRY_DAYS = 30
|
||||
|
||||
def __init__(self):
|
||||
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
self.cache_path = self.data_path / 'osm' / 'vegetation'
|
||||
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
def _get_cache_file(self, cache_key: str) -> Path:
|
||||
return self.cache_path / f"{cache_key}.json"
|
||||
|
||||
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
data = json.loads(cache_file.read_text())
|
||||
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||
return None
|
||||
return data.get('data')
|
||||
except Exception as e:
|
||||
print(f"[VegetationCache] Failed to read cache: {e}")
|
||||
return None
|
||||
|
||||
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
try:
|
||||
cache_data = {
|
||||
'_cached_at': datetime.now().isoformat(),
|
||||
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||
'data': data
|
||||
}
|
||||
cache_file.write_text(json.dumps(cache_data))
|
||||
except Exception as e:
|
||||
print(f"[VegetationCache] Failed to write cache: {e}")
|
||||
|
||||
def clear(self):
|
||||
for f in self.cache_path.glob("*.json"):
|
||||
f.unlink()
|
||||
|
||||
def get_size_mb(self) -> float:
|
||||
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||
return total / (1024 * 1024)
|
||||
|
||||
|
||||
class VegetationService:
|
||||
"""OSM vegetation for signal attenuation"""
|
||||
|
||||
@@ -44,33 +102,33 @@ class VegetationService:
|
||||
"autumn": 0.7,
|
||||
}
|
||||
|
||||
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/vegetation"):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||
self._cache: dict[str, List[VegetationArea]] = {}
|
||||
def __init__(self):
|
||||
self.cache = VegetationCache()
|
||||
self._memory_cache: dict[str, List[VegetationArea]] = {}
|
||||
|
||||
async def fetch_vegetation(
|
||||
self,
|
||||
min_lat: float, min_lon: float,
|
||||
max_lat: float, max_lon: float
|
||||
) -> List[VegetationArea]:
|
||||
"""Fetch vegetation areas in bounding box"""
|
||||
"""Fetch vegetation areas in bounding box, using cache if available"""
|
||||
|
||||
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
if cache_key in self._cache:
|
||||
return self._cache[cache_key]
|
||||
# Memory cache
|
||||
if cache_key in self._memory_cache:
|
||||
return self._memory_cache[cache_key]
|
||||
|
||||
cache_file = self.cache_dir / f"{cache_key}.json"
|
||||
if cache_file.exists():
|
||||
try:
|
||||
with open(cache_file) as f:
|
||||
data = json.load(f)
|
||||
areas = [VegetationArea(**v) for v in data]
|
||||
self._cache[cache_key] = areas
|
||||
return areas
|
||||
except Exception:
|
||||
pass
|
||||
# Disk cache with expiry
|
||||
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||
if cached is not None:
|
||||
print(f"[Vegetation] Cache hit for bbox")
|
||||
areas = [VegetationArea(**v) for v in cached]
|
||||
self._memory_cache[cache_key] = areas
|
||||
return areas
|
||||
|
||||
# Fetch from Overpass
|
||||
print(f"[Vegetation] Fetching from Overpass API...")
|
||||
|
||||
query = f"""
|
||||
[out:json][timeout:30];
|
||||
@@ -91,17 +149,17 @@ class VegetationService:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
print(f"Vegetation fetch error: {e}")
|
||||
print(f"[Vegetation] Fetch error: {e}")
|
||||
return []
|
||||
|
||||
areas = self._parse_response(data)
|
||||
|
||||
# Cache
|
||||
# Save to disk cache
|
||||
if areas:
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump([v.model_dump() for v in areas], f)
|
||||
self._cache[cache_key] = areas
|
||||
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||
[v.model_dump() for v in areas])
|
||||
|
||||
self._memory_cache[cache_key] = areas
|
||||
return areas
|
||||
|
||||
def _parse_response(self, data: dict) -> List[VegetationArea]:
|
||||
@@ -128,7 +186,6 @@ class VegetationService:
|
||||
if len(geometry) < 3:
|
||||
continue
|
||||
|
||||
# Determine density from leaf_type tag
|
||||
leaf_type = tags.get("leaf_type", "mixed")
|
||||
density = "dense" if leaf_type == "needleleaved" else "mixed"
|
||||
|
||||
@@ -151,7 +208,7 @@ class VegetationService:
|
||||
"""
|
||||
Calculate signal loss through vegetation along path.
|
||||
|
||||
Samples points along the TX→RX path and accumulates
|
||||
Samples points along the TX->RX path and accumulates
|
||||
attenuation for each segment inside vegetation.
|
||||
|
||||
Returns loss in dB (capped at 40 dB).
|
||||
@@ -163,7 +220,6 @@ class VegetationService:
|
||||
if path_length < 1:
|
||||
return 0.0
|
||||
|
||||
# Sample points along path — every ~50m
|
||||
num_samples = max(10, int(path_length / 50))
|
||||
|
||||
segment_length = path_length / num_samples
|
||||
@@ -174,7 +230,6 @@ class VegetationService:
|
||||
lat = lat1 + t * (lat2 - lat1)
|
||||
lon = lon1 + t * (lon2 - lon1)
|
||||
|
||||
# Check if sample point is inside any vegetation area
|
||||
veg = self._point_in_vegetation(lat, lon, vegetation_areas)
|
||||
|
||||
if veg:
|
||||
@@ -182,7 +237,7 @@ class VegetationService:
|
||||
seasonal = self.SEASONAL_FACTOR.get(season, 1.0)
|
||||
total_loss += (segment_length / 100) * attenuation * seasonal
|
||||
|
||||
return min(total_loss, 40.0) # Cap at 40 dB
|
||||
return min(total_loss, 40.0)
|
||||
|
||||
def _point_in_vegetation(
|
||||
self,
|
||||
@@ -199,7 +254,7 @@ class VegetationService:
|
||||
def _point_in_polygon(
|
||||
lat: float, lon: float, polygon: List[Tuple[float, float]]
|
||||
) -> bool:
|
||||
"""Ray casting algorithm — polygon coords are (lon, lat)"""
|
||||
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
|
||||
n = len(polygon)
|
||||
inside = False
|
||||
|
||||
|
||||
@@ -5,11 +5,13 @@ Water surfaces produce strong specular reflections that can boost
|
||||
or create multipath interference for RF signals.
|
||||
"""
|
||||
|
||||
import os
|
||||
import httpx
|
||||
import json
|
||||
from typing import List, Tuple, Optional
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class WaterBody(BaseModel):
|
||||
@@ -20,6 +22,62 @@ class WaterBody(BaseModel):
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class WaterCache:
|
||||
"""Local cache for water body data with expiry"""
|
||||
|
||||
CACHE_EXPIRY_DAYS = 30
|
||||
|
||||
def __init__(self):
|
||||
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
self.cache_path = self.data_path / 'osm' / 'water'
|
||||
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
def _get_cache_file(self, cache_key: str) -> Path:
|
||||
return self.cache_path / f"{cache_key}.json"
|
||||
|
||||
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
data = json.loads(cache_file.read_text())
|
||||
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||
return None
|
||||
return data.get('data')
|
||||
except Exception as e:
|
||||
print(f"[WaterCache] Failed to read cache: {e}")
|
||||
return None
|
||||
|
||||
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
try:
|
||||
cache_data = {
|
||||
'_cached_at': datetime.now().isoformat(),
|
||||
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||
'data': data
|
||||
}
|
||||
cache_file.write_text(json.dumps(cache_data))
|
||||
except Exception as e:
|
||||
print(f"[WaterCache] Failed to write cache: {e}")
|
||||
|
||||
def clear(self):
|
||||
for f in self.cache_path.glob("*.json"):
|
||||
f.unlink()
|
||||
|
||||
def get_size_mb(self) -> float:
|
||||
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||
return total / (1024 * 1024)
|
||||
|
||||
|
||||
class WaterService:
|
||||
"""OSM water bodies for reflection calculations"""
|
||||
|
||||
@@ -34,33 +92,33 @@ class WaterService:
|
||||
"water": 0.7,
|
||||
}
|
||||
|
||||
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/water"):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||
self._cache: dict[str, List[WaterBody]] = {}
|
||||
def __init__(self):
|
||||
self.cache = WaterCache()
|
||||
self._memory_cache: dict[str, List[WaterBody]] = {}
|
||||
|
||||
async def fetch_water_bodies(
|
||||
self,
|
||||
min_lat: float, min_lon: float,
|
||||
max_lat: float, max_lon: float
|
||||
) -> List[WaterBody]:
|
||||
"""Fetch water bodies in bounding box"""
|
||||
"""Fetch water bodies in bounding box, using cache if available"""
|
||||
|
||||
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
if cache_key in self._cache:
|
||||
return self._cache[cache_key]
|
||||
# Memory cache
|
||||
if cache_key in self._memory_cache:
|
||||
return self._memory_cache[cache_key]
|
||||
|
||||
cache_file = self.cache_dir / f"{cache_key}.json"
|
||||
if cache_file.exists():
|
||||
try:
|
||||
with open(cache_file) as f:
|
||||
data = json.load(f)
|
||||
bodies = [WaterBody(**w) for w in data]
|
||||
self._cache[cache_key] = bodies
|
||||
return bodies
|
||||
except Exception:
|
||||
pass
|
||||
# Disk cache with expiry
|
||||
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||
if cached is not None:
|
||||
print(f"[Water] Cache hit for bbox")
|
||||
bodies = [WaterBody(**w) for w in cached]
|
||||
self._memory_cache[cache_key] = bodies
|
||||
return bodies
|
||||
|
||||
# Fetch from Overpass
|
||||
print(f"[Water] Fetching from Overpass API...")
|
||||
|
||||
query = f"""
|
||||
[out:json][timeout:30];
|
||||
@@ -80,17 +138,17 @@ class WaterService:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
print(f"Water fetch error: {e}")
|
||||
print(f"[Water] Fetch error: {e}")
|
||||
return []
|
||||
|
||||
bodies = self._parse_response(data)
|
||||
|
||||
# Cache
|
||||
# Save to disk cache
|
||||
if bodies:
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump([w.model_dump() for w in bodies], f)
|
||||
self._cache[cache_key] = bodies
|
||||
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||
[w.model_dump() for w in bodies])
|
||||
|
||||
self._memory_cache[cache_key] = bodies
|
||||
return bodies
|
||||
|
||||
def _parse_response(self, data: dict) -> List[WaterBody]:
|
||||
@@ -106,8 +164,6 @@ class WaterService:
|
||||
continue
|
||||
|
||||
tags = element.get("tags", {})
|
||||
|
||||
# Determine water type
|
||||
water_type = tags.get("water", tags.get("waterway", tags.get("natural", "water")))
|
||||
|
||||
geometry = []
|
||||
@@ -144,7 +200,7 @@ class WaterService:
|
||||
def _point_in_polygon(
|
||||
lat: float, lon: float, polygon: List[Tuple[float, float]]
|
||||
) -> bool:
|
||||
"""Ray casting algorithm — polygon coords are (lon, lat)"""
|
||||
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
|
||||
n = len(polygon)
|
||||
inside = False
|
||||
|
||||
|
||||
Reference in New Issue
Block a user