@mytec: iter2.2 ready for testing

This commit is contained in:
2026-01-31 16:16:15 +02:00
parent baf57ad77f
commit f6a39df366
9 changed files with 901 additions and 191 deletions

View File

@@ -1,12 +1,11 @@
import os
import re
import httpx
import asyncio
import json
from typing import List, Optional
from pydantic import BaseModel
from functools import lru_cache
import hashlib
import json
from pathlib import Path
from datetime import datetime, timedelta
class Building(BaseModel):
@@ -20,24 +19,89 @@ class Building(BaseModel):
tags: dict = {} # Store all OSM tags for material detection
class OSMCache:
"""Local cache for OSM data with expiry"""
CACHE_EXPIRY_DAYS = 30
def __init__(self, cache_type: str):
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.cache_path = self.data_path / 'osm' / cache_type
self.cache_path.mkdir(parents=True, exist_ok=True)
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
"""Generate cache key from bbox (rounded to 0.01 degree grid)"""
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
def _get_cache_file(self, cache_key: str) -> Path:
return self.cache_path / f"{cache_key}.json"
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[dict]:
"""Get cached data if available and not expired"""
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
if not cache_file.exists():
return None
try:
data = json.loads(cache_file.read_text())
# Check expiry
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
return None
return data.get('data')
except Exception as e:
print(f"[OSMCache] Failed to read cache: {e}")
return None
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
"""Save data to cache"""
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
cache_file = self._get_cache_file(cache_key)
try:
cache_data = {
'_cached_at': datetime.now().isoformat(),
'_bbox': [min_lat, min_lon, max_lat, max_lon],
'data': data
}
cache_file.write_text(json.dumps(cache_data))
except Exception as e:
print(f"[OSMCache] Failed to write cache: {e}")
def clear(self):
"""Clear all cached data"""
for f in self.cache_path.glob("*.json"):
f.unlink()
def get_size_mb(self) -> float:
"""Get cache size in MB"""
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
return total / (1024 * 1024)
class BuildingsService:
"""
OpenStreetMap buildings via Overpass API
OpenStreetMap buildings via Overpass API with local caching.
"""
OVERPASS_URL = "https://overpass-api.de/api/interpreter"
DEFAULT_LEVEL_HEIGHT = 3.0 # meters per floor
DEFAULT_BUILDING_HEIGHT = 9.0 # 3 floors if unknown
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/buildings"):
self.cache_dir = Path(cache_dir)
self.cache_dir.mkdir(exist_ok=True, parents=True)
def __init__(self):
self.cache = OSMCache('buildings')
self._memory_cache: dict[str, List[Building]] = {}
self._max_cache_size = 50 # bbox regions
self._max_cache_size = 50
@staticmethod
def _safe_int(value) -> Optional[int]:
"""Safely parse int from OSM tag (handles '1а', '2-3', '5+', etc.)"""
"""Safely parse int from OSM tag (handles '1a', '2-3', '5+', etc.)"""
if not value:
return None
try:
@@ -63,10 +127,8 @@ class BuildingsService:
return None
def _bbox_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
"""Generate cache key for bbox"""
# Round to 0.01 degree (~1km) grid for cache efficiency
key = f"{min_lat:.2f},{min_lon:.2f},{max_lat:.2f},{max_lon:.2f}"
return hashlib.md5(key.encode()).hexdigest()[:12]
"""Generate memory cache key for bbox"""
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
async def fetch_buildings(
self,
@@ -74,35 +136,25 @@ class BuildingsService:
max_lat: float, max_lon: float,
use_cache: bool = True
) -> List[Building]:
"""
Fetch buildings in bounding box from OSM
Args:
min_lat, min_lon, max_lat, max_lon: Bounding box
use_cache: Whether to use cached results
Returns:
List of Building objects with height estimates
"""
cache_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
"""Fetch buildings in bounding box from OSM, using cache if available"""
bbox_key = self._bbox_key(min_lat, min_lon, max_lat, max_lon)
# Check memory cache
if use_cache and cache_key in self._memory_cache:
return self._memory_cache[cache_key]
if use_cache and bbox_key in self._memory_cache:
return self._memory_cache[bbox_key]
# Check disk cache
cache_file = self.cache_dir / f"{cache_key}.json"
if use_cache and cache_file.exists():
try:
with open(cache_file, 'r') as f:
data = json.load(f)
buildings = [Building(**b) for b in data]
self._memory_cache[cache_key] = buildings
# Check disk cache (OSMCache with expiry)
if use_cache:
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
if cached is not None:
print(f"[Buildings] Cache hit for bbox")
buildings = [Building(**b) for b in cached]
self._memory_cache[bbox_key] = buildings
return buildings
except Exception:
pass # Fetch fresh if cache corrupted
# Fetch from Overpass API
print(f"[Buildings] Fetching from Overpass API...")
query = f"""
[out:json][timeout:30];
(
@@ -123,23 +175,21 @@ class BuildingsService:
response.raise_for_status()
data = response.json()
except Exception as e:
print(f"Overpass API error: {e}")
print(f"[Buildings] Overpass API error: {e}")
return []
# Parse response
buildings = self._parse_overpass_response(data)
# Cache results
# Save to disk cache
if buildings:
# Disk cache
with open(cache_file, 'w') as f:
json.dump([b.model_dump() for b in buildings], f)
self.cache.set(min_lat, min_lon, max_lat, max_lon,
[b.model_dump() for b in buildings])
# Memory cache (with size limit)
if len(self._memory_cache) >= self._max_cache_size:
oldest = next(iter(self._memory_cache))
del self._memory_cache[oldest]
self._memory_cache[cache_key] = buildings
# Memory cache with size limit
if len(self._memory_cache) >= self._max_cache_size:
oldest = next(iter(self._memory_cache))
del self._memory_cache[oldest]
self._memory_cache[bbox_key] = buildings
return buildings
@@ -162,19 +212,16 @@ class BuildingsService:
if "building" not in tags:
continue
# Get geometry
geometry = []
for node_id in element.get("nodes", []):
if node_id in nodes:
geometry.append(list(nodes[node_id]))
if len(geometry) < 3:
continue # Invalid polygon
continue
# Estimate height
height = self._estimate_height(tags)
# Detect material from tags
material_str = None
if "building:material" in tags:
material_str = tags["building:material"]
@@ -195,19 +242,16 @@ class BuildingsService:
def _estimate_height(self, tags: dict) -> float:
"""Estimate building height from OSM tags"""
# Explicit height tag
if "height" in tags:
h = self._safe_float(tags["height"])
if h is not None and h > 0:
return h
# Calculate from levels
if "building:levels" in tags:
levels = self._safe_int(tags["building:levels"])
if levels is not None and levels > 0:
return levels * self.DEFAULT_LEVEL_HEIGHT
# Default based on building type
building_type = tags.get("building", "yes")
type_heights = {
"house": 6.0,
@@ -254,18 +298,10 @@ class BuildingsService:
lat2: float, lon2: float, height2: float,
building: Building
) -> Optional[float]:
"""
Check if line segment intersects building
Returns:
Distance along path where intersection occurs, or None
"""
# Simplified 2D check + height comparison
# For accurate 3D intersection, would need proper ray-polygon intersection
"""Check if line segment intersects building.
Returns distance along path where intersection occurs, or None."""
from app.services.terrain_service import TerrainService
# Sample points along line
num_samples = 20
for i in range(num_samples):
t = i / num_samples
@@ -274,9 +310,7 @@ class BuildingsService:
height = height1 + t * (height2 - height1)
if self.point_in_building(lat, lon, building):
# Check if signal height is below building
if height < building.height:
# Calculate distance
dist = t * TerrainService.haversine_distance(lat1, lon1, lat2, lon2)
return dist