@mytec: iter2.2 ready for testing
This commit is contained in:
@@ -5,11 +5,13 @@ Water surfaces produce strong specular reflections that can boost
|
||||
or create multipath interference for RF signals.
|
||||
"""
|
||||
|
||||
import os
|
||||
import httpx
|
||||
import json
|
||||
from typing import List, Tuple, Optional
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class WaterBody(BaseModel):
|
||||
@@ -20,6 +22,62 @@ class WaterBody(BaseModel):
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class WaterCache:
|
||||
"""Local cache for water body data with expiry"""
|
||||
|
||||
CACHE_EXPIRY_DAYS = 30
|
||||
|
||||
def __init__(self):
|
||||
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
||||
self.cache_path = self.data_path / 'osm' / 'water'
|
||||
self.cache_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
||||
return f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
def _get_cache_file(self, cache_key: str) -> Path:
|
||||
return self.cache_path / f"{cache_key}.json"
|
||||
|
||||
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[list]:
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
data = json.loads(cache_file.read_text())
|
||||
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
||||
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
||||
return None
|
||||
return data.get('data')
|
||||
except Exception as e:
|
||||
print(f"[WaterCache] Failed to read cache: {e}")
|
||||
return None
|
||||
|
||||
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data):
|
||||
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
||||
cache_file = self._get_cache_file(cache_key)
|
||||
|
||||
try:
|
||||
cache_data = {
|
||||
'_cached_at': datetime.now().isoformat(),
|
||||
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
||||
'data': data
|
||||
}
|
||||
cache_file.write_text(json.dumps(cache_data))
|
||||
except Exception as e:
|
||||
print(f"[WaterCache] Failed to write cache: {e}")
|
||||
|
||||
def clear(self):
|
||||
for f in self.cache_path.glob("*.json"):
|
||||
f.unlink()
|
||||
|
||||
def get_size_mb(self) -> float:
|
||||
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
||||
return total / (1024 * 1024)
|
||||
|
||||
|
||||
class WaterService:
|
||||
"""OSM water bodies for reflection calculations"""
|
||||
|
||||
@@ -34,33 +92,33 @@ class WaterService:
|
||||
"water": 0.7,
|
||||
}
|
||||
|
||||
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/water"):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(exist_ok=True, parents=True)
|
||||
self._cache: dict[str, List[WaterBody]] = {}
|
||||
def __init__(self):
|
||||
self.cache = WaterCache()
|
||||
self._memory_cache: dict[str, List[WaterBody]] = {}
|
||||
|
||||
async def fetch_water_bodies(
|
||||
self,
|
||||
min_lat: float, min_lon: float,
|
||||
max_lat: float, max_lon: float
|
||||
) -> List[WaterBody]:
|
||||
"""Fetch water bodies in bounding box"""
|
||||
"""Fetch water bodies in bounding box, using cache if available"""
|
||||
|
||||
cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
|
||||
|
||||
if cache_key in self._cache:
|
||||
return self._cache[cache_key]
|
||||
# Memory cache
|
||||
if cache_key in self._memory_cache:
|
||||
return self._memory_cache[cache_key]
|
||||
|
||||
cache_file = self.cache_dir / f"{cache_key}.json"
|
||||
if cache_file.exists():
|
||||
try:
|
||||
with open(cache_file) as f:
|
||||
data = json.load(f)
|
||||
bodies = [WaterBody(**w) for w in data]
|
||||
self._cache[cache_key] = bodies
|
||||
return bodies
|
||||
except Exception:
|
||||
pass
|
||||
# Disk cache with expiry
|
||||
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
||||
if cached is not None:
|
||||
print(f"[Water] Cache hit for bbox")
|
||||
bodies = [WaterBody(**w) for w in cached]
|
||||
self._memory_cache[cache_key] = bodies
|
||||
return bodies
|
||||
|
||||
# Fetch from Overpass
|
||||
print(f"[Water] Fetching from Overpass API...")
|
||||
|
||||
query = f"""
|
||||
[out:json][timeout:30];
|
||||
@@ -80,17 +138,17 @@ class WaterService:
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
print(f"Water fetch error: {e}")
|
||||
print(f"[Water] Fetch error: {e}")
|
||||
return []
|
||||
|
||||
bodies = self._parse_response(data)
|
||||
|
||||
# Cache
|
||||
# Save to disk cache
|
||||
if bodies:
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump([w.model_dump() for w in bodies], f)
|
||||
self._cache[cache_key] = bodies
|
||||
self.cache.set(min_lat, min_lon, max_lat, max_lon,
|
||||
[w.model_dump() for w in bodies])
|
||||
|
||||
self._memory_cache[cache_key] = bodies
|
||||
return bodies
|
||||
|
||||
def _parse_response(self, data: dict) -> List[WaterBody]:
|
||||
@@ -106,8 +164,6 @@ class WaterService:
|
||||
continue
|
||||
|
||||
tags = element.get("tags", {})
|
||||
|
||||
# Determine water type
|
||||
water_type = tags.get("water", tags.get("waterway", tags.get("natural", "water")))
|
||||
|
||||
geometry = []
|
||||
@@ -144,7 +200,7 @@ class WaterService:
|
||||
def _point_in_polygon(
|
||||
lat: float, lon: float, polygon: List[Tuple[float, float]]
|
||||
) -> bool:
|
||||
"""Ray casting algorithm — polygon coords are (lon, lat)"""
|
||||
"""Ray casting algorithm -- polygon coords are (lon, lat)"""
|
||||
n = len(polygon)
|
||||
inside = False
|
||||
|
||||
|
||||
Reference in New Issue
Block a user