1053 lines
30 KiB
Markdown
1053 lines
30 KiB
Markdown
# RFCP Phase 2.2: Offline Data & Caching
|
|
|
|
**Date:** January 31, 2025
|
|
**Type:** Data Management & Performance
|
|
**Estimated:** 10-14 hours
|
|
**Priority:** HIGH — enables true offline mode
|
|
**Depends on:** Phase 2.1 (Desktop App)
|
|
|
|
---
|
|
|
|
## 🎯 Goal
|
|
|
|
Enable fully offline operation by caching SRTM terrain data and OSM data locally. Add a region download wizard for first-run setup.
|
|
|
|
---
|
|
|
|
## 📊 Current Problem
|
|
|
|
| Data Source | Current | Problem |
|
|
|-------------|---------|---------|
|
|
| SRTM (terrain) | Downloaded on-demand from NASA | Slow, requires internet |
|
|
| OSM Buildings | Overpass API query each time | Very slow, rate limited |
|
|
| OSM Water | Overpass API query each time | Slow |
|
|
| OSM Vegetation | Overpass API query each time | Slow |
|
|
| Map Tiles | Online from OpenStreetMap | Requires internet |
|
|
|
|
**Result:** 10km calculation takes 2+ minutes mostly waiting for network.
|
|
|
|
---
|
|
|
|
## 🏗️ Architecture
|
|
|
|
```
|
|
┌─────────────────────────────────────────────────┐
|
|
│ Local Data Store │
|
|
├─────────────────────────────────────────────────┤
|
|
│ │
|
|
│ ~/.rfcp/data/ (or %APPDATA%\RFCP\data\) │
|
|
│ │ │
|
|
│ ├── terrain/ │
|
|
│ │ ├── N48E034.hgt # SRTM tile │
|
|
│ │ ├── N48E035.hgt # ~25MB each │
|
|
│ │ └── ... # ~120 for Ukraine │
|
|
│ │ │
|
|
│ ├── osm/ │
|
|
│ │ ├── buildings/ │
|
|
│ │ │ ├── 48.0_34.0_49.0_35.0.json │
|
|
│ │ │ └── ... # Cached by bbox │
|
|
│ │ ├── water/ │
|
|
│ │ │ └── ... │
|
|
│ │ └── vegetation/ │
|
|
│ │ └── ... │
|
|
│ │ │
|
|
│ ├── tiles/ # Map tiles (future) │
|
|
│ │ └── ... │
|
|
│ │ │
|
|
│ └── regions.json # Downloaded regions │
|
|
│ │
|
|
└─────────────────────────────────────────────────┘
|
|
```
|
|
|
|
---
|
|
|
|
## ✅ Tasks
|
|
|
|
### Task 2.2.1: SRTM Local Cache (3-4 hours)
|
|
|
|
**Update backend/app/services/terrain_service.py:**
|
|
|
|
```python
|
|
import os
|
|
import struct
|
|
import numpy as np
|
|
from pathlib import Path
|
|
import httpx
|
|
import asyncio
|
|
from typing import Optional
|
|
|
|
class TerrainService:
|
|
"""SRTM terrain data with local caching"""
|
|
|
|
# SRTM data sources (in order of preference)
|
|
SRTM_SOURCES = [
|
|
"https://elevation-tiles-prod.s3.amazonaws.com/skadi/{lat_dir}/{tile}.hgt.gz",
|
|
"https://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/TIFF/{tile}.zip",
|
|
]
|
|
|
|
def __init__(self):
|
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
|
self.terrain_path = self.data_path / 'terrain'
|
|
self.terrain_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
# In-memory cache for loaded tiles
|
|
self._tile_cache: dict[str, np.ndarray] = {}
|
|
self._max_cache_tiles = 20 # Keep max 20 tiles in memory (~500MB)
|
|
|
|
def _get_tile_name(self, lat: float, lon: float) -> str:
|
|
"""Get SRTM tile name for coordinates"""
|
|
lat_dir = 'N' if lat >= 0 else 'S'
|
|
lon_dir = 'E' if lon >= 0 else 'W'
|
|
lat_int = int(abs(lat // 1))
|
|
lon_int = int(abs(lon // 1))
|
|
return f"{lat_dir}{lat_int:02d}{lon_dir}{lon_int:03d}"
|
|
|
|
def _get_tile_path(self, tile_name: str) -> Path:
|
|
"""Get local path for tile"""
|
|
return self.terrain_path / f"{tile_name}.hgt"
|
|
|
|
async def _download_tile(self, tile_name: str) -> bool:
|
|
"""Download SRTM tile if not cached"""
|
|
tile_path = self._get_tile_path(tile_name)
|
|
|
|
if tile_path.exists():
|
|
return True
|
|
|
|
# Try each source
|
|
lat_dir = tile_name[0:3] # N48 or S48
|
|
|
|
for source_url in self.SRTM_SOURCES:
|
|
url = source_url.format(
|
|
lat_dir=lat_dir,
|
|
tile=tile_name
|
|
)
|
|
|
|
try:
|
|
async with httpx.AsyncClient(timeout=60.0) as client:
|
|
response = await client.get(url)
|
|
|
|
if response.status_code == 200:
|
|
# Handle gzip or raw
|
|
data = response.content
|
|
|
|
if url.endswith('.gz'):
|
|
import gzip
|
|
data = gzip.decompress(data)
|
|
elif url.endswith('.zip'):
|
|
import zipfile
|
|
import io
|
|
with zipfile.ZipFile(io.BytesIO(data)) as zf:
|
|
# Find .hgt file in zip
|
|
for name in zf.namelist():
|
|
if name.endswith('.hgt'):
|
|
data = zf.read(name)
|
|
break
|
|
|
|
# Save to cache
|
|
tile_path.write_bytes(data)
|
|
print(f"[Terrain] Downloaded {tile_name}")
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"[Terrain] Failed to download from {url}: {e}")
|
|
continue
|
|
|
|
print(f"[Terrain] Could not download {tile_name}")
|
|
return False
|
|
|
|
def _load_tile(self, tile_name: str) -> Optional[np.ndarray]:
|
|
"""Load tile from disk into memory"""
|
|
# Check memory cache first
|
|
if tile_name in self._tile_cache:
|
|
return self._tile_cache[tile_name]
|
|
|
|
tile_path = self._get_tile_path(tile_name)
|
|
|
|
if not tile_path.exists():
|
|
return None
|
|
|
|
try:
|
|
# SRTM HGT format: 1201x1201 or 3601x3601 big-endian int16
|
|
data = tile_path.read_bytes()
|
|
|
|
if len(data) == 1201 * 1201 * 2:
|
|
size = 1201 # SRTM3 (90m)
|
|
elif len(data) == 3601 * 3601 * 2:
|
|
size = 3601 # SRTM1 (30m)
|
|
else:
|
|
print(f"[Terrain] Unknown tile size: {len(data)}")
|
|
return None
|
|
|
|
# Parse as numpy array
|
|
tile = np.frombuffer(data, dtype='>i2').reshape((size, size))
|
|
|
|
# Cache in memory
|
|
if len(self._tile_cache) >= self._max_cache_tiles:
|
|
# Remove oldest
|
|
oldest = next(iter(self._tile_cache))
|
|
del self._tile_cache[oldest]
|
|
|
|
self._tile_cache[tile_name] = tile
|
|
return tile
|
|
|
|
except Exception as e:
|
|
print(f"[Terrain] Failed to load {tile_name}: {e}")
|
|
return None
|
|
|
|
async def get_elevation(self, lat: float, lon: float) -> float:
|
|
"""Get elevation at point, downloading tile if needed"""
|
|
tile_name = self._get_tile_name(lat, lon)
|
|
|
|
# Ensure tile is downloaded
|
|
await self._download_tile(tile_name)
|
|
|
|
# Load tile
|
|
tile = self._load_tile(tile_name)
|
|
|
|
if tile is None:
|
|
return 0.0
|
|
|
|
# Calculate position within tile
|
|
size = tile.shape[0]
|
|
|
|
# SRTM tiles start at SW corner
|
|
lat_frac = lat - int(lat)
|
|
lon_frac = lon - int(lon)
|
|
|
|
row = int((1 - lat_frac) * (size - 1))
|
|
col = int(lon_frac * (size - 1))
|
|
|
|
row = max(0, min(row, size - 1))
|
|
col = max(0, min(col, size - 1))
|
|
|
|
elevation = tile[row, col]
|
|
|
|
# Handle void values
|
|
if elevation == -32768:
|
|
return 0.0
|
|
|
|
return float(elevation)
|
|
|
|
async def ensure_tiles_for_bbox(
|
|
self,
|
|
min_lat: float, min_lon: float,
|
|
max_lat: float, max_lon: float
|
|
) -> list[str]:
|
|
"""Pre-download all tiles needed for bounding box"""
|
|
tiles_needed = []
|
|
|
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
|
tile_name = self._get_tile_name(lat, lon)
|
|
tiles_needed.append(tile_name)
|
|
|
|
# Download in parallel
|
|
results = await asyncio.gather(*[
|
|
self._download_tile(tile) for tile in tiles_needed
|
|
])
|
|
|
|
downloaded = [t for t, ok in zip(tiles_needed, results) if ok]
|
|
return downloaded
|
|
|
|
def get_cached_tiles(self) -> list[str]:
|
|
"""List all cached tiles"""
|
|
return [f.stem for f in self.terrain_path.glob("*.hgt")]
|
|
|
|
def get_cache_size_mb(self) -> float:
|
|
"""Get total cache size in MB"""
|
|
total = sum(f.stat().st_size for f in self.terrain_path.glob("*.hgt"))
|
|
return total / (1024 * 1024)
|
|
|
|
|
|
terrain_service = TerrainService()
|
|
```
|
|
|
|
---
|
|
|
|
### Task 2.2.2: OSM Local Cache (3-4 hours)
|
|
|
|
**Update backend/app/services/buildings_service.py (and water, vegetation):**
|
|
|
|
```python
|
|
import os
|
|
import json
|
|
import hashlib
|
|
from pathlib import Path
|
|
from datetime import datetime, timedelta
|
|
import httpx
|
|
from typing import Optional
|
|
|
|
class OSMCache:
|
|
"""Local cache for OSM data"""
|
|
|
|
CACHE_EXPIRY_DAYS = 30 # Re-download after 30 days
|
|
|
|
def __init__(self, cache_type: str):
|
|
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
|
|
self.cache_path = self.data_path / 'osm' / cache_type
|
|
self.cache_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
def _get_cache_key(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> str:
|
|
"""Generate cache key from bbox"""
|
|
# Round to 0.1 degree grid for better cache hits
|
|
min_lat = round(min_lat, 1)
|
|
min_lon = round(min_lon, 1)
|
|
max_lat = round(max_lat, 1)
|
|
max_lon = round(max_lon, 1)
|
|
return f"{min_lat}_{min_lon}_{max_lat}_{max_lon}"
|
|
|
|
def _get_cache_path(self, cache_key: str) -> Path:
|
|
"""Get file path for cache key"""
|
|
return self.cache_path / f"{cache_key}.json"
|
|
|
|
def get(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float) -> Optional[dict]:
|
|
"""Get cached data if available and not expired"""
|
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
|
cache_file = self._get_cache_path(cache_key)
|
|
|
|
if not cache_file.exists():
|
|
return None
|
|
|
|
try:
|
|
data = json.loads(cache_file.read_text())
|
|
|
|
# Check expiry
|
|
cached_at = datetime.fromisoformat(data.get('_cached_at', '2000-01-01'))
|
|
if datetime.now() - cached_at > timedelta(days=self.CACHE_EXPIRY_DAYS):
|
|
return None # Expired
|
|
|
|
return data.get('data')
|
|
|
|
except Exception as e:
|
|
print(f"[OSMCache] Failed to read cache: {e}")
|
|
return None
|
|
|
|
def set(self, min_lat: float, min_lon: float, max_lat: float, max_lon: float, data: dict):
|
|
"""Save data to cache"""
|
|
cache_key = self._get_cache_key(min_lat, min_lon, max_lat, max_lon)
|
|
cache_file = self._get_cache_path(cache_key)
|
|
|
|
try:
|
|
cache_data = {
|
|
'_cached_at': datetime.now().isoformat(),
|
|
'_bbox': [min_lat, min_lon, max_lat, max_lon],
|
|
'data': data
|
|
}
|
|
cache_file.write_text(json.dumps(cache_data))
|
|
|
|
except Exception as e:
|
|
print(f"[OSMCache] Failed to write cache: {e}")
|
|
|
|
def clear(self):
|
|
"""Clear all cached data"""
|
|
for f in self.cache_path.glob("*.json"):
|
|
f.unlink()
|
|
|
|
def get_size_mb(self) -> float:
|
|
"""Get cache size in MB"""
|
|
total = sum(f.stat().st_size for f in self.cache_path.glob("*.json"))
|
|
return total / (1024 * 1024)
|
|
|
|
|
|
class BuildingsService:
|
|
"""Buildings service with local caching"""
|
|
|
|
OVERPASS_URL = "https://overpass-api.de/api/interpreter"
|
|
|
|
def __init__(self):
|
|
self.cache = OSMCache('buildings')
|
|
|
|
async def fetch_buildings(
|
|
self,
|
|
min_lat: float, min_lon: float,
|
|
max_lat: float, max_lon: float
|
|
) -> list[dict]:
|
|
"""Fetch buildings, using cache if available"""
|
|
|
|
# Check cache first
|
|
cached = self.cache.get(min_lat, min_lon, max_lat, max_lon)
|
|
if cached is not None:
|
|
print(f"[Buildings] Cache hit for bbox")
|
|
return cached
|
|
|
|
# Fetch from Overpass
|
|
print(f"[Buildings] Fetching from Overpass API...")
|
|
|
|
query = f"""
|
|
[out:json][timeout:60];
|
|
(
|
|
way["building"]({min_lat},{min_lon},{max_lat},{max_lon});
|
|
);
|
|
out body;
|
|
>;
|
|
out skel qt;
|
|
"""
|
|
|
|
try:
|
|
async with httpx.AsyncClient(timeout=90.0) as client:
|
|
response = await client.post(
|
|
self.OVERPASS_URL,
|
|
data={"data": query}
|
|
)
|
|
response.raise_for_status()
|
|
data = response.json()
|
|
|
|
buildings = self._parse_response(data)
|
|
|
|
# Save to cache
|
|
self.cache.set(min_lat, min_lon, max_lat, max_lon, buildings)
|
|
|
|
return buildings
|
|
|
|
except Exception as e:
|
|
print(f"[Buildings] Fetch error: {e}")
|
|
return []
|
|
|
|
def _parse_response(self, data: dict) -> list[dict]:
|
|
"""Parse Overpass response into building list"""
|
|
# ... existing parsing code ...
|
|
pass
|
|
|
|
|
|
# Apply same pattern to WaterService and VegetationService
|
|
```
|
|
|
|
---
|
|
|
|
### Task 2.2.3: Region Download API (2-3 hours)
|
|
|
|
**backend/app/api/routes/regions.py:**
|
|
|
|
```python
|
|
from fastapi import APIRouter, BackgroundTasks, HTTPException
|
|
from pydantic import BaseModel
|
|
from typing import Optional
|
|
import asyncio
|
|
|
|
router = APIRouter(prefix="/api/regions", tags=["regions"])
|
|
|
|
# Predefined regions
|
|
REGIONS = {
|
|
"ukraine": {
|
|
"name": "Ukraine",
|
|
"bbox": [44.0, 22.0, 52.5, 40.5], # min_lat, min_lon, max_lat, max_lon
|
|
"srtm_tiles": 120,
|
|
"estimated_size_gb": 3.0,
|
|
},
|
|
"ukraine_east": {
|
|
"name": "Eastern Ukraine (Donbas)",
|
|
"bbox": [47.0, 34.0, 50.5, 40.5],
|
|
"srtm_tiles": 24,
|
|
"estimated_size_gb": 0.6,
|
|
},
|
|
"ukraine_central": {
|
|
"name": "Central Ukraine",
|
|
"bbox": [48.0, 30.0, 51.0, 36.0],
|
|
"srtm_tiles": 18,
|
|
"estimated_size_gb": 0.5,
|
|
},
|
|
"kyiv_region": {
|
|
"name": "Kyiv Region",
|
|
"bbox": [49.5, 29.5, 51.5, 32.5],
|
|
"srtm_tiles": 6,
|
|
"estimated_size_gb": 0.15,
|
|
},
|
|
}
|
|
|
|
# Download progress tracking
|
|
_download_tasks: dict[str, dict] = {}
|
|
|
|
|
|
class RegionInfo(BaseModel):
|
|
id: str
|
|
name: str
|
|
bbox: list[float]
|
|
srtm_tiles: int
|
|
estimated_size_gb: float
|
|
downloaded: bool = False
|
|
download_progress: float = 0.0
|
|
|
|
|
|
class DownloadProgress(BaseModel):
|
|
task_id: str
|
|
region_id: str
|
|
status: str # queued, downloading_terrain, downloading_osm, done, error
|
|
progress: float # 0-100
|
|
current_step: str
|
|
downloaded_mb: float
|
|
error: Optional[str] = None
|
|
|
|
|
|
@router.get("/available")
|
|
async def list_regions() -> list[RegionInfo]:
|
|
"""List available regions for download"""
|
|
from app.services.terrain_service import terrain_service
|
|
|
|
cached_tiles = set(terrain_service.get_cached_tiles())
|
|
|
|
result = []
|
|
for region_id, info in REGIONS.items():
|
|
# Check how many tiles are downloaded
|
|
min_lat, min_lon, max_lat, max_lon = info["bbox"]
|
|
needed_tiles = set()
|
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
|
tile = terrain_service._get_tile_name(lat, lon)
|
|
needed_tiles.add(tile)
|
|
|
|
downloaded_tiles = needed_tiles & cached_tiles
|
|
progress = len(downloaded_tiles) / len(needed_tiles) * 100 if needed_tiles else 0
|
|
|
|
result.append(RegionInfo(
|
|
id=region_id,
|
|
name=info["name"],
|
|
bbox=info["bbox"],
|
|
srtm_tiles=info["srtm_tiles"],
|
|
estimated_size_gb=info["estimated_size_gb"],
|
|
downloaded=progress >= 100,
|
|
download_progress=progress
|
|
))
|
|
|
|
return result
|
|
|
|
|
|
@router.post("/download/{region_id}")
|
|
async def start_download(region_id: str, background_tasks: BackgroundTasks) -> dict:
|
|
"""Start downloading a region"""
|
|
if region_id not in REGIONS:
|
|
raise HTTPException(404, f"Region '{region_id}' not found")
|
|
|
|
# Check if already downloading
|
|
for task_id, task in _download_tasks.items():
|
|
if task["region_id"] == region_id and task["status"] not in ["done", "error"]:
|
|
return {"task_id": task_id, "status": "already_downloading"}
|
|
|
|
# Create task
|
|
import uuid
|
|
task_id = str(uuid.uuid4())[:8]
|
|
|
|
_download_tasks[task_id] = {
|
|
"region_id": region_id,
|
|
"status": "queued",
|
|
"progress": 0.0,
|
|
"current_step": "Starting...",
|
|
"downloaded_mb": 0.0,
|
|
"error": None
|
|
}
|
|
|
|
# Start background download
|
|
background_tasks.add_task(download_region_task, task_id, region_id)
|
|
|
|
return {"task_id": task_id, "status": "started"}
|
|
|
|
|
|
async def download_region_task(task_id: str, region_id: str):
|
|
"""Background task to download region data"""
|
|
from app.services.terrain_service import terrain_service
|
|
from app.services.buildings_service import buildings_service
|
|
from app.services.water_service import water_service
|
|
from app.services.vegetation_service import vegetation_service
|
|
|
|
task = _download_tasks[task_id]
|
|
region = REGIONS[region_id]
|
|
min_lat, min_lon, max_lat, max_lon = region["bbox"]
|
|
|
|
try:
|
|
# Phase 1: Download SRTM tiles (0-70%)
|
|
task["status"] = "downloading_terrain"
|
|
task["current_step"] = "Downloading terrain data..."
|
|
|
|
tiles = await terrain_service.ensure_tiles_for_bbox(
|
|
min_lat, min_lon, max_lat, max_lon
|
|
)
|
|
|
|
task["progress"] = 70.0
|
|
task["downloaded_mb"] = terrain_service.get_cache_size_mb()
|
|
|
|
# Phase 2: Pre-cache OSM data (70-100%)
|
|
task["status"] = "downloading_osm"
|
|
task["current_step"] = "Downloading building data..."
|
|
|
|
# Download OSM in grid chunks
|
|
grid_size = 1.0 # 1 degree chunks
|
|
total_chunks = 0
|
|
done_chunks = 0
|
|
|
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
|
total_chunks += 1
|
|
|
|
for lat in range(int(min_lat), int(max_lat) + 1):
|
|
for lon in range(int(min_lon), int(max_lon) + 1):
|
|
chunk_min_lat = lat
|
|
chunk_min_lon = lon
|
|
chunk_max_lat = lat + grid_size
|
|
chunk_max_lon = lon + grid_size
|
|
|
|
# Buildings
|
|
await buildings_service.fetch_buildings(
|
|
chunk_min_lat, chunk_min_lon,
|
|
chunk_max_lat, chunk_max_lon
|
|
)
|
|
|
|
# Water (smaller, faster)
|
|
await water_service.fetch_water_bodies(
|
|
chunk_min_lat, chunk_min_lon,
|
|
chunk_max_lat, chunk_max_lon
|
|
)
|
|
|
|
# Vegetation
|
|
await vegetation_service.fetch_vegetation(
|
|
chunk_min_lat, chunk_min_lon,
|
|
chunk_max_lat, chunk_max_lon
|
|
)
|
|
|
|
done_chunks += 1
|
|
task["progress"] = 70 + (done_chunks / total_chunks) * 30
|
|
task["current_step"] = f"OSM data: {done_chunks}/{total_chunks} chunks"
|
|
|
|
# Small delay to avoid rate limiting
|
|
await asyncio.sleep(1.0)
|
|
|
|
task["status"] = "done"
|
|
task["progress"] = 100.0
|
|
task["current_step"] = "Complete!"
|
|
|
|
except Exception as e:
|
|
task["status"] = "error"
|
|
task["error"] = str(e)
|
|
task["current_step"] = f"Error: {e}"
|
|
|
|
|
|
@router.get("/download/{task_id}/progress")
|
|
async def get_download_progress(task_id: str) -> DownloadProgress:
|
|
"""Get download progress"""
|
|
if task_id not in _download_tasks:
|
|
raise HTTPException(404, "Task not found")
|
|
|
|
task = _download_tasks[task_id]
|
|
return DownloadProgress(
|
|
task_id=task_id,
|
|
region_id=task["region_id"],
|
|
status=task["status"],
|
|
progress=task["progress"],
|
|
current_step=task["current_step"],
|
|
downloaded_mb=task["downloaded_mb"],
|
|
error=task["error"]
|
|
)
|
|
|
|
|
|
@router.delete("/cache")
|
|
async def clear_cache() -> dict:
|
|
"""Clear all cached data"""
|
|
from app.services.terrain_service import terrain_service
|
|
from app.services.buildings_service import buildings_service
|
|
|
|
# Clear OSM caches
|
|
buildings_service.cache.clear()
|
|
# water_service.cache.clear()
|
|
# vegetation_service.cache.clear()
|
|
|
|
# Don't clear SRTM - too expensive to re-download
|
|
|
|
return {"status": "ok", "message": "OSM cache cleared"}
|
|
|
|
|
|
@router.get("/cache/stats")
|
|
async def get_cache_stats() -> dict:
|
|
"""Get cache statistics"""
|
|
from app.services.terrain_service import terrain_service
|
|
from app.services.buildings_service import buildings_service
|
|
|
|
return {
|
|
"terrain_mb": terrain_service.get_cache_size_mb(),
|
|
"terrain_tiles": len(terrain_service.get_cached_tiles()),
|
|
"buildings_mb": buildings_service.cache.get_size_mb(),
|
|
# "water_mb": water_service.cache.get_size_mb(),
|
|
# "vegetation_mb": vegetation_service.cache.get_size_mb(),
|
|
}
|
|
```
|
|
|
|
---
|
|
|
|
### Task 2.2.4: First-Run Region Wizard (Frontend) (2-3 hours)
|
|
|
|
**frontend/src/components/RegionWizard.tsx:**
|
|
|
|
```typescript
|
|
import { useState, useEffect } from 'react';
|
|
import { apiService } from '../services/api';
|
|
|
|
interface Region {
|
|
id: string;
|
|
name: string;
|
|
bbox: number[];
|
|
srtm_tiles: number;
|
|
estimated_size_gb: number;
|
|
downloaded: boolean;
|
|
download_progress: number;
|
|
}
|
|
|
|
interface DownloadProgress {
|
|
task_id: string;
|
|
status: string;
|
|
progress: number;
|
|
current_step: string;
|
|
downloaded_mb: number;
|
|
error?: string;
|
|
}
|
|
|
|
export function RegionWizard({ onComplete }: { onComplete: () => void }) {
|
|
const [regions, setRegions] = useState<Region[]>([]);
|
|
const [selectedRegion, setSelectedRegion] = useState<string | null>(null);
|
|
const [downloading, setDownloading] = useState(false);
|
|
const [progress, setProgress] = useState<DownloadProgress | null>(null);
|
|
|
|
useEffect(() => {
|
|
loadRegions();
|
|
}, []);
|
|
|
|
const loadRegions = async () => {
|
|
const data = await apiService.getRegions();
|
|
setRegions(data);
|
|
};
|
|
|
|
const startDownload = async () => {
|
|
if (!selectedRegion) return;
|
|
|
|
setDownloading(true);
|
|
const { task_id } = await apiService.downloadRegion(selectedRegion);
|
|
|
|
// Poll for progress
|
|
const interval = setInterval(async () => {
|
|
const prog = await apiService.getDownloadProgress(task_id);
|
|
setProgress(prog);
|
|
|
|
if (prog.status === 'done' || prog.status === 'error') {
|
|
clearInterval(interval);
|
|
setDownloading(false);
|
|
|
|
if (prog.status === 'done') {
|
|
onComplete();
|
|
}
|
|
}
|
|
}, 1000);
|
|
};
|
|
|
|
const skipDownload = () => {
|
|
// Store preference to not show again
|
|
localStorage.setItem('rfcp_region_wizard_skipped', 'true');
|
|
onComplete();
|
|
};
|
|
|
|
return (
|
|
<div className="region-wizard-overlay">
|
|
<div className="region-wizard">
|
|
<h1>Welcome to RFCP</h1>
|
|
<h2>RF Coverage Planner</h2>
|
|
|
|
<p>
|
|
Select a region to download for offline use.
|
|
This includes terrain elevation and building data.
|
|
</p>
|
|
|
|
{!downloading ? (
|
|
<>
|
|
<div className="region-list">
|
|
{regions.map(region => (
|
|
<div
|
|
key={region.id}
|
|
className={`region-item ${selectedRegion === region.id ? 'selected' : ''} ${region.downloaded ? 'downloaded' : ''}`}
|
|
onClick={() => setSelectedRegion(region.id)}
|
|
>
|
|
<div className="region-name">{region.name}</div>
|
|
<div className="region-size">~{region.estimated_size_gb} GB</div>
|
|
{region.downloaded && <div className="region-badge">✓ Downloaded</div>}
|
|
{region.download_progress > 0 && region.download_progress < 100 && (
|
|
<div className="region-progress">{region.download_progress.toFixed(0)}%</div>
|
|
)}
|
|
</div>
|
|
))}
|
|
</div>
|
|
|
|
<div className="wizard-actions">
|
|
<button
|
|
className="btn-primary"
|
|
onClick={startDownload}
|
|
disabled={!selectedRegion}
|
|
>
|
|
Download Selected Region
|
|
</button>
|
|
<button
|
|
className="btn-secondary"
|
|
onClick={skipDownload}
|
|
>
|
|
Skip (Online Mode)
|
|
</button>
|
|
</div>
|
|
</>
|
|
) : (
|
|
<div className="download-progress">
|
|
<div className="progress-bar">
|
|
<div
|
|
className="progress-fill"
|
|
style={{ width: `${progress?.progress || 0}%` }}
|
|
/>
|
|
</div>
|
|
<div className="progress-text">
|
|
{progress?.current_step || 'Starting...'}
|
|
</div>
|
|
<div className="progress-stats">
|
|
{progress?.downloaded_mb.toFixed(1)} MB downloaded
|
|
</div>
|
|
</div>
|
|
)}
|
|
</div>
|
|
</div>
|
|
);
|
|
}
|
|
```
|
|
|
|
**CSS (add to styles):**
|
|
|
|
```css
|
|
.region-wizard-overlay {
|
|
position: fixed;
|
|
inset: 0;
|
|
background: rgba(0, 0, 0, 0.9);
|
|
display: flex;
|
|
align-items: center;
|
|
justify-content: center;
|
|
z-index: 9999;
|
|
}
|
|
|
|
.region-wizard {
|
|
background: #1a1a2e;
|
|
border-radius: 12px;
|
|
padding: 40px;
|
|
max-width: 500px;
|
|
width: 90%;
|
|
color: white;
|
|
}
|
|
|
|
.region-wizard h1 {
|
|
font-size: 32px;
|
|
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
|
-webkit-background-clip: text;
|
|
-webkit-text-fill-color: transparent;
|
|
margin-bottom: 8px;
|
|
}
|
|
|
|
.region-wizard h2 {
|
|
font-size: 14px;
|
|
color: #888;
|
|
margin-bottom: 24px;
|
|
}
|
|
|
|
.region-list {
|
|
display: flex;
|
|
flex-direction: column;
|
|
gap: 8px;
|
|
margin: 24px 0;
|
|
}
|
|
|
|
.region-item {
|
|
padding: 16px;
|
|
background: #252540;
|
|
border-radius: 8px;
|
|
cursor: pointer;
|
|
display: flex;
|
|
align-items: center;
|
|
gap: 16px;
|
|
border: 2px solid transparent;
|
|
transition: all 0.2s;
|
|
}
|
|
|
|
.region-item:hover {
|
|
background: #303050;
|
|
}
|
|
|
|
.region-item.selected {
|
|
border-color: #00d4ff;
|
|
}
|
|
|
|
.region-item.downloaded {
|
|
opacity: 0.7;
|
|
}
|
|
|
|
.region-name {
|
|
flex: 1;
|
|
font-weight: 500;
|
|
}
|
|
|
|
.region-size {
|
|
color: #888;
|
|
font-size: 14px;
|
|
}
|
|
|
|
.region-badge {
|
|
background: #00ff88;
|
|
color: #000;
|
|
padding: 4px 8px;
|
|
border-radius: 4px;
|
|
font-size: 12px;
|
|
}
|
|
|
|
.wizard-actions {
|
|
display: flex;
|
|
gap: 12px;
|
|
margin-top: 24px;
|
|
}
|
|
|
|
.btn-primary {
|
|
flex: 1;
|
|
padding: 12px 24px;
|
|
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
|
border: none;
|
|
border-radius: 8px;
|
|
color: #000;
|
|
font-weight: 600;
|
|
cursor: pointer;
|
|
}
|
|
|
|
.btn-primary:disabled {
|
|
opacity: 0.5;
|
|
cursor: not-allowed;
|
|
}
|
|
|
|
.btn-secondary {
|
|
padding: 12px 24px;
|
|
background: transparent;
|
|
border: 1px solid #444;
|
|
border-radius: 8px;
|
|
color: #888;
|
|
cursor: pointer;
|
|
}
|
|
|
|
.download-progress {
|
|
margin-top: 24px;
|
|
}
|
|
|
|
.progress-bar {
|
|
height: 8px;
|
|
background: #333;
|
|
border-radius: 4px;
|
|
overflow: hidden;
|
|
}
|
|
|
|
.progress-fill {
|
|
height: 100%;
|
|
background: linear-gradient(90deg, #00d4ff, #00ff88);
|
|
transition: width 0.3s;
|
|
}
|
|
|
|
.progress-text {
|
|
margin-top: 12px;
|
|
text-align: center;
|
|
color: #888;
|
|
}
|
|
|
|
.progress-stats {
|
|
margin-top: 8px;
|
|
text-align: center;
|
|
font-size: 14px;
|
|
color: #666;
|
|
}
|
|
```
|
|
|
|
---
|
|
|
|
### Task 2.2.5: Integration & Testing (1-2 hours)
|
|
|
|
**Update App.tsx to show wizard on first run:**
|
|
|
|
```typescript
|
|
function App() {
|
|
const [showWizard, setShowWizard] = useState(false);
|
|
|
|
useEffect(() => {
|
|
// Check if first run or no region downloaded
|
|
const skipped = localStorage.getItem('rfcp_region_wizard_skipped');
|
|
if (!skipped && isDesktop()) {
|
|
checkRegionStatus();
|
|
}
|
|
}, []);
|
|
|
|
const checkRegionStatus = async () => {
|
|
const regions = await apiService.getRegions();
|
|
const hasDownloaded = regions.some(r => r.downloaded);
|
|
if (!hasDownloaded) {
|
|
setShowWizard(true);
|
|
}
|
|
};
|
|
|
|
return (
|
|
<>
|
|
{showWizard && <RegionWizard onComplete={() => setShowWizard(false)} />}
|
|
{/* ... rest of app */}
|
|
</>
|
|
);
|
|
}
|
|
```
|
|
|
|
---
|
|
|
|
## 🧪 Testing
|
|
|
|
```bash
|
|
# Test cache stats
|
|
curl http://127.0.0.1:8888/api/regions/cache/stats
|
|
|
|
# Test available regions
|
|
curl http://127.0.0.1:8888/api/regions/available
|
|
|
|
# Start region download
|
|
curl -X POST http://127.0.0.1:8888/api/regions/download/kyiv_region
|
|
|
|
# Check progress
|
|
curl http://127.0.0.1:8888/api/regions/download/{task_id}/progress
|
|
```
|
|
|
|
**Test Scenarios:**
|
|
1. First run shows wizard
|
|
2. Can skip wizard (online mode)
|
|
3. Region download shows progress
|
|
4. After download, calculations are faster
|
|
5. Offline mode works (disconnect internet)
|
|
|
|
---
|
|
|
|
## ✅ Success Criteria
|
|
|
|
- [ ] SRTM tiles cached locally in `data/terrain/`
|
|
- [ ] OSM data cached locally in `data/osm/`
|
|
- [ ] Region wizard shows on first run
|
|
- [ ] Can download Ukraine East (~0.6GB) in ~10 min
|
|
- [ ] Calculations 10x faster with cached data
|
|
- [ ] Works fully offline after region download
|
|
- [ ] Cache stats API works
|
|
|
|
---
|
|
|
|
## 📊 Expected Performance Improvement
|
|
|
|
| Scenario | Before (Online) | After (Cached) |
|
|
|----------|-----------------|----------------|
|
|
| 5km Fast | ~30 sec | ~2 sec |
|
|
| 5km Full | ~3 min | ~15 sec |
|
|
| 10km Fast | ~2 min | ~5 sec |
|
|
| 10km Full | ~10 min | ~1 min |
|
|
|
|
---
|
|
|
|
## 🔜 Next: Phase 2.3
|
|
|
|
- [ ] GPU acceleration (CUDA/OpenCL)
|
|
- [ ] Offline map tiles (MBTiles)
|
|
- [ ] Auto-updater
|
|
- [ ] Linux/Mac builds
|
|
|
|
---
|
|
|
|
**Ready for Claude Code** 🚀
|