Phase 2.2: performance optimizations, debug tools, app close fix

This commit is contained in:
2026-01-31 20:31:53 +02:00
parent fb2b55caff
commit 26f8067c94
18 changed files with 1006 additions and 167 deletions

View File

@@ -19,7 +19,8 @@
"Bash(cp:*)", "Bash(cp:*)",
"Bash(mv:*)", "Bash(mv:*)",
"Read(*)", "Read(*)",
"Write(*)" "Write(*)",
"Bash(python3:*)"
] ]
} }
} }

1
.gitignore vendored
View File

@@ -23,3 +23,4 @@ installer/dist/
*.spec.bak *.spec.bak
__pycache__/ __pycache__/
*.pyc *.pyc
nul

View File

@@ -26,6 +26,12 @@ REGIONS = {
"srtm_tiles": 18, "srtm_tiles": 18,
"estimated_size_gb": 0.5, "estimated_size_gb": 0.5,
}, },
"ukraine_west": {
"name": "Western Ukraine",
"bbox": [48.0, 22.0, 51.0, 26.0],
"srtm_tiles": 12,
"estimated_size_gb": 0.3,
},
"kyiv_region": { "kyiv_region": {
"name": "Kyiv Region", "name": "Kyiv Region",
"bbox": [49.5, 29.5, 51.5, 32.5], "bbox": [49.5, 29.5, 51.5, 32.5],

View File

@@ -1,6 +1,44 @@
import math
import os
import sys
import time
import threading
import numpy as np import numpy as np
import asyncio import asyncio
from concurrent.futures import ThreadPoolExecutor
from typing import List, Optional, Tuple from typing import List, Optional, Tuple
_coverage_log_file = None
def _clog(msg: str):
"""Coverage debug log — always flushed, with timestamp and thread name.
Writes to stdout, stderr, AND a file so output is always available."""
global _coverage_log_file
ts = time.strftime('%H:%M:%S')
thr = threading.current_thread().name
line = f"[COVERAGE {ts}] [{thr}] {msg}"
print(line, flush=True)
# Backup: also write to stderr in case stdout is broken
try:
sys.stderr.write(line + '\n')
sys.stderr.flush()
except Exception:
pass
# Backup: also write to a file
try:
if _coverage_log_file is None:
log_dir = os.environ.get('RFCP_DATA_PATH', './data')
os.makedirs(log_dir, exist_ok=True)
log_path = os.path.join(log_dir, 'coverage-debug.log')
_coverage_log_file = open(log_path, 'a')
_coverage_log_file.write(f"\n{'='*60}\n")
_coverage_log_file.write(f"[COVERAGE {ts}] Log started\n")
_coverage_log_file.flush()
_coverage_log_file.write(line + '\n')
_coverage_log_file.flush()
except Exception:
pass
from pydantic import BaseModel from pydantic import BaseModel
from app.services.terrain_service import terrain_service, TerrainService from app.services.terrain_service import terrain_service, TerrainService
from app.services.los_service import los_service from app.services.los_service import los_service
@@ -142,6 +180,100 @@ class CoverageService:
self.buildings = buildings_service self.buildings = buildings_service
self.los = los_service self.los = los_service
async def _fetch_osm_grid_aligned(
self,
min_lat: float, min_lon: float,
max_lat: float, max_lon: float,
settings: CoverageSettings
) -> dict:
"""
Fetch OSM data using 1-degree grid-aligned cells.
This ensures cache keys match the region download grid,
so pre-cached data is actually used.
"""
t0 = time.time()
lat_start = int(math.floor(min_lat))
lat_end = int(math.floor(max_lat))
lon_start = int(math.floor(min_lon))
lon_end = int(math.floor(max_lon))
cells = []
for lat_int in range(lat_start, lat_end + 1):
for lon_int in range(lon_start, lon_end + 1):
cells.append((float(lat_int), float(lon_int),
float(lat_int + 1), float(lon_int + 1)))
buildings: List[Building] = []
streets: List[Street] = []
water_bodies: List[WaterBody] = []
vegetation_areas: List[VegetationArea] = []
cache_stats = {"buildings": "skip", "streets": "skip",
"water": "skip", "vegetation": "skip"}
for cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon in cells:
cell_label = f"[{cell_min_lat:.0f},{cell_min_lon:.0f}]"
if settings.use_buildings:
t1 = time.time()
chunk = await self.buildings.fetch_buildings(
cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon
)
dt = time.time() - t1
src = "CACHE" if dt < 0.5 else "API"
buildings.extend(chunk)
cache_stats["buildings"] = src
_clog(f"Buildings {cell_label}: {len(chunk)} items ({src}, {dt:.1f}s)")
if settings.use_street_canyon:
t1 = time.time()
chunk = await street_canyon_service.fetch_streets(
cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon
)
dt = time.time() - t1
src = "CACHE" if dt < 0.5 else "API"
streets.extend(chunk)
cache_stats["streets"] = src
_clog(f"Streets {cell_label}: {len(chunk)} items ({src}, {dt:.1f}s)")
if settings.use_water_reflection:
t1 = time.time()
chunk = await water_service.fetch_water_bodies(
cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon
)
dt = time.time() - t1
src = "CACHE" if dt < 0.5 else "API"
water_bodies.extend(chunk)
cache_stats["water"] = src
_clog(f"Water {cell_label}: {len(chunk)} items ({src}, {dt:.1f}s)")
if settings.use_vegetation:
t1 = time.time()
chunk = await vegetation_service.fetch_vegetation(
cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon
)
dt = time.time() - t1
src = "CACHE" if dt < 0.5 else "API"
vegetation_areas.extend(chunk)
cache_stats["vegetation"] = src
_clog(f"Vegetation {cell_label}: {len(chunk)} items ({src}, {dt:.1f}s)")
total_fetch = time.time() - t0
_clog(f"OSM fetch total: {total_fetch:.1f}s "
f"({len(cells)} cells, "
f"{len(buildings)} bldgs, {len(streets)} streets, "
f"{len(water_bodies)} water, {len(vegetation_areas)} veg)")
_clog(f"Cache status: {cache_stats}")
return {
"buildings": buildings,
"streets": streets,
"water_bodies": water_bodies,
"vegetation_areas": vegetation_areas,
}
async def calculate_coverage( async def calculate_coverage(
self, self,
site: SiteParams, site: SiteParams,
@@ -152,6 +284,8 @@ class CoverageService:
Returns list of CoveragePoint with RSRP values Returns list of CoveragePoint with RSRP values
""" """
calc_start = time.time()
# Apply preset if specified # Apply preset if specified
settings = apply_preset(settings) settings = apply_preset(settings)
@@ -163,6 +297,7 @@ class CoverageService:
settings.radius, settings.radius,
settings.resolution settings.resolution
) )
_clog(f"Grid: {len(grid)} points, radius={settings.radius}m, res={settings.resolution}m")
# Calculate bbox for data fetching # Calculate bbox for data fetching
lat_delta = settings.radius / 111000 lat_delta = settings.radius / 111000
@@ -173,48 +308,80 @@ class CoverageService:
min_lon = site.lon - lon_delta min_lon = site.lon - lon_delta
max_lon = site.lon + lon_delta max_lon = site.lon + lon_delta
# Fetch buildings (if enabled) and build spatial index _clog(f"Bbox: [{min_lat:.4f}, {min_lon:.4f}, {max_lat:.4f}, {max_lon:.4f}]")
buildings: List[Building] = []
# ━━━ PHASE 1: Fetch OSM data ━━━
_clog("━━━ PHASE 1: Fetching OSM data ━━━")
t_osm = time.time()
osm_data = await self._fetch_osm_grid_aligned(
min_lat, min_lon, max_lat, max_lon, settings
)
osm_time = time.time() - t_osm
buildings = osm_data["buildings"]
streets = osm_data["streets"]
water_bodies = osm_data["water_bodies"]
vegetation_areas = osm_data["vegetation_areas"]
_clog(f"━━━ PHASE 1 done: {osm_time:.1f}s ━━━")
# Build spatial index for buildings
spatial_idx: Optional[SpatialIndex] = None spatial_idx: Optional[SpatialIndex] = None
if settings.use_buildings: if buildings:
buildings = await self.buildings.fetch_buildings( cache_key = f"{min_lat:.3f},{min_lon:.3f},{max_lat:.3f},{max_lon:.3f}"
min_lat, min_lon, max_lat, max_lon spatial_idx = get_spatial_index(cache_key, buildings)
)
if buildings:
cache_key = f"{min_lat:.3f},{min_lon:.3f},{max_lat:.3f},{max_lon:.3f}"
spatial_idx = get_spatial_index(cache_key, buildings)
# Fetch streets (if street canyon enabled) # ━━━ PHASE 2: Pre-load terrain ━━━
streets: List[Street] = [] _clog("━━━ PHASE 2: Pre-loading terrain ━━━")
if settings.use_street_canyon: t_terrain = time.time()
streets = await street_canyon_service.fetch_streets( tile_names = await self.terrain.ensure_tiles_for_bbox(
min_lat, min_lon, max_lat, max_lon min_lat, min_lon, max_lat, max_lon
) )
for tn in tile_names:
self.terrain._load_tile(tn)
# Fetch water bodies (if water reflection enabled) site_elevation = self.terrain.get_elevation_sync(site.lat, site.lon)
water_bodies: List[WaterBody] = []
if settings.use_water_reflection:
water_bodies = await water_service.fetch_water_bodies(
min_lat, min_lon, max_lat, max_lon
)
# Fetch vegetation (if enabled) point_elevations = {}
vegetation_areas: List[VegetationArea] = []
if settings.use_vegetation:
vegetation_areas = await vegetation_service.fetch_vegetation(
min_lat, min_lon, max_lat, max_lon
)
# Calculate coverage for each point
for lat, lon in grid: for lat, lon in grid:
point = await self._calculate_point( point_elevations[(lat, lon)] = self.terrain.get_elevation_sync(lat, lon)
site, lat, lon, terrain_time = time.time() - t_terrain
settings, buildings, streets, _clog(f"Tiles: {len(tile_names)}, site elev: {site_elevation:.0f}m, "
spatial_idx, water_bodies, vegetation_areas f"pre-computed {len(grid)} elevations")
) _clog(f"━━━ PHASE 2 done: {terrain_time:.1f}s ━━━")
if point.rsrp >= settings.min_signal: # ━━━ PHASE 3: Point calculation (sync, in thread pool) ━━━
points.append(point) _clog(f"━━━ PHASE 3: Calculating {len(grid)} points (threaded) ━━━")
dominant_path_service._log_count = 0 # Reset diagnostic counter
t_points = time.time()
loop = asyncio.get_event_loop()
points, timing = await loop.run_in_executor(
None,
self._run_point_loop,
grid, site, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations
)
points_time = time.time() - t_points
total_time = time.time() - calc_start
_clog(f"━━━ PHASE 3 done: {points_time:.1f}s ━━━")
_clog("=== RESULTS ===")
_clog(f" Grid points: {len(grid)}")
_clog(f" Result points: {len(points)}")
_clog(f" OSM fetch: {osm_time:.1f}s")
_clog(f" Terrain pre-load:{terrain_time:.1f}s")
_clog(f" Point calc: {points_time:.1f}s "
f"({points_time/max(1,len(grid))*1000:.1f}ms/point)")
_clog(f" TOTAL: {total_time:.1f}s")
_clog(f" Tiles in memory: {len(self.terrain._tile_cache)}")
if any(v > 0.001 for v in timing.values()):
_clog("=== PER-STEP BREAKDOWN ===")
for step, dt in timing.items():
if dt > 0.001:
_clog(f" {step:20s} {dt:.3f}s "
f"({dt/max(1,len(grid))*1000:.2f}ms/point)")
return points return points
@@ -280,7 +447,36 @@ class CoverageService:
return points return points
async def _calculate_point( def _run_point_loop(
self, grid, site, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations
):
"""Sync point loop - runs in ThreadPoolExecutor, bypasses event loop."""
points = []
timing = {"los": 0.0, "buildings": 0.0, "antenna": 0.0,
"dominant_path": 0.0, "street_canyon": 0.0,
"reflection": 0.0, "vegetation": 0.0}
total = len(grid)
log_interval = max(1, total // 20)
for i, (lat, lon) in enumerate(grid):
if i % log_interval == 0:
_clog(f"Progress: {i}/{total} ({i*100//total}%)")
point = self._calculate_point_sync(
site, lat, lon, settings, buildings, streets,
spatial_idx, water_bodies, vegetation_areas,
site_elevation, point_elevations.get((lat, lon), 0.0),
timing
)
if point.rsrp >= settings.min_signal:
points.append(point)
_clog(f"Progress: {total}/{total} (100%)")
return points, timing
def _calculate_point_sync(
self, self,
site: SiteParams, site: SiteParams,
lat: float, lon: float, lat: float, lon: float,
@@ -289,58 +485,62 @@ class CoverageService:
streets: List[Street], streets: List[Street],
spatial_idx: Optional[SpatialIndex], spatial_idx: Optional[SpatialIndex],
water_bodies: List[WaterBody], water_bodies: List[WaterBody],
vegetation_areas: List[VegetationArea] vegetation_areas: List[VegetationArea],
site_elevation: float,
point_elevation: float,
timing: dict
) -> CoveragePoint: ) -> CoveragePoint:
"""Calculate RSRP at a single point with all propagation models""" """Fully synchronous point calculation. All terrain tiles must be pre-loaded."""
# Distance # Distance
distance = TerrainService.haversine_distance(site.lat, site.lon, lat, lon) distance = TerrainService.haversine_distance(site.lat, site.lon, lat, lon)
if distance < 1: if distance < 1:
distance = 1 # Avoid division by zero distance = 1
# Base path loss (Okumura-Hata for urban) # Base path loss
path_loss = self._okumura_hata( path_loss = self._okumura_hata(distance, site.frequency, site.height, 1.5)
distance, site.frequency, site.height, 1.5
)
# Antenna pattern loss (if directional) # Antenna pattern
antenna_loss = 0.0 antenna_loss = 0.0
if site.azimuth is not None and site.beamwidth: if site.azimuth is not None and site.beamwidth:
t0 = time.time()
antenna_loss = self._antenna_pattern_loss( antenna_loss = self._antenna_pattern_loss(
site.lat, site.lon, lat, lon, site.lat, site.lon, lat, lon, site.azimuth, site.beamwidth
site.azimuth, site.beamwidth
) )
timing["antenna"] += time.time() - t0
# Terrain loss (LoS check) # Terrain LOS (sync)
terrain_loss = 0.0 terrain_loss = 0.0
has_los = True has_los = True
if settings.use_terrain: if settings.use_terrain:
los_result = await self.los.check_line_of_sight( t0 = time.time()
site.lat, site.lon, site.height, los_result = self.los.check_line_of_sight_sync(
lat, lon, 1.5 site.lat, site.lon, site.height, lat, lon, 1.5
) )
has_los = los_result["has_los"] has_los = los_result["has_los"]
if not has_los: if not has_los:
clearance = los_result["clearance"] terrain_loss = self._diffraction_loss(
terrain_loss = self._diffraction_loss(clearance, site.frequency) los_result["clearance"], site.frequency
)
timing["los"] += time.time() - t0
# Building loss — use spatial index for fast lookup # Building loss (spatial index)
building_loss = 0.0 building_loss = 0.0
t0 = time.time()
nearby_buildings = ( nearby_buildings = (
spatial_idx.query_line(site.lat, site.lon, lat, lon) spatial_idx.query_line(site.lat, site.lon, lat, lon)
if spatial_idx else buildings if spatial_idx else buildings
) )
if settings.use_buildings and nearby_buildings: if settings.use_buildings and nearby_buildings:
site_total_h = site.height + site_elevation
point_total_h = 1.5 + point_elevation
if settings.use_materials: if settings.use_materials:
for building in nearby_buildings: for building in nearby_buildings:
intersection = self.buildings.line_intersects_building( intersection = self.buildings.line_intersects_building(
site.lat, site.lon, site.height + await self.terrain.get_elevation(site.lat, site.lon), site.lat, site.lon, site_total_h,
lat, lon, 1.5 + await self.terrain.get_elevation(lat, lon), lat, lon, point_total_h, building
building
) )
if intersection is not None: if intersection is not None:
material = materials_service.detect_material(building.tags) material = materials_service.detect_material(building.tags)
@@ -352,21 +552,23 @@ class CoverageService:
else: else:
for building in nearby_buildings: for building in nearby_buildings:
intersection = self.buildings.line_intersects_building( intersection = self.buildings.line_intersects_building(
site.lat, site.lon, site.height + await self.terrain.get_elevation(site.lat, site.lon), site.lat, site.lon, site_total_h,
lat, lon, 1.5 + await self.terrain.get_elevation(lat, lon), lat, lon, point_total_h, building
building
) )
if intersection is not None: if intersection is not None:
building_loss += 20.0 building_loss += 20.0
has_los = False has_los = False
break break
timing["buildings"] += time.time() - t0
# Dominant path analysis # Dominant path (sync) — uses spatial index for O(1) building lookups
if settings.use_dominant_path and nearby_buildings: if settings.use_dominant_path and (spatial_idx or nearby_buildings):
paths = await dominant_path_service.find_dominant_paths( t0 = time.time()
paths = dominant_path_service.find_dominant_paths_sync(
site.lat, site.lon, site.height, site.lat, site.lon, site.height,
lat, lon, 1.5, lat, lon, 1.5,
site.frequency, nearby_buildings site.frequency, nearby_buildings,
spatial_idx=spatial_idx
) )
if paths: if paths:
best_path = paths[0] best_path = paths[0]
@@ -375,10 +577,12 @@ class CoverageService:
terrain_loss = 0 terrain_loss = 0
building_loss = 0 building_loss = 0
has_los = best_path.path_type == "direct" and not best_path.materials_crossed has_los = best_path.path_type == "direct" and not best_path.materials_crossed
timing["dominant_path"] += time.time() - t0
# Street canyon model # Street canyon (sync)
if settings.use_street_canyon and streets: if settings.use_street_canyon and streets:
canyon_loss, street_path = await street_canyon_service.calculate_street_canyon_loss( t0 = time.time()
canyon_loss, _street_path = street_canyon_service.calculate_street_canyon_loss_sync(
site.lat, site.lon, site.height, site.lat, site.lon, site.height,
lat, lon, 1.5, lat, lon, 1.5,
site.frequency, streets site.frequency, streets
@@ -387,80 +591,76 @@ class CoverageService:
path_loss = canyon_loss path_loss = canyon_loss
terrain_loss = 0 terrain_loss = 0
building_loss = 0 building_loss = 0
timing["street_canyon"] += time.time() - t0
# Vegetation loss # Vegetation (already sync)
veg_loss = 0.0 veg_loss = 0.0
if settings.use_vegetation and vegetation_areas: if settings.use_vegetation and vegetation_areas:
t0 = time.time()
veg_loss = vegetation_service.calculate_vegetation_loss( veg_loss = vegetation_service.calculate_vegetation_loss(
site.lat, site.lon, lat, lon, site.lat, site.lon, lat, lon, vegetation_areas, settings.season
vegetation_areas, settings.season
) )
timing["vegetation"] += time.time() - t0
# Reflections (building + ground/water) # Reflections (sync)
reflection_gain = 0.0 reflection_gain = 0.0
if settings.use_reflections and nearby_buildings: if settings.use_reflections and nearby_buildings:
t0 = time.time()
is_over_water = False is_over_water = False
if settings.use_water_reflection and water_bodies: if settings.use_water_reflection and water_bodies:
is_over_water = water_service.point_over_water(lat, lon, water_bodies) is not None is_over_water = water_service.point_over_water(lat, lon, water_bodies) is not None
reflection_paths = await reflection_service.find_reflection_paths( refl_paths = reflection_service.find_reflection_paths_sync(
site.lat, site.lon, site.height, site.lat, site.lon, site.height,
lat, lon, 1.5, lat, lon, 1.5,
site.frequency, nearby_buildings, site.frequency, nearby_buildings,
include_ground=True include_ground=True
) )
# If over water, replace ground reflection with stronger water reflection if is_over_water and refl_paths:
if is_over_water and reflection_paths:
water_path = reflection_service._calculate_ground_reflection( water_path = reflection_service._calculate_ground_reflection(
site.lat, site.lon, site.height, site.lat, site.lon, site.height,
lat, lon, 1.5, lat, lon, 1.5,
site.frequency, is_water=True site.frequency, is_water=True
) )
if water_path: if water_path:
reflection_paths = [ refl_paths = [p for p in refl_paths if "ground" not in p.materials]
p for p in reflection_paths if "ground" not in p.materials refl_paths.append(water_path)
] refl_paths.sort(key=lambda p: p.total_loss)
reflection_paths.append(water_path)
reflection_paths.sort(key=lambda p: p.total_loss)
if reflection_paths: if refl_paths:
direct_rsrp = site.power + site.gain - path_loss - antenna_loss - terrain_loss - building_loss - veg_loss direct_rsrp = (site.power + site.gain - path_loss - antenna_loss
- terrain_loss - building_loss - veg_loss)
combined_rsrp = reflection_service.combine_paths( combined_rsrp = reflection_service.combine_paths(
direct_rsrp, reflection_paths, site.power + site.gain direct_rsrp, refl_paths, site.power + site.gain
) )
reflection_gain = max(0, combined_rsrp - direct_rsrp) reflection_gain = max(0, combined_rsrp - direct_rsrp)
timing["reflection"] += time.time() - t0
elif settings.use_water_reflection and water_bodies and not settings.use_reflections: elif settings.use_water_reflection and water_bodies and not settings.use_reflections:
# Water reflection without full reflection model
is_over_water = water_service.point_over_water(lat, lon, water_bodies) is not None is_over_water = water_service.point_over_water(lat, lon, water_bodies) is not None
if is_over_water: if is_over_water:
reflection_gain = 3.0 # ~3dB boost over water reflection_gain = 3.0
# Rain attenuation # Rain
rain_loss = 0.0 rain_loss = 0.0
if settings.rain_rate > 0: if settings.rain_rate > 0:
rain_loss = weather_service.calculate_rain_attenuation( rain_loss = weather_service.calculate_rain_attenuation(
site.frequency, site.frequency, distance / 1000, settings.rain_rate
distance / 1000, # km
settings.rain_rate
) )
# Indoor penetration loss # Indoor
indoor_loss = 0.0 indoor_loss = 0.0
if settings.indoor_loss_type != "none": if settings.indoor_loss_type != "none":
indoor_loss = indoor_service.calculate_indoor_loss( indoor_loss = indoor_service.calculate_indoor_loss(
site.frequency, site.frequency, settings.indoor_loss_type
settings.indoor_loss_type
) )
# Atmospheric absorption # Atmospheric
atmo_loss = 0.0 atmo_loss = 0.0
if settings.use_atmospheric: if settings.use_atmospheric:
atmo_loss = atmospheric_service.calculate_atmospheric_loss( atmo_loss = atmospheric_service.calculate_atmospheric_loss(
site.frequency, site.frequency, distance / 1000,
distance / 1000, settings.temperature_c, settings.humidity_percent
settings.temperature_c,
settings.humidity_percent
) )
# Final RSRP # Final RSRP
@@ -470,18 +670,11 @@ class CoverageService:
+ reflection_gain) + reflection_gain)
return CoveragePoint( return CoveragePoint(
lat=lat, lat=lat, lon=lon, rsrp=rsrp, distance=distance,
lon=lon, has_los=has_los, terrain_loss=terrain_loss,
rsrp=rsrp, building_loss=building_loss, reflection_gain=reflection_gain,
distance=distance, vegetation_loss=veg_loss, rain_loss=rain_loss,
has_los=has_los, indoor_loss=indoor_loss, atmospheric_loss=atmo_loss,
terrain_loss=terrain_loss,
building_loss=building_loss,
reflection_gain=reflection_gain,
vegetation_loss=veg_loss,
rain_loss=rain_loss,
indoor_loss=indoor_loss,
atmospheric_loss=atmo_loss,
) )
def _okumura_hata( def _okumura_hata(

View File

@@ -1,10 +1,14 @@
import time
import numpy as np import numpy as np
from typing import List, Tuple, Optional from typing import List, Tuple, Optional, TYPE_CHECKING
from dataclasses import dataclass from dataclasses import dataclass
from app.services.terrain_service import terrain_service from app.services.terrain_service import terrain_service
from app.services.buildings_service import buildings_service, Building from app.services.buildings_service import buildings_service, Building
from app.services.materials_service import materials_service, BuildingMaterial from app.services.materials_service import materials_service, BuildingMaterial
if TYPE_CHECKING:
from app.services.spatial_index import SpatialIndex
@dataclass @dataclass
class RayPath: class RayPath:
@@ -30,6 +34,7 @@ class DominantPathService:
MAX_REFLECTIONS = 2 MAX_REFLECTIONS = 2
MAX_PATHS = 3 MAX_PATHS = 3
_log_count = 0 # Counter for diagnostic logging
async def find_dominant_paths( async def find_dominant_paths(
self, self,
@@ -391,4 +396,250 @@ class DominantPathService:
return 13 + 20 * np.log10(v) return 13 + 20 * np.log10(v)
# ── Sync versions (terrain tiles must be pre-loaded) ──
def find_dominant_paths_sync(
self,
tx_lat: float, tx_lon: float, tx_height: float,
rx_lat: float, rx_lon: float, rx_height: float,
frequency_mhz: float,
buildings: List[Building],
spatial_idx: 'Optional[SpatialIndex]' = None
) -> List[RayPath]:
"""Sync version - uses spatial index for O(1) building lookups.
Args:
buildings: fallback list (only used if spatial_idx is None)
spatial_idx: grid-based spatial index for fast local queries
"""
paths = []
# Use spatial index to get only buildings along the TX→RX line
if spatial_idx:
line_buildings = spatial_idx.query_line(tx_lat, tx_lon, rx_lat, rx_lon)
else:
line_buildings = buildings
direct = self._check_direct_path_sync(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz, line_buildings
)
if direct:
paths.append(direct)
# Early termination: if direct path is valid and clear, skip expensive
# reflection/diffraction — they won't produce a better path
if direct and direct.is_valid and not direct.materials_crossed:
return [direct]
# For reflections, only check buildings near the midpoint (~500m)
if spatial_idx:
mid_lat = (tx_lat + rx_lat) / 2
mid_lon = (tx_lon + rx_lon) / 2
# buffer_cells=5 with 0.001° cell ≈ 555m radius
reflection_buildings = spatial_idx.query_point(mid_lat, mid_lon, buffer_cells=5)
else:
reflection_buildings = buildings
# Log building counts for first 3 points so user can verify filtering
DominantPathService._log_count += 1
if DominantPathService._log_count <= 3:
import sys
msg = (f"[DOMINANT_PATH] Point #{DominantPathService._log_count}: "
f"line_bldgs={len(line_buildings)}, "
f"refl_bldgs={len(reflection_buildings)}, "
f"total_available={len(buildings)}, "
f"spatial_idx={'YES' if spatial_idx else 'NO'}, "
f"early_exit={'YES' if direct and direct.is_valid and not direct.materials_crossed else 'NO'}")
print(msg, flush=True)
try:
sys.stderr.write(msg + '\n')
sys.stderr.flush()
except Exception:
pass
reflections = self._find_reflection_paths_sync(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz, reflection_buildings
)
paths.extend(reflections[:2])
if not direct or not direct.is_valid:
diffracted = self._find_diffraction_path_sync(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz, spatial_idx=spatial_idx, buildings_fallback=buildings
)
if diffracted:
paths.append(diffracted)
paths.sort(key=lambda p: p.path_loss)
return paths[:self.MAX_PATHS]
def _check_direct_path_sync(
self,
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz,
buildings: List[Building]
) -> Optional[RayPath]:
"""Sync direct path check using sync LOS.
buildings should already be spatially filtered to the TX→RX line."""
from app.services.los_service import los_service
los_result = los_service.check_line_of_sight_sync(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height
)
if not los_result["has_los"]:
distance = terrain_service.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
return RayPath(
path_type="direct",
total_distance=distance,
path_loss=float('inf'),
reflection_points=[],
materials_crossed=[],
is_valid=False
)
materials_crossed = []
for building in buildings:
intersection = self._line_intersects_building_3d(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
building
)
if intersection:
material = materials_service.detect_material(building.tags)
materials_crossed.append(material)
if len(materials_crossed) >= 3:
break # Early termination — too many walls
distance = terrain_service.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
path_loss = self._calculate_path_loss(distance, frequency_mhz, tx_height, rx_height)
for material in materials_crossed:
path_loss += materials_service.get_penetration_loss(material, frequency_mhz)
return RayPath(
path_type="direct",
total_distance=distance,
path_loss=path_loss,
reflection_points=[],
materials_crossed=materials_crossed,
is_valid=len(materials_crossed) < 3
)
def _find_reflection_paths_sync(
self,
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz,
buildings: List[Building]
) -> List[RayPath]:
"""Sync reflection paths.
buildings should already be spatially filtered to nearby area."""
reflection_paths = []
direct_distance = terrain_service.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
for building in buildings:
reflection_point = self._find_reflection_point(
tx_lat, tx_lon, rx_lat, rx_lon, building
)
if not reflection_point:
continue
ref_lat, ref_lon = reflection_point
dist1 = terrain_service.haversine_distance(tx_lat, tx_lon, ref_lat, ref_lon)
dist2 = terrain_service.haversine_distance(ref_lat, ref_lon, rx_lat, rx_lon)
total_distance = dist1 + dist2
if total_distance > direct_distance * 2:
continue
path_loss = self._calculate_path_loss(total_distance, frequency_mhz, tx_height, rx_height)
material = materials_service.detect_material(building.tags)
path_loss += materials_service.get_reflection_loss(material)
reflection_paths.append(RayPath(
path_type="reflected",
total_distance=total_distance,
path_loss=path_loss,
reflection_points=[(ref_lat, ref_lon)],
materials_crossed=[],
is_valid=True
))
return reflection_paths
def _find_diffraction_path_sync(
self,
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz,
spatial_idx: 'Optional[SpatialIndex]' = None,
buildings_fallback: Optional[List[Building]] = None
) -> Optional[RayPath]:
"""Sync diffraction path.
Uses spatial_idx.query_point at each sample for O(1) building lookup."""
max_height = 0
obstacle_lat, obstacle_lon = None, None
num_samples = 20
for i in range(1, num_samples - 1):
t = i / num_samples
lat = tx_lat + t * (rx_lat - tx_lat)
lon = tx_lon + t * (rx_lon - tx_lon)
terrain_elev = terrain_service.get_elevation_sync(lat, lon)
if terrain_elev > max_height:
max_height = terrain_elev
obstacle_lat, obstacle_lon = lat, lon
# Use spatial index for O(1) lookup at this sample point
if spatial_idx:
local_buildings = spatial_idx.query_point(lat, lon, buffer_cells=1)
else:
local_buildings = buildings_fallback or []
for building in local_buildings:
if buildings_service.point_in_building(lat, lon, building):
if building.height > max_height:
max_height = building.height
obstacle_lat, obstacle_lon = lat, lon
if not obstacle_lat:
return None
distance = terrain_service.haversine_distance(tx_lat, tx_lon, rx_lat, rx_lon)
tx_elev = terrain_service.get_elevation_sync(tx_lat, tx_lon)
rx_elev = terrain_service.get_elevation_sync(rx_lat, rx_lon)
tx_total = tx_elev + tx_height
rx_total = rx_elev + rx_height
d1 = terrain_service.haversine_distance(tx_lat, tx_lon, obstacle_lat, obstacle_lon)
los_height = tx_total + (rx_total - tx_total) * (d1 / distance) if distance > 0 else tx_total
clearance = los_height - max_height
diffraction_loss = self._knife_edge_loss(clearance, frequency_mhz, distance, d1)
path_loss = self._calculate_path_loss(distance, frequency_mhz, tx_height, rx_height)
path_loss += diffraction_loss
return RayPath(
path_type="diffracted",
total_distance=distance,
path_loss=path_loss,
reflection_points=[(obstacle_lat, obstacle_lon)],
materials_crossed=[],
is_valid=True
)
dominant_path_service = DominantPathService() dominant_path_service = DominantPathService()

View File

@@ -98,6 +98,59 @@ class LineOfSightService:
"profile": profile "profile": profile
} }
def check_line_of_sight_sync(
self,
tx_lat: float, tx_lon: float, tx_height: float,
rx_lat: float, rx_lon: float, rx_height: float = 1.5,
num_samples: int = 50
) -> dict:
"""
Sync LOS check - terrain tiles must be pre-loaded into memory.
Returns dict with has_los, clearance, blocked_at (no profile for speed).
"""
profile = self.terrain.get_elevation_profile_sync(
tx_lat, tx_lon, rx_lat, rx_lon, num_samples
)
if not profile:
return {"has_los": True, "clearance": 0, "blocked_at": None}
tx_ground = profile[0]["elevation"]
rx_ground = profile[-1]["elevation"]
tx_total = tx_ground + tx_height
rx_total = rx_ground + rx_height
total_distance = profile[-1]["distance"]
min_clearance = float('inf')
blocked_at = None
for point in profile:
d = point["distance"]
terrain_elev = point["elevation"]
if total_distance == 0:
los_height = tx_total
else:
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
effective_radius = self.K_FACTOR * self.EARTH_RADIUS
curvature = (d * (total_distance - d)) / (2 * effective_radius)
los_height_corrected = los_height - curvature
clearance = los_height_corrected - terrain_elev
if clearance < min_clearance:
min_clearance = clearance
if clearance <= 0:
blocked_at = d
return {
"has_los": min_clearance > 0,
"clearance": min_clearance,
"blocked_at": blocked_at,
}
async def calculate_fresnel_clearance( async def calculate_fresnel_clearance(
self, self,
tx_lat: float, tx_lon: float, tx_height: float, tx_lat: float, tx_lon: float, tx_height: float,

View File

@@ -273,4 +273,37 @@ class ReflectionService:
return 10 * np.log10(total_power) return 10 * np.log10(total_power)
def find_reflection_paths_sync(
self,
tx_lat: float, tx_lon: float, tx_height: float,
rx_lat: float, rx_lon: float, rx_height: float,
frequency_mhz: float,
buildings: List[Building],
include_ground: bool = True
) -> List[ReflectionPath]:
"""Sync version (no I/O in the async original)"""
paths = []
for building in buildings:
path = self._find_single_bounce(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz, building
)
if path:
paths.append(path)
if include_ground:
ground_path = self._calculate_ground_reflection(
tx_lat, tx_lon, tx_height,
rx_lat, rx_lon, rx_height,
frequency_mhz
)
if ground_path:
paths.append(ground_path)
paths.sort(key=lambda p: p.total_loss)
return paths[:5]
reflection_service = ReflectionService() reflection_service = ReflectionService()

View File

@@ -21,6 +21,7 @@ class SpatialIndex:
self.cell_size = cell_size self.cell_size = cell_size
self._grid: Dict[Tuple[int, int], List[Building]] = defaultdict(list) self._grid: Dict[Tuple[int, int], List[Building]] = defaultdict(list)
self._buildings: List[Building] = [] self._buildings: List[Building] = []
self._buildings_by_id: Dict[int, Building] = {}
def _cell_key(self, lat: float, lon: float) -> Tuple[int, int]: def _cell_key(self, lat: float, lon: float) -> Tuple[int, int]:
"""Convert lat/lon to grid cell key""" """Convert lat/lon to grid cell key"""
@@ -30,6 +31,7 @@ class SpatialIndex:
"""Build spatial index from buildings list""" """Build spatial index from buildings list"""
self._grid.clear() self._grid.clear()
self._buildings = buildings self._buildings = buildings
self._buildings_by_id = {b.id: b for b in buildings}
for building in buildings: for building in buildings:
# Get bounding box of building # Get bounding box of building
@@ -63,9 +65,7 @@ class SpatialIndex:
for b in self._grid.get(key, []): for b in self._grid.get(key, []):
results.add(b.id) results.add(b.id)
# Return buildings by id (deduped) return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
id_set = results
return [b for b in self._buildings if b.id in id_set]
def query_line( def query_line(
self, self,
@@ -73,29 +73,37 @@ class SpatialIndex:
lat2: float, lon2: float, lat2: float, lon2: float,
buffer_cells: int = 1 buffer_cells: int = 1
) -> List[Building]: ) -> List[Building]:
"""Find buildings along a line (for LoS checks)""" """Find buildings along a line by walking the actual cells it passes through.
Samples points along the line at cell_size intervals and queries
a buffer around each sample — much faster than bounding-box scan
for long lines.
"""
if not self._grid: if not self._grid:
return self._buildings return self._buildings
# Get bounding box cells of the line # Walk the line in cell_size steps, collecting unique cells
min_lat = min(lat1, lat2) dlat = lat2 - lat1
max_lat = max(lat1, lat2) dlon = lon2 - lon1
min_lon = min(lon1, lon2) length = max(abs(dlat), abs(dlon))
max_lon = max(lon1, lon2) num_steps = max(1, int(length / self.cell_size) + 1)
min_clat = int(min_lat / self.cell_size) - buffer_cells visited_cells: set = set()
max_clat = int(max_lat / self.cell_size) + buffer_cells for s in range(num_steps + 1):
min_clon = int(min_lon / self.cell_size) - buffer_cells t = s / num_steps
max_clon = int(max_lon / self.cell_size) + buffer_cells lat = lat1 + t * dlat
lon = lon1 + t * dlon
center = self._cell_key(lat, lon)
for dy in range(-buffer_cells, buffer_cells + 1):
for dx in range(-buffer_cells, buffer_cells + 1):
visited_cells.add((center[0] + dy, center[1] + dx))
results = set() results = set()
for clat in range(min_clat, max_clat + 1): for key in visited_cells:
for clon in range(min_clon, max_clon + 1): for b in self._grid.get(key, []):
for b in self._grid.get((clat, clon), []): results.add(b.id)
results.add(b.id)
id_set = results return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
return [b for b in self._buildings if b.id in id_set]
def query_bbox( def query_bbox(
self, self,
@@ -117,8 +125,7 @@ class SpatialIndex:
for b in self._grid.get((clat, clon), []): for b in self._grid.get((clat, clon), []):
results.add(b.id) results.add(b.id)
id_set = results return [self._buildings_by_id[bid] for bid in results if bid in self._buildings_by_id]
return [b for b in self._buildings if b.id in id_set]
# Global cache of spatial indices # Global cache of spatial indices

View File

@@ -44,8 +44,10 @@ class StreetCanyonService:
CORNER_LOSS_90 = 10.0 # dB for 90-degree turn CORNER_LOSS_90 = 10.0 # dB for 90-degree turn
CORNER_LOSS_45 = 4.0 # dB for 45-degree turn CORNER_LOSS_45 = 4.0 # dB for 45-degree turn
def __init__(self, cache_dir: str = "/opt/rfcp/backend/data/streets"): def __init__(self):
self.cache_dir = Path(cache_dir) import os
self.data_path = Path(os.environ.get('RFCP_DATA_PATH', './data'))
self.cache_dir = self.data_path / 'osm' / 'streets'
self.cache_dir.mkdir(exist_ok=True, parents=True) self.cache_dir.mkdir(exist_ok=True, parents=True)
self._cache: dict[str, List[Street]] = {} self._cache: dict[str, List[Street]] = {}
@@ -56,21 +58,28 @@ class StreetCanyonService:
) -> List[Street]: ) -> List[Street]:
"""Fetch street network from OSM""" """Fetch street network from OSM"""
cache_key = f"{min_lat:.3f}_{min_lon:.3f}_{max_lat:.3f}_{max_lon:.3f}" cache_key = f"{min_lat:.2f}_{min_lon:.2f}_{max_lat:.2f}_{max_lon:.2f}"
# Check cache # Check memory cache
if cache_key in self._cache: if cache_key in self._cache:
return self._cache[cache_key] return self._cache[cache_key]
# Check disk cache
cache_file = self.cache_dir / f"{cache_key}.json" cache_file = self.cache_dir / f"{cache_key}.json"
if cache_file.exists(): if cache_file.exists():
with open(cache_file) as f: try:
data = json.load(f) with open(cache_file) as f:
streets = [Street(**s) for s in data] data = json.load(f)
self._cache[cache_key] = streets streets = [Street(**s) for s in data]
return streets self._cache[cache_key] = streets
print(f"[Streets] Cache hit for {cache_key}")
return streets
except Exception:
pass
# Fetch from Overpass # Fetch from Overpass
print(f"[Streets] Fetching from Overpass API for {cache_key}...")
query = f""" query = f"""
[out:json][timeout:30]; [out:json][timeout:30];
way["highway"]({min_lat},{min_lon},{max_lat},{max_lon}); way["highway"]({min_lat},{min_lon},{max_lat},{max_lon});
@@ -85,20 +94,21 @@ class StreetCanyonService:
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
except Exception as e: except Exception as e:
print(f"Street fetch error: {e}") print(f"[Streets] Fetch error: {e}")
return [] return []
streets = self._parse_streets(data) streets = self._parse_streets(data)
# Cache # Cache to disk
with open(cache_file, 'w') as f: if streets:
json.dump([{ with open(cache_file, 'w') as f:
"id": s.id, json.dump([{
"name": s.name, "id": s.id,
"geometry": s.geometry, "name": s.name,
"width": s.width, "geometry": s.geometry,
"highway_type": s.highway_type "width": s.width,
} for s in streets], f) "highway_type": s.highway_type
} for s in streets], f)
self._cache[cache_key] = streets self._cache[cache_key] = streets
return streets return streets
@@ -360,4 +370,42 @@ class StreetCanyonService:
return self.CORNER_LOSS_90 + (turn_angle - 90) * 0.2 # Extra loss for sharp turns return self.CORNER_LOSS_90 + (turn_angle - 90) * 0.2 # Extra loss for sharp turns
def calculate_street_canyon_loss_sync(
self,
tx_lat: float, tx_lon: float, tx_height: float,
rx_lat: float, rx_lon: float, rx_height: float,
frequency_mhz: float,
streets: List[Street]
) -> Tuple[float, List[Tuple[float, float]]]:
"""Sync version (no I/O in the async original)"""
street_path = self._find_street_path(tx_lat, tx_lon, rx_lat, rx_lon, streets)
if not street_path:
return float('inf'), []
total_loss = 0.0
total_distance = 0.0
for i in range(len(street_path) - 1):
p1 = street_path[i]
p2 = street_path[i + 1]
from app.services.terrain_service import TerrainService
segment_dist = TerrainService.haversine_distance(p1[0], p1[1], p2[0], p2[1])
total_distance += segment_dist
if segment_dist > 0:
segment_loss = 32.4 + 20 * np.log10(frequency_mhz) + 20 * np.log10(segment_dist / 1000 + 0.001)
total_loss += segment_loss * (segment_dist / total_distance) if total_distance > 0 else 0
if i > 0:
corner_angle = self._calculate_corner_angle(
street_path[i - 1], p1, p2
)
corner_loss = self._corner_loss(corner_angle)
total_loss += corner_loss
return total_loss, street_path
street_canyon_service = StreetCanyonService() street_canyon_service = StreetCanyonService()

View File

@@ -168,6 +168,25 @@ class TerrainService:
return float(elevation) return float(elevation)
def get_elevation_sync(self, lat: float, lon: float) -> float:
"""Sync elevation lookup from memory cache. Returns 0.0 if tile not loaded."""
tile_name = self.get_tile_name(lat, lon)
tile = self._tile_cache.get(tile_name)
if tile is None:
return 0.0
size = tile.shape[0]
lat_int = int(lat) if lat >= 0 else int(lat) - 1
lon_int = int(lon) if lon >= 0 else int(lon) - 1
row = int((1 - (lat - lat_int)) * (size - 1))
col = int((lon - lon_int) * (size - 1))
row = max(0, min(row, size - 1))
col = max(0, min(col, size - 1))
elevation = tile[row, col]
return 0.0 if elevation == -32768 else float(elevation)
async def get_elevation_profile( async def get_elevation_profile(
self, self,
lat1: float, lon1: float, lat1: float, lon1: float,
@@ -193,6 +212,30 @@ class TerrainService:
return profile return profile
def get_elevation_profile_sync(
self,
lat1: float, lon1: float,
lat2: float, lon2: float,
num_points: int = 50
) -> List[dict]:
"""Sync elevation profile - tiles must be pre-loaded into memory cache."""
lats = np.linspace(lat1, lat2, num_points)
lons = np.linspace(lon1, lon2, num_points)
total_distance = self.haversine_distance(lat1, lon1, lat2, lon2)
distances = np.linspace(0, total_distance, num_points)
profile = []
for i in range(num_points):
profile.append({
"lat": float(lats[i]),
"lon": float(lons[i]),
"elevation": self.get_elevation_sync(float(lats[i]), float(lons[i])),
"distance": float(distances[i])
})
return profile
async def ensure_tiles_for_bbox( async def ensure_tiles_for_bbox(
self, self,
min_lat: float, min_lon: float, min_lat: float, min_lon: float,

View File

@@ -1,9 +1,22 @@
"""Entry point for PyInstaller bundle""" """Entry point for PyInstaller bundle"""
print("[RFCP] run_server.py starting...", flush=True)
import os import os
import sys import sys
# Force unbuffered stdout/stderr — critical for piped output (Electron, bat files)
os.environ['PYTHONUNBUFFERED'] = '1'
if hasattr(sys.stdout, 'reconfigure'):
try:
sys.stdout.reconfigure(line_buffering=True)
except Exception:
pass
if hasattr(sys.stderr, 'reconfigure'):
try:
sys.stderr.reconfigure(line_buffering=True)
except Exception:
pass
print("[RFCP] run_server.py starting...", flush=True)
# Set base path for PyInstaller # Set base path for PyInstaller
if getattr(sys, 'frozen', False): if getattr(sys, 'frozen', False):
base_dir = os.path.dirname(sys.executable) base_dir = os.path.dirname(sys.executable)

View File

@@ -1,5 +1,5 @@
const { app, BrowserWindow, ipcMain, dialog, shell } = require('electron'); const { app, BrowserWindow, ipcMain, dialog, shell } = require('electron');
const { spawn } = require('child_process'); const { spawn, execSync } = require('child_process');
const path = require('path'); const path = require('path');
const fs = require('fs'); const fs = require('fs');
const Store = require('electron-store'); const Store = require('electron-store');
@@ -179,7 +179,8 @@ async function startBackend() {
backendProcess = spawn(backendExe, [], { backendProcess = spawn(backendExe, [], {
cwd: backendDir, cwd: backendDir,
env, env,
stdio: ['ignore', 'pipe', 'pipe'] stdio: ['ignore', 'pipe', 'pipe'],
detached: process.platform !== 'win32' // Unix: create process group for clean kill
}); });
} }
@@ -305,6 +306,35 @@ function createMainWindow() {
}); });
} }
// ── Backend cleanup ───────────────────────────────────────────────
function killBackend() {
if (!backendProcess) return;
const pid = backendProcess.pid;
log(`Killing backend (PID ${pid})...`);
try {
if (process.platform === 'win32') {
// Windows: taskkill with /T (tree) to kill child processes too
execSync(`taskkill /f /t /pid ${pid}`, { stdio: 'ignore' });
} else {
// Unix: kill process group
process.kill(-pid, 'SIGTERM');
}
} catch (e) {
// Fallback: try normal kill
try {
backendProcess.kill('SIGKILL');
} catch (_e2) {
// Already dead
}
}
backendProcess = null;
log('Backend killed');
}
// ── App lifecycle ────────────────────────────────────────────────── // ── App lifecycle ──────────────────────────────────────────────────
app.whenReady().then(async () => { app.whenReady().then(async () => {
@@ -335,10 +365,7 @@ app.whenReady().then(async () => {
}); });
app.on('window-all-closed', () => { app.on('window-all-closed', () => {
if (backendProcess) { killBackend();
backendProcess.kill();
backendProcess = null;
}
if (process.platform !== 'darwin') { if (process.platform !== 'darwin') {
app.quit(); app.quit();
@@ -352,11 +379,11 @@ app.on('activate', () => {
}); });
app.on('before-quit', () => { app.on('before-quit', () => {
if (backendProcess) { killBackend();
backendProcess.kill();
}
if (backendLogStream) { if (backendLogStream) {
backendLogStream.end(); backendLogStream.end();
backendLogStream = null;
} }
}); });
@@ -404,3 +431,74 @@ ipcMain.handle('set-setting', (_event, key, value) => store.set(key, value));
ipcMain.handle('open-external', (_event, url) => shell.openExternal(url)); ipcMain.handle('open-external', (_event, url) => shell.openExternal(url));
ipcMain.handle('open-path', (_event, filePath) => shell.openPath(filePath)); ipcMain.handle('open-path', (_event, filePath) => shell.openPath(filePath));
// ── Import Region Data ────────────────────────────────────────────
ipcMain.handle('import-region-data', async () => {
const result = await dialog.showOpenDialog(mainWindow, {
title: 'Select folder with region data',
properties: ['openDirectory']
});
const srcDir = result.filePaths[0];
if (!srcDir) return { success: false, message: 'Cancelled' };
const dataPath = getDataPath();
let terrainCount = 0;
let osmCount = 0;
try {
// Copy terrain/*.hgt files
const terrainSrc = path.join(srcDir, 'terrain');
if (fs.existsSync(terrainSrc)) {
const terrainDest = path.join(dataPath, 'terrain');
if (!fs.existsSync(terrainDest)) {
fs.mkdirSync(terrainDest, { recursive: true });
}
const hgtFiles = fs.readdirSync(terrainSrc).filter(f => f.endsWith('.hgt'));
for (const file of hgtFiles) {
fs.copyFileSync(path.join(terrainSrc, file), path.join(terrainDest, file));
terrainCount++;
}
}
// Copy osm/**/*.json files
const osmSrc = path.join(srcDir, 'osm');
if (fs.existsSync(osmSrc)) {
const osmDest = path.join(dataPath, 'osm');
const subdirs = fs.readdirSync(osmSrc).filter(d =>
fs.statSync(path.join(osmSrc, d)).isDirectory()
);
for (const subdir of subdirs) {
const subSrc = path.join(osmSrc, subdir);
const subDest = path.join(osmDest, subdir);
if (!fs.existsSync(subDest)) {
fs.mkdirSync(subDest, { recursive: true });
}
const jsonFiles = fs.readdirSync(subSrc).filter(f => f.endsWith('.json'));
for (const file of jsonFiles) {
fs.copyFileSync(path.join(subSrc, file), path.join(subDest, file));
osmCount++;
}
}
}
if (terrainCount === 0 && osmCount === 0) {
return {
success: false,
message: 'No data files found. Expected terrain/*.hgt or osm/**/*.json'
};
}
log(`Imported ${terrainCount} terrain tiles, ${osmCount} OSM files from ${srcDir}`);
return {
success: true,
terrainCount,
osmCount,
message: `Imported ${terrainCount} terrain tiles and ${osmCount} OSM cache files`
};
} catch (e) {
log(`Import error: ${e.message}`);
return { success: false, message: `Import failed: ${e.message}` };
}
});

View File

@@ -21,6 +21,9 @@ contextBridge.exposeInMainWorld('rfcp', {
openExternal: (url) => ipcRenderer.invoke('open-external', url), openExternal: (url) => ipcRenderer.invoke('open-external', url),
openPath: (path) => ipcRenderer.invoke('open-path', path), openPath: (path) => ipcRenderer.invoke('open-path', path),
// Region data import
importRegionData: () => ipcRenderer.invoke('import-region-data'),
// Platform info // Platform info
platform: process.platform, platform: process.platform,
isDesktop: true, isDesktop: true,

View File

@@ -28,7 +28,8 @@ import Button from '@/components/ui/Button.tsx';
import NumberInput from '@/components/ui/NumberInput.tsx'; import NumberInput from '@/components/ui/NumberInput.tsx';
import ConfirmDialog from '@/components/ui/ConfirmDialog.tsx'; import ConfirmDialog from '@/components/ui/ConfirmDialog.tsx';
import { RegionWizard } from '@/components/RegionWizard.tsx'; import { RegionWizard } from '@/components/RegionWizard.tsx';
import { isDesktop } from '@/lib/desktop.ts'; import { isDesktop, getDesktopApi } from '@/lib/desktop.ts';
import type { RegionInfo, CacheStats } from '@/services/api.ts';
/** /**
* Restore a sites snapshot: replace all sites in IndexedDB + Zustand. * Restore a sites snapshot: replace all sites in IndexedDB + Zustand.
@@ -121,8 +122,18 @@ export default function App() {
// Region wizard for first-run (desktop mode only) // Region wizard for first-run (desktop mode only)
const [showWizard, setShowWizard] = useState(false); const [showWizard, setShowWizard] = useState(false);
const [cachedRegions, setCachedRegions] = useState<RegionInfo[]>([]);
const [cacheStats, setCacheStats] = useState<CacheStats | null>(null);
const refreshCacheStatus = useCallback(() => {
api.getRegions().then(setCachedRegions).catch(() => {});
api.getCacheStats().then(setCacheStats).catch(() => {});
}, []);
useEffect(() => { useEffect(() => {
// Load cache status on mount
refreshCacheStatus();
if (!isDesktop()) return; if (!isDesktop()) return;
const skipped = localStorage.getItem('rfcp_region_wizard_skipped'); const skipped = localStorage.getItem('rfcp_region_wizard_skipped');
if (skipped) return; if (skipped) return;
@@ -137,7 +148,7 @@ export default function App() {
.catch(() => { .catch(() => {
// Backend not ready yet, skip wizard // Backend not ready yet, skip wizard
}); });
}, []); }, [refreshCacheStatus]);
// Resizable sidebar // Resizable sidebar
const PANEL_MIN = 300; const PANEL_MIN = 300;
@@ -1051,6 +1062,75 @@ export default function App() {
</div> </div>
</div> </div>
{/* Data Cache Status */}
<div className="bg-white dark:bg-dark-surface border border-gray-200 dark:border-dark-border rounded-lg shadow-sm p-4 space-y-2">
<h3 className="text-sm font-semibold text-gray-800 dark:text-dark-text">
Data Cache
</h3>
{cachedRegions.length > 0 ? (
<>
<div className="space-y-1">
{cachedRegions.filter((r) => r.downloaded || r.download_progress > 0).length > 0 ? (
cachedRegions
.filter((r) => r.downloaded || r.download_progress > 0)
.map((r) => (
<div key={r.id} className="flex items-center gap-2 text-xs">
<span
className={`w-1.5 h-1.5 rounded-full flex-shrink-0 ${
r.downloaded ? 'bg-emerald-500' : 'bg-amber-500'
}`}
/>
<span className="text-gray-700 dark:text-dark-text truncate">{r.name}</span>
{!r.downloaded && (
<span className="text-gray-400 dark:text-dark-muted ml-auto">
{Math.round(r.download_progress)}%
</span>
)}
</div>
))
) : (
<p className="text-xs text-gray-400 dark:text-dark-muted">No regions cached</p>
)}
</div>
{cacheStats && (
<p className="text-[11px] text-gray-400 dark:text-dark-muted">
{cacheStats.terrain_tiles} terrain tiles ({cacheStats.terrain_mb} MB)
</p>
)}
</>
) : (
<p className="text-xs text-gray-400 dark:text-dark-muted">Loading...</p>
)}
<div className="flex gap-2 pt-1">
<button
onClick={() => setShowWizard(true)}
className="text-xs px-2 py-1 bg-slate-100 dark:bg-dark-border text-gray-600 dark:text-dark-text rounded hover:bg-slate-200 dark:hover:bg-dark-muted transition-colors"
>
Download Regions
</button>
{isDesktop() && (
<button
onClick={async () => {
const desktop = getDesktopApi();
if (!desktop) return;
const result = await desktop.importRegionData();
if (result.success) {
addToast(result.message, 'success');
refreshCacheStatus();
} else {
if (result.message !== 'Cancelled') {
addToast(result.message, 'error');
}
}
}}
className="text-xs px-2 py-1 bg-slate-100 dark:bg-dark-border text-gray-600 dark:text-dark-text rounded hover:bg-slate-200 dark:hover:bg-dark-muted transition-colors"
>
Import Data
</button>
)}
</div>
</div>
{/* Coverage error */} {/* Coverage error */}
{coverageError && ( {coverageError && (
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-3"> <div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-3">
@@ -1109,7 +1189,7 @@ export default function App() {
{/* First-run region download wizard (desktop only) */} {/* First-run region download wizard (desktop only) */}
{showWizard && ( {showWizard && (
<RegionWizard onComplete={() => setShowWizard(false)} /> <RegionWizard onComplete={() => { setShowWizard(false); refreshCacheStatus(); }} />
)} )}
</div> </div>
); );

View File

@@ -14,6 +14,12 @@ interface RFCPDesktop {
setSetting: (key: string, value: unknown) => Promise<void>; setSetting: (key: string, value: unknown) => Promise<void>;
openExternal: (url: string) => Promise<void>; openExternal: (url: string) => Promise<void>;
openPath: (path: string) => Promise<void>; openPath: (path: string) => Promise<void>;
importRegionData: () => Promise<{
success: boolean;
terrainCount?: number;
osmCount?: number;
message: string;
}>;
platform: string; platform: string;
isDesktop: boolean; isDesktop: boolean;
isMac: boolean; isMac: boolean;

View File

@@ -0,0 +1 @@
{"detail":"Calculation timeout (5 min) — try smaller radius or lower resolution"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long