@mytec: feat: Phase 3.0 Architecture Refactor ✅
Major refactoring of RFCP backend: - Modular propagation models (8 models) - SharedMemoryManager for terrain data - ProcessPoolExecutor parallel processing - WebSocket progress streaming - Building filtering pipeline (351k → 15k) - 82 unit tests Performance: Standard preset 38s → 5s (7.6x speedup) Known issue: Detailed preset timeout (fix in 3.1.0)
This commit is contained in:
38
backend/app/geometry/__init__.py
Normal file
38
backend/app/geometry/__init__.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""
|
||||
Geometry operations for RF propagation calculations.
|
||||
|
||||
NumPy-dependent modules (haversine, intersection, reflection) are
|
||||
imported lazily so pure-Python modules (diffraction, los) remain
|
||||
available even when NumPy is not installed.
|
||||
"""
|
||||
|
||||
from app.geometry.diffraction import knife_edge_loss
|
||||
from app.geometry.los import check_los_terrain, fresnel_radius
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
"""Lazy import for NumPy-dependent geometry functions."""
|
||||
_numpy_exports = {
|
||||
"haversine_distance", "haversine_batch", "points_to_local_coords",
|
||||
"line_segments_intersect_batch", "line_intersects_polygons_batch",
|
||||
"calculate_reflection_points_batch", "find_best_reflection_path",
|
||||
}
|
||||
if name in _numpy_exports:
|
||||
if name in ("haversine_distance", "haversine_batch", "points_to_local_coords"):
|
||||
from app.geometry.haversine import haversine_distance, haversine_batch, points_to_local_coords
|
||||
return locals()[name]
|
||||
elif name in ("line_segments_intersect_batch", "line_intersects_polygons_batch"):
|
||||
from app.geometry.intersection import line_segments_intersect_batch, line_intersects_polygons_batch
|
||||
return locals()[name]
|
||||
elif name in ("calculate_reflection_points_batch", "find_best_reflection_path"):
|
||||
from app.geometry.reflection import calculate_reflection_points_batch, find_best_reflection_path
|
||||
return locals()[name]
|
||||
raise AttributeError(f"module 'app.geometry' has no attribute {name!r}")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"haversine_distance", "haversine_batch", "points_to_local_coords",
|
||||
"line_segments_intersect_batch", "line_intersects_polygons_batch",
|
||||
"calculate_reflection_points_batch", "find_best_reflection_path",
|
||||
"knife_edge_loss", "check_los_terrain", "fresnel_radius",
|
||||
]
|
||||
40
backend/app/geometry/diffraction.py
Normal file
40
backend/app/geometry/diffraction.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""
|
||||
Knife-edge diffraction geometry calculations.
|
||||
"""
|
||||
|
||||
import math
|
||||
|
||||
|
||||
def knife_edge_loss(
|
||||
d1_m: float,
|
||||
d2_m: float,
|
||||
h_m: float,
|
||||
wavelength_m: float,
|
||||
) -> float:
|
||||
"""
|
||||
Calculate diffraction loss over single knife edge.
|
||||
|
||||
Args:
|
||||
d1_m: Distance TX to obstacle
|
||||
d2_m: Distance obstacle to RX
|
||||
h_m: Obstacle height above LOS (positive = above)
|
||||
wavelength_m: Signal wavelength
|
||||
|
||||
Returns:
|
||||
Loss in dB (>= 0)
|
||||
"""
|
||||
if d1_m <= 0 or d2_m <= 0 or wavelength_m <= 0:
|
||||
return 0.0
|
||||
|
||||
v = h_m * math.sqrt(2 * (d1_m + d2_m) / (wavelength_m * d1_m * d2_m))
|
||||
|
||||
if v < -0.78:
|
||||
L = 0.0
|
||||
elif v < 0:
|
||||
L = 6.02 + 9.11 * v - 1.27 * v ** 2
|
||||
elif v < 2.4:
|
||||
L = 6.02 + 9.11 * v + 1.65 * v ** 2
|
||||
else:
|
||||
L = 12.95 + 20 * math.log10(v)
|
||||
|
||||
return max(0.0, L)
|
||||
50
backend/app/geometry/haversine.py
Normal file
50
backend/app/geometry/haversine.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Distance calculations using the haversine formula.
|
||||
|
||||
Supports both scalar and batch (NumPy array) operations.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from typing import Tuple
|
||||
|
||||
EARTH_RADIUS = 6371000 # meters
|
||||
|
||||
|
||||
def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
|
||||
"""Calculate distance between two points in meters."""
|
||||
lat1, lon1, lat2, lon2 = map(np.radians, [lat1, lon1, lat2, lon2])
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2) ** 2
|
||||
c = 2 * np.arcsin(np.sqrt(a))
|
||||
return float(EARTH_RADIUS * c)
|
||||
|
||||
|
||||
def haversine_batch(
|
||||
lat1: float, lon1: float,
|
||||
lats2: np.ndarray, lons2: np.ndarray,
|
||||
) -> np.ndarray:
|
||||
"""Distance from one point to many points (meters)."""
|
||||
lat1_rad = np.radians(lat1)
|
||||
lon1_rad = np.radians(lon1)
|
||||
lats2_rad = np.radians(lats2)
|
||||
lons2_rad = np.radians(lons2)
|
||||
|
||||
dlat = lats2_rad - lat1_rad
|
||||
dlon = lons2_rad - lon1_rad
|
||||
|
||||
a = np.sin(dlat / 2) ** 2 + np.cos(lat1_rad) * np.cos(lats2_rad) * np.sin(dlon / 2) ** 2
|
||||
c = 2 * np.arcsin(np.sqrt(a))
|
||||
|
||||
return EARTH_RADIUS * c
|
||||
|
||||
|
||||
def points_to_local_coords(
|
||||
ref_lat: float, ref_lon: float,
|
||||
lats: np.ndarray, lons: np.ndarray,
|
||||
) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Convert lat/lon to local X/Y meters (equirectangular projection)."""
|
||||
cos_lat = np.cos(np.radians(ref_lat))
|
||||
x = (lons - ref_lon) * 111320.0 * cos_lat
|
||||
y = (lats - ref_lat) * 110540.0
|
||||
return x, y
|
||||
116
backend/app/geometry/intersection.py
Normal file
116
backend/app/geometry/intersection.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Vectorized line-segment and line-polygon intersection checks.
|
||||
|
||||
All operations use NumPy for batch processing.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
def line_segments_intersect_batch(
|
||||
p1: np.ndarray, p2: np.ndarray,
|
||||
segments_start: np.ndarray, segments_end: np.ndarray,
|
||||
) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Check if line p1->p2 intersects with N segments.
|
||||
|
||||
Args:
|
||||
p1, p2: shape (2,)
|
||||
segments_start, segments_end: shape (N, 2)
|
||||
|
||||
Returns:
|
||||
intersects: bool array (N,)
|
||||
t_values: parameter along p1->p2 (N,)
|
||||
"""
|
||||
d = p2 - p1
|
||||
seg_d = segments_end - segments_start
|
||||
|
||||
cross = d[0] * seg_d[:, 1] - d[1] * seg_d[:, 0]
|
||||
parallel_mask = np.abs(cross) < 1e-10
|
||||
cross_safe = np.where(parallel_mask, 1.0, cross)
|
||||
|
||||
dp = p1 - segments_start
|
||||
t = (dp[:, 0] * seg_d[:, 1] - dp[:, 1] * seg_d[:, 0]) / cross_safe
|
||||
u = (dp[:, 0] * d[1] - dp[:, 1] * d[0]) / cross_safe
|
||||
|
||||
intersects = ~parallel_mask & (t >= 0) & (t <= 1) & (u >= 0) & (u <= 1)
|
||||
return intersects, t
|
||||
|
||||
|
||||
def line_intersects_polygons_batch(
|
||||
p1: np.ndarray, p2: np.ndarray,
|
||||
polygons_x: np.ndarray, polygons_y: np.ndarray,
|
||||
polygon_lengths: np.ndarray,
|
||||
max_polygons: int = 30,
|
||||
) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Check if line p1->p2 intersects multiple polygons.
|
||||
|
||||
Uses bounding-box pre-filter to limit work when polygon count is large.
|
||||
|
||||
Args:
|
||||
p1, p2: shape (2,)
|
||||
polygons_x, polygons_y: flattened vertex arrays
|
||||
polygon_lengths: vertices per polygon (num_polygons,)
|
||||
max_polygons: only check nearest N polygons
|
||||
|
||||
Returns:
|
||||
intersects: bool (num_polygons,)
|
||||
min_distances: distance to first hit (num_polygons,)
|
||||
"""
|
||||
num_polygons = len(polygon_lengths)
|
||||
|
||||
if num_polygons == 0:
|
||||
return np.array([], dtype=bool), np.array([])
|
||||
|
||||
intersects = np.zeros(num_polygons, dtype=bool)
|
||||
min_t = np.full(num_polygons, np.inf)
|
||||
|
||||
# Pre-filter: bounding box check
|
||||
if num_polygons > max_polygons:
|
||||
buf = 50.0
|
||||
line_min_x = min(p1[0], p2[0]) - buf
|
||||
line_max_x = max(p1[0], p2[0]) + buf
|
||||
line_min_y = min(p1[1], p2[1]) - buf
|
||||
line_max_y = max(p1[1], p2[1]) + buf
|
||||
|
||||
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||
vi = 0
|
||||
for i, length in enumerate(polygon_lengths):
|
||||
if length >= 3:
|
||||
cx = polygons_x[vi]
|
||||
cy = polygons_y[vi]
|
||||
if line_min_x <= cx <= line_max_x and line_min_y <= cy <= line_max_y:
|
||||
nearby_mask[i] = True
|
||||
vi += length
|
||||
|
||||
nearby_indices = np.where(nearby_mask)[0]
|
||||
if len(nearby_indices) > max_polygons:
|
||||
nearby_mask = np.zeros(num_polygons, dtype=bool)
|
||||
nearby_mask[nearby_indices[:max_polygons]] = True
|
||||
else:
|
||||
nearby_mask = np.ones(num_polygons, dtype=bool)
|
||||
|
||||
idx = 0
|
||||
for i, length in enumerate(polygon_lengths):
|
||||
if length < 3 or not nearby_mask[i]:
|
||||
idx += length
|
||||
continue
|
||||
|
||||
px = polygons_x[idx:idx + length]
|
||||
py = polygons_y[idx:idx + length]
|
||||
|
||||
starts = np.stack([px, py], axis=1)
|
||||
ends = np.stack([np.roll(px, -1), np.roll(py, -1)], axis=1)
|
||||
|
||||
edge_intersects, t_vals = line_segments_intersect_batch(p1, p2, starts, ends)
|
||||
|
||||
if np.any(edge_intersects):
|
||||
intersects[i] = True
|
||||
min_t[i] = np.min(t_vals[edge_intersects])
|
||||
|
||||
idx += length
|
||||
|
||||
line_length = np.linalg.norm(p2 - p1)
|
||||
min_distances = min_t * line_length
|
||||
|
||||
return intersects, min_distances
|
||||
85
backend/app/geometry/los.py
Normal file
85
backend/app/geometry/los.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Line-of-sight checks using terrain profile data.
|
||||
"""
|
||||
|
||||
import math
|
||||
from typing import Optional, Dict, List
|
||||
|
||||
EARTH_RADIUS = 6371000
|
||||
K_FACTOR = 4 / 3 # Standard atmospheric refraction
|
||||
|
||||
|
||||
def check_los_terrain(
|
||||
profile: List[dict],
|
||||
tx_height: float,
|
||||
rx_height: float,
|
||||
) -> dict:
|
||||
"""
|
||||
Check line-of-sight from a terrain elevation profile.
|
||||
|
||||
Args:
|
||||
profile: List of dicts with 'elevation' and 'distance' keys.
|
||||
tx_height: TX antenna height above ground (meters).
|
||||
rx_height: RX height above ground (meters).
|
||||
|
||||
Returns:
|
||||
dict with has_los, clearance, blocked_at
|
||||
"""
|
||||
if not profile:
|
||||
return {"has_los": True, "clearance": 0.0, "blocked_at": None}
|
||||
|
||||
tx_ground = profile[0]["elevation"]
|
||||
rx_ground = profile[-1]["elevation"]
|
||||
tx_total = tx_ground + tx_height
|
||||
rx_total = rx_ground + rx_height
|
||||
total_distance = profile[-1]["distance"]
|
||||
|
||||
min_clearance = float("inf")
|
||||
blocked_at = None
|
||||
|
||||
for point in profile:
|
||||
d = point["distance"]
|
||||
terrain_elev = point["elevation"]
|
||||
|
||||
if total_distance == 0:
|
||||
los_height = tx_total
|
||||
else:
|
||||
los_height = tx_total + (rx_total - tx_total) * (d / total_distance)
|
||||
|
||||
# Earth curvature correction
|
||||
effective_radius = K_FACTOR * EARTH_RADIUS
|
||||
curvature = (d * (total_distance - d)) / (2 * effective_radius)
|
||||
los_height_corrected = los_height - curvature
|
||||
|
||||
clearance = los_height_corrected - terrain_elev
|
||||
|
||||
if clearance < min_clearance:
|
||||
min_clearance = clearance
|
||||
if clearance <= 0:
|
||||
blocked_at = d
|
||||
|
||||
return {
|
||||
"has_los": min_clearance > 0,
|
||||
"clearance": min_clearance,
|
||||
"blocked_at": blocked_at,
|
||||
}
|
||||
|
||||
|
||||
def fresnel_radius(
|
||||
d1_m: float, d2_m: float, wavelength_m: float, zone: int = 1
|
||||
) -> float:
|
||||
"""Calculate Fresnel zone radius at a point along the path.
|
||||
|
||||
Args:
|
||||
d1_m: Distance from TX to point
|
||||
d2_m: Distance from point to RX
|
||||
wavelength_m: Signal wavelength
|
||||
zone: Fresnel zone number (default 1)
|
||||
|
||||
Returns:
|
||||
Radius in meters
|
||||
"""
|
||||
total = d1_m + d2_m
|
||||
if total <= 0:
|
||||
return 0.0
|
||||
return math.sqrt(zone * wavelength_m * d1_m * d2_m / total)
|
||||
163
backend/app/geometry/reflection.py
Normal file
163
backend/app/geometry/reflection.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""
|
||||
Vectorized reflection point calculations using mirror-image method.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from typing import Tuple, Optional
|
||||
from app.geometry.intersection import line_intersects_polygons_batch
|
||||
|
||||
|
||||
def calculate_reflection_points_batch(
|
||||
tx: np.ndarray, rx: np.ndarray,
|
||||
wall_starts: np.ndarray, wall_ends: np.ndarray,
|
||||
) -> Tuple[np.ndarray, np.ndarray]:
|
||||
"""Calculate reflection points on N walls via mirror-image method.
|
||||
|
||||
Args:
|
||||
tx, rx: shape (2,)
|
||||
wall_starts, wall_ends: shape (N, 2)
|
||||
|
||||
Returns:
|
||||
reflection_points: (N, 2)
|
||||
valid: bool (N,)
|
||||
"""
|
||||
wall_vec = wall_ends - wall_starts
|
||||
wall_length = np.linalg.norm(wall_vec, axis=1, keepdims=True)
|
||||
wall_unit = wall_vec / np.maximum(wall_length, 1e-10)
|
||||
|
||||
normals = np.stack([-wall_unit[:, 1], wall_unit[:, 0]], axis=1)
|
||||
|
||||
tx_to_wall = tx - wall_starts
|
||||
tx_dist_to_wall = np.sum(tx_to_wall * normals, axis=1, keepdims=True)
|
||||
tx_mirror = tx - 2 * tx_dist_to_wall * normals
|
||||
|
||||
rx_to_mirror = tx_mirror - rx
|
||||
|
||||
cross_denom = (rx_to_mirror[:, 0] * wall_vec[:, 1] -
|
||||
rx_to_mirror[:, 1] * wall_vec[:, 0])
|
||||
|
||||
valid_denom = np.abs(cross_denom) > 1e-10
|
||||
cross_denom_safe = np.where(valid_denom, cross_denom, 1.0)
|
||||
|
||||
rx_to_start = wall_starts - rx
|
||||
t = (rx_to_start[:, 0] * rx_to_mirror[:, 1] -
|
||||
rx_to_start[:, 1] * rx_to_mirror[:, 0]) / cross_denom_safe
|
||||
|
||||
reflection_points = wall_starts + t[:, np.newaxis] * wall_vec
|
||||
|
||||
valid = valid_denom & (t >= 0) & (t <= 1) & (tx_dist_to_wall[:, 0] > 0)
|
||||
|
||||
return reflection_points, valid
|
||||
|
||||
|
||||
def find_best_reflection_path(
|
||||
tx: np.ndarray, rx: np.ndarray,
|
||||
building_walls_start: np.ndarray,
|
||||
building_walls_end: np.ndarray,
|
||||
wall_to_building: np.ndarray,
|
||||
obstacle_polygons_x: np.ndarray,
|
||||
obstacle_polygons_y: np.ndarray,
|
||||
obstacle_lengths: np.ndarray,
|
||||
max_candidates: int = 50,
|
||||
max_walls: int = 100,
|
||||
max_los_checks: int = 10,
|
||||
) -> Tuple[Optional[np.ndarray], float, float]:
|
||||
"""Find best single-reflection path using vectorized ops.
|
||||
|
||||
Args:
|
||||
max_walls: Only consider closest N walls for reflection candidates.
|
||||
max_los_checks: Only verify LOS for top N shortest reflection paths.
|
||||
|
||||
Returns:
|
||||
best_reflection_point: (2,) or None
|
||||
best_path_length: meters
|
||||
best_reflection_loss: dB
|
||||
"""
|
||||
num_walls = len(building_walls_start)
|
||||
if num_walls == 0:
|
||||
return None, np.inf, 0.0
|
||||
|
||||
# Limit walls by distance to path midpoint
|
||||
if num_walls > max_walls:
|
||||
midpoint = (tx + rx) / 2
|
||||
wall_midpoints = (building_walls_start + building_walls_end) / 2
|
||||
wall_distances = np.linalg.norm(wall_midpoints - midpoint, axis=1)
|
||||
closest = np.argpartition(wall_distances, max_walls)[:max_walls]
|
||||
building_walls_start = building_walls_start[closest]
|
||||
building_walls_end = building_walls_end[closest]
|
||||
wall_to_building = wall_to_building[closest]
|
||||
|
||||
refl_points, valid = calculate_reflection_points_batch(
|
||||
tx, rx, building_walls_start, building_walls_end,
|
||||
)
|
||||
|
||||
if not np.any(valid):
|
||||
return None, np.inf, 0.0
|
||||
|
||||
valid_indices = np.where(valid)[0]
|
||||
valid_refl = refl_points[valid]
|
||||
|
||||
tx_to_refl = np.linalg.norm(valid_refl - tx, axis=1)
|
||||
refl_to_rx = np.linalg.norm(rx - valid_refl, axis=1)
|
||||
path_lengths = tx_to_refl + refl_to_rx
|
||||
|
||||
# Direct distance filter
|
||||
direct_dist = np.linalg.norm(rx - tx)
|
||||
within_range = path_lengths <= direct_dist * 2.0
|
||||
if not np.any(within_range):
|
||||
return None, np.inf, 0.0
|
||||
|
||||
valid_indices = valid_indices[within_range]
|
||||
valid_refl = valid_refl[within_range]
|
||||
path_lengths = path_lengths[within_range]
|
||||
|
||||
# Keep top candidates by shortest path
|
||||
if len(valid_indices) > max_candidates:
|
||||
top_idx = np.argpartition(path_lengths, max_candidates)[:max_candidates]
|
||||
valid_indices = valid_indices[top_idx]
|
||||
valid_refl = valid_refl[top_idx]
|
||||
path_lengths = path_lengths[top_idx]
|
||||
|
||||
# Sort by path length for early exit
|
||||
sort_order = np.argsort(path_lengths)
|
||||
valid_refl = valid_refl[sort_order]
|
||||
path_lengths = path_lengths[sort_order]
|
||||
|
||||
# Check LOS only for top N shortest candidates
|
||||
check_count = min(len(valid_refl), max_los_checks)
|
||||
best_idx = -1
|
||||
best_length = np.inf
|
||||
|
||||
for i in range(check_count):
|
||||
length = path_lengths[i]
|
||||
if length >= best_length:
|
||||
continue
|
||||
|
||||
refl_pt = valid_refl[i]
|
||||
|
||||
intersects1, _ = line_intersects_polygons_batch(
|
||||
tx, refl_pt, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||
)
|
||||
if np.any(intersects1):
|
||||
continue
|
||||
|
||||
intersects2, _ = line_intersects_polygons_batch(
|
||||
refl_pt, rx, obstacle_polygons_x, obstacle_polygons_y, obstacle_lengths,
|
||||
)
|
||||
if np.any(intersects2):
|
||||
continue
|
||||
|
||||
best_idx = i
|
||||
best_length = length
|
||||
break # sorted by length, first valid is best
|
||||
|
||||
if best_idx < 0:
|
||||
return None, np.inf, 0.0
|
||||
|
||||
best_point = valid_refl[best_idx]
|
||||
|
||||
# Reflection loss: 3-10 dB depending on path ratio
|
||||
path_ratio = best_length / max(direct_dist, 1.0)
|
||||
reflection_loss = 3.0 + 7.0 * min(1.0, (path_ratio - 1.0) * 2)
|
||||
|
||||
return best_point, best_length, reflection_loss
|
||||
Reference in New Issue
Block a user