diff --git a/environments/dev.yml b/environments/dev.yml new file mode 100644 index 0000000..4581d68 --- /dev/null +++ b/environments/dev.yml @@ -0,0 +1,33 @@ +name: rtxpy-dev +channels: + - conda-forge +dependencies: + - python=3.12 + - cupy>=12.0 + - numba>=0.56 + - zarr>=2.0 + - numpy>=1.21,<3 + - xarray + - rioxarray + - xarray-spatial + - pyproj + - pillow + - pyglfw + - moderngl + - libegl-devel + - scipy + - duckdb<1.4 + - requests + - matplotlib + - pytest + - ruff + - pip + - cmake + - git + +# After creating this environment, install pyoptix-contrib and rtxpy: +# +# conda activate rtxpy-dev +# git clone --depth 1 https://github.com/NVIDIA/optix-dev.git /tmp/optix-dev +# CMAKE_PREFIX_PATH=/tmp/optix-dev pip install pyoptix-contrib +# pip install -e ".[all]" diff --git a/environments/test-py310.yml b/environments/test-py310.yml new file mode 100644 index 0000000..7b65d5d --- /dev/null +++ b/environments/test-py310.yml @@ -0,0 +1,9 @@ +name: rtxpy-test-py310 +channels: + - makepath + - conda-forge +dependencies: + - python=3.10 + - rtxpy=0.0.6 + - libegl-devel + - pytest diff --git a/environments/test-py311.yml b/environments/test-py311.yml new file mode 100644 index 0000000..1ae32ba --- /dev/null +++ b/environments/test-py311.yml @@ -0,0 +1,9 @@ +name: rtxpy-test-py311 +channels: + - makepath + - conda-forge +dependencies: + - python=3.11 + - rtxpy=0.0.6 + - libegl-devel + - pytest diff --git a/environments/test-py312.yml b/environments/test-py312.yml new file mode 100644 index 0000000..ec616aa --- /dev/null +++ b/environments/test-py312.yml @@ -0,0 +1,9 @@ +name: rtxpy-test-py312 +channels: + - makepath + - conda-forge +dependencies: + - python=3.12 + - rtxpy=0.0.6 + - libegl-devel + - pytest diff --git a/environments/test-py313.yml b/environments/test-py313.yml new file mode 100644 index 0000000..3af652a --- /dev/null +++ b/environments/test-py313.yml @@ -0,0 +1,9 @@ +name: rtxpy-test-py313 +channels: + - makepath + - conda-forge +dependencies: + - python=3.13 + - rtxpy=0.0.6 + - libegl-devel + - pytest diff --git a/examples/bay_area_fire_risk.py b/examples/bay_area_fire_risk.py new file mode 100644 index 0000000..c33b603 --- /dev/null +++ b/examples/bay_area_fire_risk.py @@ -0,0 +1,702 @@ +"""Bay Area Wildfire Risk Analysis — GPU ML with cuML + rtxpy. + +Combines GPU-accelerated terrain analysis with RAPIDS cuML machine learning +to produce wildfire risk and urban analysis layers for the San Francisco +Bay Area. The entire pipeline stays on GPU: DEM → cupy → cuML → explore(). + +Wind data from Open-Meteo is interpolated onto the DEM grid and used as +features in the fire risk model: wind speed amplifies fire spread, and +wind-aligned slopes (wind blowing uphill) are especially dangerous. + +Analysis layers (press G to cycle): + elevation – raw terrain height + slope – steepness in degrees + aspect – downhill compass bearing + terrain_class – K-Means landform classification (6 classes) + wind_speed – interpolated 10 m wind speed (m/s) + wind_exposure – wind-slope alignment (positive = wind blowing uphill) + fire_density – KDE heatmap of recent FIRMS fire detections + fire_risk – Random Forest fire probability (terrain + wind + fires) + urban_cluster – HDBSCAN neighbourhood clustering from building centroids + +Buildings are coloured by their HDBSCAN cluster (outliers rendered grey). +FIRMS fire detections are placed as glowing orange markers for ground truth. + +Requirements: + pip install rtxpy[all] cuml-cu12 xarray rioxarray requests pyproj Pillow +""" + +import warnings + +import numpy as np +import cupy as cp +import xarray as xr +from pathlib import Path +from pyproj import Transformer + +from xrspatial import slope, aspect, quantile + +from rtxpy import fetch_dem, fetch_buildings, fetch_roads, fetch_water, fetch_firms +import rtxpy + +# --------------------------------------------------------------------------- +# Configuration +# --------------------------------------------------------------------------- +# Greater Bay Area — Marin Headlands to southern San Jose +BOUNDS = (-122.60, 37.20, -121.80, 37.90) +CRS = 'EPSG:32610' # UTM zone 10N +CACHE = Path(__file__).parent +ZARR = CACHE / "bay_area_dem.zarr" + +# K-Means +N_TERRAIN_CLASSES = 6 + +# KDE +KDE_BANDWIDTH = 1500.0 + +# Random Forest +RF_N_TREES = 100 +RF_MAX_DEPTH = 10 + +# HDBSCAN building clustering +HDBSCAN_MIN_CLUSTER = 80 +HDBSCAN_MIN_SAMPLES = 15 + +# 12 distinct cluster colours (colour-blind-friendly qualitative palette) +CLUSTER_COLORS = [ + (0.90, 0.25, 0.20), # red + (0.20, 0.60, 0.85), # blue + (0.30, 0.75, 0.40), # green + (0.95, 0.60, 0.15), # orange + (0.60, 0.35, 0.70), # purple + (0.95, 0.85, 0.20), # yellow + (0.65, 0.45, 0.25), # brown + (0.85, 0.45, 0.65), # pink + (0.45, 0.75, 0.75), # teal + (0.75, 0.75, 0.75), # light grey + (0.55, 0.55, 0.20), # olive + (0.40, 0.20, 0.55), # dark purple +] +OUTLIER_COLOR = (0.35, 0.35, 0.35) # dark grey for noise + + +# --------------------------------------------------------------------------- +# Terrain loading +# --------------------------------------------------------------------------- +def load_terrain(): + """Fetch USGS 10 m DEM, return GPU DataArray + CPU copy.""" + raw = fetch_dem( + bounds=BOUNDS, + output_path=ZARR, + source='usgs_10m', + crs=CRS, + ) + raw_cpu = raw.copy(deep=True) + + raw.data = np.ascontiguousarray(raw.data) + emin = float(np.nanmin(raw.data)) + emax = float(np.nanmax(raw.data)) + print(f"Terrain: {raw.shape}, elevation {emin:.0f} m to {emax:.0f} m") + + terrain = raw.rtx.to_cupy() + return terrain, raw_cpu + + +# --------------------------------------------------------------------------- +# Wind interpolation onto DEM grid +# --------------------------------------------------------------------------- +def interpolate_wind(wind_data, terrain_da): + """Interpolate coarse wind grid onto the full-resolution DEM grid. + + Uses scipy RegularGridInterpolator to bilinear-interpolate wind speed + and direction from the Open-Meteo grid (~15x15) onto every DEM pixel. + Returns cupy arrays: (wind_speed, wind_u, wind_v) on the DEM grid. + """ + from scipy.interpolate import RegularGridInterpolator + + if wind_data is None: + shape = terrain_da.shape + z = cp.zeros(shape, dtype=cp.float32) + return z, z, z + + # Wind grid is in WGS84 lat/lon — reproject DEM pixel coords to WGS84 + transformer = Transformer.from_crs(CRS, "EPSG:4326", always_xy=True) + xs_crs = terrain_da.x.values + ys_crs = terrain_da.y.values + grid_x_crs, grid_y_crs = np.meshgrid(xs_crs, ys_crs) + lons_dem, lats_dem = transformer.transform( + grid_x_crs.ravel().astype(np.float64), + grid_y_crs.ravel().astype(np.float64), + ) + lons_dem = lons_dem.reshape(terrain_da.shape) + lats_dem = lats_dem.reshape(terrain_da.shape) + + w_lats = wind_data['lats'] + w_lons = wind_data['lons'] + + interp_speed = RegularGridInterpolator( + (w_lats, w_lons), wind_data['speed'], + method='linear', bounds_error=False, fill_value=None, + ) + interp_u = RegularGridInterpolator( + (w_lats, w_lons), wind_data['u'], + method='linear', bounds_error=False, fill_value=None, + ) + interp_v = RegularGridInterpolator( + (w_lats, w_lons), wind_data['v'], + method='linear', bounds_error=False, fill_value=None, + ) + + pts = np.column_stack([lats_dem.ravel(), lons_dem.ravel()]) + speed = interp_speed(pts).reshape(terrain_da.shape).astype(np.float32) + u = interp_u(pts).reshape(terrain_da.shape).astype(np.float32) + v = interp_v(pts).reshape(terrain_da.shape).astype(np.float32) + + avg_speed = float(np.nanmean(speed)) + print(f" Wind interpolated to DEM grid: mean speed {avg_speed:.1f} m/s") + + return cp.asarray(speed), cp.asarray(u), cp.asarray(v) + + +def compute_wind_exposure(aspect_gpu, wind_u, wind_v): + """Compute wind-slope alignment: positive when wind blows uphill. + + The aspect gives the downhill direction. Wind blowing opposite to the + downhill direction (i.e. uphill) drives fire spread. We compute: + exposure = -dot(wind_unit, downhill_unit) + Ranges from -1 (wind blowing downhill) to +1 (wind blowing uphill). + """ + asp_rad = cp.deg2rad(aspect_gpu) + down_x = cp.sin(asp_rad) + down_y = cp.cos(asp_rad) + + wind_mag = cp.sqrt(wind_u**2 + wind_v**2) + wind_mag = cp.where(wind_mag == 0, 1.0, wind_mag) + wu = wind_u / wind_mag + wv = wind_v / wind_mag + + exposure = -(wu * down_x + wv * down_y) + + speed_norm = cp.sqrt(wind_u**2 + wind_v**2) + speed_max = speed_norm.max() + if speed_max > 0: + exposure = exposure * (speed_norm / speed_max) + + return exposure + + +# --------------------------------------------------------------------------- +# cuML: K-Means terrain classification +# --------------------------------------------------------------------------- +def terrain_classification(ds): + """Cluster pixels by (elevation, slope, aspect) into landform classes.""" + from cuml import KMeans + + elev = ds['elevation'].data + slp = ds['slope'].data + asp = ds['aspect'].data + shape = elev.shape + + e_flat = elev.ravel() + s_flat = slp.ravel() + a_flat = asp.ravel() + + valid = ~(cp.isnan(e_flat) | cp.isnan(s_flat) | cp.isnan(a_flat)) + n_valid = int(valid.sum()) + print(f" K-Means: {n_valid:,} valid pixels, {N_TERRAIN_CLASSES} classes") + + features = cp.stack([e_flat[valid], s_flat[valid], a_flat[valid]], axis=1) + + fmin = features.min(axis=0) + fmax = features.max(axis=0) + frange = fmax - fmin + frange[frange == 0] = 1.0 + features = (features - fmin) / frange + + km = KMeans(n_clusters=N_TERRAIN_CLASSES, max_iter=300, random_state=42) + labels = km.fit_predict(features) + + result = cp.full(e_flat.shape[0], cp.nan, dtype=cp.float32) + result[valid] = labels.astype(cp.float32) + result = result.reshape(shape) + + print(f" K-Means done — cluster sizes: " + f"{', '.join(str(int((labels == i).sum())) for i in range(N_TERRAIN_CLASSES))}") + return result + + +# --------------------------------------------------------------------------- +# cuML: KDE fire density heatmap +# --------------------------------------------------------------------------- +def fire_density_layer(fire_data, terrain_da): + """Build a fire density heatmap via cuml.neighbors.KernelDensity.""" + from cuml.neighbors import KernelDensity + + features = fire_data.get('features', []) + if not features: + print(" KDE: no fire detections — returning zeros") + return cp.zeros(terrain_da.shape, dtype=cp.float32) + + centroids = [] + for f in features: + geom = f['geometry'] + if geom['type'] == 'Point': + centroids.append(geom['coordinates'][:2]) + elif geom['type'] in ('Polygon', 'MultiPolygon'): + coords = geom['coordinates'] + ring = coords[0][0] if geom['type'] == 'MultiPolygon' else coords[0] + cx = sum(c[0] for c in ring) / len(ring) + cy = sum(c[1] for c in ring) / len(ring) + centroids.append([cx, cy]) + centroids = np.array(centroids, dtype=np.float64) + + if fire_data.get('crs') is None or 'EPSG:4326' in str(fire_data.get('crs', '')): + transformer = Transformer.from_crs("EPSG:4326", CRS, always_xy=True) + xs, ys = transformer.transform(centroids[:, 0], centroids[:, 1]) + centroids = np.column_stack([xs, ys]) + + print(f" KDE: fitting on {len(centroids)} fire centroids, " + f"bandwidth={KDE_BANDWIDTH:.0f} m") + + centroids_gpu = cp.asarray(centroids, dtype=cp.float32) + kde = KernelDensity(bandwidth=KDE_BANDWIDTH, kernel='gaussian') + kde.fit(centroids_gpu) + + xs = cp.asarray(terrain_da.x.values, dtype=cp.float32) + ys = cp.asarray(terrain_da.y.values, dtype=cp.float32) + grid_x, grid_y = cp.meshgrid(xs, ys) + query = cp.stack([grid_x.ravel(), grid_y.ravel()], axis=1) + + log_density = kde.score_samples(query) + density = cp.exp(log_density).reshape(terrain_da.shape) + + dmin, dmax = density.min(), density.max() + if dmax > dmin: + density = (density - dmin) / (dmax - dmin) + + elev = terrain_da.data if hasattr(terrain_da.data, '__cuda_array_interface__') \ + else cp.asarray(terrain_da.data) + density = cp.where(cp.isnan(elev), cp.nan, density) + + print(f" KDE done — heatmap shape {density.shape}") + return density + + +# --------------------------------------------------------------------------- +# cuML: Random Forest fire risk prediction +# --------------------------------------------------------------------------- +def fire_risk_layer(ds, fire_data, terrain_da, raw_cpu, + wind_speed_gpu=None, wind_exposure_gpu=None): + """Train RF to predict fire probability from terrain + wind features.""" + from cuml.ensemble import RandomForestClassifier + + features_list = fire_data.get('features', []) + if not features_list: + print(" RF: no fire detections — returning zeros") + return cp.zeros(terrain_da.shape, dtype=cp.float32) + + shape = terrain_da.shape + elev_cpu = raw_cpu.values + xs_cpu = raw_cpu.x.values + ys_cpu = raw_cpu.y.values + + centroids = [] + for f in features_list: + geom = f['geometry'] + if geom['type'] == 'Point': + centroids.append(geom['coordinates'][:2]) + elif geom['type'] in ('Polygon', 'MultiPolygon'): + coords = geom['coordinates'] + ring = coords[0][0] if geom['type'] == 'MultiPolygon' else coords[0] + cx = sum(c[0] for c in ring) / len(ring) + cy = sum(c[1] for c in ring) / len(ring) + centroids.append([cx, cy]) + centroids = np.array(centroids, dtype=np.float64) + + if fire_data.get('crs') is None or 'EPSG:4326' in str(fire_data.get('crs', '')): + transformer = Transformer.from_crs("EPSG:4326", CRS, always_xy=True) + cxs, cys = transformer.transform(centroids[:, 0], centroids[:, 1]) + centroids = np.column_stack([cxs, cys]) + + x_sorted = xs_cpu if xs_cpu[-1] > xs_cpu[0] else xs_cpu[::-1] + y_sorted = ys_cpu if ys_cpu[-1] > ys_cpu[0] else ys_cpu[::-1] + x_flip = xs_cpu[-1] < xs_cpu[0] + y_flip = ys_cpu[-1] < ys_cpu[0] + + col_idx = np.searchsorted(x_sorted, centroids[:, 0]).clip(0, len(xs_cpu) - 1) + row_idx = np.searchsorted(y_sorted, centroids[:, 1]).clip(0, len(ys_cpu) - 1) + if x_flip: + col_idx = len(xs_cpu) - 1 - col_idx + if y_flip: + row_idx = len(ys_cpu) - 1 - row_idx + + fire_mask = np.zeros(shape, dtype=bool) + for r, c in zip(row_idx, col_idx): + r0, r1 = max(0, r - 3), min(shape[0], r + 4) + c0, c1 = max(0, c - 3), min(shape[1], c + 4) + fire_mask[r0:r1, c0:c1] = True + + elev_gpu = ds['elevation'].data + slp_gpu = ds['slope'].data + asp_gpu = ds['aspect'].data + south_score = cp.cos(cp.deg2rad(asp_gpu - 180.0)) + + feature_arrays = [ + elev_gpu.ravel(), + slp_gpu.ravel(), + asp_gpu.ravel(), + south_score.ravel(), + ] + feature_names = ['elevation', 'slope', 'aspect', 'south_score'] + + if wind_speed_gpu is not None: + feature_arrays.append(wind_speed_gpu.ravel()) + feature_names.append('wind_speed') + if wind_exposure_gpu is not None: + feature_arrays.append(wind_exposure_gpu.ravel()) + feature_names.append('wind_exposure') + + feat_stack = cp.stack(feature_arrays, axis=1) + + valid_mask = ~cp.isnan(feat_stack).any(axis=1) + valid_np = cp.asnumpy(valid_mask.reshape(shape)) + + pos_mask = fire_mask & valid_np + neg_mask = ~fire_mask & valid_np & ~np.isnan(elev_cpu) + + pos_idx = np.flatnonzero(pos_mask.ravel()) + neg_idx = np.flatnonzero(neg_mask.ravel()) + + n_pos = len(pos_idx) + if n_pos == 0: + print(" RF: no valid fire pixels after mapping — returning zeros") + return cp.zeros(shape, dtype=cp.float32) + + n_neg = min(len(neg_idx), n_pos * 3) + rng = np.random.default_rng(42) + neg_sample = rng.choice(neg_idx, size=n_neg, replace=False) + + train_idx = cp.asarray(np.concatenate([pos_idx, neg_sample])) + labels = cp.concatenate([cp.ones(n_pos, dtype=cp.int32), + cp.zeros(n_neg, dtype=cp.int32)]) + + X_train = feat_stack[train_idx] + + print(f" RF: {n_pos} fire pixels, {n_neg} non-fire pixels, " + f"{RF_N_TREES} trees, max_depth={RF_MAX_DEPTH}") + print(f" RF features: {', '.join(feature_names)}") + + rf = RandomForestClassifier( + n_estimators=RF_N_TREES, + max_depth=RF_MAX_DEPTH, + random_state=42, + ) + rf.fit(X_train, labels) + + valid_features = feat_stack[valid_mask] + proba = rf.predict_proba(valid_features) + fire_prob_col = proba[:, 1] + + result = cp.full(feat_stack.shape[0], cp.nan, dtype=cp.float32) + result[valid_mask] = fire_prob_col.astype(cp.float32) + result = result.reshape(shape) + + pct = cp.nanmean(result) + print(f" RF done — mean fire probability: {float(pct):.3f}") + return result + + +# --------------------------------------------------------------------------- +# cuML: HDBSCAN building cluster detection +# --------------------------------------------------------------------------- +def cluster_buildings(bldg_data, terrain_da): + """Discover neighbourhood clusters from building centroids using HDBSCAN. + + Returns (labels_per_building, cluster_raster). + - labels_per_building: numpy int array, -1 = noise + - cluster_raster: cupy 2D float array painted with cluster IDs for the + explore() overlay (NaN where no buildings). + """ + from cuml.cluster import HDBSCAN + + features = bldg_data.get('features', []) + if not features: + return np.array([]), cp.full(terrain_da.shape, cp.nan, dtype=cp.float32) + + # Extract building centroids (WGS84 → CRS) + lons, lats = [], [] + for f in features: + coords = f['geometry']['coordinates'] + ring = coords[0][0] if f['geometry']['type'] == 'MultiPolygon' else coords[0] + lons.append(sum(c[0] for c in ring) / len(ring)) + lats.append(sum(c[1] for c in ring) / len(ring)) + + transformer = Transformer.from_crs("EPSG:4326", CRS, always_xy=True) + pxs, pys = transformer.transform( + np.asarray(lons, dtype=np.float64), + np.asarray(lats, dtype=np.float64), + ) + centroids = np.column_stack([pxs, pys]).astype(np.float32) + + print(f" HDBSCAN: {len(centroids):,} buildings, " + f"min_cluster_size={HDBSCAN_MIN_CLUSTER}, " + f"min_samples={HDBSCAN_MIN_SAMPLES}") + + centroids_gpu = cp.asarray(centroids) + model = HDBSCAN( + min_cluster_size=HDBSCAN_MIN_CLUSTER, + min_samples=HDBSCAN_MIN_SAMPLES, + ) + labels_gpu = model.fit_predict(centroids_gpu) + labels = cp.asnumpy(labels_gpu) + + n_clusters = int(labels.max()) + 1 + n_noise = int((labels == -1).sum()) + print(f" HDBSCAN done — {n_clusters} clusters, {n_noise} outliers") + + # Build a raster layer: paint each DEM pixel with the cluster ID of the + # nearest building (within 100 m), so the layer is visible in explore(). + xs = terrain_da.x.values + ys = terrain_da.y.values + + # For performance, use a simple rasterisation: map each building centroid + # to its nearest pixel and paint it. + x_sorted = xs if xs[-1] > xs[0] else xs[::-1] + y_sorted = ys if ys[-1] > ys[0] else ys[::-1] + x_flip = xs[-1] < xs[0] + y_flip = ys[-1] < ys[0] + + cols = np.searchsorted(x_sorted, centroids[:, 0]).clip(0, len(xs) - 1) + rows = np.searchsorted(y_sorted, centroids[:, 1]).clip(0, len(ys) - 1) + if x_flip: + cols = len(xs) - 1 - cols + if y_flip: + rows = len(ys) - 1 - rows + + raster = np.full(terrain_da.shape, np.nan, dtype=np.float32) + for i in range(len(labels)): + if labels[i] >= 0: + raster[rows[i], cols[i]] = float(labels[i]) + + return labels, cp.asarray(raster) + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- +if __name__ == "__main__": + terrain, raw_cpu = load_terrain() + + # --- Standard terrain layers ---------------------------------------------- + print("Computing terrain analysis layers...") + ds = xr.Dataset({ + 'elevation': terrain.rename(None), + 'slope': slope(terrain), + 'aspect': aspect(terrain), + 'quantile': quantile(terrain), + }) + print(ds) + + # === cuML: K-Means terrain classification ================================= + print("\n--- cuML: K-Means Terrain Classification ---") + tc = terrain_classification(ds) + ds['terrain_class'] = xr.DataArray( + tc, coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + # === Fetch wind data (used for ML features + particle animation) ========== + print("\n--- Fetching wind data ---") + wind = None + try: + from rtxpy import fetch_wind + wind = fetch_wind(BOUNDS, grid_size=15) + except Exception as e: + print(f" Skipping wind: {e}") + + print("\n--- Interpolating wind onto DEM grid ---") + wind_speed_gpu, wind_u_gpu, wind_v_gpu = interpolate_wind(wind, terrain) + ds['wind_speed'] = xr.DataArray( + wind_speed_gpu, + coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + wind_exposure_gpu = compute_wind_exposure( + ds['aspect'].data, wind_u_gpu, wind_v_gpu, + ) + ds['wind_exposure'] = xr.DataArray( + wind_exposure_gpu, + coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + # === Fetch fire data ====================================================== + print("\n--- Fetching FIRMS fire detections ---") + fire_data = {'features': []} + try: + fire_data = fetch_firms( + bounds=BOUNDS, date_span='7d', + cache_path=CACHE / "bay_area_fires.geojson", + crs=CRS, + ) + n_fires = len(fire_data.get('features', [])) + print(f" {n_fires} fire detections in the last 7 days") + except Exception as e: + print(f" Skipping FIRMS: {e}") + + # === cuML: KDE fire density heatmap ======================================= + print("\n--- cuML: KDE Fire Density Heatmap ---") + fd = fire_density_layer(fire_data, terrain) + ds['fire_density'] = xr.DataArray( + fd, coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + # === cuML: Random Forest fire risk prediction ============================= + print("\n--- cuML: Random Forest Fire Risk Prediction ---") + fr = fire_risk_layer( + ds, fire_data, terrain, raw_cpu, + wind_speed_gpu=wind_speed_gpu, + wind_exposure_gpu=wind_exposure_gpu, + ) + ds['fire_risk'] = xr.DataArray( + fr, coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + # === Fetch buildings for HDBSCAN ========================================== + print("\n--- Fetching buildings ---") + bldg_data = {'features': []} + try: + bldg_data = fetch_buildings( + bounds=BOUNDS, source='overture', + cache_path=CACHE / "bay_area_buildings.geojson", + ) + print(f" {len(bldg_data.get('features', []))} buildings") + except Exception as e: + print(f" Skipping buildings: {e}") + + # === cuML: HDBSCAN building cluster detection ============================= + print("\n--- cuML: HDBSCAN Neighbourhood Clustering ---") + bldg_labels, cluster_raster = cluster_buildings(bldg_data, terrain) + ds['urban_cluster'] = xr.DataArray( + cluster_raster, + coords=ds['elevation'].coords, dims=ds['elevation'].dims, + ) + + print(f"\nDataset layers: {list(ds.data_vars)}") + print(ds) + + # --- Satellite tiles ------------------------------------------------------ + print("\nLoading satellite tiles...") + ds.rtx.place_tiles('satellite', z='elevation', zoom=13) + + # --- Load or build scene meshes ------------------------------------------- + import zarr as _zarr + _has_mesh_cache = False + try: + _store = _zarr.open(str(ZARR), mode='r', use_consolidated=False) + _has_mesh_cache = 'meshes' in _store and len(list(_store['meshes'])) > 0 + _store = None + except Exception: + pass + + if _has_mesh_cache: + ds.rtx.load_meshes(ZARR) + else: + # --- Buildings coloured by HDBSCAN cluster ---------------------------- + if bldg_data.get('features') and len(bldg_labels) > 0: + # Group buildings by cluster label + unique_labels = sorted(set(bldg_labels)) + for label in unique_labels: + mask = bldg_labels == label + feats = [f for f, m in zip(bldg_data['features'], mask) if m] + if not feats: + continue + + if label == -1: + gid = 'bldg_outlier' + color = OUTLIER_COLOR + tag = 'outlier' + else: + gid = f'bldg_c{label}' + color = CLUSTER_COLORS[label % len(CLUSTER_COLORS)] + tag = f'cluster {label}' + + try: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message="place_geojson called before") + info = ds.rtx.place_geojson( + {"type": "FeatureCollection", "features": feats}, + z='elevation', height=8, extrude=True, merge=True, + geometry_id=gid, color=color, + ) + print(f" Placed {info['geometries']} {tag} buildings") + except Exception as e: + print(f" Skipping {tag}: {e}") + + # --- Roads ------------------------------------------------------------ + try: + for rt, gid, clr in [('major', 'road_major', (0.10, 0.10, 0.10)), + ('minor', 'road_minor', (0.55, 0.55, 0.55))]: + data = fetch_roads(bounds=BOUNDS, road_type=rt, source='overture', + cache_path=CACHE / f"bay_area_roads_{rt}.geojson") + info = ds.rtx.place_roads(data, z='elevation', + geometry_id=gid, color=clr) + print(f" Placed {info['geometries']} Overture {rt} road geometries") + except ImportError: + print("Skipping Overture roads (pip install duckdb)") + except Exception as e: + print(f"Skipping roads: {e}") + + # --- Water features --------------------------------------------------- + try: + water_data = fetch_water(bounds=BOUNDS, water_type='all', + cache_path=CACHE / "bay_area_water.geojson") + results = ds.rtx.place_water(water_data, z='elevation') + for cat, info in results.items(): + print(f" Placed {info['geometries']} {cat} water features") + except Exception as e: + print(f"Skipping water: {e}") + + # --- Save meshes ------------------------------------------------------ + try: + ds.rtx.save_meshes(ZARR) + except Exception as e: + print(f"Could not save mesh cache: {e}") + + # --- FIRMS fire markers --------------------------------------------------- + if fire_data.get('features'): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message="place_geojson called before") + fire_info = ds.rtx.place_geojson( + fire_data, z='elevation', height=20, + geometry_id='fire', color=(1.0, 0.25, 0.0, 3.0), + extrude=True, merge=True, + ) + print(f"Placed {fire_info['geometries']} FIRMS fire markers") + + # --- Launch explorer ------------------------------------------------------ + print("\n" + "=" * 65) + print("BAY AREA WILDFIRE & URBAN ANALYSIS — cuML + rtxpy") + print("=" * 65) + print(" G cycle layers (elevation → terrain_class → wind_speed") + print(" → wind_exposure → fire_density") + print(" → fire_risk → urban_cluster)") + print(" U toggle satellite overlay") + print(" Shift+W toggle wind particles") + print(" O / V set observer / toggle viewshed") + print(" H full help overlay") + print("=" * 65) + + ds.rtx.explore( + z='elevation', + scene_zarr=ZARR, + width=2048, + height=1600, + render_scale=0.5, + color_stretch='cbrt', + subsample=1, + wind_data=wind, + ao_samples=1, + repl=True, + ) + + print("Done") diff --git a/examples/explore_zarr.py b/examples/explore_zarr.py new file mode 100644 index 0000000..0b3d62d --- /dev/null +++ b/examples/explore_zarr.py @@ -0,0 +1,252 @@ +"""Explore a massive zarr DEM interactively by loading only a windowed region. + +Opens the USGS 10 m continental DEM stored as a zarr archive and loads a +small window around a chosen center point. The window is subsampled, +pushed to GPU, and launched in the rtxpy interactive viewer. + +The zarr store is never fully read — only the chunks that overlap the +requested window are fetched from disk. + +For zarr v3 stores with GPU-compatible codecs (e.g. ZstdCodec), chunks +are decompressed directly on the GPU via ``zarr.config.enable_gpu()``. +For zarr v2 stores (blosc/numcodecs), chunks are read on CPU and then +transferred to GPU with ``cupy.asarray()``. + +Usage: + # Default: centers on Birmingham, Alabama + python explore_zarr.py + + # Custom center (lon, lat) and window size in degrees + python explore_zarr.py --lon -118.25 --lat 34.08 --size 0.25 + + # Adjust subsample factor (higher = coarser but faster) + python explore_zarr.py --subsample 8 + +Requirements: + pip install rtxpy[all] xarray zarr cupy +""" + +import argparse +import time + +import cupy as cp +import numpy as np +import rioxarray # noqa: F401 — registers .rio accessor +import xarray as xr +import zarr + +import rtxpy + + +ZARR_PATH = "/home/brendan/elevation/usgs10m_dem_c6.zarr" + + +def _open_zarr_array(zarr_path): + """Open the zarr store, enabling GPU-direct reads when possible. + + Returns (zarr.Array, zarr_format_version, GeoTransform tuple, CF encoding, CRS WKT). + """ + root = zarr.open_group(zarr_path, mode="r") + z = root["usgs10m_dem"] + + # Parse GeoTransform: "x_origin dx rot_x y_origin rot_y dy" + gt = root["spatial_ref"].attrs["GeoTransform"] + parts = [float(v) for v in gt.split()] + x_origin, dx, _, y_origin, _, dy = parts + + # Read CRS for attaching to the DataArray + crs_wkt = root["spatial_ref"].attrs.get("crs_wkt", None) + + # CF encoding: int32 centimeters → float metres + scale_factor = float(z.attrs.get("scale_factor", 1.0)) + add_offset = float(z.attrs.get("add_offset", 0.0)) + fill_value = z.fill_value + + fmt = z.metadata.zarr_format if hasattr(z.metadata, "zarr_format") else 2 + + # Enable GPU-direct reads for zarr v3 stores with GPU-native codecs + if fmt >= 3: + zarr.config.enable_gpu() + # Re-open so the GPU prototype is active + root = zarr.open_group(zarr_path, mode="r") + z = root["usgs10m_dem"] + print(f"Zarr v{fmt} — GPU-direct reads enabled") + else: + print(f"Zarr v{fmt} (blosc) — CPU read + GPU transfer") + + return z, fmt, (x_origin, dx, y_origin, dy), (scale_factor, add_offset, fill_value), crs_wkt + + +def _utm_epsg(lon, lat): + """Return the UTM EPSG code for a given lon/lat.""" + zone = int((lon + 180) / 6) + 1 + return 32600 + zone if lat >= 0 else 32700 + zone + + +def load_window(zarr_path, center_lon, center_lat, size_deg, subsample): + """Load a subsampled elevation window from the zarr DEM. + + Reads the geographic-CRS chunk from disk, applies CF decoding, then + reprojects to the local UTM zone so coordinates are in metres. + + Parameters + ---------- + zarr_path : str + Path to the zarr store. + center_lon, center_lat : float + Center of the window in WGS84 degrees. + size_deg : float + Half-width of the window in degrees (window is 2*size_deg on a side). + subsample : int + Take every Nth pixel to reduce resolution. + + Returns + ------- + xarray.DataArray + Elevation window on GPU (cupy-backed), with projected metric coords. + """ + z, fmt, (x_origin, dx, y_origin, dy), (scale, offset, fill), crs_wkt = \ + _open_zarr_array(zarr_path) + + H, W = z.shape + lon_min = center_lon - size_deg + lon_max = center_lon + size_deg + lat_min = center_lat - size_deg + lat_max = center_lat + size_deg + + # Pixel indices from GeoTransform (x ascending, y descending) + xi0 = max(int((lon_min - x_origin) / dx), 0) + xi1 = min(int((lon_max - x_origin) / dx) + 1, W) + yi0 = max(int((lat_max - y_origin) / dy), 0) # dy is negative + yi1 = min(int((lat_min - y_origin) / dy) + 1, H) + + raw_w = xi1 - xi0 + raw_h = yi1 - yi0 + out_w = raw_w // subsample + out_h = raw_h // subsample + print(f"Window: lon [{lon_min:.3f}, {lon_max:.3f}], " + f"lat [{lat_min:.3f}, {lat_max:.3f}]") + print(f"Pixel window: {raw_w:,} x {raw_h:,} " + f"(subsample {subsample}x -> {out_w:,} x {out_h:,})") + + # Read only the chunks that overlap the window + t0 = time.time() + data = z[yi0:yi1:subsample, xi0:xi1:subsample] + dt_read = time.time() - t0 + + # Ensure numpy for CF decoding + reprojection + if not isinstance(data, np.ndarray): + data = cp.asnumpy(data) + data = data.astype(np.float32) + data[data == fill] = np.nan + data = data * scale + offset + print(f"Read: {dt_read:.2f}s") + + # Build geographic DataArray with source CRS + x_coords = np.arange(xi0, xi1, subsample) * dx + x_origin + y_coords = np.arange(yi0, yi1, subsample) * dy + y_origin + + da = xr.DataArray( + data, + dims=("y", "x"), + coords={"y": y_coords, "x": x_coords}, + ) + if crs_wkt is not None: + da = da.rio.write_crs(crs_wkt) + da = da.rio.set_spatial_dims(x_dim="x", y_dim="y") + + # Reproject to local UTM so coordinates are in metres + target_epsg = _utm_epsg(center_lon, center_lat) + t1 = time.time() + da = da.rio.reproject(f"EPSG:{target_epsg}") + dt_proj = time.time() - t1 + print(f"Reprojected to EPSG:{target_epsg} in {dt_proj:.2f}s") + + # Transfer to GPU + cpu_data = np.ascontiguousarray(da.values) + gpu_data = cp.asarray(cpu_data) + out = da.copy(data=gpu_data) + + elev_min = float(cp.nanmin(gpu_data)) + elev_max = float(cp.nanmax(gpu_data)) + print(f"Shape: {out.shape}, {gpu_data.nbytes / 1e6:.1f} MB, " + f"elevation: {elev_min:.0f}m to {elev_max:.0f}m") + + return out + + +def make_terrain_loader(zarr_path, size_deg, subsample, center_lon, center_lat): + """Create a terrain loader callback for dynamic chunk streaming. + + The viewer passes the camera position in the DataArray's CRS (UTM + metres after reprojection). This closure inverse-projects back to + geographic lon/lat before fetching a new window from the zarr store. + + Parameters + ---------- + zarr_path : str + Path to the zarr store. + size_deg : float + Half-width of the window in degrees. + subsample : int + Subsample factor. + center_lon, center_lat : float + Initial center in WGS84 degrees (used to pick the UTM zone for + the inverse projection). + + Returns + ------- + callable + ``loader(utm_x, utm_y) -> xr.DataArray | None`` + """ + from pyproj import Transformer + + target_epsg = _utm_epsg(center_lon, center_lat) + to_lonlat = Transformer.from_crs( + f"EPSG:{target_epsg}", "EPSG:4326", always_xy=True, + ) + + def loader(cam_x, cam_y): + try: + lon, lat = to_lonlat.transform(cam_x, cam_y) + return load_window(zarr_path, lon, lat, size_deg, subsample) + except Exception as e: + print(f"Terrain loader error: {e}") + return None + + return loader + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Explore a large zarr DEM interactively." + ) + parser.add_argument("--lon", type=float, default=-86.8, + help="Center longitude (default: -86.8, Birmingham AL)") + parser.add_argument("--lat", type=float, default=33.5, + help="Center latitude (default: 33.5, Birmingham AL)") + parser.add_argument("--size", type=float, default=0.25, + help="Half-width of window in degrees (default: 0.25)") + parser.add_argument("--subsample", type=int, default=4, + help="Subsample factor (default: 4)") + parser.add_argument("--zarr", type=str, default=ZARR_PATH, + help="Path to zarr store") + args = parser.parse_args() + + terrain = load_window( + args.zarr, args.lon, args.lat, args.size, args.subsample, + ) + + loader = make_terrain_loader(args.zarr, args.size, args.subsample, args.lon, args.lat) + + print(f"\nLaunching explore...\n") + terrain.rtx.explore( + width=2048, + height=1600, + render_scale=0.5, + color_stretch='cbrt', + terrain_loader=loader, + repl=True, + ) + + print("Done") diff --git a/examples/jupyter_explore_demo.ipynb b/examples/jupyter_explore_demo.ipynb new file mode 100644 index 0000000..de6ddd7 --- /dev/null +++ b/examples/jupyter_explore_demo.ipynb @@ -0,0 +1,536 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "intro", + "metadata": {}, + "source": [ + "# Interactive Terrain Exploration in Jupyter\n", + "\n", + "This notebook demonstrates rtxpy's `explore()` viewer running directly inside Jupyter.\n", + "The same GPU-accelerated ray tracing engine that powers the GLFW desktop viewer now\n", + "streams frames into an `ipywidgets.Image` widget with full mouse and keyboard controls.\n", + "\n", + "**Requirements:**\n", + "```\n", + "pip install rtxpy[notebook] xarray rioxarray xrspatial\n", + "```\n", + "\n", + "**Controls** (click the image first to focus):\n", + "- **W/A/S/D** or arrow keys: Move camera\n", + "- **Q/E**: Move up/down\n", + "- **I/J/K/L**: Look around\n", + "- **Click + drag**: Pan (slippy-map style)\n", + "- **Scroll wheel**: Zoom (FOV)\n", + "- **C**: Cycle colormap\n", + "- **T**: Toggle shadows\n", + "- **G**: Cycle terrain layers\n", + "- **M**: Toggle minimap\n", + "- **H**: Toggle help overlay\n", + "- **+/-**: Adjust movement speed" + ] + }, + { + "cell_type": "markdown", + "id": "setup-header", + "metadata": {}, + "source": [ + "## 1. Load Terrain Data\n", + "\n", + "We'll download SRTM elevation data for Crater Lake, Oregon — a collapsed volcanic\n", + "caldera with dramatic relief that looks great in 3D." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "imports", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rtxpy version: 0.0.9\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import xarray as xr\n", + "from pathlib import Path\n", + "\n", + "import rtxpy\n", + "from rtxpy import fetch_dem\n", + "\n", + "print(f\"rtxpy version: {rtxpy.__version__}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "load-terrain", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using cached DEM: crater_lake_national_park.zarr\n", + "Shape: (620, 763)\n", + "Elevation: 1118m to 2710m\n" + ] + } + ], + "source": [ + "BOUNDS = (-122.3, 42.8, -121.9, 43.0) # Crater Lake, OR\n", + "CRS = 'EPSG:5070' # NAD83 / Conus Albers\n", + "CACHE = Path.cwd()\n", + "\n", + "terrain = fetch_dem(\n", + " bounds=BOUNDS,\n", + " output_path=CACHE / 'crater_lake_national_park.zarr',\n", + " source='srtm',\n", + " crs=CRS,\n", + ")\n", + "\n", + "# Subsample for faster interactive performance\n", + "terrain = terrain[::2, ::2]\n", + "terrain.data = np.ascontiguousarray(terrain.data)\n", + "\n", + "# Get stats before GPU transfer (cupy arrays don't allow implicit float())\n", + "elev_min = float(terrain.min())\n", + "elev_max = float(terrain.max())\n", + "\n", + "# Transfer to GPU\n", + "terrain = terrain.rtx.to_cupy()\n", + "\n", + "print(f\"Shape: {terrain.shape}\")\n", + "print(f\"Elevation: {elev_min:.0f}m to {elev_max:.0f}m\")" + ] + }, + { + "cell_type": "markdown", + "id": "basic-header", + "metadata": {}, + "source": [ + "## 2. Basic Interactive Viewer\n", + "\n", + "Call `explore()` just like you would from a script. In Jupyter it automatically\n", + "returns a widget instead of opening a GLFW window." + ] + }, + { + "cell_type": "markdown", + "id": "dataset-header", + "metadata": {}, + "source": [ + "## 3. Multi-Layer Dataset\n", + "\n", + "Build a Dataset with derived layers (slope, aspect, quantile). Press **G** to cycle\n", + "which layer drives the terrain coloring." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "build-dataset", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Size: 9MB\n", + "Dimensions: (x: 763, y: 620)\n", + "Coordinates:\n", + " * x (x) float64 6kB -2.112e+06 -2.112e+06 ... -2.074e+06 -2.074e+06\n", + " * y (y) float64 5kB 2.516e+06 2.516e+06 ... 2.486e+06 2.486e+06\n", + " spatial_ref int64 8B 0\n", + "Data variables:\n", + " elevation (y, x) float64 4MB array([[nan, nan, nan, ..., nan, nan, nan...\n", + " slope (y, x) float32 2MB array([[nan, nan, nan, ..., nan, nan, nan...\n", + " aspect (y, x) float32 2MB array([[nan, nan, nan, ..., nan, nan, nan...\n", + " quantile (y, x) float32 2MB array([[nan, nan, nan, ..., nan, nan, nan...\n" + ] + } + ], + "source": [ + "from xrspatial import slope, aspect, quantile\n", + "\n", + "ds = xr.Dataset({\n", + " 'elevation': terrain.rename(None),\n", + " 'slope': slope(terrain),\n", + " 'aspect': aspect(terrain),\n", + " 'quantile': quantile(terrain),\n", + "})\n", + "print(ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f7a470a3-4231-4382-9697-de79cdb75ea7", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "77e95e5e-e723-4b23-abb5-d2dd07111b50", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ee3372a-2f35-4e6d-9b07-579a6db44efb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "explore-dataset", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Overlay layers: slope, aspect, quantile (press G to cycle)\n", + "OptiX 9.1.0 | RT Core 20 | NVIDIA RTX A6000 (sm_86) | Driver 591.44\n", + "rtxpy viewer (1024x768) — click image to focus, H for help, widget.stop() to exit\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6a979bcea0e148cea93ef0ac338d2f91", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Image(value=b'\\xff\\xd8\\xff\\xe0\\x00\\x10JFIF\\x00\\x01\\x01\\x00\\x00\\x01\\x00\\x01\\x00\\x00\\xff\\xdb\\x00C\\x00\\x05\\x03\\x0…" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "\n", + "\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "widget = ds.rtx.explore(\n", + " z='elevation',\n", + " width=1024,\n", + " height=768,\n", + " color_stretch='cbrt',\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "stop-dataset", + "metadata": {}, + "outputs": [], + "source": [ + "widget.stop()" + ] + }, + { + "cell_type": "markdown", + "id": "features-header", + "metadata": {}, + "source": [ + "## 4. Satellite Tiles + 3D Features\n", + "\n", + "Drape satellite imagery on the terrain and place GeoJSON landmarks.\n", + "Press **U** to toggle the satellite overlay, **N** to cycle geometry groups." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "place-features", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Tile service: zoom 13, bounds (42.7242, -122.3727) - (43.0760, -121.8290)\n", + " Fetching 156 tiles at zoom 13...\n", + "Placed 4 GeoJSON features\n" + ] + } + ], + "source": [ + "import warnings\n", + "\n", + "# Satellite tiles\n", + "ds.rtx.place_tiles('satellite', z='elevation')\n", + "\n", + "# GeoJSON landmarks around Crater Lake\n", + "crater_lake_geojson = {\n", + " \"type\": \"FeatureCollection\",\n", + " \"features\": [\n", + " {\n", + " \"type\": \"Feature\",\n", + " \"geometry\": {\"type\": \"Point\", \"coordinates\": [-122.163, 42.947]},\n", + " \"properties\": {\"name\": \"Wizard Island\"},\n", + " },\n", + " {\n", + " \"type\": \"Feature\",\n", + " \"geometry\": {\"type\": \"Point\", \"coordinates\": [-122.069, 42.922]},\n", + " \"properties\": {\"name\": \"Phantom Ship\"},\n", + " },\n", + " {\n", + " \"type\": \"Feature\",\n", + " \"geometry\": {\"type\": \"Point\", \"coordinates\": [-122.142, 42.912]},\n", + " \"properties\": {\"name\": \"Rim Village\"},\n", + " },\n", + " {\n", + " \"type\": \"Feature\",\n", + " \"geometry\": {\n", + " \"type\": \"LineString\",\n", + " \"coordinates\": [\n", + " [-122.142, 42.912], [-122.160, 42.918],\n", + " [-122.175, 42.932], [-122.178, 42.948],\n", + " [-122.168, 42.962], [-122.148, 42.976],\n", + " [-122.118, 42.982], [-122.088, 42.978],\n", + " [-122.065, 42.968], [-122.045, 42.952],\n", + " [-122.035, 42.935], [-122.040, 42.918],\n", + " [-122.055, 42.905], [-122.080, 42.897],\n", + " [-122.110, 42.897], [-122.135, 42.903],\n", + " [-122.142, 42.912],\n", + " ],\n", + " },\n", + " \"properties\": {\"name\": \"Rim Drive\"},\n", + " },\n", + " ],\n", + "}\n", + "\n", + "with warnings.catch_warnings():\n", + " warnings.filterwarnings('ignore', message='place_geojson called before')\n", + " info = ds.rtx.place_geojson(\n", + " crater_lake_geojson,\n", + " z='elevation',\n", + " height=15.0,\n", + " label_field='name',\n", + " geometry_id='landmark',\n", + " )\n", + "print(f\"Placed {info['geometries']} GeoJSON features\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fetch-wind", + "metadata": {}, + "outputs": [], + "source": [ + "from rtxpy import fetch_wind\n", + "\n", + "wind = fetch_wind(BOUNDS, grid_size=15)\n", + "wind['n_particles'] = 15000\n", + "wind['max_age'] = 120\n", + "wind['speed_mult'] = 400.0\n", + "\n", + "print(f\"Wind grid: {wind['u'].shape}, \"\n", + " f\"mean speed: {np.sqrt(wind['u']**2 + wind['v']**2).mean():.1f} m/s\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "explore-wind", + "metadata": {}, + "outputs": [], + "source": [ + "widget = ds.rtx.explore(\n", + " z='elevation',\n", + " width=1024,\n", + " height=768,\n", + " mesh_type='voxel',\n", + " color_stretch='cbrt',\n", + " wind_data=wind,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "stop-wind", + "metadata": {}, + "outputs": [], + "source": [ + "widget.stop()" + ] + }, + { + "cell_type": "markdown", + "id": "advanced-header", + "metadata": {}, + "source": [ + "## 6. Advanced: AO + Denoiser\n", + "\n", + "Enable ambient occlusion and the OptiX AI denoiser for higher quality rendering.\n", + "AO accumulates progressively — the image gets cleaner over time. Toggle at runtime\n", + "with **0** (AO) and **Shift+D** (denoiser)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "explore-ao", + "metadata": {}, + "outputs": [], + "source": [ + "widget = ds.rtx.explore(\n", + " z='elevation',\n", + " width=1024,\n", + " height=768,\n", + " mesh_type='voxel',\n", + " color_stretch='cbrt',\n", + " ao_samples=1,\n", + " denoise=True,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "stop-ao", + "metadata": {}, + "outputs": [], + "source": [ + "widget.stop()" + ] + }, + { + "cell_type": "markdown", + "id": "custom-start-header", + "metadata": {}, + "source": [ + "## 7. Custom Camera Start Position\n", + "\n", + "Set a specific camera position and look-at point to start from a particular viewpoint.\n", + "Coordinates are in world units (meters in the projected CRS)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "explore-custom", + "metadata": {}, + "outputs": [], + "source": [ + "# Get terrain extents for positioning\n", + "H, W = terrain.shape\n", + "spacing_x = terrain.rtx._pixel_spacing_x\n", + "spacing_y = terrain.rtx._pixel_spacing_y\n", + "world_W = W * spacing_x\n", + "world_H = H * spacing_y\n", + "\n", + "# Start looking down into the caldera from the rim\n", + "widget = terrain.rtx.explore(\n", + " width=1024,\n", + " height=768,\n", + " start_position=(world_W * 0.5, world_H * 0.8, elev_max + 500),\n", + " look_at=(world_W * 0.5, world_H * 0.45, elev_min),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "stop-custom", + "metadata": {}, + "outputs": [], + "source": [ + "widget.stop()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.14.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/nyc_tour.py b/examples/nyc_tour.py new file mode 100644 index 0000000..bd34af5 --- /dev/null +++ b/examples/nyc_tour.py @@ -0,0 +1,68 @@ +"""NYC circular flyover tour. + +Orbits around the five boroughs in a continuous loop. +The camera circles the study area, always looking toward the center. + +Usage: + python new_york_city.py --tour nyc_tour.py + +Or from the REPL: + v.tour('nyc_tour.py') +""" + +import math + +# --- Orbit parameters --- +# World coordinates: (0,0) is raster top-left corner, +# units are col * pixel_spacing (~26.4 m per pixel). +# Terrain extent: ~47887 x 48310 m +CX = 23_943 # world center x +CY = 24_155 # world center y + +RADIUS = 18_000 # orbit radius (m) — near terrain edges +ALTITUDE = 5_000 # altitude above sea level (m) +GROUND_ELEV = 100 # approximate mean terrain elevation +FOV = 60 +ORBIT_SECONDS = 45 # time for one full revolution +EASE = 'linear' # constant speed around the circle + +# Pitch: aim camera at the center of the terrain +PITCH = -math.degrees(math.atan2(ALTITUDE - GROUND_ELEV, RADIUS)) + +# Loop forever +loop = True + +# --- Generate keyframes around the circle --- +N_KEYFRAMES = 12 # every 30 degrees +tour = [] + +for i in range(N_KEYFRAMES + 1): + # Angle around the circle (start from south, go clockwise + # when viewed from above: S -> W -> N -> E -> S) + theta = 2 * math.pi * i / N_KEYFRAMES + t = ORBIT_SECONDS * i / N_KEYFRAMES + + x = CX + RADIUS * math.sin(theta) + y = CY - RADIUS * math.cos(theta) + + # Yaw: camera looks toward center. + # atan2 gives the angle of (center - camera) from +X axis. + # Engine convention: yaw 0 = +X, yaw 90 = +Y. + dx = CX - x + dy = CY - y + yaw = math.degrees(math.atan2(dy, dx)) + + kf = { + 'time': t, + 'position': [x, y, ALTITUDE], + 'yaw': yaw, + 'pitch': PITCH, + 'fov': FOV, + 'ease': EASE, + } + + # Show all geometry on the first keyframe + if i == 0: + kf['geometry'] = 'all' + + tour.append(kf) diff --git a/examples/playground.py b/examples/playground.py index 76101a7..4beaa31 100644 --- a/examples/playground.py +++ b/examples/playground.py @@ -236,7 +236,88 @@ def load_terrain(): except Exception as e: print(f"Skipping wind: {e}") - print("\nLaunching explore (press G to cycle layers, Shift+W for wind)...\n") + # --- Hydro flow data (off by default, Shift+Y to toggle) --------------- + hydro = None + try: + from xrspatial import fill as _fill + from xrspatial import flow_direction as _flow_direction + from xrspatial import flow_accumulation as _flow_accumulation + from xrspatial import stream_order as _stream_order + from xrspatial import stream_link as _stream_link + from scipy.ndimage import uniform_filter as _uniform_filter + + print("Conditioning DEM for hydrological flow...") + _data = terrain.data + _is_cupy = hasattr(_data, 'get') + _elev = _data.get() if _is_cupy else np.array(_data) + _elev = _elev.astype(np.float32) + _ocean = (_elev == 0.0) | np.isnan(_elev) + _elev[_ocean] = -100.0 + + _smoothed = _uniform_filter(_elev, size=15, mode='nearest') + _smoothed[_ocean] = -100.0 + + if _is_cupy: + import cupy as _cp + _sm = _cp.asarray(_smoothed) + else: + _sm = _smoothed + _filled = _fill(terrain.copy(data=_sm)) + _fd = _filled.data - _sm + _resolved = _filled.data + _fd * 0.01 + if _is_cupy: + _cp.random.seed(0) + _resolved += _cp.random.uniform(0, 0.001, _resolved.shape, + dtype=_cp.float32) + _resolved[_cp.asarray(_ocean)] = -100.0 + else: + np.random.seed(0) + _resolved += np.random.uniform( + 0, 0.001, _resolved.shape).astype(np.float32) + _resolved[_ocean] = -100.0 + + fd = _flow_direction(terrain.copy(data=_resolved)) + fa = _flow_accumulation(fd) + so = _stream_order(fd, fa, threshold=50) + sl = _stream_link(fd, fa, threshold=50) + + fd_out, fa_out, so_out = fd.data, fa.data, so.data + if _is_cupy: + _ocean_gpu = _cp.asarray(_ocean) + fd_out[_ocean_gpu] = _cp.nan + fa_out[_ocean_gpu] = _cp.nan + so_out[_ocean_gpu] = _cp.nan + else: + fd_out[_ocean] = np.nan + fa_out[_ocean] = np.nan + so_out[_ocean] = np.nan + + _sl_out = sl.data + if _is_cupy: + _sl_out[_ocean_gpu] = _cp.nan + else: + _sl_out[_ocean] = np.nan + _sl_np = _sl_out.get() if _is_cupy else np.asarray(_sl_out) + _sl_clean = np.nan_to_num(_sl_np, nan=0.0).astype(np.float32) + if _is_cupy: + _sl_clean = _cp.asarray(_sl_clean) + ds['stream_link'] = terrain.copy(data=_sl_clean).rename(None) + + hydro = { + 'flow_dir': fd_out, + 'flow_accum': fa_out, + 'stream_order': so_out, + 'stream_link': _sl_out, + 'accum_threshold': 50, + 'enabled': False, + } + print(f" Flow direction + accumulation computed on " + f"{terrain.shape[0]}x{terrain.shape[1]} grid") + except Exception as e: + print(f"Skipping hydro: {e}") + + print("\nLaunching explore (press G to cycle layers, " + "Shift+W for wind, Shift+Y for hydro)...\n") ds.rtx.explore( z='elevation', scene_zarr=ZARR, @@ -245,6 +326,7 @@ def load_terrain(): height=768, render_scale=0.5, wind_data=wind, + hydro_data=hydro, repl=True, ) diff --git a/examples/trinidad.py b/examples/trinidad.py index 18d72df..1fd05a8 100644 --- a/examples/trinidad.py +++ b/examples/trinidad.py @@ -1,4 +1,4 @@ -"""Trinidad and Tobago — GPU-accelerated terrain exploration.""" +"""Trinidad and Tobago — GPU-accelerated terrain exploration with hydro flow.""" from rtxpy import quickstart quickstart( @@ -7,6 +7,8 @@ crs='EPSG:32620', features=['buildings', 'roads', 'water', 'fire', 'places', 'infrastructure', 'land_use'], + hydro=True, + coast_distance=True, ao_samples=1, denoise=True, ) diff --git a/rtxpy/accessor.py b/rtxpy/accessor.py index 92360db..007ae2c 100644 --- a/rtxpy/accessor.py +++ b/rtxpy/accessor.py @@ -2547,7 +2547,7 @@ def explore(self, width=800, height=600, render_scale=0.5, start_position=None, look_at=None, key_repeat_interval=0.05, pixel_spacing_x=None, pixel_spacing_y=None, mesh_type='heightfield', color_stretch='linear', title=None, - subsample=1, wind_data=None, gtfs_data=None, + subsample=1, wind_data=None, hydro_data=None, gtfs_data=None, terrain_loader=None, scene_zarr=None, ao_samples=0, gi_bounces=1, denoise=False, repl=False, tour=None): @@ -2686,6 +2686,7 @@ def explore(self, width=800, height=600, render_scale=0.5, baked_meshes=self._baked_meshes if self._baked_meshes else None, subsample=subsample, wind_data=wind_data, + hydro_data=hydro_data, gtfs_data=gtfs_data, accessor=self, terrain_loader=terrain_loader, @@ -3007,7 +3008,8 @@ def explore(self, z, width=800, height=600, render_scale=0.5, pixel_spacing_x=None, pixel_spacing_y=None, mesh_type='heightfield', color_stretch='linear', title=None, subtitle=None, legend=None, - subsample=1, wind_data=None, gtfs_data=None, + subsample=1, wind_data=None, hydro_data=None, + gtfs_data=None, scene_zarr=None, ao_samples=0, gi_bounces=1, denoise=False, minimap_style=None, minimap_layer=None, @@ -3113,6 +3115,7 @@ def explore(self, z, width=800, height=600, render_scale=0.5, baked_meshes=terrain_da.rtx._baked_meshes if terrain_da.rtx._baked_meshes else None, subsample=subsample, wind_data=wind_data, + hydro_data=hydro_data, gtfs_data=gtfs_data, accessor=terrain_da.rtx, scene_zarr=scene_zarr, diff --git a/rtxpy/analysis/render.py b/rtxpy/analysis/render.py index 04f8397..c8b226a 100644 --- a/rtxpy/analysis/render.py +++ b/rtxpy/analysis/render.py @@ -823,7 +823,7 @@ def _shade_terrain_kernel( color_stretch, rgb_texture, overlay_data, overlay_alpha, overlay_min, overlay_range, - overlay_as_water, + overlay_as_water, overlay_color_lut, instance_ids, geometry_colors, primitive_ids, point_colors, point_color_offsets, ao_factor, gi_color, gi_intensity, @@ -1006,21 +1006,28 @@ def _shade_terrain_kernel( ov_norm = 0.0 elif ov_norm > 1: ov_norm = 1.0 - # Apply same color stretch - if color_stretch == 1: - ov_norm = math.pow(ov_norm, 1.0 / 3.0) - elif color_stretch == 2: - ov_norm = math.log(1.0 + ov_norm * 9.0) / math.log(10.0) - elif color_stretch == 3: - ov_norm = math.sqrt(ov_norm) + _use_ov_lut = overlay_color_lut.shape[0] > 1 + if not _use_ov_lut: + # Apply color stretch only with terrain colormap + if color_stretch == 1: + ov_norm = math.pow(ov_norm, 1.0 / 3.0) + elif color_stretch == 2: + ov_norm = math.log(1.0 + ov_norm * 9.0) / math.log(10.0) + elif color_stretch == 3: + ov_norm = math.sqrt(ov_norm) ov_idx = int(ov_norm * 255) if ov_idx > 255: ov_idx = 255 if ov_idx < 0: ov_idx = 0 - ov_r = color_lut[ov_idx, 0] - ov_g = color_lut[ov_idx, 1] - ov_b = color_lut[ov_idx, 2] + if _use_ov_lut: + ov_r = overlay_color_lut[ov_idx, 0] + ov_g = overlay_color_lut[ov_idx, 1] + ov_b = overlay_color_lut[ov_idx, 2] + else: + ov_r = color_lut[ov_idx, 0] + ov_g = color_lut[ov_idx, 1] + ov_b = color_lut[ov_idx, 2] a = overlay_alpha base_r = base_r * (1.0 - a) + ov_r * a base_g = base_g * (1.0 - a) + ov_g * a @@ -1552,6 +1559,7 @@ def _shade_terrain( overlay_data=None, overlay_alpha=0.5, overlay_min=0.0, overlay_range=1.0, overlay_as_water=False, + overlay_color_lut=None, instance_ids=None, geometry_colors=None, primitive_ids=None, point_colors=None, point_color_offsets=None, ao_factor=None, gi_color=None, gi_intensity=2.0, @@ -1585,6 +1593,15 @@ def _shade_terrain( _DUMMY_1x1 = cupy.zeros((1, 1), dtype=np.float32) overlay_data = _DUMMY_1x1 + # Handle overlay color LUT — custom palette for categorical overlays + # Dummy is (1,3); kernel checks shape[0] > 1 to decide whether to use + if overlay_color_lut is None: + if _DUMMY_1x1x3 is None: + _DUMMY_1x1x3 = cupy.zeros((1, 1, 3), dtype=np.float32) + overlay_color_lut = _DUMMY_1x1x3[:, 0, :] # (1, 3) + elif not isinstance(overlay_color_lut, cupy.ndarray): + overlay_color_lut = cupy.asarray(overlay_color_lut, dtype=np.float32) + # Handle geometry_colors for per-geometry solid coloring if geometry_colors is None: if _DUMMY_1x4 is None: @@ -1655,7 +1672,7 @@ def _shade_terrain( color_stretch, rgb_texture, overlay_data, overlay_alpha, overlay_min, overlay_range, - overlay_as_water, + overlay_as_water, overlay_color_lut, instance_ids, geometry_colors, primitive_ids, point_colors, point_color_offsets, ao_factor, gi_color, np.float32(gi_intensity), @@ -1788,6 +1805,7 @@ def render( overlay_data=None, overlay_alpha: float = 0.5, overlay_as_water: bool = False, + overlay_color_lut=None, geometry_colors=None, ao_samples: int = 0, ao_radius: Optional[float] = None, @@ -2166,6 +2184,7 @@ def render( overlay_data=d_overlay, overlay_alpha=overlay_alpha, overlay_min=ov_min, overlay_range=ov_range, overlay_as_water=overlay_as_water, + overlay_color_lut=overlay_color_lut, instance_ids=d_instance_ids, geometry_colors=geometry_colors, primitive_ids=d_primitive_ids, point_colors=d_point_colors, diff --git a/rtxpy/engine.py b/rtxpy/engine.py index 3a7e89c..1a4559d 100644 --- a/rtxpy/engine.py +++ b/rtxpy/engine.py @@ -27,6 +27,7 @@ from .viewer.terrain import TerrainState from .viewer.observers import ObserverManager, Observer, OBSERVER_COLORS from .viewer.wind import WindState +from .viewer.hydro import HydroState from .viewer.hud import HUDState from .viewer.keybindings import MOVEMENT_KEYS, SHIFT_BINDINGS, KEY_BINDINGS, SPECIAL_BINDINGS @@ -160,6 +161,151 @@ def _wind_splat_kernel( cuda.atomic.add(output, (py, px, 2), contrib * color_b) + @cuda.jit + def _hydro_splat_kernel( + trails, # (N*T, 2) float32 — (row, col) per trail point + ages, # (N,) int32 — per-particle age + lifetimes, # (N,) int32 — per-particle lifetime + colors, # (N, 3) float32 — per-particle (r, g, b) + radii, # (N,) int32 — per-particle splat radius + trail_len, # int32 scalar — trail points per particle + base_alpha, # float32 scalar — base alpha intensity + min_vis_age, # int32 scalar — minimum visible age + ref_depth, # float32 scalar — depth-scaling reference distance + terrain, # (tH, tW) float32 — terrain elevation + output, # (sh, sw, 3) float32 — frame buffer (atomic add) + # Camera basis — scalar args to avoid tiny GPU allocations + cam_x, cam_y, cam_z, + fwd_x, fwd_y, fwd_z, + rgt_x, rgt_y, rgt_z, + up_x, up_y, up_z, + # Projection params + fov_scale, aspect_ratio, + # Terrain/world params + psx, psy, ve, subsample_f, min_depth, + ): + idx = cuda.grid(1) + if idx >= trails.shape[0]: + return + + # Compute alpha on-GPU from per-particle ages/lifetimes + pidx = idx // trail_len + tidx = idx % trail_len + age = ages[pidx] + lifetime = lifetimes[pidx] + + # Trail point not yet laid down + if age <= tidx: + return + + # Fade in / fade out / trail decay + fade_in = (age - min_vis_age) * 0.1 + if fade_in < 0.0: + fade_in = 0.0 + elif fade_in > 1.0: + fade_in = 1.0 + fade_out = (lifetime - age) * 0.05 + if fade_out < 0.0: + fade_out = 0.0 + elif fade_out > 1.0: + fade_out = 1.0 + # Quadratic trail decay — comet-tail effect + t = float(tidx) / float(trail_len) + trail_fade = (1.0 - t) * (1.0 - t) + a = base_alpha * fade_in * fade_out * trail_fade + + # Head glow: bright spark at particle position + if tidx == 0: + a = a * 1.5 + + if a < 1e-6: + return + + row = trails[idx, 0] + col = trails[idx, 1] + + # Terrain Z lookup (nearest-neighbor, clamped) + tH = terrain.shape[0] + tW = terrain.shape[1] + sr = int(row / subsample_f) + sc = int(col / subsample_f) + if sr < 0: + sr = 0 + elif sr >= tH: + sr = tH - 1 + if sc < 0: + sc = 0 + elif sc >= tW: + sc = tW - 1 + z_raw = terrain[sr, sc] + if z_raw != z_raw: # NaN check + z_raw = 0.0 + z_val = z_raw * ve + 3.0 + + # World position + wx = col * psx + wy = row * psy + + # Camera-relative + dx = wx - cam_x + dy = wy - cam_y + dz = z_val - cam_z + + # Depth along forward axis + depth = dx * fwd_x + dy * fwd_y + dz * fwd_z + if depth <= min_depth: + return + + # Depth-scaled alpha: closer = brighter, farther = fainter. + # Prevents zoomed-out over-saturation from dense overlapping particles. + depth_scale = ref_depth / (depth + ref_depth) + a = a * depth_scale + + if a < 1e-6: + return + + inv_depth = 1.0 / (depth + 1e-10) + u_cam = dx * rgt_x + dy * rgt_y + dz * rgt_z + v_cam = dx * up_x + dy * up_y + dz * up_z + u_ndc = u_cam * inv_depth / (fov_scale * aspect_ratio) + v_ndc = v_cam * inv_depth / fov_scale + + sh = output.shape[0] + sw = output.shape[1] + sx = int((u_ndc + 1.0) * 0.5 * sw) + sy = int((1.0 - v_ndc) * 0.5 * sh) + + if sx < 0 or sx >= sw or sy < 0 or sy >= sh: + return + + # Per-particle color and radius + color_r = colors[pidx, 0] + color_g = colors[pidx, 1] + color_b = colors[pidx, 2] + r = radii[pidx] + if r < 1: + r = 1 + # Head glow: +1px radius halo at particle position + if tidx == 0: + r = r + 1 + + # Circular stamp splat + for offy in range(-r, r + 1): + for offx in range(-r, r + 1): + dist_sq = offx * offx + offy * offy + if dist_sq > r * r: + continue + falloff = 1.0 - math.sqrt(dist_sq) / r + px = sx + offx + py = sy + offy + if px < 0 or px >= sw or py < 0 or py >= sh: + continue + contrib = a * falloff + cuda.atomic.add(output, (py, px, 0), contrib * color_r) + cuda.atomic.add(output, (py, px, 1), contrib * color_g) + cuda.atomic.add(output, (py, px, 2), contrib * color_b) + + def _glfw_to_key(glfw_key, mods): """Translate a GLFW key code + modifiers to the string format used by _handle_key_press / _handle_key_release. @@ -717,6 +863,53 @@ def fn(v): _add_overlay(v, name, data) self._submit(fn) + def add_hydro(self, flow_dir, flow_accum, **kwargs): + """Add hydrological flow particle visualization. + + The flow grids should be computed from a depression-filled DEM + (e.g. ``xrspatial.fill()``) so particles follow coherent drainage + paths instead of getting trapped in pits. + + Parameters + ---------- + flow_dir : array-like, shape (H, W) + D8 flow direction grid (1=E, 2=SE, 4=S, 8=SW, 16=W, 32=NW, + 64=N, 128=NE). Compute with ``xrspatial.flow_direction()``. + flow_accum : array-like, shape (H, W) + Flow accumulation grid (cell counts or area). Compute with + ``xrspatial.flow_accumulation()``. + **kwargs + Optional overrides: n_particles, max_age, trail_len, speed, + accum_threshold, color, alpha, dot_radius, min_visible_age. + """ + stream_order = kwargs.get('stream_order') + stream_link = kwargs.get('stream_link') + def fn(v): + v._init_hydro(flow_dir, flow_accum, **kwargs) + v._hydro_enabled = True + # Add stream link overlay with palette-matched colors + if stream_link is not None: + sl_np = stream_link.get() if hasattr(stream_link, 'get') else np.asarray(stream_link) + sl_np = np.asarray(sl_np, dtype=np.float32) + # Use stream order values for coloring, NaN where no stream + if stream_order is not None: + so_for_sl = stream_order.get() if hasattr(stream_order, 'get') else np.asarray(stream_order) + so_for_sl = np.asarray(so_for_sl, dtype=np.float32) + max_order = int(np.nanmax(so_for_sl)) if np.any(~np.isnan(so_for_sl)) else 5 + palette_lut = InteractiveViewer._build_stream_palette_lut( + max_order) + sl_color = np.where( + np.isnan(sl_np) | (sl_np <= 0) | np.isnan(so_for_sl) | (so_for_sl <= 0), + np.float32(np.nan), so_for_sl) + _add_overlay(v, 'stream_link', sl_color, + color_lut=palette_lut) + else: + sl_np = np.where(np.isnan(sl_np) | (sl_np <= 0), + np.float32(np.nan), sl_np) + _add_overlay(v, 'stream_link', sl_np) + v._update_frame() + self._submit(fn) + def remove_layer(self, name): """Remove an overlay layer by name.""" def fn(v): @@ -724,6 +917,8 @@ def fn(v): del v._overlay_layers[name] if name in v._base_overlay_layers: del v._base_overlay_layers[name] + if name in v._overlay_color_luts: + del v._overlay_color_luts[name] v._overlay_names = list(v._overlay_layers.keys()) v._terrain_layer_order = ( ['elevation'] + list(v._overlay_names)) @@ -732,6 +927,7 @@ def fn(v): v._terrain_layer_idx = 0 v._active_overlay_data = None v._overlay_as_water = False + v._active_overlay_color_lut = None v._update_frame() print(f"Removed layer: {name}") self._submit(fn) @@ -743,6 +939,7 @@ def fn(v): v._active_color_data = None v._active_overlay_data = None v._overlay_as_water = False + v._active_overlay_color_lut = None v._terrain_layer_idx = 0 v._update_frame() print("Terrain: elevation") @@ -755,7 +952,10 @@ def fn(v): v._terrain_layer_idx = idx v._active_color_data = None v._active_overlay_data = v._overlay_layers[name] - v._overlay_as_water = name.startswith('flood_') + v._overlay_as_water = ( + name.startswith('flood_') + or (name == 'stream_link' and v._hydro_enabled)) + v._active_overlay_color_lut = v._overlay_color_luts.get(name) v._update_frame() print(f"Terrain: {name}") self._submit(fn) @@ -1008,13 +1208,21 @@ def __repr__(self): f"shadows={v.shadows}{obs_info})") -def _add_overlay(viewer, name, data): +def _add_overlay(viewer, name, data, color_lut=None): """Add or replace an overlay layer on *viewer* and switch to it. Must be called on the main (render) thread. + + Parameters + ---------- + color_lut : np.ndarray, optional + (256, 3) float32 categorical palette LUT. When provided, the + overlay bypasses the terrain colormap and color stretch. """ viewer._overlay_layers[name] = data viewer._base_overlay_layers[name] = data + if color_lut is not None: + viewer._overlay_color_luts[name] = color_lut viewer._overlay_names = list(viewer._overlay_layers.keys()) viewer._terrain_layer_order = ( ['elevation'] + list(viewer._overlay_names)) @@ -1023,7 +1231,10 @@ def _add_overlay(viewer, name, data): viewer._active_color_data = None viewer._active_overlay_data = data viewer._overlay_as_water = name.startswith('flood_') - viewer._update_frame() + viewer._active_overlay_color_lut = color_lut + # Skip render if camera not yet initialized (overlays added before run()) + if viewer.position is not None: + viewer._update_frame() print(f"Terrain: {name}") @@ -1070,6 +1281,7 @@ class InteractiveViewer: - C: Cycle colormap - Shift+F: Fetch/toggle FIRMS fire layer (7d LANDSAT 30m) - Shift+W: Toggle wind particle animation + - Shift+Y: Toggle hydro flow particle animation - Shift+B: Toggle GTFS-RT realtime vehicle overlay - F: Save screenshot - M: Toggle minimap overlay @@ -1134,6 +1346,7 @@ def __init__(self, raster, width: int = 800, height: int = 600, ) self.rtx = rtx + self._scene_diagonal = 1.0 # updated in run() with actual terrain extent self.width = width self.height = height self.render_scale = np.clip(render_scale, 0.25, 1.0) @@ -1230,6 +1443,9 @@ def __init__(self, raster, width: int = 800, height: int = 600, # Wind particle state self.wind = WindState() + # Hydro flow particle state + self.hydro = HydroState() + # GTFS-RT realtime vehicle overlay state self._gtfs_rt_url = None self._gtfs_rt_enabled = False @@ -1398,8 +1614,11 @@ def __init__(self, raster, width: int = 800, height: int = 600, verts.copy(), idxs.copy(), terrain_np.copy(), ) + # Only pass grid_dims for TIN meshes — cluster GAS + # partitioning assumes regular grid triangle layout. + gd = (H, W) if mesh_type != 'voxel' else None rtx.add_geometry('terrain', verts, idxs, - grid_dims=(H, W)) + grid_dims=gd) # ------------------------------------------------------------------ # Delegation properties — InputState @@ -1537,6 +1756,22 @@ def sun_altitude(self): def sun_altitude(self, value): self.render_settings.sun_altitude = value + @property + def fog_density(self): + return self.render_settings.fog_density + + @fog_density.setter + def fog_density(self, value): + self.render_settings.fog_density = value + + @property + def fog_color(self): + return self.render_settings.fog_color + + @fog_color.setter + def fog_color(self, value): + self.render_settings.fog_color = value + @property def colormap(self): return self.render_settings.colormap @@ -1749,6 +1984,22 @@ def _overlay_as_water(self): def _overlay_as_water(self, value): self.overlays.overlay_as_water = value + @property + def _active_overlay_color_lut(self): + return self.overlays.active_overlay_color_lut + + @_active_overlay_color_lut.setter + def _active_overlay_color_lut(self, value): + self.overlays.active_overlay_color_lut = value + + @property + def _overlay_color_luts(self): + return self.overlays.overlay_color_luts + + @_overlay_color_luts.setter + def _overlay_color_luts(self, value): + self.overlays.overlay_color_luts = value + @property def _terrain_layer_order(self): return self.overlays.terrain_layer_order @@ -2013,14 +2264,26 @@ def _wind_ages(self, value): def _wind_max_age(self): return self.wind.wind_max_age + @_wind_max_age.setter + def _wind_max_age(self, value): + self.wind.wind_max_age = value + @property def _wind_n_particles(self): return self.wind.wind_n_particles + @_wind_n_particles.setter + def _wind_n_particles(self, value): + self.wind.wind_n_particles = value + @property def _wind_trail_len(self): return self.wind.wind_trail_len + @_wind_trail_len.setter + def _wind_trail_len(self, value): + self.wind.wind_trail_len = value + @property def _wind_trails(self): return self.wind.wind_trails @@ -2033,6 +2296,10 @@ def _wind_trails(self, value): def _wind_speed_mult(self): return self.wind.wind_speed_mult + @_wind_speed_mult.setter + def _wind_speed_mult(self, value): + self.wind.wind_speed_mult = value + @property def _wind_min_depth(self): return self.wind.wind_min_depth @@ -2045,14 +2312,26 @@ def _wind_min_depth(self, value): def _wind_dot_radius(self): return self.wind.wind_dot_radius + @_wind_dot_radius.setter + def _wind_dot_radius(self, value): + self.wind.wind_dot_radius = value + @property def _wind_alpha(self): return self.wind.wind_alpha + @_wind_alpha.setter + def _wind_alpha(self, value): + self.wind.wind_alpha = value + @property def _wind_min_visible_age(self): return self.wind.wind_min_visible_age + @_wind_min_visible_age.setter + def _wind_min_visible_age(self, value): + self.wind.wind_min_visible_age = value + @property def _wind_terrain_np(self): return self.wind.wind_terrain_np @@ -2101,6 +2380,306 @@ def _wind_done_event(self): def _wind_done_event(self, value): self.wind.wind_done_event = value + # ------------------------------------------------------------------ + # Delegation properties — HydroState + # ------------------------------------------------------------------ + + @property + def _hydro_data(self): + return self.hydro.hydro_data + + @_hydro_data.setter + def _hydro_data(self, value): + self.hydro.hydro_data = value + + @property + def _hydro_enabled(self): + return self.hydro.hydro_enabled + + @_hydro_enabled.setter + def _hydro_enabled(self, value): + self.hydro.hydro_enabled = value + + @property + def _hydro_flow_u_px(self): + return self.hydro.hydro_flow_u_px + + @_hydro_flow_u_px.setter + def _hydro_flow_u_px(self, value): + self.hydro.hydro_flow_u_px = value + + @property + def _hydro_flow_v_px(self): + return self.hydro.hydro_flow_v_px + + @_hydro_flow_v_px.setter + def _hydro_flow_v_px(self, value): + self.hydro.hydro_flow_v_px = value + + @property + def _hydro_flow_accum_norm(self): + return self.hydro.hydro_flow_accum_norm + + @_hydro_flow_accum_norm.setter + def _hydro_flow_accum_norm(self, value): + self.hydro.hydro_flow_accum_norm = value + + @property + def _hydro_stream_order(self): + return self.hydro.hydro_stream_order + + @_hydro_stream_order.setter + def _hydro_stream_order(self, value): + self.hydro.hydro_stream_order = value + + @property + def _hydro_stream_order_raw(self): + return self.hydro.hydro_stream_order_raw + + @_hydro_stream_order_raw.setter + def _hydro_stream_order_raw(self, value): + self.hydro.hydro_stream_order_raw = value + + @property + def _hydro_stream_link(self): + return self.hydro.hydro_stream_link + + @_hydro_stream_link.setter + def _hydro_stream_link(self, value): + self.hydro.hydro_stream_link = value + + @property + def _hydro_particle_raw_order(self): + return self.hydro.hydro_particle_raw_order + + @_hydro_particle_raw_order.setter + def _hydro_particle_raw_order(self, value): + self.hydro.hydro_particle_raw_order = value + + @property + def _hydro_particles(self): + return self.hydro.hydro_particles + + @_hydro_particles.setter + def _hydro_particles(self, value): + self.hydro.hydro_particles = value + + @property + def _hydro_ages(self): + return self.hydro.hydro_ages + + @_hydro_ages.setter + def _hydro_ages(self, value): + self.hydro.hydro_ages = value + + @property + def _hydro_lifetimes(self): + return self.hydro.hydro_lifetimes + + @_hydro_lifetimes.setter + def _hydro_lifetimes(self, value): + self.hydro.hydro_lifetimes = value + + @property + def _hydro_max_age(self): + return self.hydro.hydro_max_age + + @property + def _hydro_n_particles(self): + return self.hydro.hydro_n_particles + + @_hydro_n_particles.setter + def _hydro_n_particles(self, value): + self.hydro.hydro_n_particles = value + + @property + def _hydro_trail_len(self): + return self.hydro.hydro_trail_len + + @_hydro_trail_len.setter + def _hydro_trail_len(self, value): + self.hydro.hydro_trail_len = value + + @property + def _hydro_trails(self): + return self.hydro.hydro_trails + + @_hydro_trails.setter + def _hydro_trails(self, value): + self.hydro.hydro_trails = value + + @property + def _hydro_speed(self): + return self.hydro.hydro_speed + + @_hydro_speed.setter + def _hydro_speed(self, value): + self.hydro.hydro_speed = value + + @property + def _hydro_min_depth(self): + return self.hydro.hydro_min_depth + + @_hydro_min_depth.setter + def _hydro_min_depth(self, value): + self.hydro.hydro_min_depth = value + + @property + def _hydro_ref_depth(self): + return self.hydro.hydro_ref_depth + + @_hydro_ref_depth.setter + def _hydro_ref_depth(self, value): + self.hydro.hydro_ref_depth = value + + @property + def _hydro_dot_radius(self): + return self.hydro.hydro_dot_radius + + @_hydro_dot_radius.setter + def _hydro_dot_radius(self, value): + self.hydro.hydro_dot_radius = value + + @property + def _hydro_alpha(self): + return self.hydro.hydro_alpha + + @_hydro_alpha.setter + def _hydro_alpha(self, value): + self.hydro.hydro_alpha = value + + @property + def _hydro_min_visible_age(self): + return self.hydro.hydro_min_visible_age + + @property + def _hydro_accum_threshold(self): + return self.hydro.hydro_accum_threshold + + @_hydro_accum_threshold.setter + def _hydro_accum_threshold(self, value): + self.hydro.hydro_accum_threshold = value + + @property + def _hydro_color(self): + return self.hydro.hydro_color + + @_hydro_color.setter + def _hydro_color(self, value): + self.hydro.hydro_color = value + + @property + def _hydro_terrain_np(self): + return self.hydro.hydro_terrain_np + + @_hydro_terrain_np.setter + def _hydro_terrain_np(self, value): + self.hydro.hydro_terrain_np = value + + @property + def _hydro_spawn_probs(self): + return self.hydro.hydro_spawn_probs + + @_hydro_spawn_probs.setter + def _hydro_spawn_probs(self, value): + self.hydro.hydro_spawn_probs = value + + @property + def _hydro_spawn_indices(self): + return self.hydro.hydro_spawn_indices + + @_hydro_spawn_indices.setter + def _hydro_spawn_indices(self, value): + self.hydro.hydro_spawn_indices = value + + @property + def _hydro_spawn_valid_probs(self): + return self.hydro.hydro_spawn_valid_probs + + @_hydro_spawn_valid_probs.setter + def _hydro_spawn_valid_probs(self, value): + self.hydro.hydro_spawn_valid_probs = value + + @property + def _d_hydro_trails(self): + return self.hydro.d_hydro_trails + + @_d_hydro_trails.setter + def _d_hydro_trails(self, value): + self.hydro.d_hydro_trails = value + + @property + def _d_hydro_ages(self): + return self.hydro.d_hydro_ages + + @_d_hydro_ages.setter + def _d_hydro_ages(self, value): + self.hydro.d_hydro_ages = value + + @property + def _d_hydro_lifetimes(self): + return self.hydro.d_hydro_lifetimes + + @_d_hydro_lifetimes.setter + def _d_hydro_lifetimes(self, value): + self.hydro.d_hydro_lifetimes = value + + @property + def _hydro_done_event(self): + return self.hydro.hydro_done_event + + @_hydro_done_event.setter + def _hydro_done_event(self, value): + self.hydro.hydro_done_event = value + + @property + def _hydro_slope_mag(self): + return self.hydro.hydro_slope_mag + + @_hydro_slope_mag.setter + def _hydro_slope_mag(self, value): + self.hydro.hydro_slope_mag = value + + @property + def _hydro_particle_accum(self): + return self.hydro.hydro_particle_accum + + @_hydro_particle_accum.setter + def _hydro_particle_accum(self, value): + self.hydro.hydro_particle_accum = value + + @property + def _d_hydro_colors(self): + return self.hydro.d_hydro_colors + + @_d_hydro_colors.setter + def _d_hydro_colors(self, value): + self.hydro.d_hydro_colors = value + + @property + def _d_hydro_radii(self): + return self.hydro.d_hydro_radii + + @_d_hydro_radii.setter + def _d_hydro_radii(self, value): + self.hydro.d_hydro_radii = value + + @property + def _hydro_particle_colors(self): + return self.hydro.hydro_particle_colors + + @_hydro_particle_colors.setter + def _hydro_particle_colors(self, value): + self.hydro.hydro_particle_colors = value + + @property + def _hydro_particle_radii(self): + return self.hydro.hydro_particle_radii + + @_hydro_particle_radii.setter + def _hydro_particle_radii(self, value): + self.hydro.hydro_particle_radii = value + # ------------------------------------------------------------------ # Delegation properties — ObserverManager # ------------------------------------------------------------------ @@ -2569,6 +3148,10 @@ def _build_title(self): if self._wind_enabled: parts.append('wind') + # Hydro + if self._hydro_enabled: + parts.append('hydro') + # Active observer drone mode active_obs = (self._observers.get(self._active_observer) if self._active_observer else None) @@ -2809,7 +3392,8 @@ def _rebuild_at_resolution(self, factor): self.raster = sub self._wind_terrain_np = None # invalidate cached terrain - self._d_base_frame = None # invalidate GPU wind buffers + self._hydro_terrain_np = None + self._d_base_frame = None # invalidate GPU wind/hydro buffers self._d_wind_scratch = None H, W = sub.shape self.terrain_shape = (H, W) @@ -2923,7 +3507,12 @@ def _rebuild_at_resolution(self, factor): terrain_name = self._terrain_layer_order[self._terrain_layer_idx] if terrain_name != 'elevation' and terrain_name in self._overlay_layers: self._active_overlay_data = self._overlay_layers[terrain_name] - self._overlay_as_water = terrain_name.startswith('flood_') + self._overlay_as_water = ( + terrain_name.startswith('flood_') + or (terrain_name == 'stream_link' + and self._hydro_enabled)) + self._active_overlay_color_lut = self._overlay_color_luts.get( + terrain_name) # 6. Invalidate chunk manager cache (meshes need new Z coords) if self._chunk_manager is not None: @@ -4054,6 +4643,784 @@ def _splat_wind_gpu(self, d_frame): # Clamp output cp.clip(d_frame, 0, 1, out=d_frame) + # ------------------------------------------------------------------ + # Hydrological flow particles + # ------------------------------------------------------------------ + + def _toggle_hydro(self): + """Toggle hydro flow particles + stream_link water overlay together.""" + if self._hydro_data is None: + print("No hydro data. Use v.add_hydro(flow_dir, flow_accum).") + return + self._hydro_enabled = not self._hydro_enabled + + if self._hydro_enabled: + # Save current overlay state for restoration on OFF + self._hydro_prev_layer_idx = self._terrain_layer_idx + # Switch to stream_link overlay with water reflection shader + if 'stream_link' in self._overlay_layers: + idx = self._terrain_layer_order.index('stream_link') + self._terrain_layer_idx = idx + self._active_overlay_data = self._overlay_layers['stream_link'] + self._overlay_as_water = True + self._active_overlay_color_lut = self._overlay_color_luts.get( + 'stream_link') + print("Hydro flow: ON (stream overlay + water shader)") + else: + print("Hydro flow: ON") + else: + # Restore previous overlay state + prev = getattr(self, '_hydro_prev_layer_idx', 0) + if prev >= len(self._terrain_layer_order): + prev = 0 + self._terrain_layer_idx = prev + layer = self._terrain_layer_order[prev] + if layer == 'elevation': + self._active_overlay_data = None + self._overlay_as_water = False + self._active_overlay_color_lut = None + else: + self._active_overlay_data = self._overlay_layers[layer] + self._overlay_as_water = layer.startswith('flood_') + self._active_overlay_color_lut = ( + self._overlay_color_luts.get(layer)) + print("Hydro flow: OFF") + + self._update_frame() + + def _action_toggle_hydro(self): + self._toggle_hydro() + + _STREAM_ORDER_PALETTE = np.array([ + [0.0, 0.0, 0.0 ], # 0: unused + [0.50, 0.80, 1.00], # 1: pale sky blue (headwaters) + [0.38, 0.68, 0.98], # 2: light blue + [0.28, 0.55, 0.95], # 3: sky blue + [0.18, 0.42, 0.90], # 4: medium blue + [0.10, 0.30, 0.85], # 5: royal blue + [0.06, 0.20, 0.78], # 6: deep blue + [0.03, 0.12, 0.70], # 7: dark blue + [0.01, 0.06, 0.60], # 8: navy (major rivers) + ], dtype=np.float32) + + @staticmethod + def _build_stream_palette_lut(max_order): + """Build a 256-entry color LUT for stream order overlay rendering. + + Maps normalized [0,1] overlay values back to integer orders + and looks up the categorical palette color. + """ + palette = InteractiveViewer._STREAM_ORDER_PALETTE + lut = np.zeros((256, 3), dtype=np.float32) + denom = max(max_order - 1, 1) + for i in range(256): + # Reverse the normalization: order = 1 + norm * (max_order - 1) + order = int(round(1 + (i / 255.0) * denom)) + order = max(1, min(8, order)) + lut[i] = palette[order] + return lut + + @staticmethod + def _hydro_color_from_order(order_norm, raw_order=None): + """Map stream order → (R, G, B) per particle. + + If *raw_order* is provided, uses categorical palette keyed by + integer Strahler order. Otherwise falls back to continuous + blue gradient from normalized [0,1] order. + """ + if raw_order is not None: + idx = np.clip(raw_order, 1, 8).astype(int) + colors = InteractiveViewer._STREAM_ORDER_PALETTE[idx].copy() + else: + colors = np.empty((len(order_norm), 3), dtype=np.float32) + colors[:, 0] = 0.02 + order_norm * 0.43 # R: 0.02 → 0.45 + colors[:, 1] = 0.10 + order_norm * 0.65 # G: 0.10 → 0.75 + colors[:, 2] = 0.55 + order_norm * 0.40 # B: 0.55 → 0.95 + colors = np.clip(colors, 0.0, 1.0) + return colors + + @staticmethod + def _hydro_radius_from_order(order_norm, raw_order=None): + """Map stream order → radius (2–5) per particle. + + If *raw_order* is provided, uses integer order + 1 directly + (clamped 2–5). Otherwise uses continuous mapping. + """ + if raw_order is not None: + return np.clip(raw_order + 1, 2, 5).astype(np.int32) + return np.clip(2 + (order_norm * 3).astype(np.int32), + 2, 5).astype(np.int32) + + def _init_hydro(self, flow_dir, flow_accum, **kwargs): + """Initialize hydro flow particles from D8 flow direction and accumulation grids. + + Parameters + ---------- + flow_dir : array-like, shape (H, W) + D8 flow direction grid (1=E, 2=SE, 4=S, 8=SW, 16=W, 32=NW, + 64=N, 128=NE). + flow_accum : array-like, shape (H, W) + Flow accumulation grid (cell counts or area). + **kwargs + Optional overrides: n_particles, max_age, trail_len, speed, + accum_threshold, color, alpha, dot_radius, min_visible_age, + stream_order (array). + """ + # Accept CuPy or NumPy arrays — particle advection runs on CPU + if hasattr(flow_dir, 'get'): + flow_dir = flow_dir.get() + if hasattr(flow_accum, 'get'): + flow_accum = flow_accum.get() + flow_dir = np.asarray(flow_dir, dtype=np.int32) + flow_accum = np.asarray(flow_accum, dtype=np.float64) + H, W = flow_dir.shape + + # Stream order grid (optional but strongly recommended) + stream_order = kwargs.pop('stream_order', None) + if stream_order is not None: + if hasattr(stream_order, 'get'): + stream_order = stream_order.get() + stream_order = np.asarray(stream_order, dtype=np.float64) + # Replace NaN with 0 (non-stream cells) + stream_order = np.nan_to_num(stream_order, nan=0.0) + has_stream_order = stream_order is not None and (stream_order > 0).any() + + # Mark hydro as initialised (don't hold the full grids — + # particle advection uses _hydro_flow_u_px / _hydro_flow_v_px). + self._hydro_data = True + + # Apply optional overrides + for key, attr, conv in [ + ('n_particles', '_hydro_n_particles', int), + ('max_age', None, int), + ('trail_len', '_hydro_trail_len', int), + ('speed', '_hydro_speed', float), + ('accum_threshold', '_hydro_accum_threshold', int), + ('color', '_hydro_color', tuple), + ('alpha', '_hydro_alpha', float), + ('dot_radius', '_hydro_dot_radius', int), + ('min_visible_age', None, int), + ]: + if key in kwargs: + val = conv(kwargs[key]) + if attr: + setattr(self, attr, val) + elif key == 'max_age': + self.hydro.hydro_max_age = val + elif key == 'min_visible_age': + self.hydro.hydro_min_visible_age = val + + # D8 code → (drow, dcol) unit vectors + # Row increases downward (south), col increases rightward (east) + sqrt2_inv = 1.0 / np.sqrt(2.0) + d8_to_drow_dcol = { + 1: (0.0, 1.0), # E + 2: (sqrt2_inv, sqrt2_inv), # SE + 4: (1.0, 0.0), # S + 8: (sqrt2_inv, -sqrt2_inv),# SW + 16: (0.0, -1.0), # W + 32: (-sqrt2_inv, -sqrt2_inv),# NW + 64: (-1.0, 0.0), # N + 128: (-sqrt2_inv, sqrt2_inv),# NE + } + + flow_u = np.zeros((H, W), dtype=np.float32) # col direction + flow_v = np.zeros((H, W), dtype=np.float32) # row direction + for code, (dr, dc) in d8_to_drow_dcol.items(): + mask = flow_dir == code + flow_v[mask] = dr + flow_u[mask] = dc + valid_flow = np.isin(flow_dir, list(d8_to_drow_dcol.keys())) + + self._hydro_flow_u_px = flow_u + self._hydro_flow_v_px = flow_v + + # Normalize accumulation: log10(clip(fa, 1)), scale to [0,1] + fa_clipped = np.clip(flow_accum, 1, None) + log_fa = np.log10(fa_clipped) + threshold = np.log10(max(self._hydro_accum_threshold, 1)) + log_max = log_fa.max() + if log_max > threshold: + accum_norm = np.clip((log_fa - threshold) / (log_max - threshold), 0, 1) + else: + accum_norm = np.zeros_like(log_fa) + self._hydro_flow_accum_norm = accum_norm.astype(np.float32) + + # Store stream order grids + if has_stream_order: + max_order = stream_order.max() + so_norm = (stream_order / max(max_order, 1)).astype(np.float32) + self._hydro_stream_order = so_norm + self._hydro_stream_order_raw = stream_order.astype(np.int32) + print(f" Stream order: max {int(max_order)}, " + f"{int((stream_order > 0).sum())} stream cells") + else: + self._hydro_stream_order = None + self._hydro_stream_order_raw = None + + # Accept and store stream_link grid + stream_link_grid = kwargs.pop('stream_link', None) + if stream_link_grid is not None: + if hasattr(stream_link_grid, 'get'): + stream_link_grid = stream_link_grid.get() + self._hydro_stream_link = np.nan_to_num( + np.asarray(stream_link_grid, dtype=np.float64), nan=0.0 + ).astype(np.int32) + else: + self._hydro_stream_link = None + + # Build spawn probabilities — stream-order weighted if available + if has_stream_order: + # Spawn on stream cells, weighted by sqrt(order) — much + # flatter than order^2, giving real coverage to headwaters + spawn_weights = np.where(stream_order > 0, + np.sqrt(stream_order), 0.0) + spawn_weights[~valid_flow] = 0.0 + else: + spawn_weights = accum_norm.copy() + spawn_weights[~valid_flow] = 0.0 + + # Rasterize Overture waterway LineStrings into spawn pool + # and stream_link overlay (for unified water shader rendering). + waterway_geojson = kwargs.pop('waterway_geojson', None) + if waterway_geojson is not None and has_stream_order: + _WATERWAY_ORDER = { + 'river': (5, 3.0), 'canal': (4, 2.5), + 'stream': (2, 1.5), 'drain': (1, 1.0), 'ditch': (1, 1.0), + } + so_raw = self._hydro_stream_order_raw + sl_grid = self._hydro_stream_link + n_ww_cells = 0 + # Use a synthetic link ID for waterway cells not already + # in the stream network. + _ww_link_id = (int(sl_grid.max()) + 1) if sl_grid is not None else 1 + from .geojson import ( + _geojson_to_world_coords, _build_transformer, + ) + terrain_data_np = self.raster.data + if hasattr(terrain_data_np, 'get'): + terrain_data_np = terrain_data_np.get() + terrain_data_np = np.asarray(terrain_data_np, dtype=np.float32) + try: + transformer = _build_transformer(self.raster) + except Exception: + transformer = None + + def _burn_pixels(rows, cols, eq_order, eq_weight): + """Burn a set of (row, col) pixels into hydro grids.""" + nonlocal n_ww_cells + for rr, cc in zip(rows, cols): + if 0 <= rr < H and 0 <= cc < W: + # Upgrade raw order (don't downgrade) + if so_raw[rr, cc] < eq_order: + so_raw[rr, cc] = eq_order + # Upgrade spawn weight + if eq_weight > spawn_weights[rr, cc]: + spawn_weights[rr, cc] = eq_weight + # Ensure cell appears in stream_link overlay + if sl_grid is not None and sl_grid[rr, cc] <= 0: + sl_grid[rr, cc] = _ww_link_id + n_ww_cells += 1 + + def _coords_to_pixels(coords): + """Convert lon/lat coords to (col, row) pixel pairs.""" + try: + _, px = _geojson_to_world_coords( + coords, self.raster, terrain_data_np, + self._base_pixel_spacing_x, + self._base_pixel_spacing_y, + transformer=transformer, + return_pixel_coords=True) + return px + except Exception: + return [] + + def _densify_line(pixel_coords): + """Walk a polyline at 1-pixel steps, return (rows, cols).""" + rows, cols = [], [] + for i in range(len(pixel_coords) - 1): + c0, r0 = pixel_coords[i] + c1, r1 = pixel_coords[i + 1] + dc, dr = c1 - c0, r1 - r0 + n_steps = max(int(max(abs(dr), abs(dc))), 1) + for s in range(n_steps + 1): + t = s / n_steps + rows.append(int(round(r0 + dr * t))) + cols.append(int(round(c0 + dc * t))) + return rows, cols + + for feat in waterway_geojson.get('features', []): + geom = feat.get('geometry', {}) + gtype = geom.get('type', '') + subtype = (feat.get('properties') or {}).get('subtype', '') + eq_order, eq_weight = _WATERWAY_ORDER.get( + subtype, (2, 1.5)) + + if gtype == 'LineString': + coords = geom.get('coordinates', []) + if len(coords) < 2: + continue + px = _coords_to_pixels(coords) + if len(px) < 2: + continue + rs, cs = _densify_line(px) + _burn_pixels(rs, cs, eq_order, eq_weight) + + elif gtype in ('Polygon', 'MultiPolygon'): + # Water bodies: burn outline + filled interior + rings = [] + if gtype == 'Polygon': + rings = geom.get('coordinates', []) + else: + for poly in geom.get('coordinates', []): + rings.extend(poly) + # Lakes/reservoirs get high order + poly_order = max(eq_order, 5) + poly_weight = max(eq_weight, 3.0) + for ring in rings: + if len(ring) < 3: + continue + px = _coords_to_pixels(ring) + if len(px) < 3: + continue + # Outline + rs, cs = _densify_line(px) + _burn_pixels(rs, cs, poly_order, poly_weight) + # Fill interior via scanline + pr = np.array([p[1] for p in px]) + pc = np.array([p[0] for p in px]) + r_min = max(int(pr.min()), 0) + r_max = min(int(pr.max()), H - 1) + for row in range(r_min, r_max + 1): + # Find x-intersections of scanline with edges + xings = [] + n_verts = len(pr) + for j in range(n_verts): + j1 = (j + 1) % n_verts + r0, r1 = pr[j], pr[j1] + if (r0 <= row < r1) or (r1 <= row < r0): + t = (row - r0) / (r1 - r0) + xings.append(pc[j] + t * (pc[j1] - pc[j])) + xings.sort() + # Fill between pairs + for k in range(0, len(xings) - 1, 2): + c_lo = max(int(round(xings[k])), 0) + c_hi = min(int(round(xings[k + 1])), W - 1) + if c_lo <= c_hi: + fill_rs = [row] * (c_hi - c_lo + 1) + fill_cs = list(range(c_lo, c_hi + 1)) + _burn_pixels(fill_rs, fill_cs, + poly_order, poly_weight) + if n_ww_cells > 0: + print(f" Waterway rasterization: {n_ww_cells} cells injected") + + flat_weights = spawn_weights.ravel() + valid_mask = flat_weights > 0 + valid_indices = np.nonzero(valid_mask)[0] + if len(valid_indices) > 0: + valid_probs = flat_weights[valid_indices].astype(np.float64) + valid_probs /= valid_probs.sum() + else: + valid_flow_flat = valid_flow.ravel() + valid_indices = np.nonzero(valid_flow_flat)[0] + if len(valid_indices) > 0: + valid_probs = np.ones(len(valid_indices), dtype=np.float64) + valid_probs /= valid_probs.sum() + else: + valid_indices = np.arange(H * W) + valid_probs = np.ones(H * W, dtype=np.float64) / (H * W) + self._hydro_spawn_indices = valid_indices + self._hydro_spawn_valid_probs = valid_probs + + # Spawn initial particles + N = self._hydro_n_particles + chosen = np.random.choice(len(valid_indices), N, p=valid_probs) + indices = valid_indices[chosen] + rows = (indices // W).astype(np.float32) + np.random.uniform(-0.5, 0.5, N).astype(np.float32) + cols = (indices % W).astype(np.float32) + np.random.uniform(-0.5, 0.5, N).astype(np.float32) + rows = np.clip(rows, 0, H - 1) + cols = np.clip(cols, 0, W - 1) + + self._hydro_particles = np.column_stack([rows, cols]).astype(np.float32) + self._hydro_ages = np.random.randint(0, self._hydro_max_age, N).astype(np.int32) + self._hydro_lifetimes = np.random.randint( + self._hydro_max_age // 2, self._hydro_max_age, N).astype(np.int32) + self._hydro_trails = np.zeros( + (N, self._hydro_trail_len, 2), dtype=np.float32) + for t in range(self._hydro_trail_len): + self._hydro_trails[:, t, :] = self._hydro_particles + + # Compute terrain slope magnitude for speed modulation + terrain_data = self.raster.data + if hasattr(terrain_data, 'get'): + elev = terrain_data.get().astype(np.float64) + else: + elev = np.asarray(terrain_data, dtype=np.float64) + grad_row, grad_col = np.gradient(np.nan_to_num(elev, nan=0.0)) + slope_mag = np.sqrt(grad_row**2 + grad_col**2).astype(np.float32) + p95 = np.percentile(slope_mag[slope_mag > 0], 95) if (slope_mag > 0).any() else 1.0 + slope_norm = np.clip(slope_mag / max(p95, 1e-6), 0, 1).astype(np.float32) + self._hydro_slope_mag = slope_norm + + # Per-particle visual properties from stream order (or accum fallback) + r_idx = np.clip(np.floor(rows).astype(int), 0, H - 1) + c_idx = np.clip(np.floor(cols).astype(int), 0, W - 1) + if has_stream_order: + order_val = so_norm[r_idx, c_idx].astype(np.float32) + raw_order = self._hydro_stream_order_raw[r_idx, c_idx] + else: + order_val = accum_norm[r_idx, c_idx].astype(np.float32) + raw_order = None + self._hydro_particle_accum = order_val + self._hydro_particle_raw_order = raw_order + self._hydro_particle_colors = self._hydro_color_from_order( + order_val, raw_order=raw_order) + self._hydro_particle_radii = self._hydro_radius_from_order( + order_val, raw_order=raw_order) + + # Min render distance and depth-scaled alpha reference + world_diag = np.sqrt((W * self._base_pixel_spacing_x)**2 + + (H * self._base_pixel_spacing_y)**2) + self._hydro_min_depth = 1.0 # metres — allow building-level zoom + self._hydro_ref_depth = world_diag * 0.15 + + print(f" Hydro flow initialized on {H}x{W} grid " + f"({N} particles, threshold={self._hydro_accum_threshold})") + + def _update_hydro_particles(self): + """Advect hydro particles one tick using D8 flow direction lookup.""" + if self._hydro_flow_u_px is None or self._hydro_particles is None: + return + + H, W = self._hydro_flow_u_px.shape + pts = self._hydro_particles # (N, 2) — (row, col) + + # Shift trail buffer (drop oldest, prepend current position) + self._hydro_trails[:, 1:, :] = self._hydro_trails[:, :-1, :] + self._hydro_trails[:, 0, :] = pts + + # Nearest-neighbor D8 lookup (discrete — no interpolation) + rows = pts[:, 0] + cols = pts[:, 1] + r_idx = np.clip(np.floor(np.nan_to_num(rows, nan=0.0)).astype(int), 0, H - 1) + c_idx = np.clip(np.floor(np.nan_to_num(cols, nan=0.0)).astype(int), 0, W - 1) + + u_val = self._hydro_flow_u_px[r_idx, c_idx] + v_val = self._hydro_flow_v_px[r_idx, c_idx] + + # Max-track per-particle visual weight (stream order or accum). + # Particles only get brighter as they flow into bigger streams. + so = self._hydro_stream_order + so_raw = self._hydro_stream_order_raw + if so is not None: + current_val = so[r_idx, c_idx] + else: + current_val = self._hydro_flow_accum_norm[r_idx, c_idx] + old_val = self._hydro_particle_accum.copy() + np.maximum(old_val, current_val, out=self._hydro_particle_accum) + # Track raw integer order alongside normalized value + if so_raw is not None and self._hydro_particle_raw_order is not None: + current_raw = so_raw[r_idx, c_idx] + np.maximum(self._hydro_particle_raw_order, current_raw, + out=self._hydro_particle_raw_order) + changed = self._hydro_particle_accum > old_val + if changed.any(): + a = self._hydro_particle_accum[changed] + raw_o = (self._hydro_particle_raw_order[changed] + if self._hydro_particle_raw_order is not None + else None) + self._hydro_particle_colors[changed] = \ + self._hydro_color_from_order(a, raw_order=raw_o) + self._hydro_particle_radii[changed] = \ + self._hydro_radius_from_order(a, raw_order=raw_o) + + # Small random jitter for visual variety + jitter = np.random.uniform(-0.1, 0.1, pts.shape).astype(np.float32) + + # Slope-based speed: steeper terrain → faster flow + # Base speed 0.3 + 0.7 * slope so even flat areas move + slope_factor = np.ones(len(r_idx), dtype=np.float32) + if self._hydro_slope_mag is not None: + slope_factor = 0.3 + 0.7 * self._hydro_slope_mag[r_idx, c_idx] + + # Advect + speed = self._hydro_speed + dt_scale = getattr(self, '_dt_scale', 1.0) + pts[:, 0] += (v_val + jitter[:, 0]) * speed * dt_scale * slope_factor + pts[:, 1] += (u_val + jitter[:, 1]) * speed * dt_scale * slope_factor + + # Age particles + self._hydro_ages += 1 + + # Respawn: OOB, aged-out, or stuck (zero velocity = pit/sink) + nan_pos = np.isnan(pts[:, 0]) | np.isnan(pts[:, 1]) + oob = nan_pos | (pts[:, 0] < 0) | (pts[:, 0] >= H) | (pts[:, 1] < 0) | (pts[:, 1] >= W) + old = self._hydro_ages >= self._hydro_lifetimes + stuck = (u_val == 0) & (v_val == 0) + respawn = oob | old | stuck + + n_respawn = int(respawn.sum()) + if n_respawn > 0: + chosen = np.random.choice( + len(self._hydro_spawn_indices), n_respawn, + p=self._hydro_spawn_valid_probs) + indices = self._hydro_spawn_indices[chosen] + pts[respawn, 0] = (indices // W).astype(np.float32) + np.random.uniform(-0.5, 0.5, n_respawn).astype(np.float32) + pts[respawn, 1] = (indices % W).astype(np.float32) + np.random.uniform(-0.5, 0.5, n_respawn).astype(np.float32) + pts[respawn, 0] = np.clip(pts[respawn, 0], 0, H - 1) + pts[respawn, 1] = np.clip(pts[respawn, 1], 0, W - 1) + self._hydro_ages[respawn] = 0 + self._hydro_lifetimes[respawn] = np.random.randint( + self._hydro_max_age // 2, self._hydro_max_age, n_respawn) + for t in range(self._hydro_trail_len): + self._hydro_trails[respawn, t, :] = pts[respawn] + # Reset visual weight, color, radius for respawned particles + r_new = np.clip(np.floor(pts[respawn, 0]).astype(int), 0, H - 1) + c_new = np.clip(np.floor(pts[respawn, 1]).astype(int), 0, W - 1) + so = self._hydro_stream_order + so_raw = self._hydro_stream_order_raw + if so is not None: + new_val = so[r_new, c_new] + else: + new_val = self._hydro_flow_accum_norm[r_new, c_new] + self._hydro_particle_accum[respawn] = new_val + # Reset raw order for respawned particles + if so_raw is not None and self._hydro_particle_raw_order is not None: + self._hydro_particle_raw_order[respawn] = so_raw[r_new, c_new] + raw_o = self._hydro_particle_raw_order[respawn] + else: + raw_o = None + self._hydro_particle_colors[respawn] = \ + self._hydro_color_from_order(new_val, raw_order=raw_o) + self._hydro_particle_radii[respawn] = \ + self._hydro_radius_from_order(new_val, raw_order=raw_o) + + def _draw_hydro_on_frame(self, img): + """Project hydro particles to screen space and draw on rendered frame. + + CPU fallback with per-particle accumulation-scaled color and radius. + + Parameters + ---------- + img : ndarray, shape (H_screen, W_screen, 3) + Rendered frame (float32 0-1). Modified in-place. + """ + if self._hydro_particles is None: + return + + from .analysis.render import _compute_camera_basis + + sh, sw = img.shape[:2] + N = self._hydro_particles.shape[0] + trail_len = self._hydro_trail_len + + cam_pos = self.position + look_at = self._get_look_at() + forward, right, cam_up = _compute_camera_basis( + tuple(cam_pos), tuple(look_at), (0, 0, 1), + ) + fov_scale = math.tan(math.radians(self.fov) / 2.0) + aspect_ratio = sw / sh + + if self._hydro_terrain_np is None: + terrain_data = self.raster.data + if hasattr(terrain_data, 'get'): + self._hydro_terrain_np = terrain_data.get() + else: + self._hydro_terrain_np = np.asarray(terrain_data) + terrain_np = self._hydro_terrain_np + tH, tW = terrain_np.shape + + f = self.subsample_factor + psx = self._base_pixel_spacing_x + psy = self._base_pixel_spacing_y + ve = self.vertical_exaggeration + min_depth = self._hydro_min_depth + + all_pts = self._hydro_trails.reshape(-1, 2) + rows_all = all_pts[:, 0] + cols_all = all_pts[:, 1] + + sr = np.clip(np.nan_to_num(rows_all / f, nan=0.0).astype(np.int32), 0, tH - 1) + sc = np.clip(np.nan_to_num(cols_all / f, nan=0.0).astype(np.int32), 0, tW - 1) + z_vals = np.nan_to_num(terrain_np[sr, sc], nan=0.0) * ve + 3.0 + + wx = cols_all * psx + wy = rows_all * psy + + dx = wx - cam_pos[0] + dy = wy - cam_pos[1] + dz = z_vals - cam_pos[2] + + depth = dx * forward[0] + dy * forward[1] + dz * forward[2] + valid = depth > min_depth + + inv_depth = np.where(valid, 1.0 / (depth + 1e-10), 0.0) + u_cam = dx * right[0] + dy * right[1] + dz * right[2] + v_cam = dx * cam_up[0] + dy * cam_up[1] + dz * cam_up[2] + u_ndc = u_cam * inv_depth / (fov_scale * aspect_ratio) + v_ndc = v_cam * inv_depth / fov_scale + + sx_all = np.nan_to_num(((u_ndc + 1.0) * 0.5 * sw), nan=-1.0).astype(np.int32) + sy_all = np.nan_to_num(((1.0 - v_ndc) * 0.5 * sh), nan=-1.0).astype(np.int32) + + on_screen = valid & (sx_all >= 0) & (sx_all < sw) & (sy_all >= 0) & (sy_all < sh) + + ages = self._hydro_ages + lifetimes = self._hydro_lifetimes + + trail_idx = np.tile(np.arange(trail_len, dtype=np.float32), N) + ages_rep = np.repeat(ages, trail_len) + lifetimes_rep = np.repeat(lifetimes, trail_len) + age_ok = ages_rep > trail_idx + + mva = self._hydro_min_visible_age + fade_in = np.clip((ages_rep - mva) / 10.0, 0, 1) + fade_out = np.clip((lifetimes_rep - ages_rep) / 20.0, 0, 1) + # Quadratic trail decay — comet-tail effect (matches GPU kernel) + trail_fade = (1.0 - trail_idx / trail_len) ** 2 + + ref_d = self._hydro_ref_depth + depth_scale = ref_d / (depth + ref_d) + alpha = self._hydro_alpha * fade_in * fade_out * trail_fade * depth_scale + + # Head glow: bright spark at particle position (tidx == 0) + is_head = trail_idx == 0 + alpha = np.where(is_head, alpha * 1.5, alpha) + + mask = on_screen & age_ok & (alpha > 1e-6) + if not mask.any(): + return img + + sx_m = sx_all[mask] + sy_m = sy_all[mask] + alpha_m = alpha[mask].astype(np.float32) + + # Per-particle color/radius → per-trail-point via particle index + particle_idx = np.arange(N * trail_len) // trail_len + pidx_m = particle_idx[mask] + color_m = self._hydro_particle_colors[pidx_m] # (M, 3) + radii_m = self._hydro_particle_radii[pidx_m] # (M,) + + # Head glow: +1px radius at particle position + is_head_m = trail_idx[mask] == 0 + radii_m = np.where(is_head_m, radii_m + 1, radii_m) + + for r_val in range(1, 7): + r_mask = radii_m == r_val + if not r_mask.any(): + continue + sxr = sx_m[r_mask] + syr = sy_m[r_mask] + ar = alpha_m[r_mask] + cr = color_m[r_mask] + + for offy in range(-r_val, r_val + 1): + for offx in range(-r_val, r_val + 1): + dist_sq = offx * offx + offy * offy + if dist_sq > r_val * r_val: + continue + falloff = 1.0 - (dist_sq / (r_val * r_val)) ** 0.5 + + px = sxr + offx + py = syr + offy + ok = (px >= 0) & (px < sw) & (py >= 0) & (py < sh) + if not ok.any(): + continue + + contribution = ar[ok] * falloff + for c in range(3): + np.add.at(img[:, :, c], (py[ok], px[ok]), + contribution * cr[ok, c]) + + np.clip(img, 0, 1, out=img) + return img + + def _splat_hydro_gpu(self, d_frame): + """Project and splat hydro particles on GPU via Numba CUDA kernel. + + Alpha is computed entirely on GPU from per-particle ages/lifetimes — + no CPU tile/repeat/clip overhead. Colors/radii are N-sized + (per-particle). Only trails (N*T) need per-frame upload; everything + else is N-sized (~60KB). + + Parameters + ---------- + d_frame : cupy.ndarray, shape (H, W, 3) + GPU frame buffer (float32 0-1). Modified in-place via atomic add. + """ + if self._hydro_particles is None or self._hydro_trails is None: + return + + from .analysis.render import _compute_camera_basis + + N = self._hydro_particles.shape[0] + trail_len = self._hydro_trail_len + total = N * trail_len + + cam_pos = self.position + look_at = self._get_look_at() + forward, right, cam_up = _compute_camera_basis( + tuple(cam_pos), tuple(look_at), (0, 0, 1), + ) + fov_scale = math.tan(math.radians(self.fov) / 2.0) + aspect_ratio = d_frame.shape[1] / d_frame.shape[0] + + # Flatten trails: (N, T, 2) → (N*T, 2) — the only large upload + all_pts = self._hydro_trails.reshape(-1, 2) + + # Allocate / resize GPU buffers + if self._d_hydro_trails is None or self._d_hydro_trails.shape[0] != total: + self._d_hydro_trails = cp.empty((total, 2), dtype=cp.float32) + if self._d_hydro_ages is None or self._d_hydro_ages.shape[0] != N: + self._d_hydro_ages = cp.empty(N, dtype=cp.int32) + self._d_hydro_lifetimes = cp.empty(N, dtype=cp.int32) + self._d_hydro_colors = cp.empty((N, 3), dtype=cp.float32) + self._d_hydro_radii = cp.empty(N, dtype=cp.int32) + + # Upload — trails are N*T (~3MB), rest is N-sized (~60KB each) + self._d_hydro_trails.set(all_pts) + self._d_hydro_ages.set(self._hydro_ages) + self._d_hydro_lifetimes.set(self._hydro_lifetimes) + self._d_hydro_colors.set(self._hydro_particle_colors) + self._d_hydro_radii.set(self._hydro_particle_radii) + + # GPU terrain + terrain_data = self.raster.data + if not isinstance(terrain_data, cp.ndarray): + terrain_data = cp.asarray(terrain_data) + + # Single kernel launch — alpha computed on GPU + threadsperblock = 256 + blockspergrid = (total + threadsperblock - 1) // threadsperblock + + _hydro_splat_kernel[blockspergrid, threadsperblock]( + self._d_hydro_trails, + self._d_hydro_ages, + self._d_hydro_lifetimes, + self._d_hydro_colors, + self._d_hydro_radii, + trail_len, + float(self._hydro_alpha), + int(self._hydro_min_visible_age), + float(self._hydro_ref_depth), + terrain_data, + d_frame, + float(cam_pos[0]), float(cam_pos[1]), float(cam_pos[2]), + float(forward[0]), float(forward[1]), float(forward[2]), + float(right[0]), float(right[1]), float(right[2]), + float(cam_up[0]), float(cam_up[1]), float(cam_up[2]), + float(fov_scale), float(aspect_ratio), + float(self._base_pixel_spacing_x), + float(self._base_pixel_spacing_y), + float(self.vertical_exaggeration), + float(self.subsample_factor), + float(self._hydro_min_depth), + ) + + # Clamp output + cp.clip(d_frame, 0, 1, out=d_frame) + # ------------------------------------------------------------------ # GTFS-RT realtime vehicle overlay # ------------------------------------------------------------------ @@ -4621,7 +5988,8 @@ def _check_terrain_reload(self): self._base_raster = new_raster self.raster = new_raster self._wind_terrain_np = None # invalidate cached terrain - self._d_base_frame = None # invalidate GPU wind buffers + self._hydro_terrain_np = None + self._d_base_frame = None # invalidate GPU wind/hydro buffers self._d_wind_scratch = None # Update coordinate tracking @@ -4851,12 +6219,16 @@ def _tick(self): if self._render_needed: self._update_frame() self._render_needed = False - elif self._wind_enabled and self._wind_particles is not None and self._d_base_frame is not None: - # Wind is on but camera didn't move — skip the expensive ray + elif (self._wind_enabled or self._hydro_enabled) and self._d_base_frame is not None: + # Wind/hydro is on but camera didn't move — skip the expensive ray # trace and just re-advect particles + GPU splat on fresh copy. - self._update_wind_particles() cp.copyto(self._d_wind_scratch, self._d_base_frame) - self._splat_wind_gpu(self._d_wind_scratch) + if self._wind_enabled and self._wind_particles is not None: + self._update_wind_particles() + self._splat_wind_gpu(self._d_wind_scratch) + if self._hydro_enabled and self._hydro_particles is not None: + self._update_hydro_particles() + self._splat_hydro_gpu(self._d_wind_scratch) self._d_wind_scratch.get(out=self._pinned_frame) self._composite_overlays() @@ -4876,11 +6248,16 @@ def _cycle_terrain_layer(self): self._active_color_data = None self._active_overlay_data = None self._overlay_as_water = False + self._active_overlay_color_lut = None print(f"Terrain: elevation") else: self._active_color_data = None self._active_overlay_data = self._overlay_layers[layer_name] - self._overlay_as_water = layer_name.startswith('flood_') + self._overlay_as_water = ( + layer_name.startswith('flood_') + or (layer_name == 'stream_link' and self._hydro_enabled)) + self._active_overlay_color_lut = self._overlay_color_luts.get( + layer_name) if self._overlay_as_water: print(f"Terrain: {layer_name} (water)") else: @@ -5821,6 +7198,8 @@ def _save_screenshot(self): from .analysis import render as render_func # Common render kwargs + # fog_density is scene-relative; convert to absolute for the kernel + _fog = self.fog_density / self._scene_diagonal if self.fog_density > 0 else 0.0 render_kwargs = dict( camera_position=tuple(self.position), look_at=tuple(self._get_look_at()), @@ -5831,6 +7210,8 @@ def _save_screenshot(self): sun_altitude=self.sun_altitude, shadows=self.shadows, ambient=self.ambient, + fog_density=_fog, + fog_color=self.fog_color, colormap=self.colormap, rtx=self.rtx, viewshed_data=viewshed_data, @@ -5844,6 +7225,7 @@ def _save_screenshot(self): overlay_data=self._active_overlay_data, overlay_alpha=self._overlay_alpha, overlay_as_water=self._overlay_as_water, + overlay_color_lut=self._active_overlay_color_lut, geometry_colors=geometry_colors, ) @@ -5967,6 +7349,8 @@ def _render_frame(self): # are non-linear operations that must act on the clean signal. defer_post = self.ao_enabled or self.denoise_enabled + # fog_density is scene-relative; convert to absolute for the kernel + _fog = self.fog_density / self._scene_diagonal if self.fog_density > 0 else 0.0 d_output = render( self.raster, camera_position=tuple(self.position), @@ -5978,6 +7362,8 @@ def _render_frame(self): sun_altitude=self.sun_altitude, shadows=self.shadows, ambient=self.ambient, + fog_density=_fog, + fog_color=self.fog_color, colormap=self.colormap, rtx=self.rtx, viewshed_data=viewshed_data, @@ -5993,6 +7379,7 @@ def _render_frame(self): overlay_data=self._active_overlay_data, overlay_alpha=self._overlay_alpha, overlay_as_water=self._overlay_as_water, + overlay_color_lut=self._active_overlay_color_lut, geometry_colors=geometry_colors, ao_samples=ao_samples, ao_radius=self.ao_radius, @@ -6112,8 +7499,8 @@ def _update_frame(self): self._pinned_mem, dtype=np.float32, count=d_display.size ).reshape(d_display.shape) - # Save clean post-processed frame for idle wind replay - if self._wind_enabled: + # Save clean post-processed frame for idle wind/hydro replay + if self._wind_enabled or self._hydro_enabled: if self._d_base_frame is None or self._d_base_frame.shape != d_display.shape: self._d_base_frame = cp.empty_like(d_display) self._d_wind_scratch = cp.empty_like(d_display) @@ -6124,11 +7511,22 @@ def _update_frame(self): self._update_wind_particles() self._splat_wind_gpu(d_display) - # Sync: wind kernel runs on stream 0, readback on non-blocking stream - if self._wind_done_event is None: - self._wind_done_event = cp.cuda.Event() - self._wind_done_event.record() - self._readback_stream.wait_event(self._wind_done_event) + # GPU hydro: advect on CPU, splat on GPU + if self._hydro_enabled and self._hydro_particles is not None: + self._update_hydro_particles() + self._splat_hydro_gpu(d_display) + + # Sync: splat kernels run on stream 0, readback on non-blocking stream + if self._wind_enabled or self._hydro_enabled: + sync_event = self._wind_done_event or self._hydro_done_event + if sync_event is None: + sync_event = cp.cuda.Event() + if self._wind_enabled: + self._wind_done_event = sync_event + else: + self._hydro_done_event = sync_event + sync_event.record() + self._readback_stream.wait_event(sync_event) # Async D2H copy on non-blocking stream d_display.get(out=self._pinned_frame, stream=self._readback_stream) @@ -6593,6 +7991,7 @@ def _render_help_text(self): ("DATA LAYERS", [ ("Shift+F", "FIRMS fire (7d)"), ("Shift+W", "Toggle wind"), + ("Shift+Y", "Toggle hydro flow"), ]), ("RENDERING", [ ("0", "Toggle ambient occlusion"), @@ -6835,6 +8234,7 @@ def run(self, start_position: Optional[Tuple[float, float, float]] = None, world_W = W * self.pixel_spacing_x world_H = H * self.pixel_spacing_y world_diag = np.sqrt(world_W**2 + world_H**2) + self._scene_diagonal = world_diag # Set initial move speed based on terrain extent (~1% of diagonal per keystroke) if self.move_speed is None: @@ -7106,6 +8506,7 @@ def explore(raster, width: int = 800, height: int = 600, baked_meshes=None, subsample: int = 1, wind_data=None, + hydro_data=None, gtfs_data=None, accessor=None, terrain_loader=None, @@ -7113,6 +8514,13 @@ def explore(raster, width: int = 800, height: int = 600, ao_samples: int = 0, gi_bounces: int = 1, denoise: bool = False, + fog_density: float = 0.0, + fog_color: tuple = (0.7, 0.8, 0.9), + colormap: str = None, + sun_azimuth: float = None, + sun_altitude: float = None, + shadows: bool = None, + ambient: float = None, minimap_style: str = None, minimap_layer: str = None, minimap_colors: dict = None, @@ -7164,6 +8572,12 @@ def explore(raster, width: int = 800, height: int = 600, wind_data : dict, optional Wind data from ``fetch_wind()``. If provided, Shift+W toggles wind particle animation. + hydro_data : dict, optional + Hydrological flow data with keys ``'flow_dir'`` (D8 direction + grid) and ``'flow_accum'`` (flow accumulation grid). If provided, + Shift+Y toggles hydro flow particle animation. Optional keys: + ``'n_particles'``, ``'max_age'``, ``'trail_len'``, ``'speed'``, + ``'accum_threshold'``, ``'color'``, ``'alpha'``, ``'dot_radius'``. gtfs_data : dict, optional GTFS data from ``fetch_gtfs()``. If the metadata contains a ``realtime_url``, Shift+B toggles realtime vehicle positions. @@ -7216,6 +8630,7 @@ def explore(raster, width: int = 800, height: int = 600, - C: Cycle colormap - Shift+F: Fetch/toggle FIRMS fire layer (7d LANDSAT 30m) - Shift+W: Toggle wind particle animation + - Shift+Y: Toggle hydro flow particle animation - Shift+B: Toggle GTFS-RT realtime vehicle overlay - F: Save screenshot - M: Toggle minimap overlay @@ -7283,6 +8698,43 @@ def explore(raster, width: int = 800, height: int = 600, if wind_data is not None: viewer._init_wind(wind_data) + # Hydro flow initialization + if hydro_data is not None: + hydro_start_enabled = hydro_data.get('enabled', True) + flow_dir = hydro_data['flow_dir'] + flow_accum = hydro_data['flow_accum'] + hydro_opts = {k: v for k, v in hydro_data.items() + if k not in ('flow_dir', 'flow_accum', 'enabled')} + viewer._init_hydro(flow_dir, flow_accum, **hydro_opts) + # Re-register stream_link overlay with NaN + palette coloring + if (viewer._hydro_stream_order_raw is not None + and 'stream_link' in viewer._overlay_layers): + max_order = int(viewer._hydro_stream_order_raw.max()) + palette_lut = InteractiveViewer._build_stream_palette_lut( + max_order) + sl_data = viewer._base_overlay_layers['stream_link'] + if hasattr(sl_data, 'get'): + sl_data = sl_data.get() + sl_data = np.asarray(sl_data, dtype=np.float32) + so_raw = viewer._hydro_stream_order_raw.astype(np.float32) + sl_color = np.where( + (sl_data <= 0) | (so_raw <= 0), + np.float32(np.nan), so_raw) + _add_overlay(viewer, 'stream_link', sl_color, + color_lut=palette_lut) + if hydro_start_enabled: + viewer._hydro_enabled = True + # Stream cells rendered with water reflection shader + if 'stream_link' in viewer._overlay_layers: + viewer._overlay_as_water = True + else: + viewer._hydro_enabled = False + # Switch back to elevation (don't leave stream_link active) + viewer._terrain_layer_idx = 0 + viewer._active_overlay_data = None + viewer._overlay_as_water = False + viewer._active_overlay_color_lut = None + # GTFS-RT initialization if gtfs_data is not None: rt_url = (gtfs_data.get('metadata') or {}).get('realtime_url') @@ -7318,6 +8770,24 @@ def explore(raster, width: int = 800, height: int = 600, if denoise: viewer.denoise_enabled = True + # Shared render params (fog, lighting, colormap) + if fog_density > 0: + viewer.fog_density = fog_density + if fog_color != (0.7, 0.8, 0.9): + viewer.fog_color = fog_color + if colormap is not None: + viewer.colormap = colormap + if colormap in viewer.colormaps: + viewer.colormap_idx = viewer.colormaps.index(colormap) + if sun_azimuth is not None: + viewer.sun_azimuth = sun_azimuth + if sun_altitude is not None: + viewer.sun_altitude = sun_altitude + if shadows is not None: + viewer.shadows = shadows + if ambient is not None: + viewer.ambient = ambient + # Initial state: everything off except elevation viewer._tiles_enabled = False viewer._basemap_idx = 0 # 'none' diff --git a/rtxpy/quickstart.py b/rtxpy/quickstart.py index 088129e..d7a6301 100644 --- a/rtxpy/quickstart.py +++ b/rtxpy/quickstart.py @@ -4,6 +4,393 @@ from pathlib import Path +def _rasterize_waterways_to_dem(water_geojson, terrain, elev_np, ocean): + """Rasterize Overture waterway features into a burn-depth grid. + + Carves LineStrings (rivers/streams) and fills Polygons (lakes) into + the DEM so the D8 algorithm routes flow through known channels. + + Returns a float32 array of burn depths (positive = carve down), + or None if no features were rasterized. + """ + import numpy as np + + features = water_geojson.get('features', []) + if not features: + return None + + H, W = elev_np.shape + burn = np.zeros((H, W), dtype=np.float32) + + # Build lon/lat → pixel transformer from the terrain's CRS + affine + try: + from pyproj import Transformer + terrain_crs = terrain.rio.crs + to_crs = Transformer.from_crs('EPSG:4326', terrain_crs, + always_xy=True) + except Exception: + return None + + # Affine: pixel (col, row) ↔ projected (x, y) + x_coords = terrain.coords[terrain.dims[-1]].values + y_coords = terrain.coords[terrain.dims[-2]].values + x0, x1 = float(x_coords[0]), float(x_coords[-1]) + y0, y1 = float(y_coords[0]), float(y_coords[-1]) + dx = (x1 - x0) / max(W - 1, 1) + dy = (y1 - y0) / max(H - 1, 1) + + def lonlat_to_pixel(lon, lat): + """Convert (lon, lat) → (col, row) in the DEM grid.""" + px, py = to_crs.transform(lon, lat) + col = (px - x0) / dx + row = (py - y0) / dy + return col, row + + def densify_line(pixel_coords): + """Walk polyline at 1-pixel steps → list of (row, col).""" + pts = [] + for i in range(len(pixel_coords) - 1): + c0, r0 = pixel_coords[i] + c1, r1 = pixel_coords[i + 1] + dc, dr = c1 - c0, r1 - r0 + n = max(int(max(abs(dr), abs(dc))), 1) + for s in range(n + 1): + t = s / n + pts.append((int(round(r0 + dr * t)), + int(round(c0 + dc * t)))) + return pts + + # Burn depths by waterway type + _LINE_BURN = { + 'river': 5.0, 'canal': 3.0, + 'stream': 2.0, 'drain': 1.0, 'ditch': 0.5, + } + _POLY_BURN = 3.0 # lakes / reservoirs + + n_burned = 0 + for feat in features: + geom = feat.get('geometry', {}) + gtype = geom.get('type', '') + subtype = (feat.get('properties') or {}).get('subtype', '') + + if gtype == 'LineString': + coords = geom.get('coordinates', []) + if len(coords) < 2: + continue + depth = _LINE_BURN.get(subtype, 2.0) + px = [lonlat_to_pixel(c[0], c[1]) for c in coords] + for rr, cc in densify_line(px): + if 0 <= rr < H and 0 <= cc < W and not ocean[rr, cc]: + if depth > burn[rr, cc]: + burn[rr, cc] = depth + n_burned += 1 + + elif gtype in ('Polygon', 'MultiPolygon'): + all_rings = [] + if gtype == 'Polygon': + all_rings = geom.get('coordinates', []) + else: + for poly in geom.get('coordinates', []): + all_rings.extend(poly) + for ring in all_rings: + if len(ring) < 3: + continue + px = [lonlat_to_pixel(c[0], c[1]) for c in ring] + # Outline + for rr, cc in densify_line(px): + if 0 <= rr < H and 0 <= cc < W and not ocean[rr, cc]: + if _POLY_BURN > burn[rr, cc]: + burn[rr, cc] = _POLY_BURN + # Fill interior via scanline + pr = np.array([p[1] for p in px], dtype=np.float64) + pc = np.array([p[0] for p in px], dtype=np.float64) + r_min = max(int(pr.min()), 0) + r_max = min(int(pr.max()), H - 1) + n_verts = len(pr) + for row in range(r_min, r_max + 1): + xings = [] + for j in range(n_verts): + j1 = (j + 1) % n_verts + r0, r1 = pr[j], pr[j1] + if (r0 <= row < r1) or (r1 <= row < r0): + t = (row - r0) / (r1 - r0) + xings.append(pc[j] + t * (pc[j1] - pc[j])) + xings.sort() + for k in range(0, len(xings) - 1, 2): + c_lo = max(int(round(xings[k])), 0) + c_hi = min(int(round(xings[k + 1])), W - 1) + for cc in range(c_lo, c_hi + 1): + if not ocean[row, cc]: + if _POLY_BURN > burn[row, cc]: + burn[row, cc] = _POLY_BURN + n_burned += 1 + + if n_burned > 0: + # Buffer the burn by a few pixels with tapered depth — + # widens channels so D8 catches more flow into them. + from scipy.ndimage import maximum_filter as _max_filt + from scipy.ndimage import gaussian_filter as _gauss_filt + # Expand burn mask by 3 px radius + buffered = _max_filt(burn, size=7) + # Taper edges: Gaussian blur gives smooth falloff + buffered = _gauss_filt(buffered, sigma=1.5).astype(np.float32) + # Keep the sharper original where it's deeper + burn = np.maximum(burn, buffered) + burn[ocean] = 0.0 + + n_cells = int((burn > 0).sum()) + print(f" Burned {n_burned} waterway features into DEM " + f"({n_cells} cells, max depth {burn.max():.1f} m)") + return burn + return None + + +def _burn_waterways_to_grids(water_geojson, terrain, sl_grid, so_grid, + ocean, next_link_id): + """Burn Overture waterway features into stream_link and stream_order grids. + + Marks waterway cells in *sl_grid* (with *next_link_id* for cells not + already part of the D8 stream network) and upgrades *so_grid* with + equivalent Strahler orders. Both arrays are modified in-place. + + Returns the number of features burned. + """ + import numpy as np + + features = water_geojson.get('features', []) + if not features: + return 0 + + H, W = sl_grid.shape + + try: + from pyproj import Transformer + terrain_crs = terrain.rio.crs + to_crs = Transformer.from_crs('EPSG:4326', terrain_crs, + always_xy=True) + except Exception: + return 0 + + x_coords = terrain.coords[terrain.dims[-1]].values + y_coords = terrain.coords[terrain.dims[-2]].values + x0, x1 = float(x_coords[0]), float(x_coords[-1]) + y0, y1 = float(y_coords[0]), float(y_coords[-1]) + dx = (x1 - x0) / max(W - 1, 1) + dy = (y1 - y0) / max(H - 1, 1) + + def lonlat_to_pixel(lon, lat): + px, py = to_crs.transform(lon, lat) + return (px - x0) / dx, (py - y0) / dy + + def densify_line(pixel_coords): + pts = [] + for i in range(len(pixel_coords) - 1): + c0, r0 = pixel_coords[i] + c1, r1 = pixel_coords[i + 1] + dc, dr = c1 - c0, r1 - r0 + n = max(int(max(abs(dr), abs(dc))), 1) + for s in range(n + 1): + t = s / n + pts.append((int(round(r0 + dr * t)), + int(round(c0 + dc * t)))) + return pts + + _LINE_ORDER = { + 'river': 5, 'canal': 4, + 'stream': 2, 'drain': 1, 'ditch': 1, + } + + def _mark(rr, cc, order): + if 0 <= rr < H and 0 <= cc < W and not ocean[rr, cc]: + if sl_grid[rr, cc] <= 0: + sl_grid[rr, cc] = next_link_id + if order > so_grid[rr, cc]: + so_grid[rr, cc] = order + + n_burned = 0 + for feat in features: + geom = feat.get('geometry', {}) + gtype = geom.get('type', '') + subtype = (feat.get('properties') or {}).get('subtype', '') + + if gtype == 'LineString': + coords = geom.get('coordinates', []) + if len(coords) < 2: + continue + order = _LINE_ORDER.get(subtype, 2) + px = [lonlat_to_pixel(c[0], c[1]) for c in coords] + for rr, cc in densify_line(px): + _mark(rr, cc, order) + n_burned += 1 + + elif gtype in ('Polygon', 'MultiPolygon'): + all_rings = [] + if gtype == 'Polygon': + all_rings = geom.get('coordinates', []) + else: + for poly in geom.get('coordinates', []): + all_rings.extend(poly) + poly_order = max(_LINE_ORDER.get(subtype, 2), 5) + for ring in all_rings: + if len(ring) < 3: + continue + px = [lonlat_to_pixel(c[0], c[1]) for c in ring] + for rr, cc in densify_line(px): + _mark(rr, cc, poly_order) + # Scanline fill + pr = np.array([p[1] for p in px], dtype=np.float64) + pc = np.array([p[0] for p in px], dtype=np.float64) + r_min = max(int(pr.min()), 0) + r_max = min(int(pr.max()), H - 1) + n_verts = len(pr) + for row in range(r_min, r_max + 1): + xings = [] + for j in range(n_verts): + j1 = (j + 1) % n_verts + r0, r1 = pr[j], pr[j1] + if (r0 <= row < r1) or (r1 <= row < r0): + t = (row - r0) / (r1 - r0) + xings.append(pc[j] + t * (pc[j1] - pc[j])) + xings.sort() + for k in range(0, len(xings) - 1, 2): + c_lo = max(int(round(xings[k])), 0) + c_hi = min(int(round(xings[k + 1])), W - 1) + for cc in range(c_lo, c_hi + 1): + _mark(row, cc, poly_order) + n_burned += 1 + + return n_burned + + +def _trace_tributaries_flow_path(fd, fa, water_geojson, terrain, ocean, + threshold=50): + """Trace tributary network via xrspatial.flow_path. + + Pass 1: rasterize Overture waterway vertices as seed points, trace + downstream through D8 → main channel mask. + Pass 2: seed from high-accumulation cells NOT on the main channel, + trace downstream → tributary mask. + + Returns (trib_cells, ww_net) — both bool arrays (H, W). + """ + import numpy as np + from xrspatial import flow_path as _flow_path + + features = water_geojson.get('features', []) + H, W = ocean.shape + + # -- coordinate transform (lon/lat → pixel) ------------------------------ + try: + from pyproj import Transformer + terrain_crs = terrain.rio.crs + to_crs = Transformer.from_crs('EPSG:4326', terrain_crs, + always_xy=True) + except Exception: + return None, None + + x_coords = terrain.coords[terrain.dims[-1]].values + y_coords = terrain.coords[terrain.dims[-2]].values + x0, x1 = float(x_coords[0]), float(x_coords[-1]) + y0, y1 = float(y_coords[0]), float(y_coords[-1]) + dx = (x1 - x0) / max(W - 1, 1) + dy = (y1 - y0) / max(H - 1, 1) + + def lonlat_to_pixel(lon, lat): + px, py = to_crs.transform(lon, lat) + return (px - x0) / dx, (py - y0) / dy + + def densify_line(pixel_coords): + pts = [] + for i in range(len(pixel_coords) - 1): + c0, r0 = pixel_coords[i] + c1, r1 = pixel_coords[i + 1] + dc_, dr_ = c1 - c0, r1 - r0 + n = max(int(max(abs(dr_), abs(dc_))), 1) + for s in range(n + 1): + t = s / n + pts.append((int(round(r0 + dr_ * t)), + int(round(c0 + dc_ * t)))) + return pts + + # -- Pass 1: waterway seeds → main channel via flow_path ----------------- + ww_seeds = np.full((H, W), np.nan, dtype=np.float32) + n_seed = 0 + label = 1.0 + for feat in features: + geom = feat.get('geometry', {}) + gtype = geom.get('type', '') + + if gtype == 'LineString': + coords = geom.get('coordinates', []) + if len(coords) < 2: + continue + px = [lonlat_to_pixel(c[0], c[1]) for c in coords] + for rr, cc in densify_line(px): + if 0 <= rr < H and 0 <= cc < W and not ocean[rr, cc]: + ww_seeds[rr, cc] = label + n_seed += 1 + label += 1.0 + + elif gtype in ('Polygon', 'MultiPolygon'): + all_rings = [] + if gtype == 'Polygon': + all_rings = geom.get('coordinates', []) + else: + for poly in geom.get('coordinates', []): + all_rings.extend(poly) + for ring in all_rings: + if len(ring) < 3: + continue + px = [lonlat_to_pixel(c[0], c[1]) for c in ring] + for rr, cc in densify_line(px): + if 0 <= rr < H and 0 <= cc < W and not ocean[rr, cc]: + ww_seeds[rr, cc] = label + n_seed += 1 + label += 1.0 + + if n_seed == 0: + return None, None + + ww_seeds_da = terrain.copy(data=ww_seeds) + del ww_seeds + ww_traced = _flow_path(fd, ww_seeds_da) + del ww_seeds_da + ww_traced_np = ww_traced.data.get() if hasattr(ww_traced.data, 'get') \ + else np.asarray(ww_traced.data) + ww_net = np.isfinite(ww_traced_np) + del ww_traced, ww_traced_np + n_channel = int(ww_net.sum()) + + # -- Pass 2: headwater seeds → tributaries via flow_path ----------------- + fa_np = fa.data.get() if hasattr(fa.data, 'get') else np.asarray(fa.data) + fa_np = np.nan_to_num(fa_np, nan=0.0) + + hw_seeds = np.full((H, W), np.nan, dtype=np.float32) + hw_mask = (fa_np >= threshold) & (~ww_net) & (~ocean) + hw_seeds[hw_mask] = 1.0 + n_hw = int(hw_mask.sum()) + del fa_np, hw_mask + + if n_hw > 0: + hw_seeds_da = terrain.copy(data=hw_seeds) + del hw_seeds + hw_traced = _flow_path(fd, hw_seeds_da) + del hw_seeds_da + hw_traced_np = hw_traced.data.get() if hasattr(hw_traced.data, 'get') \ + else np.asarray(hw_traced.data) + trib_cells = np.isfinite(hw_traced_np) & (~ww_net) + del hw_traced, hw_traced_np + n_trib = int(trib_cells.sum()) + else: + del hw_seeds + trib_cells = np.zeros((H, W), dtype=bool) + n_trib = 0 + + print(f" flow_path: {n_seed} waterway seeds → {n_channel} channel cells, " + f"{n_hw} headwaters → {n_trib} tributary cells") + return trib_cells, ww_net + + def quickstart( name, bounds, @@ -13,6 +400,8 @@ def quickstart( tiles='satellite', tile_zoom=None, wind=True, + hydro=False, + coast_distance=False, cache_dir=None, **explore_kwargs, ): @@ -50,6 +439,15 @@ def quickstart( Tile zoom level override. ``None`` uses the provider default. wind : bool Fetch live wind data from Open-Meteo. Default ``True``. + hydro : bool + Compute D8 flow direction and flow accumulation from the terrain + using xarray-spatial and enable hydro flow particle animation + (Shift+Y). Default ``False``. + coast_distance : bool + Compute terrain-aware surface distance from the coast using + xrspatial's ``surface_distance`` (3-D Dijkstra). Adds a + ``coast_distance`` layer visible as a G-key overlay. + Default ``False``. cache_dir : str or Path, optional Directory for the zarr store and GeoJSON caches. Defaults to the current working directory. @@ -61,7 +459,6 @@ def quickstart( """ import numpy as np import xarray as xr - from xrspatial import slope, aspect, quantile # -- paths ---------------------------------------------------------------- if cache_dir is None: @@ -79,12 +476,9 @@ def quickstart( terrain = terrain.rtx.to_cupy() # -- Dataset with analysis layers ----------------------------------------- - print("Building Dataset with terrain analysis layers...") + print("Building Dataset...") ds = xr.Dataset({ 'elevation': terrain.rename(None), - 'slope': slope(terrain), - 'aspect': aspect(terrain), - 'quantile': quantile(terrain), }) # -- tiles ---------------------------------------------------------------- @@ -189,6 +583,337 @@ def quickstart( except Exception as e: print(f"Skipping wind: {e}") + # -- hydro ---------------------------------------------------------------- + hydro_data = None + if hydro: + try: + import gc + from xrspatial import fill as _fill + from xrspatial import flow_direction as _flow_direction + from xrspatial import flow_accumulation as _flow_accumulation + from xrspatial import stream_order as _stream_order + from xrspatial import stream_link as _stream_link + + print("Conditioning DEM for hydrological flow...") + from scipy.ndimage import uniform_filter as _uniform_filter + + # 1. Prepare elevation: ocean (0-fill) → low sentinel so + # fill() routes coastal depressions toward the sea. + _data = terrain.data + is_cupy = hasattr(_data, 'get') + elev_np = _data.get() if is_cupy else np.array(_data) + elev_np = elev_np.astype(np.float32) + ocean = (elev_np == 0.0) | np.isnan(elev_np) + elev_np[ocean] = -100.0 + + # Pre-load waterway GeoJSON once (used in steps 1b, 6b, + # 6c, and for hydro_data). Avoids 4× redundant file reads. + _water_geojson = None + if 'water' in feat: + _wc = cache_dir / f"{name}_water.geojson" + if _wc.exists(): + import json as _json_ww + try: + with open(_wc) as _wf: + _water_geojson = _json_ww.load(_wf) + except Exception as _e: + print(f" Waterway GeoJSON load failed: {_e}") + + # 1b. Burn Overture waterways into the DEM — carve known + # river/stream channels so D8 routes through them. + if _water_geojson is not None: + try: + _ww_burn = _rasterize_waterways_to_dem( + _water_geojson, terrain, elev_np, ocean) + if _ww_burn is not None: + elev_np -= _ww_burn + elev_np[ocean] = -100.0 + del _ww_burn + except Exception as _e: + print(f" Waterway DEM burn skipped: {_e}") + + # 2. Smooth (15×15 ≈ 450 m at 30 m) to remove noise pits + # that fragment the drainage network. + smoothed = _uniform_filter(elev_np, size=15, mode='nearest') + smoothed[ocean] = -100.0 + del elev_np + + # 3. Fill remaining depressions, then resolve flats. + if is_cupy: + import cupy as _cp + _sm = _cp.asarray(smoothed) + else: + _sm = smoothed + del smoothed + filled = _fill(terrain.copy(data=_sm)) + fill_depth = filled.data - _sm + resolved = filled.data + fill_depth * 0.01 + del filled, fill_depth, _sm + + # 3b. Distance-to-ocean gradient — ensures flat areas + # drain coherently toward the coast. + from scipy.ndimage import distance_transform_edt as _dist_edt + dist_to_ocean = _dist_edt(~ocean).astype(np.float32) + ocean_gradient = dist_to_ocean * 0.0001 + del dist_to_ocean + + if is_cupy: + resolved = resolved + _cp.asarray(ocean_gradient) + resolved[_cp.asarray(ocean)] = -100.0 + else: + resolved += ocean_gradient + resolved[ocean] = -100.0 + del ocean_gradient + + # 3c. Channel burning — compute an initial flow + # accumulation, then lower high-accumulation cells + # to carve channels into the DEM. Re-fill and + # re-compute so streams connect into a network. + _fd0 = _flow_direction(terrain.copy(data=resolved)) + _fa0 = _flow_accumulation(_fd0) + del _fd0 + _fa0_np = _fa0.data.get() if is_cupy else np.asarray(_fa0.data) + _fa0_np = np.nan_to_num(_fa0_np, nan=0.0) + del _fa0 + + # Burn proportional to log(accumulation): cells with + # more upstream area get carved deeper (up to ~2 m). + _log_acc = np.log10(np.clip(_fa0_np, 1, None)) + _log_max = max(_log_acc.max(), 1.0) + _burn = (_log_acc / _log_max) * 2.0 # 0–2 m carve + _burn[ocean] = 0.0 + del _fa0_np, _log_acc + + if is_cupy: + resolved = resolved - _cp.asarray(_burn.astype(np.float32)) + resolved[_cp.asarray(ocean)] = -100.0 + else: + resolved -= _burn.astype(np.float32) + resolved[ocean] = -100.0 + del _burn + + # Re-fill after burning to remove any new micro-pits + filled2 = _fill(terrain.copy(data=resolved)) + fill_depth2 = filled2.data - resolved + resolved = filled2.data + fill_depth2 * 0.01 + del filled2, fill_depth2 + + # Final jitter to break remaining ties + if is_cupy: + _cp.random.seed(0) + resolved = resolved + _cp.random.uniform( + 0, 0.0001, resolved.shape, dtype=_cp.float32) + resolved[_cp.asarray(ocean)] = -100.0 + else: + np.random.seed(0) + resolved += np.random.uniform( + 0, 0.0001, resolved.shape).astype(np.float32) + resolved[ocean] = -100.0 + + # Free CuPy cached blocks before the next heavy step + if is_cupy: + _cp.get_default_memory_pool().free_all_blocks() + gc.collect() + + # 4. Compute final D8 flow direction and accumulation. + fd = _flow_direction(terrain.copy(data=resolved)) + del resolved + fa = _flow_accumulation(fd) + + # 5. Compute Strahler stream order — only stream cells + # (accum >= threshold) get an order; rest are NaN. + so = _stream_order(fd, fa, threshold=50) + + # 5b. Compute stream link — unique segment IDs per reach. + sl = _stream_link(fd, fa, threshold=50) + + # 6. Mask ocean back to NaN/0 in the output grids. + fd_out = fd.data + fa_out = fa.data + so_out = so.data + if is_cupy: + ocean_gpu = _cp.asarray(ocean) + fd_out[ocean_gpu] = _cp.nan + fa_out[ocean_gpu] = _cp.nan + so_out[ocean_gpu] = _cp.nan + else: + fd_out[ocean] = np.nan + fa_out[ocean] = np.nan + so_out[ocean] = np.nan + + # Add stream_link to the dataset so it shows up as an + # overlay layer (G key) with palette-matched colors. + _sl_out = sl.data + if is_cupy: + _sl_out[ocean_gpu] = _cp.nan + else: + _sl_out[ocean] = np.nan + _sl_np = _sl_out.get() if is_cupy else np.asarray(_sl_out) + _sl_clean = np.nan_to_num(_sl_np, nan=0.0).astype(np.float32) + if is_cupy: + _sl_clean = _cp.asarray(_sl_clean) + del _sl_np + + # 6b. Burn Overture waterways into stream_link + stream_order + # so they appear in the overlay with the water shader. + if _water_geojson is not None: + try: + _so_np = so_out.get() if is_cupy else np.asarray(so_out) + _so_np = np.nan_to_num(_so_np, nan=0.0).astype( + np.float32) + _sl_np2 = _sl_clean.get() if is_cupy else np.asarray( + _sl_clean) + _sl_np2 = np.array(_sl_np2, dtype=np.float32) + _max_link = int(_sl_np2.max()) + 1 + _n_ww = _burn_waterways_to_grids( + _water_geojson, terrain, _sl_np2, _so_np, + ocean, _max_link) + if _n_ww > 0: + if is_cupy: + _sl_clean = _cp.asarray(_sl_np2) + so_out = _cp.asarray(_so_np) + else: + _sl_clean = _sl_np2 + so_out = _so_np + _sl_out = _sl_clean # keep in sync + print(f" Burned {_n_ww} waterway features " + f"into stream_link overlay") + del _so_np, _sl_np2 + except Exception as _e2: + print(f" Waterway overlay burn skipped: {_e2}") + + # 6c. Trace tributary network via flow_path + if _water_geojson is not None: + try: + _trib, _ww_net = _trace_tributaries_flow_path( + fd, fa, _water_geojson, terrain, ocean) + if _trib is not None: + _so_np3 = so_out.get() if is_cupy else np.asarray(so_out) + _so_np3 = np.nan_to_num(_so_np3, nan=0.0).astype( + np.float32) + _sl_np3 = _sl_clean.get() if is_cupy else np.asarray( + _sl_clean) + _sl_np3 = np.array(_sl_np3, dtype=np.float32) + _max_link3 = int(_sl_np3.max()) + 1 + # New waterway-channel cells → stream_order 3 + _new_ww = _ww_net & (_sl_np3 == 0) & (~ocean) + _sl_np3[_new_ww] = _max_link3 + _so_np3[_new_ww] = np.maximum( + _so_np3[_new_ww], 3.0) + _max_link3 += 1 + # New tributary cells → stream_order 1 + _new_trib = _trib & (_sl_np3 == 0) & (~ocean) + _sl_np3[_new_trib] = _max_link3 + _so_np3[_new_trib] = np.maximum( + _so_np3[_new_trib], 1.0) + if is_cupy: + _sl_clean = _cp.asarray(_sl_np3) + so_out = _cp.asarray(_so_np3) + else: + _sl_clean = _sl_np3 + so_out = _so_np3 + _sl_out = _sl_clean + del _so_np3, _sl_np3, _trib, _ww_net + except ImportError: + print(" flow_path tracing skipped " + "(xrspatial.flow_path unavailable)") + except Exception as _e3: + print(f" flow_path tracing skipped: {_e3}") + + # Drop xrspatial DataArray wrappers — we only need .data + del fd, fa, so, sl + + ds['stream_link'] = terrain.copy(data=_sl_clean).rename(None) + + hydro_data = { + 'flow_dir': fd_out, + 'flow_accum': fa_out, + 'stream_order': so_out, + 'stream_link': _sl_out, + 'accum_threshold': 50, + } + del fd_out, fa_out, _sl_out + # Pass overrides from explore_kwargs if present + for key in ('n_particles', 'max_age', 'trail_len', 'speed', + 'accum_threshold', 'color', 'alpha', 'dot_radius'): + hydro_key = f'hydro_{key}' + if hydro_key in explore_kwargs: + hydro_data[key] = explore_kwargs.pop(hydro_key) + # Attach cached Overture waterway features for particle + # injection and unified water-shader rendering. + if _water_geojson is not None: + ww_feats = [ + f for f in _water_geojson.get('features', []) + if f.get('geometry', {}).get('type') in + ('LineString', 'Polygon', 'MultiPolygon') + ] + if ww_feats: + hydro_data['waterway_geojson'] = { + 'type': 'FeatureCollection', + 'features': ww_feats, + } + n_lines = sum( + 1 for f in ww_feats + if f['geometry']['type'] == 'LineString') + n_polys = len(ww_feats) - n_lines + parts = [] + if n_lines: + parts.append(f"{n_lines} LineStrings") + if n_polys: + parts.append(f"{n_polys} Polygons") + print(f" Loaded {' + '.join(parts)} " + f"for hydro overlay") + del ww_feats + + del _water_geojson, ocean, _sl_clean + if is_cupy: + _cp.get_default_memory_pool().free_all_blocks() + gc.collect() + + print(f" Flow direction + accumulation computed on " + f"{terrain.shape[0]}x{terrain.shape[1]} grid") + except Exception as e: + print(f"Skipping hydro: {e}") + + # -- coast distance ------------------------------------------------------- + if coast_distance: + try: + import gc as _gc_cd + from xrspatial import surface_distance as _surface_distance + from scipy.ndimage import binary_erosion as _binary_erosion + + _data = terrain.data + _elev = _data.get() if hasattr(_data, 'get') else np.array(_data) + _ocean = (_elev == 0.0) | np.isnan(_elev) + _land = ~_ocean + + # Coast = land cells adjacent to ocean + _coast = _land & ~_binary_erosion(_land) + del _land + + # Target raster: 1.0 at coast, 0.0 elsewhere + _targets = np.zeros_like(_elev, dtype=np.float32) + _targets[_coast] = 1.0 + del _coast + + # Elevation with ocean as NaN barriers + _elev_clean = _elev.astype(np.float32).copy() + _elev_clean[_ocean] = np.nan + del _elev, _ocean + + _dist = _surface_distance( + terrain.copy(data=_targets), + elevation=terrain.copy(data=_elev_clean), + method='planar', + ) + del _targets, _elev_clean + ds['coast_distance'] = _dist.rename(None) + del _dist + _gc_cd.collect() + print("Surface distance from coast computed") + except Exception as e: + print(f"Skipping coast distance: {e}") + # -- explore -------------------------------------------------------------- defaults = dict( width=2048, height=1600, render_scale=0.5, @@ -198,7 +923,8 @@ def quickstart( print("\nLaunching explore...\n") ds.rtx.explore(z='elevation', scene_zarr=zarr_path, - wind_data=wind_data, gtfs_data=gtfs_data, **defaults) + wind_data=wind_data, hydro_data=hydro_data, + gtfs_data=gtfs_data, **defaults) # --------------------------------------------------------------------------- diff --git a/rtxpy/viewer/hydro.py b/rtxpy/viewer/hydro.py new file mode 100644 index 0000000..99bf6b9 --- /dev/null +++ b/rtxpy/viewer/hydro.py @@ -0,0 +1,84 @@ +"""Hydrological flow particle state for the interactive viewer.""" + + +class HydroState: + """Holds all hydro flow particle simulation and GPU splatting state. + + Groups the hydro-related variables and GPU buffers into a + single object for cleaner viewer initialization. + """ + + __slots__ = ( + 'hydro_data', 'hydro_enabled', + 'hydro_flow_u_px', 'hydro_flow_v_px', + 'hydro_flow_accum_norm', + 'hydro_stream_order', + 'hydro_stream_order_raw', + 'hydro_stream_link', + 'hydro_particles', 'hydro_ages', 'hydro_lifetimes', + 'hydro_max_age', 'hydro_n_particles', + 'hydro_trail_len', 'hydro_trails', + 'hydro_speed', 'hydro_min_depth', 'hydro_ref_depth', + 'hydro_dot_radius', 'hydro_alpha', + 'hydro_min_visible_age', + 'hydro_accum_threshold', + 'hydro_color', + 'hydro_terrain_np', + 'hydro_spawn_probs', + 'hydro_spawn_indices', 'hydro_spawn_valid_probs', + # Accumulation-scaled rendering + 'hydro_slope_mag', + 'hydro_particle_accum', + 'hydro_particle_colors', + 'hydro_particle_radii', + 'hydro_particle_raw_order', + # GPU buffers + 'd_hydro_trails', + 'd_hydro_ages', 'd_hydro_lifetimes', + 'd_hydro_colors', 'd_hydro_radii', + 'hydro_done_event', + ) + + def __init__(self): + self.hydro_data = None + self.hydro_enabled = False + self.hydro_flow_u_px = None + self.hydro_flow_v_px = None + self.hydro_flow_accum_norm = None + self.hydro_stream_order = None + self.hydro_stream_order_raw = None + self.hydro_stream_link = None + self.hydro_particles = None + self.hydro_ages = None + self.hydro_lifetimes = None + self.hydro_max_age = 200 + self.hydro_n_particles = 12000 + self.hydro_trail_len = 20 + self.hydro_trails = None + self.hydro_speed = 0.75 + self.hydro_min_depth = 0.0 + self.hydro_ref_depth = 1.0 + self.hydro_dot_radius = 2 + self.hydro_alpha = 0.5 + self.hydro_min_visible_age = 2 + self.hydro_accum_threshold = 50 + self.hydro_color = (0.2, 0.5, 1.0) + self.hydro_terrain_np = None + self.hydro_spawn_probs = None + self.hydro_spawn_indices = None + self.hydro_spawn_valid_probs = None + + # Accumulation-scaled rendering + self.hydro_slope_mag = None + self.hydro_particle_accum = None + self.hydro_particle_colors = None + self.hydro_particle_radii = None + self.hydro_particle_raw_order = None + + # GPU buffers + self.d_hydro_trails = None + self.d_hydro_ages = None + self.d_hydro_lifetimes = None + self.d_hydro_colors = None + self.d_hydro_radii = None + self.hydro_done_event = None diff --git a/rtxpy/viewer/keybindings.py b/rtxpy/viewer/keybindings.py index 83ca07e..217a9ce 100644 --- a/rtxpy/viewer/keybindings.py +++ b/rtxpy/viewer/keybindings.py @@ -38,6 +38,7 @@ 'H': '_action_prev_help_page', # Previous help page 'L': '_action_toggle_drone_glow', # Drone glow 'T': '_action_cycle_time', # Time-of-day + 'Y': '_action_toggle_hydro', # Hydro flow particles } # Lowercase key bindings — checked after shift bindings diff --git a/rtxpy/viewer/overlays.py b/rtxpy/viewer/overlays.py index 176dafe..e12850d 100644 --- a/rtxpy/viewer/overlays.py +++ b/rtxpy/viewer/overlays.py @@ -11,6 +11,8 @@ class OverlayManager: __slots__ = ( 'overlay_layers', 'overlay_names', 'active_color_data', 'active_overlay_data', + 'active_overlay_color_lut', + 'overlay_color_luts', 'overlay_alpha', 'overlay_as_water', 'terrain_layer_order', 'terrain_layer_idx', 'base_overlay_layers', @@ -23,6 +25,8 @@ def __init__(self, overlay_layers=None, base_overlay_layers=None): self.overlay_names = list(self.overlay_layers.keys()) self.active_color_data = None self.active_overlay_data = None + self.active_overlay_color_lut = None + self.overlay_color_luts = {} self.overlay_alpha = 0.7 self.overlay_as_water = False