Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
.python-version
__pycache__/
**/.venv
**/data
**/cache
Expand All @@ -7,3 +8,5 @@
*.tif
*.csv
*.xml
*.env
*.mp4
47 changes: 47 additions & 0 deletions animation_from_event/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Git
.git
.gitignore

# Python
__pycache__
*.py[cod]
*$py.class
*.so
.Python
*.egg-info/
dist/
build/

# Environment
.env
.env.local
venv/
env/

# Data directories
data/
*.gpkg
*.csv
*.tif
*.tiff
*.mp4
*.avi

# IDE
.vscode/
.idea/
*.swp
*.swo
*~

# OS
.DS_Store
Thumbs.db

# Documentation (we copy specific files)
*.md
!README.md

# Test files
tests/
*.test
17 changes: 17 additions & 0 deletions animation_from_event/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# AWS Credentials for S3 Access
# ==============================
# Copy this file to .env and fill in your credentials

# AWS credentials (required for accessing private S3 buckets)
AWS_ACCESS_KEY_ID=your_access_key_here
AWS_SECRET_ACCESS_KEY=your_secret_key_here
AWS_DEFAULT_REGION=us-east-1

# Optional: AWS session token (for temporary credentials)
# AWS_SESSION_TOKEN=your_session_token_here

# Optional: S3 endpoint (for non-AWS S3-compatible storage)
# AWS_S3_ENDPOINT=https://s3.example.com

# Note: If using anonymous access to public buckets (like noaa-nwm-pds),
# these credentials are not required. Set nwm.use_anonymous: true in config.yaml
49 changes: 49 additions & 0 deletions animation_from_event/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
FROM python:3.11-slim

# Install system dependencies
RUN apt-get update && apt-get install -y \
ffmpeg \
libgdal-dev \
gdal-bin \
python3-gdal \
libhdf5-dev \
libnetcdf-dev \
git \
wget \
build-essential \
&& rm -rf /var/lib/apt/lists/*

# Set working directory
WORKDIR /app

# Copy requirements first (for better caching)
COPY requirements.txt .

# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt

# Install flows2fim from GitHub releases
# Download and install flows2fim binary (v0.4.1 - Linux AMD64)
ARG FLOWS2FIM_VERSION=v0.4.1
RUN wget -q https://github.com/NGWPC/flows2fim-archive/releases/download/${FLOWS2FIM_VERSION}/flows2fim-linux-amd64.tar.gz \
&& tar -xzf flows2fim-linux-amd64.tar.gz \
&& mv flows2fim /usr/local/bin/flows2fim \
&& chmod +x /usr/local/bin/flows2fim \
&& rm flows2fim-linux-amd64.tar.gz

# Verify flows2fim installation
RUN flows2fim --version

# Copy application files
COPY *.py ./
COPY config.yaml ./

# Create output directories
RUN mkdir -p /data/input /data/output /data/cache

# Set environment variables
ENV PYTHONUNBUFFERED=1
ENV GDAL_DATA=/usr/share/gdal

# Default command
CMD ["python", "--version"]
73 changes: 73 additions & 0 deletions animation_from_event/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
.PHONY: help build up down shell clean generate-flows generate-fims generate-animation run-workflow download-lake

help:
@echo "Flood Animation Tool - Make Commands"
@echo ""
@echo "Setup:"
@echo " make build Build Docker image"
@echo " make setup Initial setup (copy .env)"
@echo ""
@echo "Run:"
@echo " make run-workflow Run complete workflow"
@echo " make shell Open interactive shell in container"
@echo ""
@echo "Generate:"
@echo " make generate-flows Generate flow files from NWM data"
@echo " make generate-fims Generate FIM GeoTIFFs"
@echo " make generate-animation Generate animation video"
@echo ""
@echo "Utilities:"
@echo " make download-lake Download lake polygon (interactive)"
@echo ""
@echo "Cleanup:"
@echo " make clean Remove output files"
@echo " make clean-all Remove all generated files and images"

build:
docker-compose build

setup:
@if [ ! -f .env ]; then \
cp .env.example .env; \
echo "Created .env file - please edit with your credentials"; \
else \
echo ".env file already exists"; \
fi
@mkdir -p data/input data/output data/cache
@echo "Directory structure created"

run-workflow:
docker-compose run --rm flood-animation python run_workflow.py --config config.yaml

shell:
docker-compose run --rm flood-animation /bin/bash

generate-flows:
docker-compose run --rm flood-animation python generate_flow_files.py --config config.yaml

generate-fims:
docker-compose run --rm flood-animation python generate_batch_fims.py --config config.yaml

generate-animation:
docker-compose run --rm flood-animation python generate_animation.py --config config.yaml

download-lake:
docker-compose run --rm flood-animation python download_lake_polygon.py --interactive

clean:
rm -rf data/output/*
rm -rf data/cache/*
@echo "Output and cache directories cleaned"

clean-all: clean
docker-compose down --rmi all -v
@echo "Docker images and volumes removed"

up:
docker-compose up -d

down:
docker-compose down

logs:
docker-compose logs -f
Loading