diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..b1ffad8 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,23 @@ +{ + "name": "SOLWEIG", + "image": "mcr.microsoft.com/devcontainers/python:3.13", + "features": { + "ghcr.io/devcontainers/features/rust:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {} + }, + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/workspaces/solweig/.venv/bin/python" + }, + "extensions": [ + "ms-python.python", + "charliermarsh.ruff", + "rust-lang.rust-analyzer", + "ms-toolsai.jupyter", + "astral-sh.ty" + ] + } + }, + "postCreateCommand": "pip install uv && uv sync --dev && uv run maturin develop --release" +} \ No newline at end of file diff --git a/.github/workflows/build-qgis-plugin.yml b/.github/workflows/build-qgis-plugin.yml new file mode 100644 index 0000000..4409a43 --- /dev/null +++ b/.github/workflows/build-qgis-plugin.yml @@ -0,0 +1,91 @@ +name: Build QGIS Plugin + +on: + push: + tags: + - "v*" + workflow_dispatch: + inputs: + version: + description: "Version for the release" + required: true + default: "0.1.0" + +jobs: + test: + uses: ./.github/workflows/test.yml + + build-plugin: + name: Build QGIS Plugin + needs: test + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Get version + id: version + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "version=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT + else + # Extract version from tag (e.g., v0.1.0 -> 0.1.0) + echo "version=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT + fi + + - name: Build plugin package + run: python qgis_plugin/build_plugin.py --version ${{ steps.version.outputs.version }} + + - name: Validate plugin ZIP + run: | + cd qgis_plugin + ZIP=$(ls solweig-qgis-*.zip | head -1) + echo "Validating $ZIP..." + unzip -l "$ZIP" | tee /tmp/bundle_contents.txt + + # Check essential files exist + for file in metadata.txt __init__.py provider.py icon.png; do + if grep -q "solweig_qgis/$file" /tmp/bundle_contents.txt; then + echo " OK: $file found" + else + echo " MISSING: $file" + exit 1 + fi + done + + echo "Plugin validation passed." + + - name: Upload plugin package + uses: actions/upload-artifact@v4 + with: + name: qgis-plugin + path: qgis_plugin/solweig-qgis-*.zip + + create-release: + name: Create Release + needs: build-plugin + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/') + permissions: + contents: write + + steps: + - name: Download plugin + uses: actions/download-artifact@v4 + with: + name: qgis-plugin + path: artifacts/ + + - name: Create Release + uses: softprops/action-gh-release@v2 + with: + files: | + artifacts/*.zip + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..28144fd --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,55 @@ +name: Deploy Documentation + +on: + push: + branches: [main] + tags-ignore: + - "**" + paths: + - 'docs/**' + - 'mkdocs.yml' + - 'pysrc/**' + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install mkdocs-material mkdocstrings[python] pymdown-extensions + + - name: Build documentation + run: mkdocs build --strict + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: site/ + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 867804d..4c44fc0 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -2,25 +2,15 @@ name: publish package on: push: tags-ignore: - - "*.*.*[a]*" + - "*.*.*[a]*" # Skip alpha tags (e.g. v0.1.0a1); beta/rc/release tags publish to PyPI permissions: contents: read jobs: - check: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Install uv - uses: astral-sh/setup-uv@v3 - - name: Set up Python ${{ matrix.python-version }} - run: uv python install ${{ matrix.python-version }} + test: + uses: ./.github/workflows/test.yml linux: runs-on: ${{ matrix.platform.runner }} - needs: check + needs: test strategy: matrix: platform: @@ -47,7 +37,7 @@ jobs: path: dist musllinux: runs-on: ${{ matrix.platform.runner }} - needs: check + needs: test strategy: matrix: platform: @@ -74,7 +64,7 @@ jobs: path: dist windows: runs-on: ${{ matrix.platform.runner }} - needs: check + needs: test strategy: matrix: platform: @@ -99,13 +89,13 @@ jobs: path: dist macos: runs-on: ${{ matrix.platform.runner }} - needs: check + needs: test strategy: matrix: platform: - - runner: macos-13 + - runner: macos-15-intel # Intel x86_64 (GitHub's recommended replacement) target: x86_64 - - runner: macos-14 + - runner: macos-14 # Apple Silicon ARM64 target: aarch64 steps: - uses: actions/checkout@v4 @@ -125,7 +115,7 @@ jobs: path: dist sdist: runs-on: ubuntu-latest - needs: check + needs: test steps: - uses: actions/checkout@v4 - name: Build sdist @@ -156,9 +146,16 @@ jobs: uses: actions/attest-build-provenance@v2 with: subject-path: "wheels-*/*" + - name: Merge wheels into dist/ + if: ${{ startsWith(github.ref, 'refs/tags/') }} + run: mkdir -p dist && cp wheels-*/* dist/ - name: Publish to PyPI if: ${{ startsWith(github.ref, 'refs/tags/') }} - uses: PyO3/maturin-action@v1 + uses: pypa/gh-action-pypi-publish@release/v1 + - name: Create GitHub Release + if: ${{ startsWith(github.ref, 'refs/tags/') }} + uses: softprops/action-gh-release@v2 with: - command: upload - args: --non-interactive --skip-existing wheels-*/* + files: dist/* + generate_release_notes: true + prerelease: ${{ contains(github.ref, 'a') || contains(github.ref, 'b') || contains(github.ref, 'rc') }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..e8ec09a --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,196 @@ +name: Test + +on: + push: + branches: + - main + - dev + tags-ignore: + - "**" + pull_request: + branches: + - main + - dev + workflow_call: # Allow publish workflow to call this as a dependency + +# Cancel in-progress runs for the same branch/PR +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --group dev + + - name: Run ruff check + run: uv run ruff check + + - name: Run ruff format check + run: uv run ruff format --check + + typecheck: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --group dev + + - name: Run ty + run: uv run ty check pysrc/ tests/ demos/ scripts/ qgis_plugin/ + + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install dependencies + run: uv sync --group dev + + - name: Build Rust extension + run: uv run maturin develop --release + + - name: Run tests (excluding slow) + run: uv run python -m pytest tests/ -m 'not slow' -v --tb=short + env: + PYTHONPATH: ${{ github.workspace }} + + test-qgis-compat: + name: Test QGIS compatibility (NumPy 1.26) + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python 3.12 + run: uv python install 3.12 + + - name: Install GDAL system libraries + run: sudo apt-get update && sudo apt-get install -y libgdal-dev gdal-bin python3-gdal + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install dependencies (QGIS compat - NumPy 1.26) + run: uv sync --group qgis-compat + + - name: Install GDAL Python bindings matching system + run: uv pip install "gdal==$(gdal-config --version)" + + - name: Build Rust extension + run: uv run maturin develop --release + + - name: Verify NumPy version + run: uv run python -c "import numpy; assert numpy.__version__.startswith('1.26'), f'Expected NumPy 1.26.x, got {numpy.__version__}'" + + - name: Verify GDAL backend is used + run: uv run python -c "from solweig import io; assert io.GDAL_ENV, 'Expected GDAL backend'" + env: + UMEP_USE_GDAL: "1" + + - name: Run tests (excluding slow) with GDAL backend + run: uv run python -m pytest tests/ -m 'not slow' -v --tb=short + env: + PYTHONPATH: ${{ github.workspace }} + UMEP_USE_GDAL: "1" + + test-spec: + name: Test scientific spec/parity gates + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python 3.12 + run: uv python install 3.12 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install dependencies + run: uv sync --group dev + + - name: Build Rust extension + run: uv run maturin develop --release + + - name: Verify UMEP availability for parity tests + run: uv run python -c "import umep" + + - name: Run scientific/spec suite + run: uv run python -m pytest tests/spec -v --tb=short + env: + PYTHONPATH: ${{ github.workspace }} + + test-gpu-gates: + name: Test GPU gates (correctness + performance) + runs-on: ubuntu-latest + # GPU tests require hardware; this job validates the CPU-fallback path on + # standard runners and is expected to pass. For true GPU coverage, run + # locally or on a self-hosted GPU runner. + continue-on-error: true + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python 3.12 + run: uv python install 3.12 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install dependencies + run: uv sync --group dev + + - name: Build Rust extension + run: uv run maturin develop --release + + - name: Run GPU correctness gates + run: uv run poe test_gpu_gates + env: + PYTHONPATH: ${{ github.workspace }} + + - name: Run GPU performance gate + run: uv run poe test_gpu_perf_gate + env: + PYTHONPATH: ${{ github.workspace }} diff --git a/.gitignore b/.gitignore index b61afef..02e4ab4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,19 @@ # mac .DS_Store +# Claude +.claude/ + # demo data demos/data/athens/shadow* demos/data/athens/svf* demos/data/athens/solweig* +# validation data (downloaded from Zenodo/INRAE, not checked in) +tests/validation_data/ +tests/validation/montpellier/AIR.csv +tests/validation/montpellier/AIR-Readme.pdf + # geopackage peripheral files *.gpkg-* @@ -22,7 +30,6 @@ __pycache__/ # UV .python-version -uv.lock # Don't ignore the temp directory itself !temp/ @@ -173,6 +180,12 @@ venv.bak/ .dmypy.json dmypy.json +# ruff +.ruff_cache/ + +# ty type checker +.ty/ + # Pyre type checker .pyre/ @@ -188,3 +201,18 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +# QGIS +*.qgs +*.qgz +*.qgs~ +*.qgz~ +*.qpt +qgis_plugin/*.zip +qgis_plugin/build/ +qgis_plugin/dist/ +qgis_plugin/solweig_qgis/_bundled/ +qgis_plugin/solweig_qgis/_native/ + +# Local benchmark logs (auto-generated by performance matrix tests) +tests/benchmarks/logs/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..fb4e367 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +repos: + - repo: local + hooks: + - id: upgrade-linters + name: upgrade ruff & ty to latest + entry: uv lock --upgrade-package ruff --upgrade-package ty + language: system + always_run: true + pass_filenames: false + + - id: ruff-check + name: ruff check + entry: uv run ruff check --fix + language: system + types: [python] + + - id: ruff-format + name: ruff format + entry: uv run ruff format + language: system + types: [python] + + - id: ty + name: ty type check + entry: uv run ty check pysrc/ tests/ demos/ scripts/ qgis_plugin/ + language: system + types: [python] + pass_filenames: false diff --git a/.vscode/settings.json b/.vscode/settings.json index 4ec5468..e78a77a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,30 +1,76 @@ { - "editor.wordWrap": "on", + // ═══════════════════════════════════════════════════════════════════════════ + // EDITOR + // ═══════════════════════════════════════════════════════════════════════════ + "editor.wordWrap": "on", + "editor.rulers": [120], + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + }, + + // ═══════════════════════════════════════════════════════════════════════════ + // PYTHON + // ═══════════════════════════════════════════════════════════════════════════ + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", "editor.formatOnSave": true, "editor.codeActionsOnSave": { - "source.fixAll.eslint": "explicit", - "source.fixAll.stylelint": "explicit" - }, - "notebook.lineNumbers": "on", - "notebook.diff.ignoreMetadata": true, - "notebook.diff.ignoreOutputs": true, - "notebook.formatOnSave.enabled": true, - "notebook.codeActionsOnSave": { - "notebook.source.fixAll": "explicit", - "notebook.source.organizeImports": "explicit" - }, - "[python]": { - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.fixAll": "explicit", - "source.organizeImports": "explicit" - }, - "editor.defaultFormatter": "charliermarsh.ruff" - }, - "jupyter.notebookFileRoot": "${workspaceFolder}", - "python.testing.pytestArgs": [ - "tests" - ], - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true - } \ No newline at end of file + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + } + }, + "python.testing.pytestArgs": ["tests"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", + // Disable Pylance/Pyright - using Ruff + ty instead + "python.languageServer": "None", + + // ═══════════════════════════════════════════════════════════════════════════ + // RUST + // ═══════════════════════════════════════════════════════════════════════════ + "[rust]": { + "editor.defaultFormatter": "rust-lang.rust-analyzer", + "editor.formatOnSave": true + }, + "rust-analyzer.linkedProjects": ["${workspaceFolder}/rust/Cargo.toml"], + "rust-analyzer.check.command": "clippy", + + // ═══════════════════════════════════════════════════════════════════════════ + // JUPYTER & NOTEBOOKS + // ═══════════════════════════════════════════════════════════════════════════ + "jupyter.notebookFileRoot": "${workspaceFolder}", + "notebook.lineNumbers": "on", + "notebook.diff.ignoreMetadata": true, + "notebook.diff.ignoreOutputs": true, + "notebook.formatOnSave.enabled": true, + "notebook.codeActionsOnSave": { + "notebook.source.fixAll": "explicit", + "notebook.source.organizeImports": "explicit" + }, + + // ═══════════════════════════════════════════════════════════════════════════ + // SEARCH & FILE EXCLUDES (Performance) + // ═══════════════════════════════════════════════════════════════════════════ + "files.exclude": { + "**/__pycache__": true, + "**/*.pyc": true, + ".pytest_cache": true + }, + "search.exclude": { + "**/.venv": true, + "**/rust/target": true, + "**/__pycache__": true, + "**/node_modules": true, + "**/*.pyc": true + }, + "files.watcherExclude": { + "**/.venv/**": true, + "**/rust/target/**": true, + "**/__pycache__/**": true, + "**/.git/objects/**": true + }, + "python-envs.pythonProjects": [] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..f11159d --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,19 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "QGIS: Dev Setup", + "type": "shell", + "command": "ln -sfn ${workspaceFolder}/qgis_plugin/solweig_qgis ~/Library/Application\\ Support/QGIS/QGIS3/profiles/default/python/plugins/solweig_qgis", + "problemMatcher": [], + "detail": "Symlink plugin into QGIS plugins folder. Restart QGIS after." + }, + { + "label": "QGIS: Package ZIP", + "type": "shell", + "command": "uv run python qgis_plugin/build_plugin.py", + "problemMatcher": [], + "detail": "Create distributable ZIP for QGIS Plugin Repository" + } + ] +} diff --git a/CHANGES.md b/CHANGES.md new file mode 100644 index 0000000..9a8833e --- /dev/null +++ b/CHANGES.md @@ -0,0 +1,211 @@ +# Algorithm Changes and Observations + +This document tracks algorithm-related changes, differences, and observations discovered during the SOLWEIG modernization project. It is intended for discussion with the original authors. + +## Purpose + +During the Rust modernization of SOLWEIG, we are: + +1. Creating golden fixtures using the **UMEP Python module** as ground truth +2. Verifying that Rust implementations match UMEP Python outputs +3. Documenting any discrepancies or algorithmic questions + +## Testing Strategy + +### Three-Layer Testing Approach + +| Layer | Purpose | Data Source | +| ----------------- | ------------------------------- | -------------------- | +| Spec Tests | Verify physical properties | Synthetic/mock data | +| Golden Tests | Verify Rust matches UMEP Python | Athens demo data | +| Performance Tests | Benchmark Rust vs Python | Large tiled datasets | + +### Golden Fixtures + +Golden fixtures are generated using **UMEP Python** functions, not Rust: + +- `shadowingfunction_wallheight_23` for shadow calculations +- `svfForProcessing153` for SVF calculations +- `gvf_2018a` for Ground View Factor +- `Kside_veg_v2022a` / `Lside_veg_v2022a` for radiation + +This ensures a neutral reference that doesn't change during modernization. + +--- + +## Observed Differences + +### 1. Shadow Calculation (shadowingfunction_wallheight_23) + +**Status:** VERIFIED - Rust matches UMEP Python exactly + +**UMEP Python function:** `umep.util.SEBESOLWEIGCommonFiles.shadowingfunction_wallheight_23` + +**Rust function:** `solweig.rustalgos.shadowing.calculate_shadows_wall_ht_25` + +**Test Results:** +All shadow components match within tolerance 1e-5: + +- `bldg_sh` (building shadows) - PASS +- `veg_sh` (vegetation shadows) - PASS +- `wall_sh` (wall shadows) - PASS +- `wall_sun` (wall sun exposure) - PASS + +**Findings:** + +- The Rust `_25` is a direct port of Python `_23` - the version increment was for internal tracking +- No algorithmic changes were made during the Rust modernization +- Both implementations produce identical results + +**Conclusion:** No action needed - implementations are equivalent. + +--- + +### 2. Sky View Factor (svfForProcessing153) + +**Status:** INTENTIONAL DIFFERENCE - documented in earlier modernization + +**UMEP Python function:** `umep.functions.svf_functions.svfForProcessing153` + +**Rust function:** `solweig.rustalgos.skyview.calculate_svf` + +**Test Results (Golden Test Comparison):** + +| Component | Match Status | Notes | +| --------- | ------------ | ----------------------------------- | +| svf_total | ~1% diff | Different underlying shadow | +| svf_north | ~1% diff | Different underlying shadow | +| svf_east | ~1% diff | Different underlying shadow | +| svf_south | EXACT | matches within 1e-5 | +| svf_west | EXACT | matches within 1e-5 | +| svf_veg | ~1% diff | Different underlying shadow | + +**Root Cause (from `test_rustalgos.py`):** + +This difference was **intentional and documented** during the earlier modernization: + +```python +# Line 201: "# uses older shadowingfunction_20" +# Line 205-206: "# uses rust shadowing based on shadowingfunction_wallheight_23" +# Line 282-283: print("Small differences expected for N and E and totals +# due to different shadowing implementations") +``` + +The UMEP Python `svfForProcessing153` internally calls the older `shadowingfunction_20`, while Rust uses the newer `shadowingfunction_wallheight_23` throughout for architectural consistency. + +**Verification:** + +A hybrid implementation (`svfForProcessing153_rust_shdw`) exists that uses Python SVF logic with Rust shadows. This hybrid matches the full Rust implementation exactly, proving: + +1. The SVF algorithm itself is correctly ported +2. The ~1% difference comes solely from using different shadow algorithms + +**Decision:** ACCEPTED + +The ~1% difference is accepted. Rust uses the newer `shadowingfunction_wallheight_23` throughout, which is architecturally cleaner and more consistent. The older `shadowingfunction_20` used by Python SVF is legacy code. + +Golden tests use 2% tolerance for affected components (total, north, east, veg) and strict 1e-5 tolerance for unaffected components (south, west). + +--- + +### 3. Ground View Factor (gvf_2018a) + +**Status:** Not yet tested + +**UMEP Python function:** `umep.functions.SOLWEIGpython.gvf_2018a.gvf_2018a` + +**Rust function:** `solweig.rustalgos.gvf.gvf_calc` + +**Output fields:** + +- `gvfSum`, `gvfNorm` +- `gvfLup`, `gvfLupE/S/W/N` +- `gvfalb`, `gvfalbE/S/W/N` +- `gvfalbnosh`, `gvfalbnoshE/S/W/N` + +--- + +### 4. Radiation Calculations + +**Status:** Not yet tested + +#### Kside (Shortwave Side Radiation) + +- **Python:** `Kside_veg_v2022a` +- **Rust:** `vegetation.kside_veg` + +#### Lside (Longwave Side Radiation) + +- **Python:** `Lside_veg_v2022a` +- **Rust:** `vegetation.lside_veg` + +--- + +### 5. Thermal Comfort Indices + +**Status:** Spec tests created, golden fixtures pending + +#### UTCI (Universal Thermal Climate Index) + +- **Rust:** `solweig.rustalgos.utci` +- Spec tests verify property-based behavior + +#### PET (Physiological Equivalent Temperature) + +- **Rust:** `solweig.rustalgos.pet` +- Spec tests verify property-based behavior + +--- + +## Bug Fixes Applied + +_Document any bug fixes discovered and applied during testing._ + +### Example Template + +``` +### [Date] Bug Title + +**Location:** file:line +**Symptom:** Description of incorrect behavior +**Root Cause:** Why it was happening +**Fix:** What was changed +**Impact:** How significant was this bug +``` + +--- + +## Numerical Precision Notes + +### Tolerance Levels Used + +| Test Type | rtol | atol | Rationale | +| ---------------- | ---- | ---- | ------------------------------------ | +| Shadow masks | 1e-5 | 1e-5 | Binary-like values (0/1) | +| SVF values | 1e-5 | 1e-5 | Range [0, 1] | +| Radiation (W/m²) | 1e-4 | 0.01 | Physical units, ~1% error acceptable | +| Temperature (°C) | 1e-4 | 0.01 | Physical units | + +### Known Precision Issues + +_Document any known floating-point precision issues._ + +--- + +## Version Information + +- **UMEP Python version:** (check with `pip show umep`) +- **SOLWEIG Rust version:** See Cargo.toml +- **Test data:** Athens demo dataset + +--- + +## Discussion Log + +_Record discussions with original authors here._ + +### [Date] Discussion Topic + +- Participants: +- Decision: +- Action items: diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000..9f73e3c --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,94 @@ +cff-version: 1.2.0 +title: SOLWEIG +message: >- + If you use this software, please cite both the original SOLWEIG + model paper and the UMEP platform paper. +type: software +authors: + - family-names: Lindberg + given-names: Fredrik + affiliation: University of Gothenburg + - family-names: Grimmond + given-names: C. Sue B. + affiliation: University of Reading + - name: UMEP Developers +repository-code: https://github.com/UMEP-dev/solweig +license: AGPL-3.0 +keywords: + - urban-microclimate + - mean-radiant-temperature + - thermal-comfort + - UTCI + - PET + - urban-climate + - SOLWEIG + - UMEP +references: + - type: article + title: >- + SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes + and mean radiant temperature in complex urban settings + authors: + - family-names: Lindberg + given-names: Fredrik + - family-names: Holmer + given-names: Björn + - family-names: Thorsson + given-names: Sofia + journal: International Journal of Biometeorology + volume: 52 + start: 697 + end: 713 + year: 2008 + doi: 10.1007/s00484-008-0162-7 + - type: article + title: >- + Urban Multi-scale Environmental Predictor (UMEP) – An integrated + tool for city-based climate services + authors: + - family-names: Lindberg + given-names: Fredrik + - family-names: Grimmond + given-names: C. Sue B. + - family-names: Gabey + given-names: Andrew + - family-names: Huang + given-names: Bei + - family-names: Kent + given-names: Christoph W. + - family-names: Sun + given-names: Ting + - family-names: Theeuwes + given-names: Natalie + - family-names: Järvi + given-names: Leena + - family-names: Ward + given-names: Helen + - family-names: Capel-Timms + given-names: Ian + - family-names: Chang + given-names: Yuanyong + - family-names: Jonsson + given-names: Per + - family-names: Krave + given-names: Niklas + - family-names: Liu + given-names: Dongwei + - family-names: Meyer + given-names: David + - family-names: Olofson + given-names: Frans + - family-names: Tan + given-names: Jianguo + - family-names: Wästberg + given-names: Dag + - family-names: Xue + given-names: Lingbo + - family-names: Zhang + given-names: Zhen + journal: Environmental Modelling and Software + volume: 99 + start: 70 + end: 87 + year: 2018 + doi: 10.1016/j.envsoft.2017.09.020 diff --git a/README.md b/README.md index b866d2c..d7b6ac4 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,176 @@ -# UMEP Core +# SOLWEIG -## Setup +**Map how hot it *feels* across a city — pixel by pixel.** -- Make sure you have a Python installation on your system -- Install `vscode` and `github` apps. -- Install `uv` package manager (e.g. `pip install uv`). -- Clone repo. -- Run `uv sync` from the directory where `pyproject.toml` in located to install `.venv` and packages. -- Select `.venv` Python environment. -- FYI: Recommended settings and extensions are included in the repo. Proceed if prompted to install extensions. -- Develop and commit to Github often! +SOLWEIG computes **Mean Radiant Temperature (Tmrt)** and thermal comfort indices (**UTCI**, **PET**) for urban environments. Give it a building height model and weather data, and it produces high-resolution maps showing where people experience heat stress — and where trees, shade, and cool surfaces make a difference. -## Demo +Built on Rust for speed, with optional GPU acceleration. Handles everything from a single city block to an entire district. -See the demo notebook file at [/demo.py](/demo.py). +> **Status:** Beta (v0.1.0). The API is stabilising. Feedback and bug reports welcome — [open an issue](https://github.com/UMEP-dev/solweig/issues). -Also, a test with GBG data is found in [/solweig_gbg_test.py](/solweig_gbg_test.py) +## What can you do with it? -The demo and the test uses the datasets included in the tests folder +- **Urban planning** — Compare street canyon designs, tree planting scenarios, or cool-roof strategies by mapping thermal comfort before and after. +- **Heat risk assessment** — Identify the hottest spots in a neighbourhood during a heatwave, hour by hour. +- **Research** — Run controlled microclimate experiments at 1 m resolution with full radiation budgets. +- **Climate services** — Generate thermal comfort maps for public health warnings or outdoor event planning. -## Original code +## How it works (in brief) -The code reproduced in the `umep` folder is adapted from the original GPLv3-licensed code by Fredrik Lindberg, Ting Sun, Sue Grimmond, Yihao Tang, Nils Wallenberg. +SOLWEIG models the complete radiation budget experienced by a person standing in an urban environment: -The original code has been modified to work without QGIS to facilitate Python workflows. +1. **Shadows** — Which pixels are shaded by buildings and trees at a given sun angle? +2. **Sky View Factor** — How much sky can a person see from each point? (More sky = more incoming radiation.) +3. **Surface temperatures** — How hot are the ground and surrounding walls? +4. **Radiation balance** — Sum shortwave (sun) and longwave (heat) radiation from all directions. +5. **Tmrt** — Convert total absorbed radiation into a single "felt temperature" metric. +6. **Thermal comfort** — Optionally derive UTCI or PET, which combine Tmrt with air temperature, humidity, and wind. -The original code can be found at: [UMEP-processing](https://github.com/UMEP-dev/UMEP-processing). +## Quick start -This modified code is licensed under the GNU General Public License v3.0. +### Install -See the LICENSE file for details. +```bash +pip install solweig +``` -Please give all credit for UMEP code to the original authors and cite accordingly. +### Minimal example (numpy arrays) -© Copyright 2018 - 2020, Fredrik Lindberg, Ting Sun, Sue Grimmond, Yihao Tang, Nils Wallenberg. +```python +import numpy as np +import solweig +from datetime import datetime -Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel- Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services. Environmental Modelling and Software.99, 70-87 https://doi.org/10.1016/j.envsoft.2017.09.020 +# A flat surface with one 15 m building +dsm = np.full((200, 200), 2.0, dtype=np.float32) +dsm[80:120, 80:120] = 15.0 -## Demo Data +surface = solweig.SurfaceData(dsm=dsm, pixel_size=1.0) +# SVF is required before calculate(); compute once and reuse +surface.compute_svf() -Two seprated demo dataset are included +location = solweig.Location(latitude=48.8, longitude=2.3, utc_offset=1) # Paris +weather = solweig.Weather( + datetime=datetime(2025, 7, 15, 14, 0), + ta=32.0, # Air temperature (°C) + rh=40.0, # Relative humidity (%) + global_rad=850.0, # Solar radiation (W/m²) +) -### ATENS (vector data) +result = solweig.calculate(surface, location, weather) -#### Tree Canopies +print(f"Sunlit Tmrt: {result.tmrt[result.shadow > 0.5].mean():.0f}°C") +print(f"Shaded Tmrt: {result.tmrt[result.shadow < 0.5].mean():.0f}°C") +``` -Copernicus +`calculate*()` requires SVF to already be prepared (`SurfaceData.prepare(...)` +or `surface.compute_svf()`). If you explicitly set +`use_anisotropic_sky=True`, shadow matrices must also already be available. -#### Trees +### Real-world workflow (GeoTIFFs + EPW weather) -https://walkable.cityofathens.gr/home +```python +import solweig -#### Buildings +# 1. Load surface — prepare() computes and caches walls/SVF when missing +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + cdsm="data/trees.tif", # Optional: vegetation canopy heights + working_dir="cache/", # Expensive preprocessing cached here +) -http://gis.cityofathens.gr/layers/athens_geonode_data:geonode:c40solarmap +# 2. Load weather from an EPW file (standard format from climate databases) +weather_list = solweig.Weather.from_epw( + "data/weather.epw", + start="2025-07-01", + end="2025-07-03", +) +location = solweig.Location.from_epw("data/weather.epw") -### Gothenburg (raster data) +# 3. Run — outputs saved as GeoTIFFs, thermal state carried between timesteps +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir="output/", + outputs=["tmrt", "shadow"], +) -Standard dataset used in tutorials (https://umep-docs.readthedocs.io/en/latest/Tutorials.html) +# Summary grids (mean Tmrt, UTCI, sun hours, etc.) are on the returned object +print(summary.report()) +``` +## What you need + +| Input | Required? | What it is | +|-------|-----------|------------| +| **DSM** | Yes | Digital Surface Model — a height grid (metres) including buildings. GeoTIFF or numpy array. | +| **Location** | Yes | Latitude, longitude, and UTC offset. Can be extracted from the DSM's CRS or an EPW file. | +| **Weather** | Yes | Air temperature, relative humidity, and global solar radiation. Load from an EPW file or create manually. | +| **CDSM** | No | Canopy heights (trees). Adds vegetation shading. | +| **DEM** | No | Ground elevation. Separates terrain from buildings. | +| **Land cover** | No | Surface type grid (paved, grass, water, etc.). Affects surface temperatures. | + +## What you get + +| Output | Unit | Description | +|--------|------|-------------| +| **Tmrt** | °C | Mean Radiant Temperature — the main output. How much radiation a person absorbs. | +| **Shadow** | 0–1 | Shadow fraction (1 = sunlit, 0 = fully shaded). | +| **UTCI** | °C | Universal Thermal Climate Index — "feels like" temperature combining all factors. | +| **PET** | °C | Physiological Equivalent Temperature — similar to UTCI but with customisable body parameters. | +| Kdown / Kup | W/m² | Shortwave radiation (down and reflected up). | +| Ldown / Lup | W/m² | Longwave radiation (thermal, down and emitted up). | + +### Don't have an EPW file? Download one + +```python +# Download weather data for any location (no API key needed) +epw_path = solweig.download_epw(latitude=37.98, longitude=23.73, output_path="athens.epw") +weather_list = solweig.Weather.from_epw(epw_path) +``` + +## Demos + +Complete working scripts you can run directly: + +- **[demos/athens-demo.py](demos/athens-demo.py)** — Full workflow: rasterise tree vectors, load GeoTIFFs, run a multi-day timeseries, post-process UTCI. +- **[demos/solweig_gbg_test.py](demos/solweig_gbg_test.py)** — Gothenburg: surface preparation with SVF caching, timeseries calculation. + +## Documentation + +- [Installation](docs/getting-started/installation.md) +- [Quick Start Guide](docs/getting-started/quick-start.md) — Step-by-step first calculation +- [User Guide](docs/guide/basic-usage.md) — Common workflows, height conventions, and options +- [API Reference](docs/api/index.md) — All classes and functions +- [Physics](docs/physics/index.md) — How the radiation model works + +## QGIS Plugin + +SOLWEIG is also available as a QGIS plugin for point-and-click spatial analysis: + +1. **Plugins** → **Manage and Install Plugins** +2. **Settings** tab → Check **"Show also experimental plugins"** +3. Search for **"SOLWEIG"** → **Install Plugin** + +## Citation + +Adapted from [UMEP](https://github.com/UMEP-dev/UMEP-processing) by Fredrik Lindberg, Sue Grimmond, and contributors. + +If you use SOLWEIG in your research, please cite the original model paper and the UMEP platform: + +1. Lindberg F, Holmer B, Thorsson S (2008) SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *International Journal of Biometeorology* 52, 697–713 [doi:10.1007/s00484-008-0162-7](https://doi.org/10.1007/s00484-008-0162-7) + +2. Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) – An integrated tool for city-based climate services. *Environmental Modelling and Software* 99, 70-87 [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +## Demo data + +The Athens demo dataset (`demos/data/athens/`) uses the following sources: + +- **DSM/DEM** — Derived from LiDAR data available via the [Hellenic Cadastre geoportal](https://www.ktimatologio.gr/) +- **Tree vectors** (`trees.gpkg`) — Derived from the [Athens Urban Atlas](https://land.copernicus.eu/local/urban-atlas) and municipal open data at [geodata.gov.gr](https://geodata.gov.gr/) +- **EPW weather** (`athens_2023.epw`) — Generated using Copernicus Climate Change Service information [2025] via [PVGIS](https://re.jrc.ec.europa.eu/pvg_tools/en/). Contains modified Copernicus Climate Change Service information; neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus information or data it contains. + +## License + +GNU Affero General Public License v3.0 — see [LICENSE](LICENSE). diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 0000000..5271a5d --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,842 @@ +# SOLWEIG Roadmap + +**Updated: February 2026** + +This document outlines the development priorities for SOLWEIG. + +## Priorities (in order) + +1. **Scientific Rigor & Validation** - Complete specifications, add missing references, validate implementations +2. **Memory & Computational Improvements** - Optimize for large rasters and efficiency +3. **Performance (POI Mode)** - Deferred until core science is solid + +--- + +## Next Tasks (Prioritized) + +| # | Task | Section | Impact | Status | +| --- | ----------------------------------------- | ------- | ---------------------------------- | ----------- | +| 1 | ~~Result methods (compute_utci/pet)~~ | E.1 | HIGH - API discoverability | ✅ Complete | +| 2 | ~~Location UTC offset warning~~ | E.2 | HIGH - prevents silent bugs | ✅ Complete | +| 3 | ~~Structured errors + validate_inputs()~~ | E.4 | MEDIUM - better error messages | ✅ Complete | +| 4 | ~~Config precedence (explicit wins)~~ | E.3 | MEDIUM - API consistency | ✅ Complete | +| 5 | ~~API cleanup (factories, docs)~~ | E.5 | LOW - polish | ✅ Complete | +| 6 | ~~Cache validation (hashes)~~ | B.3 | LOW - safety feature | ✅ Complete | +| 7 | ~~Tests for `calculate_timeseries()`~~ | F.1 | HIGH - primary workflow untested | ✅ Complete | +| 8 | ~~Fix GPU function docs~~ | D | HIGH - breaks new user experience | ✅ Complete | +| 9 | ~~Fix EPW parser tests~~ | F | HIGH - 8 tests failing silently | ✅ Complete | +| 10 | ~~Rename `algorithms/` → `physics/`~~ | B.4 | MEDIUM - misleading "Legacy" label | ✅ Complete | +| 11 | ~~Slim down `__all__` exports~~ | E.5 | MEDIUM - internal bundles exposed | ✅ Complete | +| 12 | ~~Rename `config.py` → `loaders.py`~~ | B.4 | LOW - two-config ambiguity | ✅ Complete | +| 13 | ~~Move `cylindric_wedge` to Rust~~ | G.2 | HIGH - per-timestep hotspot | ✅ Complete | +| 14 | ~~GPU buffer reuse / persistence~~ | G.3 | HIGH - eliminates per-call alloc | ✅ Complete | +| 15 | ~~Move aniso patch loop to Rust~~ | G.2 | MEDIUM - anisotropic mode speedup | ✅ Complete | +| 16 | ~~QGIS plugin testing (Phase 11)~~ | D | HIGH - blocks plugin adoption | ✅ Complete | +| 17 | ~~Orchestration layer unit tests~~ | F.1 | MEDIUM - regression safety | ✅ Complete | +| 18 | ~~API reference with mkdocstrings~~ | D | MEDIUM - user adoption | ✅ Complete | +| 19 | Field-data validation | H | HIGH - scientific credibility | In Progress | +| 20 | POI Mode | C | HIGH - 10-100x speedup | Deferred | + +**Current status:** Phases A, B, D, E, F.1, G.2, G.3.1–G.3.2, H.1, H.2 complete. 612+ tests total. Perez sky luminance fully ported to Rust (`crate::perez::perez_v3()`). GPU acceleration covers shadows, SVF (pipelined dispatch via `svf_accumulation.wgsl`), and anisotropic sky (`anisotropic_sky.wgsl`). Fused Rust pipeline (`pipeline.compute_timestep`) handles all per-timestep computation in a single FFI call. Next: POI mode. + +### Recently Completed + +| Task | Section | Status | +| ------------------------------------ | ------- | ----------- | +| Cache validation (hash-based) | B.3 | ✅ Complete | +| np.memmap for SVF caching | B.1 | ✅ Complete | +| Pre-allocated buffer pools | B.1 | ✅ Complete | +| Batch thermal delay (Rust) | B.5 | ✅ Complete | +| Constants consolidation | B.4 | ✅ Complete | +| GVF golden tests | A.4 | ✅ Complete | +| Radiation golden tests (Kside/Lside) | A.4 | ✅ Complete | +| SurfaceData.prepare() refactor | B.4 | ✅ Complete | +| Rust parameter structs | B.5 | ✅ Complete | +| SVF auto-caching in calculate() | B.2 | ✅ Complete | + +--- + +## Session Log (Feb 2026) + +**Session (Feb 6, JSON parameter integration):** + +- ✅ **Full JSON parameter integration** - `parametersforsolweig.json` as single source of truth + - Created `pysrc/solweig/data/default_materials.json` (bundled UMEP JSON with wall values filled in) + - Auto-load materials in `calculate()` / `calculate_timeseries()` when `materials=None` + - Wall params flow from JSON → Python → Rust: tgk_wall, tstart_wall, tmaxlst_wall + - Rust `ground.rs`: 3 `Option` wall params with cobblestone defaults + - Fixed phase clamping bug (unclamped to allow afternoon cooling per UMEP) + - Fixed wall denominator division-by-zero guard + - 5 new sinusoidal golden tests + 12 parametrized UMEP agreement tests + - Golden report generator: HTML → Markdown rewrite, added sinusoidal section + - Wall material defaults: Brick (TgK=0.40), Wood (TgK=0.50), Concrete (TgK=0.35) + +**Session (Feb 6, validation):** + +- ✅ **Field-data validation (Phase H)** - Kolumbus dataset from Zenodo (Wallenberg et al. 2025) + - Downloaded: kolumbus.csv (wall temps), geodata (DSM/DEM/CDSM/groundcover), met forcing + - Added `Weather.from_umep_met()` classmethod for SUEWS-format meteorological files + - 12 validation tests: data loading (7), wall temperature (3), full pipeline (2) + - Wall temp RMSE: 6.67°C (PB) / 8.96°C (wood) with generic params vs ~2°C in paper (tuned) + - Full pipeline: Tmrt 31.7°C at noon (Ta=20.5°C), peak 41.8°C at 15:00 + - Confirmed land cover support exists throughout pipeline and QGIS plugin + - Added `tests/conftest.py` to fix QGIS test imports (pre-existing sys.path issue) + - Investigated Montpellier dataset: reduced-scale canyon, globe thermometer, needs synthetic DSM + +**Session (Feb 6, continued):** + +- ✅ **QGIS plugin tests (Phase 11)** - 40 tests for converters and base algorithm + - `tests/qgis_mocks.py`: shared mock infrastructure (install/uninstall osgeo separately) + - `tests/test_qgis_converters.py`: 25 tests (HumanParams, Weather, Location, EPW) + - `tests/test_qgis_base.py`: 15 tests (grid validation, output paths, georeferenced save) + - Fixed osgeo mock pollution (split install/install_osgeo to prevent cross-test contamination) +- ✅ **Orchestration unit tests (F.1)** - computation internals + - `_apply_thermal_delay()`: 7 tests (state transitions, Rust FFI mock, day/night flags) + - `_precompute_weather()`: 5 tests (altmax caching, multi-day, derived computation) + - ThermalState/TileSpec: 11 tests, tiling helpers: 21 tests +- ✅ **API reference with mkdocstrings** - docs build with `--strict` + - Added mkdocs/mkdocstrings[python] to dev dependencies + - `poe docs` / `poe docs_build` tasks for local serving and strict build + - All 6 functions + 9 dataclasses + 5 error classes auto-documented from docstrings + +**Session (Feb 6, first half):** + +- ✅ **GPU buffer reuse (G.3.1)** - `CachedBuffers` struct persists 17 wgpu buffers across shadow calls + - Buffers reallocated only when grid dimensions change + - Uses `queue.write_buffer()` instead of `create_buffer_init()` per call +- ✅ **Test infrastructure** - `poe test_quick` (221 tests, ~4 min) / `poe test_full` (357 tests) + - `@pytest.mark.slow` on 7 modules (api, timeseries, tiling, memory, svf, gvf, wall_geometry) + - CI expanded from 55 → 221 tests per Python version + - `ty check` scope fixed in CI to match pre-commit hook +- ✅ **Phase G.2 complete** - Moved Python hotspots to Rust with rayon parallelism + - `cylindric_wedge()`: per-pixel wall shadow fraction → `sky.rs` + - `weighted_patch_sum()`: anisotropic patch summation → `sky.rs` + - Both include low-sun guards matching Python reference +- ✅ **Type checking expanded** - `ty check` now covers all directories (pysrc/, tests/, demos/, scripts/, qgis_plugin/) + - Fixed 8 type errors across codebase + - Pre-commit hook and poe tasks updated to match +- ✅ Fixed real bug: QGIS converters.py `sex` field mapped to string instead of int + +**Session (Feb 5):** + +- ✅ **Low sun angle handling** - Fixed numerical issues at low solar altitudes + - `Perez_v3.py`: robust handling of edge-case zenith angles + - `cylindric_wedge.py`: clamp/guard for near-horizon sun positions + - `io.py`: related fixes for sun position edge cases + - Added `tests/spec/test_low_sun_angles.py` validation tests +- ✅ QGIS plugin scaffolded and documented (Phases 1-10 complete) +- ✅ MkDocs documentation site scaffolded (25 pages under `docs/`) + +**Session (Feb 3):** + +- ✅ **SVF auto-caching** - Fresh-computed SVF now cached on `surface.svf` for reuse + - First call: computes SVF (~67s for 200×200) + - Subsequent calls: **0.3s** (210× speedup) +- ✅ Fixed `_compute_and_cache_svf()` - was referencing non-existent `svf` module +- ✅ Added `SvfArrays.from_bundle()` - converts computation result to cacheable format +- ✅ Confirmed `SurfaceData.prepare()` refactor already complete (methods split into focused functions) + +**Earlier sessions:** + +- ✅ Batch thermal delay (Rust) - 6 FFI calls → 1 +- ✅ Constants consolidation - `SBC`, `KELVIN_OFFSET` centralized in `constants.py` +- ✅ `as_float32()` helper - avoids unnecessary dtype copies +- ✅ Rust parameter structs - `GvfScalarParams` (20→11 params), `TmrtParams` (18→15 params) + +**API cleanup:** + +- `gvf.gvf_calc(arrays..., GvfScalarParams)` - clean struct-based API +- `tmrt.compute_tmrt(arrays..., TmrtParams)` - clean struct-based API +- Old 20+ param functions removed (no backward compat needed for new API) + +--- + +## Completed Work + +| Phase | Description | Status | +| ------- | ---------------------------------- | ---------------------- | +| Phase 1 | Spec-driven testing infrastructure | ✅ Complete | +| Phase 2 | API simplification (100% parity) | ✅ Complete | +| Phase 3 | User experience improvements | ✅ Complete | +| Phase 5 | Middle layer refactoring | ✅ Complete (Jan 2026) | + +**Key metrics achieved:** + +- api.py reduced from 3,976 → 403 lines (-89.9%) +- 6,100 lines of legacy code deleted +- models.py split into models/ package (6 modules, ~3,080 lines) +- 612+ tests passing (including spec, golden, benchmark, and validation tests) +- 100% parity with reference UMEP implementation + +--- + +## Phase A: Scientific Rigor & Validation + +**Goal:** Ensure all physics models are properly documented, referenced, and validated. + +### A.1 Specification Gaps + +| Gap | Spec File | Status | Notes | +| ----------------------- | -------------- | ----------- | ----------------------------------------------- | +| Sky emissivity formula | radiation.md | ✅ Complete | Jonsson et al. 2006 formula documented | +| Diffuse fraction model | radiation.md | ✅ Complete | Reindl et al. 1990 piecewise correlations | +| Anisotropic radiation | radiation.md | ✅ Complete | Perez et al. 1993 sky luminance model | +| Absorption coefficients | tmrt.md | ✅ Complete | ISO 7726:1998 reference added | +| absL discrepancy | tmrt.md + JSON | ✅ Fixed | Updated JSON files from 0.95 → 0.97 | +| Posture view factors | tmrt.md | ✅ Complete | Mayer & Höppe 1987 reference, derivations added | +| SVF calculation method | svf.md | ✅ Complete | Patch-based method, Robinson & Stone 1990 | +| GVF calculation method | gvf.md | ✅ Complete | Wall integration, Lindberg et al. 2008 | + +### A.2 Ground Temperature Model ✅ Complete + +**Files:** + +- [specs/ground_temperature.md](specs/ground_temperature.md) +- [components/ground.py](pysrc/solweig/components/ground.py) + +**Completed:** + +- [x] Created specs/ground_temperature.md specification +- [x] Documented thermal mass parameters (decay constant 33.27 day⁻¹, τ ≈ 43 min) +- [x] Added reference: Lindberg et al. (2016) +- [x] Documented exponential decay formula: `w = exp(-33.27 × Δt)` + +### A.3 Missing References ✅ Complete + +All key citations have been added to specifications: + +| Parameter | Location | Citation Added | Status | +| ------------------------ | ---------- | -------------------------- | ----------- | +| DuBois body surface area | pet.md | DuBois & DuBois 1916 | ✅ Complete | +| MEMI energy balance | pet.md | Höppe 1984, 1999 | ✅ Complete | +| Metabolic rates | pet.md | ISO 8996:2021 | ✅ Complete | +| Clothing insulation | pet.md | ISO 9920:2007, Fanger 1970 | ✅ Complete | +| Tree transmissivity | shadows.md | Konarska et al. 2014 | ✅ Complete | +| Trunk ratio (0.25) | shadows.md | Lindberg & Grimmond 2011 | ✅ Complete | + +### A.4 Validation Tests ✅ Complete + +**Spec compliance tests:** ✅ Complete (16 tests) + +- [x] Sky emissivity formula validation (Jonsson et al. 2006) +- [x] Diffuse fraction model tests (Reindl et al. 1990) +- [x] Absorption coefficient tests (ISO 7726:1998) +- [x] Posture view factor tests (Mayer & Höppe 1987) +- [x] TsWaveDelay thermal delay tests (decay constant, morning reset) + +See: [tests/spec/test_radiation_formulas.py](tests/spec/test_radiation_formulas.py) + +**Thermal comfort validation:** ✅ Complete (19 tests) + +- [x] UTCI polynomial accuracy and stress categories +- [x] UTCI wind/humidity/radiation effects +- [x] PET solver with DuBois body surface area +- [x] PET stress categories and radiation effects +- [x] Default parameter validation + +See: [tests/spec/test_utci.py](tests/spec/test_utci.py), [tests/spec/test_pet.py](tests/spec/test_pet.py) + +**Component validation:** ✅ Complete + +- [x] GVF golden tests (physical property validation, regression detection) +- [x] Radiation component golden tests (Kside, Lside directional components) + +### A.5 Formula Reconciliation ✅ Complete + +**Resolved:** + +- [x] absL coefficient: Updated from 0.95 → 0.97 in JSON files to match ISO 7726:1998 +- [x] All specs reviewed and aligned with implementation +- [x] Validation tests confirm spec-implementation consistency + +--- + +## Phase B: Memory & Computational Improvements + +**Goal:** Handle large rasters efficiently without compromising accuracy. + +### B.1 Memory Optimization (HIGH priority) + +| Issue | Current | Target | Approach | Status | +| ------------------- | ---------------------- | ------------------- | ---------------------------------- | ---------------------- | +| Array precision | Mixed float32/float64 | float32 throughout | Audit and convert | ✅ Complete | +| Shadow storage | float32 | float32 | Continuous values (transmissivity) | ⚠️ Cannot compress | +| SVF caching | Full arrays in memory | Memory-mapped files | Use np.memmap for tiled processing | ✅ Complete (Feb 2026) | +| Intermediate arrays | Allocated per timestep | Pre-allocated pools | Reuse buffers | ⏳ Pending | + +**Tasks:** + +- [x] Audit all array allocations for dtype consistency (53 allocations fixed) +- [x] Investigate shadow compression - **Finding:** Shadow masks are NOT binary due to vegetation transmissivity formula `shadow = bldg_sh - (1 - veg_sh) * (1 - psi)` where psi is continuous (0.03-0.5) +- [x] Memory profiling script created: `scripts/profile_memory.py` +- [x] Benchmark memory usage - **Results (Feb 2026):** + - ~370 bytes/pixel peak memory at scale + - 800×800 grid: 225 MB peak + - Estimated 10k×10k: **34 GB** (requires optimization before large-scale use) + - Memory overhead decreases with grid size (fixed module overhead amortized) +- [x] Add memory profiling to CI (tests/benchmarks/test_memory_benchmark.py) + +**Bug fixes (Feb 2026):** + +- [x] Fixed missing imports in tiling.py (`SurfaceData`, `PrecomputedData`, `SolweigResult`, `SimpleNamespace`) +- [x] Fixed Rust function call in ground.py (positional vs keyword arguments) +- [x] Fixed `max_height` to include vegetation heights for buffer calculation + +### B.2 Computational Efficiency (MEDIUM priority) + +| Optimization | Benefit | Approach | Status | +| -------------------------------- | -------------------------- | ---------------------------------------------- | ------------------------------------------ | +| ~~Reduce Python/Rust crossings~~ | ~~Less FFI overhead~~ | ~~Batch operations in Rust~~ | Deferred (diminishing returns) | +| Lazy SVF loading | Faster startup | Load on first access | ✅ Already implemented | +| ~~Parallel timestep processing~~ | ~~Better CPU utilization~~ | ~~Process independent timesteps concurrently~~ | ❌ Not feasible (thermal state dependency) | +| **Altmax caching** | 17x faster timeseries | Cache max sun altitude per day | ✅ Complete (Feb 2026) | +| **SVF auto-caching** | **210× faster repeats** | Cache fresh-computed SVF on surface object | ✅ Complete (Feb 2026) | +| **Algorithm optimizations** | 1.6-2x faster functions | Vectorized numpy, pre-compute common terms | ✅ Complete (Feb 2026) | + +**Completed optimizations:** + +- [x] **SVF auto-caching** (Feb 3, 2026) - Fresh-computed SVF is now cached back to `surface.svf` after first `calculate()` call. Subsequent calls reuse cached SVF. Result: **210× speedup** on repeat timesteps (67s → 0.3s for 200×200 grid). + +- [x] **Altmax caching** - Weather.compute_derived() iterated 96 times to find max sun altitude. For timeseries, this is now computed once per unique day and cached. Result: **17.6x speedup** for weather pre-computation (4.04s → 0.23s for 72 timesteps). + +- [x] **SVF lazy loading** - SVF resolution checks cached/precomputed sources before computing fresh (see [components/svf_resolution.py](pysrc/solweig/components/svf_resolution.py)). + +- [x] **Algorithm optimizations** (Feb 2026) - Optimized Python algorithms: + - `cylindric_wedge.py`: 1.6× faster via vectorized np.where and pre-computed trig values + - `Kup_veg_2015a.py`: 2× faster via pre-computing common terms (5 sin/multiply → 1) + +### B.3 Cache Validation ✅ Complete + +Working directory cache now validates against input data: + +- [x] Store input hashes with cached data (via `cache.py` module) +- [x] Validate cache on load (hash comparison of DSM/CDSM/pixel_size) +- [x] Clear stale cache automatically (auto-clears and recomputes if inputs changed) + +### B.4 Code Quality (LOW priority) + +Optional refactoring for maintainability. No behavioral changes. + +| Task | Goal | Status | +| ---------------------------------- | -------------------------------------------------------- | ----------- | +| Q.1 Constants consolidation | Eliminate duplicate `SBC = 5.67e-8` etc. across 5+ files | ✅ Complete | +| Q.2 Models package split | Split 2,238-line models.py into modules | ✅ Complete | +| Q.3 Tiling consolidation | Merge duplicate tiling implementations | ✅ Complete | +| Q.4 SurfaceData.prepare() refactor | Break 400+ line method into focused functions | ✅ Complete | + +**Q.4 Details:** `prepare()` is now ~50 lines of orchestration calling focused helpers: +`_load_and_validate_dsm()`, `_load_terrain_rasters()`, `_load_preprocessing_data()`, +`_align_rasters()`, `_create_surface_instance()`, `_compute_and_cache_walls()`, `_compute_and_cache_svf()` + +### B.5 Rust FFI Optimization (LOW priority) + +Optional Rust improvements to reduce Python/Rust crossing overhead. + +| Task | Goal | Status | +| ---------------------- | -------------------------------------------------------------- | ------------------------------- | +| Batch thermal delay | Combine 6 `ts_wave_delay` calls into 1 | ✅ Complete | +| Rust parameter structs | Replace 29-param functions with structs | ✅ Complete | +| Fused radiation+tmrt | Combine radiation calc + Tmrt in single Rust call | Deferred (marginal gain) | +| Parallel SVF patches | Rayon parallelization of patch calculations | Deferred (SVF is one-time cost) | +| Mega-kernel | Combine SVF→shadows→ground→GVF→radiation→Tmrt into single call | Deferred (0.3s/step is fast) | + +**Note:** With SVF auto-caching, per-timestep cost is ~0.3s for 200×200. Further FFI optimization offers diminishing returns. + +--- + +## Phase C: Performance (Deferred) + +**POI-only mode** - Deferred until scientific validation is complete. + +When prioritized, this phase would enable 10-100× speedup for point-based calculations through localized ray-casting and SVF sampling. See archived MODERNIZATION_PLAN.md for detailed design. + +--- + +## Phase G: GPU & Rust-Python Interface Design + +**Goal:** Extend GPU acceleration beyond shadowing, adopt a principled Rust/Python boundary, and move remaining Python hotspots to Rust where the gain justifies the complexity. + +### Current State (Feb 2026) + +**Rust modules** (5,341 lines, 15 files): + +- `shadowing.rs` (812 lines) - GPU-accelerated ray-marching via wgpu compute shader +- `skyview.rs` (550 lines) - Hemispherical SVF (calls shadowing 32-248×) +- `gvf.rs` (390 lines) - Ground view factor with wall radiation +- `sky.rs` (550 lines) - Anisotropic sky longwave +- `vegetation.rs` (800 lines) - Directional radiation from vegetation/buildings +- `ground.rs` (350 lines) - TgMaps ground temperature model +- `utci.rs` (350 lines) - Fast polynomial (125 terms) +- `pet.rs` (370 lines) - Iterative thermal comfort solver +- `tmrt.rs` (240 lines) - Mean radiant temperature integration +- Internal helpers: `sun.rs`, `patch_radiation.rs`, `emissivity_models.rs`, `sunlit_shaded_patches.rs` + +**GPU status:** Three GPU-accelerated paths via wgpu compute shaders. All fall back to CPU automatically. + +- `shadowing.rs` — ray-marching shadows (`shadow_propagation.wgsl`) +- `skyview.rs` — SVF accumulation with pipelined dispatch (`svf_accumulation.wgsl`) +- `aniso_gpu.rs` — anisotropic sky longwave (`anisotropic_sky.wgsl`) + +**Python physics** (`physics/`) — reference implementations retained for readability and validation: + +- `sun_position.py` (1,061 lines) - ASTM solar position algorithm (kept in Python: once per timestep, scalar) +- `Perez_v3.py` (313 lines) - Reference only; production uses `crate::perez::perez_v3()` in Rust +- `cylindric_wedge.py` (109 lines) - Reference only; production uses `crate::sky::cylindric_wedge()` in Rust +- `morphology.py` (188 lines) - Reference only; production uses `crate::morphology` in Rust +- `wallalgorithms.py` (158 lines) - Wall height/aspect detection (setup, not per-timestep) +- Scalars: `clearnessindex_2013b.py`, `diffusefraction.py`, `daylen.py`, etc. (kept in Python: once per timestep) + +### G.1 Principled Rust/Python Boundary + +**Decision framework** - move to Rust when ALL of: + +1. Per-pixel computation (not scalar/once-per-timestep) +2. Called in the per-timestep hot path +3. Measurable bottleneck (>5% of timestep time) + +**Keep in Python** when: + +- Scalar computation (clearnessindex, diffusefraction, daylen) +- Called once per scenario, not per timestep +- Complex control flow better expressed in Python +- Debugging/readability priority outweighs performance + +### G.2 Python → Rust Migration Candidates + +| Priority | Function | Current | Per-Timestep? | Expected Speedup | Effort | +| -------- | -------------------------------- | -------------------------------- | ---------------------- | ---------------- | ------ | +| **P0** | `cylindric_wedge()` | Python (109 lines) | Yes, always | 3-5× | Low | +| **P0** | Anisotropic patch summation loop | Python (5 lines in radiation.py) | Yes (aniso mode) | 5-10× | Low | +| **P1** | ~~`binary_dilation()`~~ | Rust (morphology.rs) | No (setup) | 2.5× (measured) | ✅ Done | +| **P2** | ~~`Perez_v3()`~~ | Rust (`crate::perez::perez_v3`) | Yes (aniso mode) | 2-3× | ✅ Done | +| **P3** | `wallalgorithms.py` | Python (158 lines) | No (setup) | 3-5× | Medium | +| Keep | `sun_position.py` | Python (1,061 lines) | Once/timestep (scalar) | Negligible | — | +| Keep | `clearnessindex_2013b.py` | Python (88 lines) | Once/timestep (scalar) | Negligible | — | +| Keep | `diffusefraction.py` | Python (47 lines) | Once/timestep (scalar) | Negligible | — | +| Keep | `daylen.py` | Python (22 lines) | Once/scenario (scalar) | Negligible | — | + +**P0: cylindric_wedge → Rust** + +- Currently: vectorized numpy with trig ops (tan, arctan, sqrt, cos, sin) over full 2D grid +- Why: Called every timestep, pure math, no complex control flow +- How: Add `cylindric_wedge()` to existing `sky.rs` module +- Test: Validate against golden regression tests + +**P0: Patch summation loop → Rust** + +- Currently: `for idx in range(lv.shape[0]): ani_lum += diffsh[:,:,idx] * lv[idx, 2]` +- Why: ~150 iterations × full grid per timestep (anisotropic mode) +- How: Add batch dot-product function to `sky.rs` +- Test: Bit-exact comparison with Python loop + +**P1: binary_dilation → Rust** + +- Currently: Python nested loop replacing scipy.ndimage.binary_dilation +- Why: O(rows × cols × iterations × 9) where iterations ≈ 25/pixel_size +- How: Add to a new `morphology.rs` or to `gvf.rs` +- Alternative: Could use ndarray + rayon in Rust for immediate 10× + +### G.3 GPU Acceleration Roadmap + +**Current GPU architecture (wgpu):** + +- Framework: wgpu 27.0 (WebGPU standard, cross-platform) +- Shader: WGSL compute shader (shadow_propagation.wgsl, 346 lines) +- Context: `ShadowGpuContext` with 17 storage buffers +- Dispatch: 16×16×1 workgroups +- Lifecycle: Context persisted via `OnceLock`, but buffers recreated per call + +**Phase G.3.1: GPU Buffer Reuse** (HIGH priority) + +- Problem: Per-call buffer allocation overhead - every `calculate_shadows_wall_ht_25()` creates ~10 new GPU buffers, bind groups, staging buffers, and command encoders +- Context itself already persisted via `OnceLock>` in `shadowing.rs` +- Fix: Add `GpuResourcePool` with buffer caching by size, persistent staging buffer +- Alternative: Python-side `ShadowGpuRunner` class (matches existing `SkyviewRunner` pattern) +- Expected benefit: Eliminate per-call allocation overhead +- Risk: Low (architectural change, no algorithm changes) + +**Phase G.3.2: GPU-Accelerated SVF** ✅ Complete + +- Implemented pipelined GPU dispatch via `svf_accumulation.wgsl` +- SVF accumulation runs entirely on GPU with batch patch processing +- Falls back to CPU automatically when GPU unavailable +- SVF is cached after first computation (210× speedup on repeat) + +**Phase G.3.3: GPU cylindric_wedge** (MEDIUM priority) + +- Candidate for GPU: Pure per-pixel trig operations +- Could run as a simple compute shader alongside shadow GPU +- Expected benefit: Marginal (already fast with numpy vectorization) +- Recommendation: Move to Rust first (G.2 P0), consider GPU later if needed + +**Phase G.3.4: GPU UTCI/PET** (LOW priority) + +- Both are embarrassingly parallel (per-pixel, no data dependencies) +- UTCI: 125-term polynomial (fast, already rayon-parallel in Rust) +- PET: 50-iteration solver (slower, could benefit from GPU for very large grids) +- Expected benefit: Only significant for grids >5000×5000 +- Recommendation: Defer unless handling very large rasters + +### G.4 FFI Boundary Optimization + +**Current pattern** (good): + +``` +Python orchestration → Rust computation → Python result handling +``` + +**Identified improvements:** + +| Issue | Current | Target | Priority | +| ----------------------- | ---------------------------------- | --------------------------- | -------- | +| Radiation orchestration | Python loops + multiple Rust calls | Single fused Rust call | P2 | +| GPU context lifecycle | Per-call init | Persistent across timesteps | P0 | +| Array transfer overhead | Copy per call | Zero-copy via PyArray views | P3 | +| Parameter passing | Mix of structs and positional args | Consistent struct-based API | P2 | + +**Fused radiation kernel** (deferred): + +- Currently: Python calls `cylindric_wedge` → `Perez_v3` → `vegetation.kside_veg` → `vegetation.lside_veg` → `sky.anisotropic_sky` → `tmrt.compute_tmrt` +- Could be: Single `compute_full_radiation(inputs) → RadiationResult` in Rust +- Benefit: Eliminate ~6 Python/Rust crossings per timestep +- Risk: Reduces modularity, harder to debug intermediate values +- Recommendation: Defer until per-timestep time exceeds 1s for target grid sizes + +### G.5 Implementation Order + +| Step | Task | Est. Effort | Dependencies | Status | +| ---- | ---------------------------------------------- | ----------- | ------------- | ----------- | +| 1 | Move `cylindric_wedge()` to Rust (`sky.rs`) | 2-3 hours | None | ✅ Complete | +| 2 | Move anisotropic patch loop to Rust (`sky.rs`) | 1-2 hours | None | ✅ Complete | +| 3 | GPU buffer reuse (persistent resource pool) | 3-4 hours | None | ✅ Complete | +| 4 | Move `binary_dilation()` to Rust | 2-3 hours | None | ✅ Complete | +| 5 | ~~Move `Perez_v3()` to Rust~~ | 4-6 hours | Step 1 | ✅ Complete | +| 6 | ~~GPU-accelerated SVF~~ | 2-3 days | Step 3 | ✅ Complete | +| 7 | ~~Fused radiation kernel~~ | 1-2 days | Steps 1, 2, 5 | ✅ Complete | + +**Milestone targets:** + +- After steps 1-2: ~2× faster per timestep (anisotropic mode) +- After step 3: Eliminated GPU init overhead +- After step 4: 10-100× faster wall setup +- After step 6: 5-50× faster fresh SVF computation + +--- + +## Phase H: Field Data Validation (In Progress) + +**Goal:** Validate SOLWEIG outputs against measured field data from real-world observation campaigns. + +**Why this matters:** Currently all validation is against the reference UMEP Python implementation (computational parity). This confirms the code is _equivalent_, but not that it's _correct_. Field-data validation would confirm that: + +1. Tmrt predictions match actual measurements within published error bounds +2. UTCI/PET thermal comfort categories are realistic +3. Shadow patterns match observed conditions +4. The Perez anisotropic sky model improves accuracy vs isotropic + +### H.1 Kolumbus Wall Temperature Validation (Complete) + +**Dataset:** Wallenberg et al. (2025) - Zenodo record 15309445 +- Site: Gothenburg, Sweden (57.697°N, 11.930°E), EPSG:3007 +- Period: 2023-05-15 to 2023-08-31 (summer months) +- Grid: 80×81 pixels at 0.5m resolution +- Geodata: DSM, DEM, CDSM, groundcover GeoTIFFs + WOI shapefile +- Met forcing: UMEP/SUEWS format (10-min resolution, 4 monthly files) +- Observations: IR radiometer wall surface temperatures (plastered brick + wood) + +**Results (generic cobblestone parameters):** + +| Metric | Plastered Brick | Wood | +|--------|----------------|------| +| Monthly RMSE (July) | 6.67°C | 8.96°C | +| Monthly Bias | -2.53°C | -3.17°C | +| Single-day RMSE | 8.53°C | 11.57°C | +| Published RMSE (tuned params) | ~2°C | ~2°C | + +**Key finding:** Our generic model (hardcoded tgk=0.37, tstart=-3.41, tmaxlst=15.0) is 3-4× worse than the paper's per-material tuned parameters. This validates the importance of land cover support (which the full pipeline already has). + +**Full pipeline results (noon, July 15):** +- WOI Tmrt: 31.7°C at Ta=20.5°C (+11.2°C excess radiation) +- Peak Tmrt: 41.8°C at 15:00, Ta=23.9°C (+17.9°C excess) + +**Tasks:** + +- [x] Download Zenodo validation dataset (kolumbus.csv + geodata + met forcing) +- [x] Add `Weather.from_umep_met()` for SUEWS-format met files +- [x] Write 12 validation tests (data loading, wall temp, full pipeline) +- [x] Register `validation` pytest marker +- [x] Add material-specific wall temperature parameters (Rust optional params + JSON defaults) +- [x] ~~Land cover-aware wall temperature model~~ — Not needed: scalar `wall_material` param (brick/concrete/wood/cobblestone) is the correct abstraction for SOLWEIG's wall radiation model + +### H.2 Montpellier Tmrt Validation (Complete) + +**Dataset:** INRAE PRESTI experimental canyon, Montpellier, France (43.64°N, 3.87°E) +- Reduced-scale urban canyon (2.3m concrete walls, 12m long, 5m apart, E-W orientation) +- 15 grey globe thermometers (40mm, RAL 7001, PT100) at 1.3m height +- Period: 2023-07-21 to 2024-07-31 (10-min intervals) +- Clear-sky GHI model (Ineichen, Linke turbidity 3.5) + +**Results (isotropic sky, Aug 4 2023):** + +| Metric | Value | +|--------|-------| +| Single-day RMSE | 7.59°C | +| Single-day Bias | +4.45°C | +| Multi-day RMSE (3 days) | 9.06°C | +| Multi-day Bias | +5.18°C | +| Noon Tmrt | 52.2°C (Ta=25.6°C) | +| Peak Tmrt | 52.8°C | + +**Tasks:** + +- [x] Construct synthetic DSM from known canyon dimensions (30×40 at 0.5m) +- [x] Download and parse globe thermometer measurements (presti_subset.csv) +- [x] Write Tmrt validation tests (20 tests: data, globe-to-Tmrt, DSM, model vs obs) +- [x] Compare isotropic vs anisotropic sky model accuracy (aniso requires shadow matrices, deferred) + +### H.3 Additional Validation Opportunities + +**Potential data sources:** + +- UMEP validation datasets (Gothenburg, London) +- Published SOLWEIG validation studies (Lindberg et al. 2008, 2016) +- COSMO/CLM urban datasets +- Local university weather stations with globe thermometer data + +--- + +## Phase D: Documentation & Integration (In Progress) + +- [x] Quick Start Guide ([docs/getting-started/quick-start.md](docs/getting-started/quick-start.md)) +- [x] MkDocs site scaffolded (25 pages under `docs/`) +- [x] API Reference with mkdocstrings (auto-generated) +- [x] QGIS plugin scaffolded (Phases 1-10, see [qgis_plugin/README.md](qgis_plugin/README.md)) +- [x] QGIS plugin testing & polish (Phase 11) +- [x] CI/CD for cross-platform plugin builds +- [ ] Build and publish wheels for multiple platforms + +### D.1 Documentation Fixes (Pending) + +| Task | Impact | Notes | +| --------------------------------------- | ------ | ------------------------------------------------------------------------------------------------------------------ | +| Fix GPU function docs | HIGH | `disable_gpu()` referenced in quick-start & installation but doesn't exist. `is_gpu_available()` not in `__all__`. | +| Document undocumented `__all__` exports | MEDIUM | `compute_utci_grid`, `compute_pet_grid`, data bundles, tiling utils undocumented | +| Fix `ThermalState` import path in docs | LOW | `docs/api/dataclasses.md` uses deep path instead of `solweig.ThermalState` | + +--- + +## Phase F: Test Coverage (Pending) + +**Goal:** Close critical gaps in test coverage. The physics (golden tests) and API surface are well tested, but the orchestration layer and primary workflow have blind spots. + +### F.1 Critical Test Gaps + +| Gap | Risk | What's missing | +| ---------------------------- | ------ | --------------------------------------------------------------------------- | +| ~~`calculate_timeseries()`~~ | HIGH | ✅ 13 tests added in `tests/test_timeseries.py` | +| ~~`validate_inputs()`~~ | MEDIUM | ✅ 8 tests added in `tests/test_timeseries.py` | +| `compute_utci_grid/pet_grid` | MEDIUM | Grid-level postprocessing exported in `__all__` but untested | +| Orchestration unit tests | MEDIUM | `computation.py` and `timeseries.py` only tested indirectly via integration | +| Multi-timestep thermal state | MEDIUM | No test verifies state persistence/accumulation across timesteps | + +**Current coverage by layer:** + +| Layer | Coverage | Notes | +| ------------------------------ | ----------------------------- | ----------------------------------------------------------- | +| Layer 1: Public API (`api.py`) | Good (70 tests) | Missing timeseries, validate_inputs | +| Layer 2: Orchestration | Poor (indirect only) | No unit tests for compute_single_timestep, state management | +| Layer 3: Components | Moderate (indirect) | Tested through golden tests, not directly | +| Layer 4: Rust | Excellent (100+ golden tests) | No gaps identified | + +--- + +## Phase E: API Improvements (Complete) + +**Goal:** Improve API ergonomics, consistency, and error handling. + +**Status:** ✅ Complete (Feb 2026) + +### E.1 Result Methods Pattern (P0) + +Add `compute_utci()` and `compute_pet()` methods directly on `SolweigResult` for discoverability. + +| Task | File | Status | +| ----------------------------------------------------------------------------------- | ----------------- | ----------- | +| Add `SolweigResult.compute_utci(weather)` method | models/results.py | ✅ Complete | +| Add `SolweigResult.compute_pet(weather, human)` method | models/results.py | ✅ Complete | +| Support both `result.compute_utci(weather)` and `result.compute_utci(ta, rh, wind)` | models/results.py | ✅ Complete | +| Update README with new pattern | README.md | ✅ Complete | +| Add tests for result methods | tests/test_api.py | ✅ Complete | + +**Usage after implementation:** + +```python +result = solweig.calculate(surface, location, weather) + +# Pattern A: Pass weather object (convenient) +utci = result.compute_utci(weather) + +# Pattern B: Pass individual values (explicit) +utci = result.compute_utci(ta=25.0, rh=50.0, wind=2.0) +``` + +### E.2 Location Auto-Extraction Warning (P0) + +Fix silent UTC offset defaulting when location is auto-extracted from CRS. + +| Task | File | Status | +| ------------------------------------------------------------------------- | ----------------------------------- | ----------- | +| Change `Location.from_surface()` to require explicit `utc_offset` or warn | models/weather.py | ✅ Complete | +| Add warning in `calculate_timeseries()` when location=None | timeseries.py | ✅ Complete | +| Update quick-start guide with explicit location examples | docs/getting-started/quick-start.md | ✅ Complete | + +**Behavior after implementation:** + +```python +# This will emit a warning about UTC offset defaulting to 0 +results = calculate_timeseries(surface, weather_list) # location=None + +# Recommended: explicit location +location = solweig.Location(latitude=37.98, longitude=23.73, utc_offset=2) +results = calculate_timeseries(surface, weather_list, location=location) +``` + +### E.3 Config Harmonization - Explicit Wins (P1) + +Change precedence so explicit parameters override `config` values (Python's "explicit is better than implicit"). + +| Task | File | Status | +| ---------------------------------------------------------------------------- | --------------------- | ----------- | +| Change `calculate()` to let explicit params override config | api.py | ✅ Complete | +| Change `calculate_timeseries()` to let explicit params override config | timeseries.py | ✅ Complete | +| Change `use_anisotropic_sky` default to `None` (means "use config or False") | api.py | ✅ Complete | +| Add debug logging when explicit params override config | api.py | ✅ Complete | +| Document new precedence in docstrings | api.py, timeseries.py | ✅ Complete | +| Add tests for precedence behavior | tests/test_api.py | ✅ Complete | + +**Current behavior (config wins):** + +```python +# config.use_anisotropic_sky=True overrides explicit False - CONFUSING +calculate(..., config=config, use_anisotropic_sky=False) # Uses True! +``` + +**New behavior (explicit wins):** + +```python +# Explicit parameter takes precedence - INTUITIVE +calculate(..., config=config, use_anisotropic_sky=False) # Uses False +``` + +### E.4 Validation & Structured Errors (P1) + +Add typed exceptions and preflight validation for better error messages. + +| Task | File | Status | +| ------------------------------------------------------------------------------------------------------------------------------- | -------------------------- | ----------- | +| Create `errors.py` with `SolweigError`, `InvalidSurfaceData`, `GridShapeMismatch`, `MissingPrecomputedData`, `WeatherDataError` | errors.py (new) | ✅ Complete | +| Add `validate_inputs()` preflight function | api.py | ✅ Complete | +| Update `calculate()` to raise structured errors | api.py | ✅ Complete | +| Export errors in `__all__` | api.py | ✅ Complete | +| Add tests for error cases | tests/test_errors.py (new) | ✅ Complete | + +**Usage after implementation:** + +```python +try: + warnings = solweig.validate_inputs(surface, location, weather) + result = solweig.calculate(surface, location, weather) +except solweig.GridShapeMismatch as e: + print(f"Grid mismatch: {e.field} expected {e.expected}, got {e.got}") +except solweig.MissingPrecomputedData as e: + print(f"Missing data: {e}") +``` + +### E.5 API Cleanup (P2) + +Minor cleanup tasks. + +| Task | File | Status | +| --------------------------------------------------------- | ----------------- | ----------- | +| Remove `poi_coords` from public signature (keep internal) | api.py | ⏳ Deferred | +| Add `Weather.from_values()` factory for quick testing | models/weather.py | ✅ Complete | +| Document result methods and validation in README | README.md | ✅ Complete | + +### E.6 Implementation Order + +| Step | Task | Effort | Risk | Dependencies | +| ---- | ---------------------------------- | ------- | --------------------------- | ------------ | +| 1 | E.1: Result methods | 1 hour | None | - | +| 2 | E.2: Location warning | 30 min | None | - | +| 3 | E.4: errors.py + validate_inputs() | 2 hours | None | - | +| 4 | E.3: Config precedence | 2 hours | **Low** - behavioral change | - | +| 5 | E.5: API cleanup | 30 min | None | - | +| 6 | Update README and docs | 1 hour | None | E.1, E.2 | + +**Total estimated effort:** ~7 hours + +--- + +## Wish List (Future Features) + +Ideas for future development, not yet prioritized. + +### High Value + +| Feature | Description | Complexity | +| -------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- | +| **Shade Duration Mapping** | Compute hours of shade per pixel over a day. Useful for urban planning and tree placement. Builds on existing shadow infrastructure. | Low | +| **WBGT Index** | Wet Bulb Globe Temperature - occupational heat stress index (OSHA, military, sports). Formula: `WBGT = 0.7×Tw + 0.2×Tg + 0.1×Ta`. Simpler than UTCI/PET. | Low | +| **Tree Canopy Scenarios** | "What-if" analysis: add hypothetical trees to CDSM and quantify cooling effect. Useful for urban forestry and climate adaptation planning. | Medium | +| **Heat Exposure Duration** | Cumulative hours above thermal stress thresholds (e.g., UTCI > 32°C). Time-weighted exposure mapping. | Low | + +### Medium Value + +| Feature | Description | Complexity | +| --------------------------- | ------------------------------------------------------------- | ---------- | +| **SET\* Index** | Standard Effective Temperature - ASHRAE thermal comfort index | Medium | +| **GeoTIFF Export** | Export results with proper CRS metadata for GIS integration | Low | +| **Animation Export** | Time-lapse visualization of Tmrt/UTCI over a day | Medium | +| **Weather API Integration** | Fetch real-time weather from OpenWeather, etc. | Medium | + +### Exploratory + +| Feature | Description | Notes | +| ---------------------------- | -------------------------------------------------- | ------------------------------------------- | +| Wind comfort | Simple wind amplification from building geometry | Requires wind field data or simplifications | +| Cool corridor identification | Automated detection of thermally comfortable paths | Builds on shade duration | +| Optimal tree placement | Algorithmic tree positioning for maximum cooling | Requires optimization framework | + +--- + +## Testing Requirements + +All changes must maintain: + +- **Tmrt bias < 0.1°C** vs reference implementation +- **612+ tests passing** (current baseline, including spec, golden, benchmark, and validation tests) +- No memory regression on standard benchmarks + +Gate command: `pytest tests/` + +--- + +## File Reference + +| File | Purpose | Lines | +| ---------------------------------------------- | ----------------------- | -------- | +| [api.py](pysrc/solweig/api.py) | Public API entry point | 403 | +| [models/](pysrc/solweig/models/) | Dataclasses (6 modules) | ~3,080 | +| [computation.py](pysrc/solweig/computation.py) | Core orchestration | 389 | +| [components/](pysrc/solweig/components/) | Physics modules | ~1,365 | +| [specs/](specs/) | Physics specifications | 10 files | + +--- + +## Risk Register + +| Risk | Impact | Mitigation | +| --------------------------------- | ------ | ----------------------------------- | +| Breaking parity during spec fixes | HIGH | Run parity tests after every change | +| Memory regression | MEDIUM | Add memory benchmarks to CI | +| NumPy ABI mismatch | HIGH | Pin version, test 1.x and 2.x | diff --git a/demos/athens-demo.py b/demos/athens-demo.py index b16442f..4eb17c5 100644 --- a/demos/athens-demo.py +++ b/demos/athens-demo.py @@ -1,96 +1,251 @@ # %% +""" +Demo: Athens SOLWEIG - Simplified API + +This demo shows how to use the solweig package with the new simplified API. +The simplified API automatically handles: +- Wall height and aspect computation from DSM +- Sky View Factor (SVF) preparation and caching via ``SurfaceData.prepare()`` +- Extent intersection and resampling +- CRS validation and extraction +- NaN filling in DSM/CDSM/TDSM with ground reference (DEM or DSM) + +Legacy config file-driven workflows (parametersforsolweig.json) are +supported via ``ModelConfig.from_json()``. + +Data sources +------------ +- DSM/DEM: Derived from LiDAR data, Hellenic Cadastre (https://www.ktimatologio.gr/) +- Tree vectors (trees.gpkg): Derived from Athens Urban Atlas + (https://land.copernicus.eu/local/urban-atlas) and geodata.gov.gr +- EPW weather (athens_2023.epw): Generated using Copernicus Climate Change + Service information [2025] via PVGIS (https://re.jrc.ec.europa.eu/pvg_tools/en/). + Contains modified Copernicus Climate Change Service information; neither the + European Commission nor ECMWF is responsible for any use that may be made of + the Copernicus information or data it contains. +""" + from pathlib import Path import geopandas as gpd +import solweig from pyproj import CRS -from umep import ( - common, - wall_heightaspect_algorithm, -) -from umep.functions.SOLWEIGpython import solweig_runner_core -from umepr import solweig_runner_rust, svf -# working folder +# Working folders input_folder = "demos/data/athens" input_path = Path(input_folder).absolute() -input_path.mkdir(parents=True, exist_ok=True) -input_path_str = str(input_path) -# output folder output_folder = "temp/athens" output_folder_path = Path(output_folder).absolute() output_folder_path.mkdir(parents=True, exist_ok=True) -output_folder_path_str = str(output_folder_path) -# extents -total_extents = [476800, 4205850, 477200, 4206250] +output_dir = output_folder_path / "output_simplified" + +# Extents for Athens demo area +EXTENTS_BBOX = [476800, 4205850, 477200, 4206250] +TARGET_CRS = 2100 # %% -# buffer -working_crs = 2100 +# ============================================================================= +# SIMPLIFIED API (Recommended) +# ============================================================================= + +# Generate CDSM from tree vector data trees_gdf = gpd.read_file(input_folder + "/trees.gpkg") -trees_gdf = trees_gdf.to_crs(working_crs) -cdsm_rast, cdsm_transf = common.rasterise_gdf( +trees_gdf = trees_gdf.to_crs(TARGET_CRS) +cdsm_rast, cdsm_transf = solweig.io.rasterise_gdf( trees_gdf, "geometry", "height", - bbox=total_extents, + bbox=EXTENTS_BBOX, pixel_size=1.0, ) -# add to DEM then set -common.save_raster( +solweig.io.save_raster( str(output_folder_path / "CDSM.tif"), cdsm_rast, cdsm_transf.to_gdal(), - CRS.from_epsg(working_crs).to_wkt(), + CRS.from_epsg(TARGET_CRS).to_wkt(), ) + # %% -# wall info for SOLWEIG -wall_heightaspect_algorithm.generate_wall_hts( - dsm_path=input_path_str + "/DSM.tif", - bbox=total_extents, - out_dir=output_folder_path_str + "/walls", +# Step 1: Prepare surface data +# - CRS automatically extracted from DSM +# - NaN in DSM/CDSM/TDSM filled with ground reference (DEM or DSM) +# - During prepare(): walls/SVF are computed and cached to working_dir if not already cached +# - During calculate*(): SVF must already be present on the surface/precomputed data +# - Extent and resolution handled automatically +# - Resampled data saved to working_dir for inspection +surface = solweig.SurfaceData.prepare( + dsm=str(input_path / "DSM.tif"), + working_dir=str(output_folder_path / "working"), # Cache preprocessing here + cdsm=str(output_folder_path / "CDSM.tif"), + bbox=EXTENTS_BBOX, # Optional: specify extent + pixel_size=1.0, # Optional: specify resolution (default: from DSM) ) -# %% -# skyview factor for SOLWEIG -svf.generate_svf( - dsm_path=input_path_str + "/DSM.tif", - bbox=total_extents, - out_dir=output_folder_path_str + "/svf", - cdsm_path=output_folder_path_str + "/CDSM.tif", - trans_veg_perc=3, +# Step 2: Load weather data and location from EPW file +epw_path = str(input_path / "athens_2023.epw") +weather_list = solweig.Weather.from_epw( + epw_path, + start="2023-07-01", + end="2023-07-04", # 4 days: July 1-4 ) +location = solweig.Location.from_epw(epw_path) # lat, lon, UTC offset, elevation # %% -# skyview factor for SOLWEIG - Tiled -svf.generate_svf( - dsm_path=input_path_str + "/DSM.tif", - bbox=total_extents, - out_dir=output_folder_path_str + "/svf_tiled", - cdsm_path=output_folder_path_str + "/CDSM.tif", - trans_veg_perc=3, - use_tiled_loading=True, - tile_size=200, +# Step 3: Calculate Tmrt with all defaults +# All parameters use bundled defaults: +# - Human: abs_k=0.7, abs_l=0.95, standing, 75kg, 180cm, 35yo, 80W activity +# - Physics: Tree transmissivity=0.03, seasonal dates, posture geometry +# - No materials needed (no landcover grid) +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + use_anisotropic_sky=True, # Uses precomputed SVF from prepare() + conifer=False, # Use seasonal leaf on/off (set True for evergreen trees) + output_dir=str(output_dir), + outputs=["tmrt", "shadow"], ) +print(summary.report()) # %% -SRR = solweig_runner_rust.SolweigRunRust( - "demos/data/athens/configsolweig.ini", - "demos/data/athens/parametersforsolweig.json", - use_tiled_loading=True, - tile_size=200, -) -SRR.run() -""" -Running SOLWEIG: 100%|| 72/72 [00:57<00:00, 1.63step/s] -""" +# Plot timeseries (Ta, Tmrt, UTCI, radiation, sun exposure over time) +summary.plot() + +# %% +# Visualise summary grids +import matplotlib.pyplot as plt # noqa: E402 + +fig, axes = plt.subplots(2, 3, figsize=(15, 10)) + +im0 = axes[0, 0].imshow(summary.tmrt_mean, cmap="hot") +axes[0, 0].set_title("Mean Tmrt (°C)") +plt.colorbar(im0, ax=axes[0, 0]) + +im1 = axes[0, 1].imshow(summary.utci_mean, cmap="hot") +axes[0, 1].set_title("Mean UTCI (°C)") +plt.colorbar(im1, ax=axes[0, 1]) + +im2 = axes[0, 2].imshow(summary.sun_hours, cmap="YlOrRd") +axes[0, 2].set_title("Sun hours") +plt.colorbar(im2, ax=axes[0, 2]) + +im3 = axes[1, 0].imshow(summary.tmrt_day_mean, cmap="hot") +axes[1, 0].set_title("Mean daytime Tmrt (°C)") +plt.colorbar(im3, ax=axes[1, 0]) + +im4 = axes[1, 1].imshow(summary.tmrt_night_mean, cmap="cool") +axes[1, 1].set_title("Mean nighttime Tmrt (°C)") +plt.colorbar(im4, ax=axes[1, 1]) + +# Show hours above the first day threshold (32°C by default) +threshold = sorted(summary.utci_hours_above.keys())[0] +im5 = axes[1, 2].imshow(summary.utci_hours_above[threshold], cmap="Reds") +axes[1, 2].set_title(f"UTCI hours > {threshold}°C") +plt.colorbar(im5, ax=axes[1, 2]) + +for ax in axes.flat: + ax.set_xticks([]) + ax.set_yticks([]) + +plt.suptitle(f"SOLWEIG Summary — {len(summary)} timesteps ({summary.n_daytime} day, {summary.n_nighttime} night)") +plt.tight_layout() +plt.show() + +# %% +# Optional: Load and inspect run metadata +# This metadata captures all parameters used in the calculation for reproducibility +metadata = solweig.load_run_metadata(output_dir / "run_metadata.json") +print("\nRun metadata loaded:") +print(f" Timestamp: {metadata['run_timestamp']}") +print(f" SOLWEIG version: {metadata['solweig_version']}") +print(f" Location: {metadata['location']['latitude']:.2f}°N, {metadata['location']['longitude']:.2f}°E") +print(f" Human posture: {metadata.get('human', {}).get('posture', 'default (standing)')}") +print(f" Anisotropic sky: {metadata['parameters']['use_anisotropic_sky']}") +print(f" Weather timesteps: {metadata['timeseries']['timesteps']}") +print(f" Date range: {metadata['timeseries']['start']} to {metadata['timeseries']['end']}") + +# %% +# Optional parameter customization examples: + +# Example 1: Custom human parameters (common use case) +# results = solweig.calculate_timeseries( +# surface=surface, +# weather_series=weather_list, +# human=solweig.HumanParams( +# abs_k=0.65, # Lower shortwave absorption +# abs_l=0.97, # Higher longwave absorption +# weight=70, # 70 kg +# height=1.65, # 165 cm +# posture="sitting", +# ), +# output_dir=str(output_dir), +# ) + +# Example 2: Custom physics (e.g., different tree transmissivity) +# Create custom_trees.json with: +# { +# "Tree_settings": {"Value": {"Transmissivity": 0.05, ...}}, +# "Posture": {"Standing": {...}, "Sitting": {...}} +# } +# physics = solweig.load_physics("custom_trees.json") +# results = solweig.calculate_timeseries( +# surface=surface, +# weather_series=weather_list, +# physics=physics, +# output_dir=str(output_dir), +# ) + +# Example 3: Custom materials (requires landcover grid) +# surface_with_lc = solweig.SurfaceData.prepare( +# dsm="dsm.tif", +# land_cover="landcover.tif", # Grid with class IDs (0-7, 99-102) +# working_dir="cache/", +# ) +# materials = solweig.load_materials("site_materials.json") # Albedo, emissivity per class +# results = solweig.calculate_timeseries( +# surface=surface_with_lc, +# weather_series=weather_list, +# materials=materials, +# output_dir=str(output_dir), +# ) + +# Legacy: Old unified params file (still supported for backwards compatibility) +# params = solweig.load_params("parametersforsolweig.json") +# Contains human + physics + materials in one file +# Note: Prefer the new three-parameter model for clarity! + +# %% +# Step 4: Per-timestep UTCI/PET (via timestep_outputs) +# To get per-timestep UTCI or PET arrays, include them in timestep_outputs: +# +# summary = solweig.calculate_timeseries( +# surface=surface, +# weather_series=weather_list, +# location=location, +# timestep_outputs=["tmrt", "utci"], # retain per-timestep Tmrt + UTCI +# output_dir=str(output_dir), +# ) +# for r in summary.results: +# print(f"UTCI range: {np.nanmin(r.utci):.1f} - {np.nanmax(r.utci):.1f}") +# +# Note: Summary grids (utci_mean, utci_max, etc.) are always computed regardless. + +# %% +# ============================================================================= +# NOTE: Legacy API (SolweigRunRust, SolweigRunCore, configs.py) removed in Phase 5.6 +# ============================================================================= +# The legacy config-file-driven API has been removed. Use the modern simplified API above. +# For tiled processing of large rasters, use: +# +# results = solweig.calculate_tiled( +# surface=surface, +# location=location, +# weather=weather, +# tile_size=256, # Tile size in pixels +# overlap=50, # Overlap in pixels for shadow continuity +# output_dir=str(output_dir), +# ) +# +# Performance: The modern API with Rust algorithms is comparable to the old runner. # %% -SRC = solweig_runner_core.SolweigRunCore( - "demos/data/athens/configsolweig.ini", - "demos/data/athens/parametersforsolweig.json", - use_tiled_loading=False, -) -# SRC.run() -""" -Running SOLWEIG: 100%|| 72/72 [04:49<00:00, 4.02s/step] -""" diff --git a/demos/profile_timeseries.py b/demos/profile_timeseries.py new file mode 100644 index 0000000..b1ce043 --- /dev/null +++ b/demos/profile_timeseries.py @@ -0,0 +1,196 @@ +""" +Profile SOLWEIG timeseries to find per-timestep bottlenecks. + +Instruments each component of calculate_core() and the I/O layer. +Patches at computation.py level to capture Python wrapper overhead too. +""" + +import functools +import statistics +import time +from collections import defaultdict +from pathlib import Path +from typing import Any, cast + +# ── Monkey-patch component functions with timing ────────────────────── + +_timings: dict[str, list[float]] = defaultdict(list) + + +def _timed(name, fn): + @functools.wraps(fn) + def wrapper(*args, **kwargs): + t0 = time.perf_counter() + result = fn(*args, **kwargs) + _timings[name].append(time.perf_counter() - t0) + return result + + return wrapper + + +# Patch at Rust FFI level (these are always called indirectly) +import solweig # noqa: E402 +from solweig.rustalgos import ground as ground_rust # noqa: E402 +from solweig.rustalgos import gvf as gvf_rust # noqa: E402 +from solweig.rustalgos import shadowing, sky, vegetation # noqa: E402 +from solweig.rustalgos import tmrt as tmrt_rust # noqa: E402 + +shadowing.calculate_shadows_wall_ht_25 = _timed("rust:shadows", shadowing.calculate_shadows_wall_ht_25) +gvf_rust.gvf_calc = _timed("rust:gvf_calc", gvf_rust.gvf_calc) +vegetation.kside_veg = _timed("rust:kside_veg", vegetation.kside_veg) +vegetation.lside_veg = _timed("rust:lside_veg", vegetation.lside_veg) +sky.cylindric_wedge = _timed("rust:cylindric_wedge", sky.cylindric_wedge) +sky.anisotropic_sky = _timed("rust:aniso_sky", sky.anisotropic_sky) +sky.weighted_patch_sum = _timed("rust:patch_sum", sky.weighted_patch_sum) +ground_rust.compute_ground_temperature = _timed("rust:ground_temp", ground_rust.compute_ground_temperature) +ground_rust.ts_wave_delay_batch = _timed("rust:ts_wave_delay", ground_rust.ts_wave_delay_batch) +tmrt_rust.compute_tmrt = _timed("rust:tmrt", tmrt_rust.compute_tmrt) + +# Patch at computation.py level (captures Python wrapper + Rust call) +from solweig import computation # noqa: E402 + +# Explicit Any alias for monkey-patching dynamic module attributes used only in this profiler. +comp = cast(Any, computation) + +# These are the functions imported by computation.py at module level +# We need to patch computation's references directly +comp.compute_shadows = _timed("py:shadows", comp.compute_shadows) +comp.resolve_svf = _timed("py:svf_resolve", comp.resolve_svf) +comp.compute_ground_temperature = _timed("py:ground_temp", comp.compute_ground_temperature) +comp.compute_gvf = _timed("py:gvf", comp.compute_gvf) +comp.compute_radiation = _timed("py:radiation", comp.compute_radiation) +comp.compute_tmrt = _timed("py:tmrt", comp.compute_tmrt) +comp._apply_thermal_delay = _timed("py:thermal_delay", comp._apply_thermal_delay) + +# Patch I/O +from solweig.models import results as results_mod # noqa: E402 + +if hasattr(results_mod, "SolweigResult"): + orig_to_geotiff = results_mod.SolweigResult.to_geotiff + results_mod.SolweigResult.to_geotiff = _timed("io:geotiff_write", orig_to_geotiff) + +# Patch Python physics used in radiation (requires UMEP) +from solweig.components import radiation as rad_mod # noqa: E402 + +if rad_mod.Kup_veg_2015a is not None: + rad_mod.Kup_veg_2015a = _timed("py:Kup_veg_comp", rad_mod.Kup_veg_2015a) + + +def profile_period(weather_slice, label, surface, output_dir): + """Profile a weather slice and return timing dict.""" + _timings.clear() + + Path(output_dir).mkdir(parents=True, exist_ok=True) + + print(f"\n{'=' * 70}") + print(f"Profiling: {label}") + print(f" {len(weather_slice)} timesteps: {weather_slice[0].datetime} → {weather_slice[-1].datetime}") + print(f"{'=' * 70}") + + t_total_start = time.perf_counter() + solweig.calculate_timeseries( + surface=surface, + weather_series=weather_slice, + output_dir=output_dir, + ) + t_total = time.perf_counter() - t_total_start + + n_day = sum(1 for w in weather_slice if w.sun_altitude > 0) + n_night = len(weather_slice) - n_day + + print(f"\n{'─' * 70}") + print(f"RESULTS: {label}") + print(f"{'─' * 70}") + print(f"Total: {t_total:.2f}s | {len(weather_slice)} steps ({n_day} day, {n_night} night)") + print( + f"Per step: {t_total / len(weather_slice) * 1000:.1f}ms avg | " + f"Per daytime step: {t_total / max(n_day, 1) * 1000:.1f}ms est." + ) + + # Collect all component-level (py:) timings + py_components = sorted([(n, sum(t)) for n, t in _timings.items() if n.startswith("py:")], key=lambda x: -x[1]) + rust_components = sorted([(n, sum(t)) for n, t in _timings.items() if n.startswith("rust:")], key=lambda x: -x[1]) + io_components = sorted([(n, sum(t)) for n, t in _timings.items() if n.startswith("io:")], key=lambda x: -x[1]) + + # Compute real overhead + py_total = sum(t for _, t in py_components) + io_total = sum(t for _, t in io_components) + nighttime_total = sum(t for n, t in py_components if n == "py:nighttime") + overhead = t_total - py_total - io_total - nighttime_total + + print(f"\n{'Component':<25} {'Total':>8} {'Mean':>8} {'Med':>8} {'Max':>8} {'N':>5} {'%':>6}") + print(f"{'─' * 70}") + + all_items = py_components + io_components + all_items.sort(key=lambda x: -x[1]) + + for name, total in all_items: + times = _timings[name] + ms = [t * 1000 for t in times] + pct = total / t_total * 100 + print( + f" {name:<23} {total:>7.3f}s {statistics.mean(ms):>7.2f} " + f"{statistics.median(ms):>7.2f} {max(ms):>7.2f} {len(ms):>5} {pct:>5.1f}%" + ) + + print( + f" {'overhead (precompute…)':<23} {overhead:>7.3f}s {'':>8} {'':>8} {'':>8} {'':>5} " + f"{overhead / t_total * 100:>5.1f}%" + ) + + print(f"\n {'Rust FFI detail:'}") + for name, total in rust_components: + times = _timings[name] + ms = [t * 1000 for t in times] + pct = total / t_total * 100 + print( + f" {name:<21} {total:>7.3f}s {statistics.mean(ms):>7.2f} " + f"{statistics.median(ms):>7.2f} {max(ms):>7.2f} {len(ms):>5} {pct:>5.1f}%" + ) + + # Bar chart + print("\n Time budget:") + bar_items = all_items + [("overhead", overhead)] + for name, total in sorted(bar_items, key=lambda x: -x[1]): + pct = total / t_total * 100 + bar = "█" * int(pct / 2) + "░" * (1 if pct % 2 > 0.5 else 0) + if pct >= 1.0: + print(f" {name:<23} {pct:>5.1f}% {bar}") + + return dict(_timings) + + +# ── Setup ────────────────────────────────────────────────────────────── + +working_path = Path("temp/goteborg").absolute() +dsm_path = "demos/data/Goteborg_SWEREF99_1200/DSM_KRbig.tif" +cdsm_path = "demos/data/Goteborg_SWEREF99_1200/CDSM_KRbig.tif" + +print("=" * 70) +print("SOLWEIG Timeseries Profiler") +print("=" * 70) +print(f"GPU: {'enabled' if solweig.GPU_ENABLED else 'disabled'}") + +surface = solweig.SurfaceData.prepare( + dsm=dsm_path, + cdsm=cdsm_path, + working_dir=str(working_path), + trunk_ratio=0.25, +) +print(f"Grid: {surface.dsm.shape[1]}x{surface.dsm.shape[0]} = {surface.dsm.size:,} pixels") + +weather_all = solweig.Weather.from_umep_met("demos/data/Goteborg_SWEREF99_1200/GBG_TMY_1977.txt") + +# ── Profile winter 48h (few daytime hours) ───────────────────────────── +winter_48h = weather_all[:48] +profile_period(winter_48h, "Winter 48h (Jan 1-2)", surface, str(working_path / "profile_winter")) + +# ── Profile summer 48h (long days, most computation) ────────────────── +# June 21 = day 172, hour index = 172*24 = 4128 +summer_start = 172 * 24 +summer_48h = weather_all[summer_start : summer_start + 48] +profile_period(summer_48h, "Summer 48h (Jun 21-22)", surface, str(working_path / "profile_summer")) + +print("\n" + "=" * 70) +print("Profiling complete.") +print("=" * 70) diff --git a/demos/small_nbhd_walls.py b/demos/small_nbhd_walls.py index 1adb3b3..a3b9231 100644 --- a/demos/small_nbhd_walls.py +++ b/demos/small_nbhd_walls.py @@ -1,31 +1,41 @@ # %% -from importlib import reload -from pathlib import Path +""" +Demo: Wall height/aspect generation for a small neighbourhood. -from umep import ( - solweig_algorithm, - wall_heightaspect_algorithm, -) +Generates wall height and wall aspect rasters from a DSM GeoTIFF. +The outputs (wall_hts.tif, wall_aspects.tif) are saved into a ``walls/`` +subdirectory and can be loaded by ``SurfaceData.prepare()`` for subsequent +SOLWEIG calculations. -reload(solweig_algorithm) +Inputs: + - DSM GeoTIFF (``demos/data/small_nbhd/dsm_clipped.tif``) -# +Outputs (written to ``temp/demos/small_nbhd/walls/``): + - ``wall_hts.tif`` — wall pixel heights in metres + - ``wall_aspects.tif`` — wall pixel aspect angles in degrees (0 = N) +""" + +from pathlib import Path + +import solweig + +# Bounding box in the DSM's projected CRS [minx, miny, maxx, maxy]. +# This clips the DSM to the area of interest before wall detection. bbox = [789700, 784130, 790100, 784470] + working_folder = "temp/demos/small_nbhd" -pixel_resolution = 1 # metres -working_crs = 32651 working_path = Path(working_folder).absolute() working_path.mkdir(parents=True, exist_ok=True) -working_path_str = str(working_path) # %% +# Generate wall heights and aspects from the DSM. +# The function writes wall_hts.tif and wall_aspects.tif into out_dir. dsm_path = Path("demos/data/small_nbhd/dsm_clipped.tif").absolute() -# if not Path.exists(working_path / "walls"): -wall_heightaspect_algorithm.generate_wall_hts( +solweig.walls.generate_wall_hts( dsm_path=str(dsm_path), bbox=bbox, - out_dir=working_path_str + "/walls", + out_dir=str(working_path / "walls"), ) # %% diff --git a/demos/solweig_gbg_test.py b/demos/solweig_gbg_test.py index 1ac362e..22736f2 100644 --- a/demos/solweig_gbg_test.py +++ b/demos/solweig_gbg_test.py @@ -1,51 +1,75 @@ # %% +""" +Demo: Gothenburg SOLWEIG preprocessing + +This demo shows how to use the solweig package for: +1. Wall height and aspect generation +2. Sky View Factor (SVF) calculation +3. Land-cover-based surface properties (albedo, emissivity) + +Uses SurfaceData.prepare() which automatically computes and caches +walls and SVF in the working directory. +""" + from pathlib import Path -from umep import ( - wall_heightaspect_algorithm, -) -from umep.functions.SOLWEIGpython import Solweig_run as sr -from umepr import svf +import solweig # %% -bbox = [476070, 4203550, 477110, 4204330] +# Working folder and input files working_folder = "temp/goteborg" -pixel_resolution = 1 # metres -working_crs = 3007 - working_path = Path(working_folder).absolute() working_path.mkdir(parents=True, exist_ok=True) -working_path_str = str(working_path) -# input files for computing +# Input files dsm_path = "demos/data/Goteborg_SWEREF99_1200/DSM_KRbig.tif" cdsm_path = "demos/data/Goteborg_SWEREF99_1200/CDSM_KRbig.tif" -lc_path = "" +dem_path = "demos/data/Goteborg_SWEREF99_1200/DEM_KRbig.tif" +land_cover_path = "demos/data/Goteborg_SWEREF99_1200/landcover.tif" -# setup parameters -trans_veg_perc = 3 -trunk_ratio_perc = 25 +# Setup parameters +trunk_ratio = 0.25 # Trunk height as fraction of canopy height # %% -# wall info for SOLWEIG (height and aspect) -wall_heightaspect_algorithm.generate_wall_hts( - dsm_path=dsm_path, - bbox=None, - out_dir=working_path_str + "/walls", -) +# Prepare surface data with automatic wall and SVF computation +# SurfaceData.prepare() will: +# - Fill NaN in DSM/CDSM/TDSM with the ground reference (DEM or DSM) +# - Compute wall heights/aspects and cache in working_dir/walls/ +# - Compute SVF and cache in working_dir/svf/ +# - Reuse cached data on subsequent runs (use force_recompute=True to regenerate) +print("Preparing surface data (walls and SVF will be computed if not cached)...") +print(f" Working dir: {working_path}") +print(f"GPU acceleration: {'enabled' if solweig.GPU_ENABLED else 'disabled'}") -# %% -# skyview factor for SOLWEIG -svf.generate_svf( - dsm_path=dsm_path, - bbox=None, - out_dir=working_path_str + "/svf", - cdsm_path=cdsm_path, - trans_veg_perc=trans_veg_perc, - trunk_ratio_perc=trunk_ratio_perc, +surface = solweig.SurfaceData.prepare( + dsm=dsm_path, + cdsm=cdsm_path, + dem=dem_path, + land_cover=land_cover_path, + working_dir=str(working_path), + trunk_ratio=trunk_ratio, + # bbox=None, # Full extent (default) + # force_recompute=False, # Use cached data if available (default) ) # %% -sr.solweig_run("demos/data/Goteborg_SWEREF99_1200/configsolweig.ini", feedback=None) +# The surface object is now ready for SOLWEIG calculations: +# +weather_list = solweig.Weather.from_umep_met( + "demos/data/Goteborg_SWEREF99_1200/GBG_TMY_1977.txt", + start="1977-07-01", + end="1977-07-05", # 5 days: July 1-5 +) +# Location from surface CRS with explicit UTC offset (Gothenburg: CET = UTC+1) +location = solweig.Location.from_surface(surface, utc_offset=1) +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir=str(working_path / "output"), + outputs=["tmrt", "shadow"], +) +summary.report() +summary.plot() # %% diff --git a/docs/PARAMS_MIGRATION.md b/docs/PARAMS_MIGRATION.md new file mode 100644 index 0000000..433d793 --- /dev/null +++ b/docs/PARAMS_MIGRATION.md @@ -0,0 +1,302 @@ +# Parameter & Configuration Migration Guide + +**Updated: January 2026** + +This document maps all original parameters and configuration options to their new API equivalents. + +## Overview + +The old API used two configuration systems: +1. **Config INI file** (`configsolweig.ini`) - Model behavior flags, file paths +2. **Params JSON file** (`parametersforsolweig.json`) - Physical constants, material properties + +The new API simplifies this: +1. **Direct parameters** - Key model options as function arguments +2. **Bundled defaults** - Common physical constants in `default_params.json` +3. **Custom params** - Optional JSON file for landcover-specific properties +4. **Automatic preparation** - Many expensive values are prepared/cached via `SurfaceData.prepare()` + +--- + +## Model Configuration Options + +### Boolean Flags (Model Behavior) + +| Old API (config.ini) | New API | Status | Notes | +|--------------------------|-----------------------------------|-------------|-------| +| `use_veg_dem` | Automatic (from `surface.cdsm`) | ✅ AUTO | If CDSM provided, vegetation is used | +| `conifer` | `conifer=True/False` | ✅ DIRECT | Direct parameter in `calculate()` | +| `use_aniso` | `use_anisotropic_sky=True/False` | ✅ DIRECT | Direct parameter in `calculate()` | +| `use_landcover` | Automatic (from `surface.land_cover`) | ⏳ TODO | Will be automatic when land_cover provided | +| `person_cylinder` | `human.posture="standing"/"sitting"` | ✅ PARAMS | Via HumanParams object | +| `only_global` | Removed (always use full radiation) | 🗑️ REMOVED | Simplified assumption | +| `use_dem_for_buildings` | Automatic (from `surface.dem`) | ✅ AUTO | If DEM provided, used for building detection | + +### File Paths (Input Data) + +| Old API (config.ini) | New API | Status | Notes | +|--------------------------|-----------------------------------|-------------|-------| +| `dsm_path` | `SurfaceData.prepare(dsm=...)` | ✅ COMPLETE | Required input | +| `cdsm_path` | `SurfaceData.prepare(cdsm=...)` | ✅ COMPLETE | Optional vegetation | +| `tdsm_path` | `SurfaceData.prepare(tdsm=...)` | ✅ COMPLETE | Optional trunk zone | +| `dem_path` | `SurfaceData.prepare(dem=...)` | ✅ COMPLETE | Optional ground elevation | +| `lc_path` | `SurfaceData.prepare(land_cover=...)` | ⏳ TODO | Landcover grid (planned) | +| `wall_path` | `working_dir/walls/` | ✅ AUTO | Auto-generated and cached | +| `svf_path` | `working_dir/svf/` | ✅ AUTO | Auto-generated and cached | +| `aniso_path` | `working_dir/svf/shadowmats.npz` | ✅ AUTO | Auto-generated if use_aniso=True | +| `epw_path` | `Weather.from_epw(path)` | ✅ COMPLETE | EPW file loading | +| `output_path` | `calculate_timeseries(output_dir=...)` | ✅ COMPLETE | Output directory | + +### Preprocessing Control + +| Old API (config.ini) | New API | Status | Notes | +|--------------------------|-----------------------------------|-------------|-------| +| Pre-generate walls | Automatic + cached | ✅ AUTO | Generated during `SurfaceData.prepare()`, cached to working_dir | +| Pre-generate SVF | Automatic + cached | ✅ AUTO | Generated during `SurfaceData.prepare()`, cached to working_dir | +| Pre-generate shadowmats | Automatic + cached | ✅ AUTO | Generated during preparation when anisotropic data is requested | +| Wall limit (1.0m) | Hardcoded default | ✅ AUTO | No user control needed | + +--- + +## Physical Parameters (Material Properties) + +### Human Body Parameters (Tmrt) + +| Old API (params.json) | New API | Status | Notes | +|---------------------------------|-----------------------------------|-------------|-------| +| `Tmrt_params.absK` | `HumanParams(abs_k=0.7)` | ✅ BUNDLED | Default 0.7 in bundled params | +| `Tmrt_params.absL` | `HumanParams(abs_l=0.95)` | ✅ BUNDLED | Default 0.95 in bundled params | +| `Tmrt_params.posture` | `HumanParams(posture="standing")` | ✅ BUNDLED | "standing" or "sitting" | +| `Posture.Standing.Fside` | Internal constant | ✅ BUNDLED | 0.22 (from bundled params) | +| `Posture.Standing.Fup` | Internal constant | ✅ BUNDLED | 0.06 (from bundled params) | +| `Posture.Standing.height` | Internal constant | ✅ BUNDLED | 1.1m (from bundled params) | +| `Posture.Standing.Fcyl` | Internal constant | ✅ BUNDLED | 0.28 (from bundled params) | +| `Posture.Sitting.*` | Internal constant | ✅ BUNDLED | Similar for sitting posture | + +### Human Body Parameters (PET/UTCI) + +| Old API (params.json) | New API | Status | Notes | +|---------------------------------|-----------------------------------|-------------|-------| +| `PET_settings.Age` | `HumanParams(age=35)` | ✅ BUNDLED | Default 35 in bundled params | +| `PET_settings.Weight` | `HumanParams(weight=75)` | ✅ BUNDLED | Default 75 kg in bundled params | +| `PET_settings.Height` | `HumanParams(height=180)` | ✅ BUNDLED | Default 180 cm in bundled params | +| `PET_settings.Sex` | `HumanParams(sex="Male")` | ✅ BUNDLED | "Male" or "Female" | +| `PET_settings.Activity` | `HumanParams(activity=80)` | ✅ BUNDLED | Default 80 W in bundled params | +| `PET_settings.clo` | `HumanParams(clothing=0.9)` | ✅ BUNDLED | Default 0.9 clo in bundled params | +| `Wind_Height.magl` | `weather.wind_speed_height` | ⏳ TODO | Currently assumes 10m (planned) | + +### Vegetation Parameters + +| Old API (params.json) | New API | Status | Notes | +|------------------------------------|-----------------------------------|-------------|-------| +| `Tree_settings.Transmissivity` | Bundled default (0.03) | ✅ BUNDLED | Leaf-on transmissivity | +| `Tree_settings.Trunk_ratio` | Bundled default (0.25) | ✅ BUNDLED | Trunk height as fraction of total | +| `Tree_settings.First_day_leaf` | Bundled default (97 = ~Apr 7) | ✅ BUNDLED | Day of year for leaf-on | +| `Tree_settings.Last_day_leaf` | Bundled default (300 = ~Oct 27) | ✅ BUNDLED | Day of year for leaf-off | +| Conifer override | `conifer=True` parameter | ✅ DIRECT | Forces always-leaf-on if True | + +### Landcover-Specific Properties (Material Library) + +These require **custom params file** with landcover definitions: + +| Old API (params.json) | New API | Status | Notes | +|------------------------------------|-----------------------------------|-------------|-------| +| `Names.Value.*` | `load_params("custom.json")` | ⏳ TODO | Landcover class names | +| `Code.Value.*` | `load_params("custom.json")` | ⏳ TODO | Landcover class IDs | +| `Albedo.Effective.Value.*` | `load_params("custom.json")` | ⏳ TODO | Surface albedo per class | +| `Albedo.Material.Value.*` | `load_params("custom.json")` | ⏳ TODO | Wall albedo per material | +| `Emissivity.Value.*` | `load_params("custom.json")` | ⏳ TODO | Surface emissivity per class | +| `Specific_heat.Value.*` | `load_params("custom.json")` | ⏳ TODO | Wall thermal properties | +| `Thermal_conductivity.Value.*` | `load_params("custom.json")` | ⏳ TODO | Wall thermal properties | +| `Density.Value.*` | `load_params("custom.json")` | ⏳ TODO | Wall thermal properties | +| `Wall_thickness.Value.*` | `load_params("custom.json")` | ⏳ TODO | Wall thermal properties | +| `TmaxLST.Value.*` | `load_params("custom.json")` | ⏳ TODO | Ground temperature model | +| `Ts_deg.Value.*` | `load_params("custom.json")` | ⏳ TODO | Ground temperature model | +| `Tstart.Value.*` | `load_params("custom.json")` | ⏳ TODO | Ground temperature model | + +**Note:** Landcover-specific properties are not in bundled defaults because they're highly site-specific. Users who need material variation must provide a custom params file. + +--- + +## Automatic Computations and Preparation + +The following values were previously required inputs but are now **computed automatically** or **prepared/cached automatically during `SurfaceData.prepare()`**: + +| Parameter | Old API | New API | Notes | +|----------------------------|--------------------------|------------------|-------| +| Sun position (azimuth, altitude) | Pre-computed or manual | Auto from datetime + location | Uses `weather.compute_derived()` | +| Max DSM height | Manual specification | Auto from DSM | Computed: `surface.max_height = dsm.max()` | +| Direct/diffuse radiation split | Pre-computed | Auto from clearness | Reindl model | +| Location (lat/lon) | Manual or from EPW | Auto from CRS | `Location.from_surface(surface)` | +| Shadow matrices | Pre-computed NPZ files | Auto-generated | Prepared/cached during surface preparation | +| Wall heights/aspects | Pre-computed TIF files | Auto-generated | Prepared/cached during surface preparation | +| Sky View Factor | Pre-computed ZIP files | Auto-generated | Prepared via `SurfaceData.prepare()` (or `surface.compute_svf()`) before `calculate*()` | + +--- + +## Usage Examples + +### Minimal (uses all bundled defaults) + +```python +import solweig + +surface = solweig.SurfaceData.prepare( + dsm="dsm.tif", + working_dir="cache/", +) + +weather = solweig.Weather.from_epw("weather.epw", start="2023-07-01", end="2023-07-01") + +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather, + output_dir="output/", +) +# Uses bundled defaults: +# - abs_k=0.7, abs_l=0.95, posture="standing" +# - Vegetation transmissivity=0.03, deciduous trees +# - Sky model from ModelConfig defaults (override explicitly if needed) +``` + +### With direct parameters + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather, + use_anisotropic_sky=True, # Enable Perez diffuse model + conifer=True, # Evergreen trees (always leaf-on) + output_dir="output/", +) +``` + +### With custom human parameters + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather, + human=solweig.HumanParams( + abs_k=0.65, # Lower shortwave absorption + abs_l=0.97, # Higher longwave absorption + posture="sitting", + weight=70, # For PET post-processing + height=1.65, + ), + output_dir="output/", +) +``` + +### With custom landcover parameters + +```python +# Load custom material library +params = solweig.load_params("parametersforsolweig.json") + +# Requires land_cover grid in SurfaceData (TODO - Phase 3) +surface = solweig.SurfaceData.prepare( + dsm="dsm.tif", + land_cover="landcover.tif", # Grid with class IDs + working_dir="cache/", +) + +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather, + params=params, # Uses custom albedo/emissivity per class + output_dir="output/", +) +``` + +### Explicit bundled defaults (for inspection) + +```python +# Load bundled defaults to see what's included +params = solweig.load_params() # Uses pysrc/solweig/data/default_params.json +print(f"Default Tmrt absK: {params.Tmrt_params.Value.absK}") +print(f"Default tree transmissivity: {params.Tree_settings.Value.Transmissivity}") + +# Can pass explicitly, but not necessary (loaded automatically if params=None) +results = solweig.calculate_timeseries(..., params=params) +``` + +--- + +## Migration Checklist + +If you're migrating from the old config-based API, use this checklist: + +### ✅ Already Handled (No Action Needed) + +- [x] Sun position calculation +- [x] Direct/diffuse radiation split +- [x] Wall generation +- [x] SVF generation +- [x] Shadow matrix generation +- [x] Location extraction from CRS +- [x] Max DSM height +- [x] Default human parameters +- [x] Vegetation seasonal behavior + +### 🔧 Requires New API Usage + +- [ ] **File paths**: Replace config.ini paths with `SurfaceData.prepare()` arguments +- [ ] **EPW loading**: Use `Weather.from_epw()` instead of manual parsing +- [ ] **Output directory**: Use `calculate_timeseries(output_dir=...)` instead of config +- [ ] **Model flags**: Use direct parameters (`use_anisotropic_sky`, `conifer`) instead of config flags + +### ⏳ TODO (Future Work) + +- [ ] **Landcover variation**: Custom params file support (planned Phase 3.6) +- [ ] **Wind height**: Currently assumes 10m (planned parameter) +- [ ] **Custom wall/SVF**: Advanced preprocessing control (manual mode available) + +--- + +## API Design Principles + +The new API follows these design principles: + +1. **Direct parameters for key decisions** + - `use_anisotropic_sky=True/False` - Major model choice + - `conifer=True/False` - Vegetation type + - **NOT** hidden in config object + +2. **Bundled defaults for common constants** + - Human body parameters (absK=0.7, absL=0.95) + - Vegetation parameters (transmissivity=0.03) + - Loaded automatically, overridable + +3. **Custom params for site-specific values** + - Landcover material properties (albedo, emissivity per class) + - Requires explicit JSON file + +4. **Automatic for derived values** + - Sun position from datetime + - Max height from DSM + - Location from CRS metadata + +5. **Progressive disclosure** + - Simple case: 3-4 lines of code + - Advanced: Full control via optional parameters + - Expert: Direct access to low-level functions + +--- + +## Status Summary + +| Category | Total | ✅ Complete | ⏳ TODO | 🗑️ Removed | +|----------|-------|------------|---------|-----------| +| **Model Flags** | 7 | 5 | 1 | 1 | +| **File Paths** | 11 | 9 | 1 | 1 | +| **Preprocessing** | 4 | 4 | 0 | 0 | +| **Human Params** | 12 | 12 | 0 | 0 | +| **Vegetation** | 5 | 5 | 0 | 0 | +| **Landcover** | 11 | 0 | 11 | 0 | +| **Automatic** | 8 | 8 | 0 | 0 | +| **TOTAL** | 58 | 43 | 13 | 2 | + +**Overall Progress: 74% Complete (43/58)** + +**Remaining work:** Landcover-specific material properties (Phase 3.6 - High Priority) diff --git a/docs/PARAMS_SIMPLE.md b/docs/PARAMS_SIMPLE.md new file mode 100644 index 0000000..390efa5 --- /dev/null +++ b/docs/PARAMS_SIMPLE.md @@ -0,0 +1,290 @@ +# Parameter Handling - Simple Summary + +**TL;DR:** Old API had 58 config options. New API has 3 parameter types, all with defaults. 99% of users never touch them. + +--- + +## The Three Parameter Types + +### 1. Human Parameters (Person-Specific) +Who is experiencing the thermal environment? + +```python +human = HumanParams( + abs_k=0.7, # Shortwave absorption (0-1) + abs_l=0.95, # Longwave absorption (0-1) + posture="standing", # "standing" or "sitting" + weight=75, # kg (for PET post-processing) + height=180, # cm (for PET post-processing) + age=35, # years (for PET post-processing) + activity=80, # W (for PET post-processing) + clothing=0.9, # clo (for PET post-processing) +) +``` + +**Defaults:** abs_k=0.7, abs_l=0.95, standing, 75kg, 180cm, 35yo, 80W, 0.9 clo + +**When to customize:** Different body characteristics, sitting posture + +--- + +### 2. Physics Parameters (Site-Independent) +How do vegetation and posture work? (Universal scientific constants) + +```python +physics = load_physics("custom_trees.json") # Optional +``` + +Contains: +- `Tree_settings`: Transmissivity (0.03), seasonal dates (day 97-300), trunk ratio (0.25) +- `Posture`: Geometry for standing/sitting (Fside, Fup, Fcyl, height) + +**Defaults:** Bundled in package (`physics_defaults.json`) + +**When to customize:** Different tree species, different seasonal periods + +--- + +### 3. Material Library (Site-Specific) +What is the ground/buildings made of? + +```python +materials = load_materials("site_materials.json") # Required if landcover grid +``` + +Contains per-landcover-class values: +- Albedo, Emissivity +- Ground temperature model parameters (TmaxLST, Ts_deg, Tstart) +- Wall thermal properties (specific heat, conductivity, density, thickness) + +**Defaults:** None (only needed if you have landcover grid) + +**When to customize:** You have a landcover classification grid with different surface types + +--- + +## Model Behavior Flags + +Two direct parameters control major model behavior: + +### `use_anisotropic_sky` (default: follows `ModelConfig`, currently `True`) +- `False` = Simpler isotropic sky model +- `True` = Perez anisotropic sky model +- **When to change:** Research papers, high-accuracy work +If explicitly set to `True`, shadow matrices must already be prepared. + +### `conifer` (default: `False`) +- `False` = Deciduous trees (seasonal leaf on/off) +- `True` = Evergreen trees (always have leaves) +- **When to change:** Your site has pine/spruce/fir trees + +--- + +## Usage Patterns + +### 99% of users (all defaults) +```python +import solweig + +surface = solweig.SurfaceData.prepare(dsm="dsm.tif", working_dir="cache/") +weather = solweig.Weather.from_epw("weather.epw", start="2023-07-01") +results = solweig.calculate_timeseries(surface, weather, output_dir="output/") + +# All parameters use bundled defaults - nothing to configure! +``` + +### Custom human parameters (common) +```python +results = solweig.calculate_timeseries( + surface, weather, + human=solweig.HumanParams(weight=70, height=1.65, posture="sitting"), + output_dir="output/", +) +``` + +### Better accuracy (anisotropic sky) +```python +results = solweig.calculate_timeseries( + surface, weather, + use_anisotropic_sky=True, # <-- Slower, more accurate + output_dir="output/", +) +``` + +### Evergreen trees +```python +results = solweig.calculate_timeseries( + surface, weather, + conifer=True, # <-- Always leaf-on + output_dir="output/", +) +``` + +### Custom physics (rare) +```python +# Create custom_trees.json with different transmissivity: +# { +# "Tree_settings": {"Value": {"Transmissivity": 0.05, ...}}, +# "Posture": {"Standing": {...}, "Sitting": {...}} +# } + +physics = solweig.load_physics("custom_trees.json") +results = solweig.calculate_timeseries(surface, weather, physics=physics, output_dir="output/") +``` + +### Landcover material variation (advanced) +```python +# Requires: landcover grid (classification raster with class IDs) +# Requires: materials file with properties per class + +materials = solweig.load_materials("site_materials.json") +surface = solweig.SurfaceData.prepare( + dsm="dsm.tif", + land_cover="landcover.tif", # Grid with surface type IDs (0-7, 99-102) + working_dir="cache/", +) +results = solweig.calculate_timeseries(surface, weather, materials=materials, output_dir="output/") +``` + +--- + +## Decision Tree + +**Do you need to customize human characteristics?** +- Yes → `human=HumanParams(weight=..., height=..., posture=...)` +- No → Use defaults + +**Do you have evergreen trees?** +- Yes → `conifer=True` +- No → Use defaults + +**Do you need research-grade accuracy?** +- Yes → `use_anisotropic_sky=True` (slower, more accurate) +- No → Use defaults + +**Do you have different tree species or seasonal periods?** +- Yes → Create custom physics file, `physics=load_physics("custom.json")` +- No → Use bundled defaults + +**Do you have a landcover grid with different surface materials?** +- Yes → Create materials file, `materials=load_materials("site_materials.json")` +- No → Use uniform defaults + +**Everything else?** +- Use defaults! + +--- + +## Conceptual Separation + +The three parameter types are **conceptually distinct**: + +| Type | What | Example | When Needed | +|------|------|---------|-------------| +| **human** | Person characteristics | Weight, height, absorption | Custom body properties | +| **physics** | Universal constants | Tree transmissivity, posture geometry | Different tree species | +| **materials** | Landcover properties | Albedo per surface type | Spatial material variation | + +This separation makes it clear: +- `human` = **WHO** is experiencing the thermal environment +- `physics` = **HOW** vegetation and posture work (universal science) +- `materials` = **WHAT** the ground/buildings are made of (site-specific) + +--- + +## What Happened to Everything Else? + +### Now Automatic (28 things) +- Sun position → Computed from datetime + location +- Location → Extracted from DSM file metadata +- Walls → Generated and cached automatically +- SVF → Generated and cached automatically +- Direct/diffuse radiation split → Computed +- Max building height → Computed from DSM +- Many more... + +### Now Bundled Defaults (Physics) +Site-independent constants in `physics_defaults.json`: +- Tree transmissivity: 0.03 +- Seasonal dates: Day 97-300 (~April-October) +- Trunk ratio: 0.25 +- Posture geometry: Standing/sitting projected areas + +### Now Bundled Defaults (Human) +Person characteristics: +- Shortwave absorption: 0.7 +- Longwave absorption: 0.95 +- Posture: Standing +- Weight: 75 kg, Height: 180 cm +- Age: 35, Activity: 80 W +- Clothing: 0.9 clo + +**You don't need to think about these unless you want custom values.** + +### Advanced: Landcover-Specific (Materials) +Material properties per surface type (asphalt, grass, concrete, etc.): +- Albedo, emissivity, thermal properties +- **Only needed if you have a landcover grid** +- Requires custom `materials.json` file + +--- + +## What If I Need Fine Control? + +Three levels of control: + +### Level 1: Direct parameters (most users) +```python +calculate_timeseries( + ..., + use_anisotropic_sky=True, + conifer=True, + human=HumanParams(weight=70), +) +``` + +### Level 2: Custom physics or materials (advanced) +```python +physics = solweig.load_physics("my_physics.json") +materials = solweig.load_materials("my_materials.json") +calculate_timeseries(..., physics=physics, materials=materials) +``` + +### Level 3: Manual preprocessing (experts) +```python +solweig.walls.generate_wall_hts(dsm_path="dsm.tif", out_dir="walls/") +solweig.svf.generate_svf(dsm_path="dsm.tif", out_dir="svf/") +surface = solweig.SurfaceData.prepare(dsm="dsm.tif", working_dir="manual/") +``` + +--- + +## Backwards Compatibility + +The old unified `params.json` file (220 lines with human + physics + materials) is still supported: + +```python +# Legacy unified params (still works for backwards compatibility) +params = solweig.load_params("parametersforsolweig.json") +results = solweig.calculate_timeseries(surface, weather, params=params, output_dir="output/") +``` + +But the new three-parameter model is clearer and more flexible. + +--- + +## Summary + +**Before:** 58 configuration options, 2 config files, manual preprocessing + +**After:** 3 parameter types (all with defaults), everything else automatic + +| Parameter | Purpose | Default | Customization | +|-----------|---------|---------|---------------| +| `human` | Person characteristics | Standing, 75kg, 180cm | `HumanParams(...)` object | +| `physics` | Universal constants | Bundled in package | `load_physics("custom.json")` | +| `materials` | Landcover properties | Not needed if no LC grid | `load_materials("site.json")` | +| `use_anisotropic_sky` | Sky model accuracy | False (faster) | Set to True | +| `conifer` | Tree type | False (deciduous) | Set to True | + +**The point:** Start simple. Add complexity only if you need it. diff --git a/docs/RUN_METADATA.md b/docs/RUN_METADATA.md new file mode 100644 index 0000000..b081224 --- /dev/null +++ b/docs/RUN_METADATA.md @@ -0,0 +1,395 @@ +# Run Metadata and Provenance + +**TL;DR:** Every calculation automatically saves a `run_metadata.json` file that captures all parameters, inputs, and configuration for perfect reproducibility. + +--- + +## What is Run Metadata? + +Run metadata is a complete record of all parameters and configuration used in a SOLWEIG calculation. This enables: + +1. **Reproducibility** - Re-run the exact same calculation months later +2. **Audit Trail** - Document what parameters were used for publications or reports +3. **Debugging** - Understand why results differ between runs +4. **Archiving** - Save complete experimental setup alongside results + +--- + +## Automatic Metadata Saving + +When you use `calculate_timeseries()` with `output_dir` specified, metadata is **automatically saved**: + +```python +import solweig + +surface = solweig.SurfaceData.prepare(dsm="dsm.tif", working_dir="cache/") +weather = solweig.Weather.from_epw("weather.epw", start="2023-07-01") + +# This automatically saves run_metadata.json to output_dir +results = solweig.calculate_timeseries( + surface, weather, + human=solweig.HumanParams(weight=70, height=1.65), + use_anisotropic_sky=True, + output_dir="output/", # <-- Triggers automatic metadata saving +) + +# Metadata is now saved at: output/run_metadata.json +``` + +**No extra work needed!** The metadata file is created automatically when `output_dir` is provided. + +--- + +## What's Captured? + +The metadata file records everything needed to reproduce a calculation: + +### 1. Execution Info +- Timestamp of calculation +- SOLWEIG version used +- Compute backend (CPU/GPU) + +### 2. Location +- Latitude, longitude, altitude +- UTC offset + +### 3. Model Flags +- `use_anisotropic_sky`: Sky model type +- `conifer`: Evergreen vs deciduous trees +- `use_legacy_kelvin_offset`: Backward compatibility flag + +### 4. Human Parameters +- Posture (standing/sitting) +- Absorption coefficients (shortwave/longwave) +- Body metrics (age, weight, height) +- Activity level and clothing insulation + +### 5. Physics Parameters +- Whether custom physics file was used +- Path to custom physics file (if any) +- Full physics parameters (if custom) + +### 6. Materials Parameters +- Whether materials were used +- Path to materials file (if any) +- Full materials parameters (if used) + +### 7. Surface Inputs +- Paths to DSM, CDSM, landcover files +- Bounding box and pixel size +- CRS (coordinate reference system) +- Grid dimensions + +### 8. Weather Info +- Path to EPW file or other weather source +- Number of timesteps +- Date range (start and end) + +### 9. Outputs +- Output directory path +- List of output types saved + +--- + +## Loading and Inspecting Metadata + +Load metadata to inspect or verify parameters: + +```python +import solweig + +# Load metadata from previous run +metadata = solweig.load_run_metadata("output/run_metadata.json") + +# Inspect key parameters +print(f"Calculation performed: {metadata['timestamp']}") +print(f"SOLWEIG version: {metadata['solweig_version']}") +print(f"Location: {metadata['location']['latitude']:.2f}°N") +print(f"Human posture: {metadata['human_params']['posture']}") +print(f"Anisotropic sky: {metadata['model_flags']['use_anisotropic_sky']}") +print(f"Weather period: {metadata['weather']['date_range']}") +``` + +--- + +## Manual Metadata Creation + +For custom workflows, create metadata manually: + +```python +import solweig + +# Prepare your calculation +surface = solweig.SurfaceData.prepare(dsm="dsm.tif", working_dir="cache/") +weather = solweig.Weather.from_epw("weather.epw") +location = solweig.Location.from_surface(surface) +human = solweig.HumanParams(weight=70) + +# Create metadata +metadata = solweig.create_run_metadata( + surface=surface, + location=location, + weather_series=weather, + weather_source_path="weather.epw", + human=human, + use_anisotropic_sky=True, + output_dir="output/", + outputs=["tmrt", "shadow"], +) + +# Save to custom location +solweig.save_run_metadata(metadata, output_dir="custom_dir/", filename="my_metadata.json") +``` + +--- + +## Example Metadata File + +Here's what a typical `run_metadata.json` looks like: + +```json +{ + "timestamp": "2024-07-15T14:30:22.123456", + "solweig_version": "0.0.1a1", + "compute_backend": "cpu", + "location": { + "latitude": 37.98, + "longitude": 23.73, + "altitude": 0.0, + "utc_offset": 2 + }, + "model_flags": { + "use_anisotropic_sky": true, + "conifer": false, + "use_legacy_kelvin_offset": false + }, + "human_params": { + "posture": "standing", + "abs_k": 0.7, + "abs_l": 0.95, + "age": 35, + "weight": 75, + "height": 180, + "activity": 80, + "clothing": 0.9 + }, + "physics": { + "custom": false, + "path": null + }, + "materials": { + "used": false, + "path": null + }, + "surface": { + "dsm_path": "/path/to/DSM.tif", + "cdsm_path": "/path/to/CDSM.tif", + "land_cover_path": null, + "bbox": [476800, 4205850, 477200, 4206250], + "pixel_size": 1.0, + "crs_wkt": "PROJCS[...]", + "shape": [400, 400] + }, + "weather": { + "source_path": "/path/to/athens_2023.epw", + "num_timesteps": 72, + "date_range": ["2023-07-01T00:00:00", "2023-07-03T23:00:00"] + }, + "outputs": { + "output_dir": "/path/to/output", + "outputs": ["tmrt", "shadow"] + } +} +``` + +--- + +## Use Cases + +### Research Publications + +Document exact parameters for reproducible science: + +```python +# Run calculation +results = solweig.calculate_timeseries( + surface, weather, + human=solweig.HumanParams(weight=75, height=1.80), + use_anisotropic_sky=True, + output_dir="paper_results/", +) + +# Metadata is saved automatically - include it in supplementary materials +# Readers can reproduce your exact calculation +``` + +### Comparing Runs + +Compare metadata from different runs to understand differences: + +```python +# Load metadata from two runs +meta_run1 = solweig.load_run_metadata("run1/run_metadata.json") +meta_run2 = solweig.load_run_metadata("run2/run_metadata.json") + +# Compare key parameters +print("Run 1 posture:", meta_run1['human_params']['posture']) +print("Run 2 posture:", meta_run2['human_params']['posture']) + +print("Run 1 sky model:", meta_run1['model_flags']['use_anisotropic_sky']) +print("Run 2 sky model:", meta_run2['model_flags']['use_anisotropic_sky']) +``` + +### Archival and Documentation + +Save complete experimental setup alongside results: + +```python +# Your calculation produces: +# output/ +# ├── run_metadata.json <-- Complete parameter record +# ├── tmrt_2023-07-01_1200.tif +# ├── tmrt_2023-07-01_1300.tif +# └── ... + +# Archive the entire directory - everything needed to reproduce the calculation +``` + +### Debugging + +Verify parameters when results seem unexpected: + +```python +metadata = solweig.load_run_metadata("output/run_metadata.json") + +# Check if anisotropic sky was actually enabled +if not metadata['model_flags']['use_anisotropic_sky']: + print("Warning: Anisotropic sky was disabled!") + +# Check human parameters +if metadata['human_params']['posture'] != 'standing': + print(f"Note: Results are for {metadata['human_params']['posture']} posture") +``` + +--- + +## Custom Physics and Materials + +When using custom physics or materials files, the **full parameters are saved** in the metadata: + +```python +# Load custom physics +physics = solweig.load_physics("custom_trees.json") + +# Calculate with custom physics +results = solweig.calculate_timeseries( + surface, weather, + physics=physics, + output_dir="output/", +) + +# Metadata now includes: +# - physics.custom: true +# - physics.path: "custom_trees.json" +# - physics.full_params: {...complete physics parameters...} +``` + +This ensures the metadata is **self-contained** - you don't need to keep track of the separate physics file. + +--- + +## Backward Compatibility Notes + +The metadata system is designed to **complement**, not replace, the old config file approach. + +**Old workflow (still supported):** +```python +# Legacy API with config files +SRR = solweig.SolweigRunRust( + "configsolweig.ini", + "parametersforsolweig.json" +) +SRR.run() +# No automatic metadata saved +``` + +**New workflow:** +```python +# Simplified API with automatic metadata +results = solweig.calculate_timeseries( + surface, weather, + output_dir="output/", +) +# Metadata automatically saved to output/run_metadata.json +``` + +The metadata format is JSON-based and **not intended** to be a drop-in replacement for the old `.ini` config format. Instead, it provides a **more complete** record that includes: +- Runtime information (timestamp, version) +- Derived values (auto-extracted location) +- Complete parameter sets (physics, materials) + +--- + +## API Reference + +### `create_run_metadata()` + +Create a metadata dictionary for a SOLWEIG run. + +**Parameters:** +- `surface`: SurfaceData object +- `location`: Location object +- `weather_series`: List of Weather objects (optional) +- `weather_source_path`: Path to EPW file (optional) +- `human`: HumanParams object (optional, uses defaults if None) +- `physics`: Physics parameters from load_physics() (optional) +- `physics_path`: Path to custom physics file (optional) +- `materials`: Materials from load_materials() (optional) +- `materials_path`: Path to materials file (optional) +- `use_anisotropic_sky`: Anisotropic sky flag +- `conifer`: Conifer mode flag +- `output_dir`: Output directory path (optional) +- `outputs`: List of output types (optional) +- `use_legacy_kelvin_offset`: Backward compatibility flag + +**Returns:** Dictionary containing complete metadata + +--- + +### `save_run_metadata()` + +Save metadata dictionary to JSON file. + +**Parameters:** +- `metadata`: Metadata dict from create_run_metadata() +- `output_dir`: Directory to save metadata file +- `filename`: Filename (default: "run_metadata.json") + +**Returns:** Path to saved metadata file + +--- + +### `load_run_metadata()` + +Load metadata from JSON file. + +**Parameters:** +- `metadata_path`: Path to metadata JSON file + +**Returns:** Metadata dictionary + +--- + +## Summary + +**Automatic:** Metadata is saved automatically when you use `output_dir` + +**Complete:** Captures all parameters, inputs, and configuration + +**Reproducible:** Contains everything needed to re-run the exact calculation + +**Self-contained:** Includes full custom physics/materials (not just paths) + +**Future-proof:** Version information enables backward compatibility + +**The point:** Perfect reproducibility with zero extra effort. diff --git a/docs/SOLWEIG_VALIDATION_RESEARCH.md b/docs/SOLWEIG_VALIDATION_RESEARCH.md new file mode 100644 index 0000000..6045dd5 --- /dev/null +++ b/docs/SOLWEIG_VALIDATION_RESEARCH.md @@ -0,0 +1,252 @@ +# SOLWEIG Validation Research Summary + +*Research conducted: January 2026* + +## Overview + +This document summarizes research into the academic background of the SOLWEIG model, its validation methodology, and available datasets for replicating first-principles validation. + +--- + +## 1. Academic Background + +### Original Paper + +**Lindberg, F., Holmer, B. & Thorsson, S. (2008)** +"SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings" +*International Journal of Biometeorology* 52, 697–713 +DOI: [10.1007/s00484-008-0162-7](https://doi.org/10.1007/s00484-008-0162-7) + +### Key Subsequent Papers + +| Year | Authors | Focus | Journal | +|------|---------|-------|---------| +| 2011 | Lindberg & Grimmond | Vegetation scheme | Theoretical and Applied Climatology | +| 2016 | Lindberg, Onomura & Grimmond | Ground surface characteristics | Int. J. Biometeorology | +| 2025 | Lindberg et al. | Wall surface temperature scheme | EGUsphere (preprint) | + +### Research Group + +**Göteborg Urban Climate Group (GUCG)** +Department of Earth Sciences, University of Gothenburg, Sweden +Website: https://www.gu.se/en/research/gucg + +--- + +## 2. Original Validation Methodology (2008) + +### Study Design + +| Aspect | Details | +|--------|---------| +| **Location** | Göteborg, Sweden (57°N) | +| **Sites** | Large open square + small courtyard | +| **Duration** | 7 days across multiple seasons | +| **Periods** | October 2005, July/August 2006 | +| **Conditions** | Clear to overcast weather | + +### Measurement Method + +The **six-directional integral radiation method** (ISO 7726 standard): + +- Measures shortwave and longwave radiation from 6 directions (up, down, N, S, E, W) +- Angular weighting factors: **0.22** for cardinal directions, **0.06** for up/down +- Instruments positioned at **1.1m height** (center of gravity for standing person) +- Requires pyranometers (shortwave) and pyrgeometers (longwave) + +### Validation Results + +| Metric | Value | +|--------|-------| +| R² | 0.94 | +| RMSE | 4.8 K | +| p-value | < 0.01 | + +### Comparative Performance + +Studies comparing SOLWEIG against other models (RayMan, ENVI-met) consistently show SOLWEIG performs best for Tmrt estimation: + +- Hong Kong study (670 sites): SOLWEIG showed best correlation with six-directional measurements +- Cold region study: SOLWEIG exhibited better determination performance than RayMan and ENVI-met + +--- + +## 3. Available Public Datasets + +### 3.1 UMEP Göteborg Tutorial Dataset + +**Source:** [GitHub - Urban Meteorology Reading](https://github.com/Urban-Meteorology-Reading/Urban-Meteorology-Reading.github.io/tree/master/other%20files/Goteborg_SWEREF99_1200.zip) + +| Contents | Format | +|----------|--------| +| DSM (Digital Surface Model) | GeoTIFF | +| CDSM (Canopy DSM) | GeoTIFF | +| DEM (Digital Elevation Model) | GeoTIFF | +| Land cover / ground cover | GeoTIFF | +| Study area boundary | Shapefile | + +**Coordinate System:** SWEREF99 1200 (EPSG:3007) + +**Use Case:** Running SOLWEIG simulations (model inputs only, no validation measurements) + +--- + +### 3.2 Swedish National Data Service - Gothenburg Climate Data + +**Source:** [researchdata.se](https://researchdata.se/en/catalogue/dataset/2021-253-1) +**DOI:** 10.5878/a2h2-4s63 + +| Variable | Unit | +|----------|------| +| Air temperature | °C | +| Wind speed (average) | m/s | +| Wind direction | degrees | +| Relative humidity | % | +| Global radiation | W/m² | +| Diffuse radiation | W/m² | +| Direct-beam radiation | W/m² | +| Mean sea-level pressure | hPa | + +**Period:** September 1986 – December 2020 +**Resolution:** Hourly +**Format:** CSV, NetCDF + +**Use Case:** Meteorological forcing data for SOLWEIG runs + +**Note:** Does NOT include Tmrt or six-directional radiation measurements + +--- + +### 3.3 Zenodo SOLWEIG v2025 Validation Dataset + +**Source:** [Zenodo Record 15309445](https://zenodo.org/records/15309445) + +#### Files Available + +| File | Size | Contents | +|------|------|----------| +| `geodata.zip` | 86.7 kB | Urban geometry for validation site | +| `kolumbus.csv` | 1.2 MB | **Wall surface temperature validation data** | +| `metdata_10min_may.txt` | ~700 kB | Meteorological forcing | +| `metdata_10min_june.txt` | ~668 kB | Meteorological forcing | +| `metdata_10min_july.txt` | ~743 kB | Meteorological forcing | +| `metdata_10min_august.txt` | ~743 kB | Meteorological forcing | + +#### kolumbus.csv Details + +| Aspect | Details | +|--------|---------| +| **Variable** | Wall surface temperature (Ts) | +| **Period** | 2023-05-15 to 2023-08-31 | +| **Resolution** | 10-minute intervals | +| **Observations** | ~15,400 measurements | +| **Surfaces** | Wooden wall + plastered brick wall (albedo ≈ 0.5) | +| **Instrument** | Apogee SI-111 infrared radiometer at 10cm from wall | +| **Reported accuracy** | R² = 0.93-0.94, RMSE = 1.94-2.09°C | + +**Use Case:** Validation of wall surface temperature calculation (intermediate variable in SOLWEIG) + +--- + +### 3.4 Datasets NOT Publicly Available + +| Dataset | Status | How to Obtain | +|---------|--------|---------------| +| Original 2008 Göteborg Tmrt measurements | Not archived | Contact authors | +| Six-directional radiation data (2005-2006) | Not archived | Contact authors | +| Hong Kong 670-site validation data | On request | Contact paper authors | +| Singapore thermal comfort data | On request | Singapore-ETH Centre | + +--- + +## 4. Gap Analysis + +### What First-Principles Validation Requires + +1. **Urban geometry** (DSMs, land cover) ✅ Available +2. **Meteorological forcing** (radiation, Ta, RH) ✅ Available +3. **Ground-truth Tmrt measurements** ❌ Not publicly available + +### Current Test Strategy (This Repository) + +| Layer | Purpose | Data Source | +|-------|---------|-------------| +| Spec property tests | Physical invariants | Synthetic data | +| Golden regression tests | Numerical drift detection | UMEP reference outputs | +| Parity tests | API vs runner match | UMEP implementation | + +**Target:** Tmrt bias < 0.5°C against reference implementation + +This validates **implementation correctness** but not **physical accuracy** against real-world observations. + +--- + +## 5. Recommendations + +### Short-term (No external data needed) + +1. **Physics-based unit tests** + - Verify Tmrt formula: `Tmrt⁴ = (1/σ) × Σ[αk·Ki·Fi + αL·Li·Fi]` + - Verify angular weighting factors (0.22/0.06) + - Verify clear-sky radiation (I₀) against astronomical calculations + +2. **UTCI reference validation** + - Compare against [Bröde et al. reference implementation](https://utci.org/) + +3. **Wall Ts validation** + - Download kolumbus.csv from Zenodo + - Run SOLWEIG with 2023 Gothenburg data + - Compare wall temperatures (target: R² > 0.9, RMSE < 2.5°C) + +### Medium-term (Requires author contact) + +4. **Request original validation data** + - Contact Fredrik Lindberg (University of Gothenburg) + - Request 2005-2006 six-directional radiation measurements + - Academic researchers often share data for reproducibility + +### Long-term (If resources available) + +5. **Conduct independent validation campaign** + - Six-directional radiation measurements at known location + - Paired with high-resolution DSM from LiDAR + - Would provide fully independent validation + +--- + +## 6. References + +### Primary SOLWEIG Papers + +1. Lindberg, F., Holmer, B. & Thorsson, S. (2008). SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *Int. J. Biometeorol.* 52, 697–713. + +2. Lindberg, F. & Grimmond, C.S.B. (2011). The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas. *Theor. Appl. Climatol.* 105, 311–323. + +3. Lindberg, F., Onomura, S. & Grimmond, C.S.B. (2016). Influence of ground surface characteristics on the mean radiant temperature in urban areas. *Int. J. Biometeorol.* 60, 1439–1452. + +### Validation Methodology + +4. Thorsson, S., Lindberg, F., Eliasson, I. & Holmer, B. (2007). Different methods for estimating the mean radiant temperature in an outdoor urban setting. *Int. J. Climatol.* 27, 1983–1993. + +### Comparative Studies + +5. Chen, L. et al. (2024). Estimation of mean radiant temperature across diverse outdoor spaces: A comparative study of different modeling approaches. *Energy and Buildings* 308, 113999. + +### Standards + +6. ISO 7726:1998. Ergonomics of the thermal environment — Instruments for measuring physical quantities. + +--- + +## 7. Contact Information + +**For original validation data:** + +- Fredrik Lindberg - Department of Earth Sciences, University of Gothenburg +- Sofia Thorsson - Department of Earth Sciences, University of Gothenburg + +**UMEP/SOLWEIG resources:** + +- Documentation: https://umep-docs.readthedocs.io/ +- GitHub: https://github.com/UMEP-dev/UMEP +- Zenodo (v2025): https://zenodo.org/records/15309384 diff --git a/docs/api/dataclasses.md b/docs/api/dataclasses.md new file mode 100644 index 0000000..4623859 --- /dev/null +++ b/docs/api/dataclasses.md @@ -0,0 +1,98 @@ +# Data Classes + +## SurfaceData + +::: solweig.SurfaceData + options: + show_source: false + heading_level: 3 + +--- + +## Location + +::: solweig.Location + options: + show_source: false + heading_level: 3 + +--- + +## Weather + +::: solweig.Weather + options: + show_source: false + heading_level: 3 + +--- + +## HumanParams + +::: solweig.HumanParams + options: + show_source: false + heading_level: 3 + +--- + +## ModelConfig + +::: solweig.ModelConfig + options: + show_source: false + heading_level: 3 + +--- + +## SolweigResult + +::: solweig.SolweigResult + options: + show_source: false + heading_level: 3 + +--- + +## TimeseriesSummary + +::: solweig.TimeseriesSummary + options: + show_source: false + heading_level: 3 + +--- + +## Timeseries + +::: solweig.Timeseries + options: + show_source: false + heading_level: 3 + +--- + +## PrecomputedData + +::: solweig.PrecomputedData + options: + show_source: false + heading_level: 3 + +--- + +## ThermalState + +::: solweig.models.state.ThermalState + options: + show_source: false + heading_level: 3 + +--- + +## TileSpec + +::: solweig.TileSpec + options: + show_source: false + heading_level: 3 diff --git a/docs/api/errors.md b/docs/api/errors.md new file mode 100644 index 0000000..966e1d6 --- /dev/null +++ b/docs/api/errors.md @@ -0,0 +1,113 @@ +# Error Handling + +SOLWEIG provides structured exceptions for clear error messages and easy handling. + +## Exception Hierarchy + +``` +SolweigError (base) +├── InvalidSurfaceData +├── GridShapeMismatch +├── MissingPrecomputedData +├── WeatherDataError +└── ConfigurationError +``` + +## Catching Errors + +```python +import solweig +from solweig.errors import GridShapeMismatch, MissingPrecomputedData, SolweigError + +try: + result = solweig.calculate(surface, location, weather) +except GridShapeMismatch as e: + print(f"Grid mismatch: {e.field}") + print(f" Expected: {e.expected}") + print(f" Got: {e.got}") +except MissingPrecomputedData as e: + print(f"Missing data: {e}") + print(f" Hint: {e.hint}") +except SolweigError as e: + # Catch any SOLWEIG error + print(f"Error: {e}") +``` + +--- + +## SolweigError + +::: solweig.errors.SolweigError + options: + show_source: false + heading_level: 3 + +--- + +## GridShapeMismatch + +::: solweig.errors.GridShapeMismatch + options: + show_source: false + heading_level: 3 + +--- + +## MissingPrecomputedData + +::: solweig.errors.MissingPrecomputedData + options: + show_source: false + heading_level: 3 + +--- + +## InvalidSurfaceData + +::: solweig.errors.InvalidSurfaceData + options: + show_source: false + heading_level: 3 + +--- + +## WeatherDataError + +::: solweig.errors.WeatherDataError + options: + show_source: false + heading_level: 3 + +--- + +## ConfigurationError + +::: solweig.errors.ConfigurationError + options: + show_source: false + heading_level: 3 + +--- + +## Pre-flight Validation + +Use `validate_inputs()` to catch errors before expensive computations: + +```python +from solweig.errors import GridShapeMismatch, MissingPrecomputedData + +try: + warnings = solweig.validate_inputs(surface, location, weather) + for w in warnings: + print(f"Warning: {w}") + + # Now safe to run expensive calculation + result = solweig.calculate(surface, location, weather) + +except GridShapeMismatch as e: + print(f"Fix grid shapes before proceeding: {e.field}") +except MissingPrecomputedData as e: + print(f"Missing required data: {e}") +``` + +This catches shape mismatches, missing data, and other issues *before* SVF computation. diff --git a/docs/api/functions.md b/docs/api/functions.md new file mode 100644 index 0000000..8c3aad9 --- /dev/null +++ b/docs/api/functions.md @@ -0,0 +1,36 @@ +# Core Functions + +## calculate + +::: solweig.calculate + options: + show_source: false + heading_level: 3 + +--- + +## calculate_timeseries + +::: solweig.calculate_timeseries + options: + show_source: false + heading_level: 3 + +--- + +## calculate_tiled + +::: solweig.calculate_tiled + options: + show_source: false + heading_level: 3 + +--- + +## validate_inputs + +::: solweig.api.validate_inputs + options: + show_source: false + heading_level: 3 + diff --git a/docs/api/index.md b/docs/api/index.md new file mode 100644 index 0000000..2f79258 --- /dev/null +++ b/docs/api/index.md @@ -0,0 +1,59 @@ +# API Reference + +SOLWEIG provides a clean, minimal API for urban microclimate calculations. + +## Quick Overview + +### Core Functions + +| Function | Description | +|----------|-------------| +| [`calculate()`](functions.md#calculate) | Single timestep Tmrt calculation | +| [`calculate_timeseries()`](functions.md#calculate_timeseries) | Multi-timestep with thermal state | +| [`calculate_tiled()`](functions.md#calculate_tiled) | Large raster processing | +| [`validate_inputs()`](functions.md#validate_inputs) | Pre-flight input validation | + +### Data Classes + +| Class | Description | +|-------|-------------| +| [`SurfaceData`](dataclasses.md#surfacedata) | Terrain data (DSM, CDSM, walls, SVF) | +| [`Location`](dataclasses.md#location) | Geographic coordinates | +| [`Weather`](dataclasses.md#weather) | Meteorological conditions | +| [`HumanParams`](dataclasses.md#humanparams) | Human body parameters | +| [`SolweigResult`](dataclasses.md#solweigresult) | Calculation output | +| [`TimeseriesSummary`](dataclasses.md#timeseriessummary) | Aggregated timeseries output | +| [`Timeseries`](dataclasses.md#timeseries) | Per-timestep scalar timeseries | +| [`ModelConfig`](dataclasses.md#modelconfig) | Model configuration | + +### GPU Utilities + +| Function | Description | +|----------|-------------| +| `is_gpu_available()` | Check if GPU acceleration is available | +| `get_compute_backend()` | Returns `"gpu"` or `"cpu"` | +| `disable_gpu()` | Disable GPU, fall back to CPU | + +## Import Pattern + +```python +import solweig + +# All public API is available at the top level +surface = solweig.SurfaceData(dsm=my_dsm, pixel_size=1.0) +result = solweig.calculate(surface, location, weather) +``` + +## Type Annotations + +SOLWEIG is fully typed. Enable type checking in your IDE for the best experience: + +```python +from solweig import SurfaceData, Location, Weather, SolweigResult + +def process_area(dsm: np.ndarray) -> SolweigResult: + surface: SurfaceData = SurfaceData(dsm=dsm, pixel_size=1.0) + location: Location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather: Weather = Weather(...) + return solweig.calculate(surface, location, weather) +``` diff --git a/docs/development/architecture.md b/docs/development/architecture.md new file mode 100644 index 0000000..f35e588 --- /dev/null +++ b/docs/development/architecture.md @@ -0,0 +1,166 @@ +# Architecture + +SOLWEIG follows a 4-layer architecture separating concerns cleanly. + +## Layer Overview + +``` +┌─────────────────────────────────────────────┐ +│ Layer 1: User API (api.py) │ +│ calculate(), SurfaceData, Weather, etc. │ +├─────────────────────────────────────────────┤ +│ Layer 2: Orchestration │ +│ computation.py, timeseries.py, tiling.py │ +├─────────────────────────────────────────────┤ +│ Layer 3: Component Functions │ +│ shadows.py, svf.py, radiation.py, etc. │ +├─────────────────────────────────────────────┤ +│ Layer 4: Rust Computation │ +│ rustalgos (via maturin/PyO3) │ +└─────────────────────────────────────────────┘ +``` + +## Layer 1: User API + +**File**: `api.py` (~244 lines) + +Public interface that users import: + +```python +import solweig + +result = solweig.calculate(surface, location, weather) +``` + +Responsibilities: + +- Re-export public classes and functions +- Input validation +- Documentation (docstrings) + +## Layer 2: Orchestration + +**Files**: `computation.py`, `timeseries.py`, `tiling.py` + +Coordinates component functions and manages state: + +```python +# computation.py +def _compute_single_timestep(surface, location, weather, state): + shadows = compute_shadows(...) + svf = resolve_svf(...) + ground = compute_ground_temperature(...) + gvf = compute_gvf(...) + radiation = compute_radiation(...) + tmrt = compute_tmrt(...) + return SolweigResult(...) +``` + +Responsibilities: + +- Call components in correct order +- Manage thermal state across timesteps +- Handle caching and buffer pools +- Coordinate parallel processing + +## Layer 3: Component Functions + +**Directory**: `components/` + +Pure functions that implement physical models: + +| Module | Function | Output | +|--------|----------|--------| +| `shadows.py` | `compute_shadows()` | ShadowBundle | +| `svf_resolution.py` | `resolve_svf()` | SvfBundle | +| `ground.py` | `compute_ground_temperature()` | GroundBundle | +| `gvf.py` | `compute_gvf()` | GvfBundle | +| `radiation.py` | `compute_radiation()` | RadiationBundle | +| `tmrt.py` | `compute_tmrt()` | TmrtResult | + +Design principles: + +- Pure functions (no side effects) +- Explicit inputs and outputs +- Bundle classes for multiple return values +- Testable in isolation + +## Layer 4: Rust Computation + +**Directory**: `rust/` + +Performance-critical algorithms in Rust: + +- `shadowing` - Ray-traced shadow computation +- `skyview` - Sky View Factor calculation +- `gvf` - Ground View Factor +- `vegetation` - Vegetation transmissivity +- `utci` - UTCI polynomial +- `pet` - PET iterative solver + +Exposed to Python via maturin/PyO3: + +```python +from solweig import rustalgos +shadows = rustalgos.compute_shadows(dsm, sun_altitude, sun_azimuth) +``` + +## Data Flow + +``` +SurfaceData ──┐ + │ +Location ─────┼──► calculate() ──► SolweigResult + │ │ │ +Weather ──────┘ │ ├── tmrt + │ ├── shadow + ▼ ├── kdown + Component ├── kup + Functions ├── ldown + │ └── lup + ▼ + Rust Algorithms +``` + +## Bundle Classes + +Components communicate via typed bundles: + +```python +@dataclass +class ShadowBundle: + shadow: np.ndarray # Combined shadow fraction + shadow_building: np.ndarray + shadow_vegetation: np.ndarray + +@dataclass +class RadiationBundle: + kdown: np.ndarray # Downwelling shortwave + kup: np.ndarray # Upwelling shortwave + ldown: np.ndarray # Downwelling longwave + lup: np.ndarray # Upwelling longwave + kside: DirectionalData # Lateral shortwave + lside: DirectionalData # Lateral longwave +``` + +## Caching Strategy + +Expensive computations are cached: + +| Data | Cached Where | Invalidation | +|------|-------------|--------------| +| SVF | `PrecomputedData` | DSM hash change | +| Wall heights | `working_dir/walls/` | DSM change | +| Shadow matrices | `PrecomputedData` | DSM change | + +## Dual Environment Support + +SOLWEIG runs in both standalone Python and QGIS: + +| Component | Python | QGIS/OSGeo4W | +|-----------|--------|--------------| +| Raster I/O | rasterio | GDAL | +| Progress | tqdm | QgsProcessingFeedback | +| Logging | logging | QgsProcessingFeedback | + +Backend detection is automatic in `io.py`. diff --git a/docs/development/contributing.md b/docs/development/contributing.md new file mode 100644 index 0000000..c293903 --- /dev/null +++ b/docs/development/contributing.md @@ -0,0 +1,131 @@ +# Contributing + +Thank you for your interest in contributing to SOLWEIG! + +## Development Setup + +### Prerequisites + +- Python 3.10+ +- Rust toolchain (for building extensions) +- uv (package manager) + +### Clone and Install + +```bash +git clone https://github.com/UMEP-dev/solweig.git +cd solweig + +# Install dependencies +uv sync + +# Build Rust extension +maturin develop +``` + +### Verify Installation + +```bash +# Run tests +pytest ./tests + +# Full verification (format, lint, typecheck, test) +poe verify_project +``` + +## Development Workflow + +### Making Changes + +1. Create a feature branch: `git checkout -b feature/my-feature` +2. Make your changes +3. Run verification: `poe verify_project` +4. Commit with clear messages +5. Open a pull request + +### Code Style + +We use these tools (configured in `pyproject.toml`): + +| Tool | Purpose | +|------|---------| +| **ruff** | Linting and formatting | +| **ty** | Type checking | +| **pytest** | Testing | + +Run all checks: + +```bash +poe verify_project +``` + +### Testing + +Tests are in `tests/`: + +- `tests/spec/` - Physical property tests (shadows, SVF, radiation) +- `tests/golden/` - Reference data validation +- `tests/test_api.py` - Integration tests + +Add tests for new functionality: + +```bash +# Run specific test file +pytest tests/test_api.py + +# Run with coverage +pytest --cov=solweig tests/ +``` + +## Project Structure + +``` +pysrc/solweig/ # Python source + api.py # Public API + models/ # Dataclasses (SurfaceData, Weather, etc.) + components/ # Modular calculation functions + computation.py # Core orchestration +rust/ # Rust extensions +specs/ # Module specifications +tests/ # Test suite +docs/ # Documentation (MkDocs) +``` + +## Types of Contributions + +### Bug Reports + +Open an issue with: + +- Clear description of the bug +- Steps to reproduce +- Expected vs actual behavior +- Version information + +### Feature Requests + +Open an issue describing: + +- The use case +- Proposed solution +- Alternatives considered + +### Code Contributions + +1. Check existing issues for related work +2. Discuss major changes in an issue first +3. Follow the code style guidelines +4. Add tests for new functionality +5. Update documentation as needed + +## Acknowledgements + +SOLWEIG is adapted from the original UMEP (Urban Multi-scale Environmental Predictor) code by Fredrik Lindberg, Ting Sun, Sue Grimmond, Yihao Tang, and Nils Wallenberg. + +If you use SOLWEIG in research, please cite: + +> Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services. Environmental Modelling and Software 99, 70-87 [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +## License + +By contributing, you agree that your contributions will be licensed under the GNU General Public License v3.0. diff --git a/docs/development/gpu-execution-plan.md b/docs/development/gpu-execution-plan.md new file mode 100644 index 0000000..96f13cd --- /dev/null +++ b/docs/development/gpu-execution-plan.md @@ -0,0 +1,247 @@ +# GPU Execution Plan + +This document defines a methodical process for GPU work in SOLWEIG across: + +- SVF preprocessing (`skyview`, tiled and non-tiled) +- Timestep and timeseries runtime (`shadowing`, anisotropic sky, tiled orchestration) + +The goal is to make GPU changes **predictable, measurable, and reversible**. + +## Objectives + +1. Improve runtime and scalability without silent correctness regressions. +2. Keep fallback behavior explicit and diagnosable. +3. Guarantee large-grid behavior on all supported backends. + +## Core Policy + +### 1) No silent fallback + +If a GPU stage fails and CPU fallback is used, we must log a clear reason. + +Required log pattern: + +- `"[GPU] failed: . CPU fallback."` + +### 2) Kernel geometry must be size-safe + +For grid kernels, prefer 2D dispatch (`x`, `y`) over 1D dispatch over `total_pixels`. + +Reason: dispatch dimensions are limited by backend (e.g., 65535 per dimension in wgpu validation). + +### 3) GPU changes must pass parity + performance gates + +No GPU change is complete without both functional parity and benchmark evidence. + +## Acceptance Criteria + +A GPU change is accepted only if all items pass. + +### A. Correctness gates + +1. SVF kernel/property tests: + +```bash +uv run pytest tests/spec/test_svf.py -q +``` + +2. SVF core-path regression (Rust core output must match full-tile slicing): + +```bash +uv run pytest tests/spec/test_svf_core_api.py -q +``` + +3. Anisotropic GPU/CPU parity: + +```bash +uv run pytest tests/spec/test_aniso_gpu_parity.py -q +``` + +4. Tiled parity checks: + +```bash +uv run pytest tests/test_tiling_integration.py -k "multitile_vs_nontiled_comparison or anisotropic_tiled_vs_nontiled" -q +``` + +### B. Runtime gates + +Run the CI-stable tiled performance benchmark: + +```bash +uv run pytest tests/benchmarks/test_tiling_benchmark.py -q +``` + +Expected: + +- Worker-scaling sanity passes +- Bounded in-flight scheduling passes +- Anisotropic tiled runtime smoke passes + +For deeper diagnostics (including API vs plugin matrix ratios), run: + +```bash +uv run pytest tests/benchmarks/test_performance_matrix_benchmark.py -q +``` + +### C. Build gate + +Rebuild release extension with GPU features and retest: + +```bash +uv run maturin develop --release --manifest-path rust/Cargo.toml --features "pyo3/extension-module,pyo3/abi3-py39,gpu" +``` + +## Metrics To Track + +Track both absolute and relative metrics. + +### SVF metrics + +- `svf_tile_wall_time_s` +- `svf_patch_progress_rate` (patches/s) +- `svf_stitch_copy_time_ms` +- `svf_gpu_fallback_count` + +### Timeseries metrics + +- `timestep_wall_time_s` +- `tile_turnaround_ms` (mean, p95) +- `gpu_stage_time_ms` (shadow + anisotropic when active) +- `queue_depth_peak` +- `gpu_fallback_count` per stage + +### Quality metrics + +- `mean_abs_diff` and `max_abs_diff` vs CPU reference for parity fixtures +- `% finite pixels` equality checks where applicable + +## Standard Profiling Modes + +### Lightweight timing (developer) + +```bash +SOLWEIG_TIMING=1 uv run pytest tests/spec/test_aniso_gpu_parity.py -q +``` + +### Anisotropic overlap mode experimentation + +```bash +SOLWEIG_ANISO_GPU_OVERLAP=1 uv run pytest tests/spec/test_aniso_gpu_parity.py -q +``` + +### Full benchmark matrix + +```bash +uv run pytest tests/benchmarks/test_performance_matrix_benchmark.py -q +``` + +## Workstreams + +## 1) Kernel Safety + +Scope: + +- Keep all grid kernels dispatch-safe for large tiles. +- Ensure host/shader uniform structs stay layout-aligned. + +Checklist: + +1. Dispatch dimensions are bounded by `rows/cols`, not `total_pixels` on one axis. +2. Shader bounds checks match host-provided dimensions. +3. Test a case where `rows * cols / workgroup_size > 65535` would have failed in 1D mode. + +## 2) Data Movement Reduction + +Scope: + +- Minimize GPU↔CPU transfers and Python-side copies. + +Checklist: + +1. No per-patch readback when accumulation can remain on GPU. +2. Prefer core-window outputs for tiled stitching paths. +3. Measure and report copy/stitch time explicitly. + +## 3) Tiled Orchestration Efficiency + +Scope: + +- Keep GPU fed while avoiding CPU-side queue/mem thrash. + +Checklist: + +1. Validate `tile_workers` and `inflight_limit` under realistic memory pressure. +2. Keep telemetry for `mean_turnaround`, `max_queue`. +3. Verify parity for tiled vs non-tiled after scheduling changes. + +## 4) Backend-Aware Resource Policy + +Scope: + +- Keep tile sizing stable across Metal/DX12/Vulkan/GL behavior differences. + +Checklist: + +1. Use backend metadata in sizing decisions. +2. Keep total-memory vs single-buffer heuristics documented and tested. +3. Preserve CPU fallback when limits are exceeded unexpectedly. + +## 5) Observability and Operations + +Scope: + +- Make performance and fallback behavior visible in user logs. + +Checklist: + +1. Every fallback path logs stage + reason. +2. Progress bars use bounded ranges when embedded in multi-phase workflows. +3. Provide concise per-run telemetry summaries. + +## Change Workflow (Required) + +For each GPU PR: + +1. Describe expected bottleneck shift (e.g., readback -> compute). +2. Attach before/after metrics from benchmark matrix and one realistic dataset. +3. Run all correctness gates. +4. Run release build + spot parity check. +5. Document fallback behavior and any new env flags. + +## Immediate Priorities + +1. Keep dispatch geometry audits active for all remaining GPU kernels. +2. Add explicit fallback counters (not only logs) for shadow/SVF/anisotropic stages. +3. Add one large-grid stress test for anisotropic GPU dispatch limits. +4. Add a CI lane for GPU parity + tiled parity (nightly if runtime is high). + +## Quick Command Bundle + +```bash +# Correctness gates +uv run pytest tests/spec/test_svf.py tests/spec/test_svf_core_api.py tests/spec/test_aniso_gpu_parity.py -q +uv run pytest tests/test_tiling_integration.py -k "multitile_vs_nontiled_comparison or anisotropic_tiled_vs_nontiled" -q + +# Performance gates +uv run pytest tests/benchmarks/test_tiling_benchmark.py -q + +# Release build +uv run maturin develop --release --manifest-path rust/Cargo.toml --features "pyo3/extension-module,pyo3/abi3-py39,gpu" +``` + +## Poe Shortcuts + +```bash +uv run poe test_gpu_gates +uv run poe test_gpu_perf_gate +``` + +## Definition of Done + +A GPU optimization is done when: + +1. Correctness gates pass. +2. Performance gates pass or regressions are explicitly accepted with rationale. +3. No silent fallback remains in changed paths. +4. Release build is validated. +5. Documentation is updated here if behavior/policy changed. diff --git a/docs/development/roadmap.md b/docs/development/roadmap.md new file mode 100644 index 0000000..1aed1bf --- /dev/null +++ b/docs/development/roadmap.md @@ -0,0 +1,47 @@ +# Roadmap + +The canonical roadmap lives at **[ROADMAP.md](https://github.com/gushogg-blake/solweig/blob/main/ROADMAP.md)** in the project root. + +This page summarizes current status. For full details including session logs, phase breakdowns, and the wish list, see the root file. + +## Current Status (February 2026) + +**Phases A, B, E complete.** Code quality sweep done. GPU/Rust plan written. + +| Phase | Description | Status | +| ----- | ----------- | ------ | +| 1-2 | API simplification | Complete | +| 5 | Middle layer refactoring | Complete | +| A | Scientific rigor & validation | Complete | +| B | Memory & computational improvements | Complete | +| E | API improvements | Complete | +| D | Documentation & integration | **In progress** | +| F | Test coverage | **In progress** | +| G | GPU & Rust-Python interface | **Planned** | +| H | Field-data validation | **Planned** | +| C | Performance (POI mode) | Deferred | + +## Next Tasks + +| # | Task | Impact | Status | +| - | ---- | ------ | ------ | +| 1 | Move `cylindric_wedge` to Rust | HIGH - per-timestep hotspot | Pending | +| 2 | GPU context persistence | HIGH - eliminates init overhead | Pending | +| 3 | QGIS plugin testing (Phase 11) | HIGH - blocks plugin adoption | Pending | +| 4 | Field-data validation | HIGH - scientific credibility | Pending | +| 5 | Orchestration layer unit tests | MEDIUM - regression safety | Pending | +| 6 | API reference with mkdocstrings | MEDIUM - user adoption | Pending | +| 7 | POI mode | HIGH - 10-100x speedup | Deferred | + +## Test Suite + +353 tests across 4 categories: + +- **Spec property tests** (`tests/spec/`) - Physical invariants +- **Golden regression tests** (`tests/golden/`) - Reference output comparison +- **Integration tests** (`tests/test_*.py`) - API and feature tests +- **Benchmarks** (`tests/benchmarks/`) - Memory profiling + +## Contributing + +See [Contributing](contributing.md) for how to help with these priorities. diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md new file mode 100644 index 0000000..e472c0b --- /dev/null +++ b/docs/getting-started/installation.md @@ -0,0 +1,86 @@ +# Installation + +## Install from PyPI + +```bash +pip install solweig +``` + +Verify it worked: + +```bash +python -c "import solweig; print(solweig.__version__)" +``` + +## Install from source (for development) + +If you want to modify the code or contribute: + +- **Python 3.10+** +- **Rust toolchain** — needed to compile the high-performance core ([install Rust](https://rustup.rs/)) +- **uv** — fast Python package manager ([install uv](https://docs.astral.sh/uv/getting-started/installation/)) + +```bash +git clone https://github.com/UMEP-dev/solweig.git +cd solweig +uv sync # Install Python dependencies +maturin develop # Compile Rust extension and link it +``` + +## Optional dependencies + +SOLWEIG works with just numpy arrays, but file-based workflows benefit from these extras: + +| Package | What it enables | +| ------- | --------------- | +| `rasterio` | Loading/saving GeoTIFF rasters (installed by default) | +| `geopandas` | Rasterising vector data (e.g. tree polygons to a canopy grid) | +| `affine` | Geospatial coordinate transforms (installed by default) | +| `pyproj` | CRS handling and coordinate conversion (installed by default) | + +If you only work with numpy arrays, `rasterio` and `geopandas` are not needed. + +## GPU acceleration + +SOLWEIG automatically uses GPU acceleration (via wgpu/Metal/Vulkan) when available. No extra setup is needed. + +```python +import solweig + +print(f"GPU available: {solweig.is_gpu_available()}") +print(f"Backend: {solweig.get_compute_backend()}") # "gpu" or "cpu" +``` + +If no GPU is found, it falls back to CPU transparently. To force CPU mode: + +```python +solweig.disable_gpu() +``` + +## Troubleshooting + +### `maturin: command not found` + +Install it via uv or pip: + +```bash +uv tool install maturin +# or +pip install maturin +``` + +### Build errors on macOS + +Ensure Xcode command line tools are installed: + +```bash +xcode-select --install +``` + +### `import solweig` fails after `maturin develop` + +Make sure you're using the same Python environment that `uv sync` created. If using uv: + +```bash +uv run python -c "import solweig; print('OK')" +``` diff --git a/docs/getting-started/quick-start.md b/docs/getting-started/quick-start.md new file mode 100644 index 0000000..9780bb7 --- /dev/null +++ b/docs/getting-started/quick-start.md @@ -0,0 +1,336 @@ +# Quick Start + +This guide walks you through your first SOLWEIG calculation — from raw inputs to a Tmrt map. + +## What you'll do + +1. Create a surface with buildings +2. Define where and when +3. Calculate Mean Radiant Temperature +4. Interpret the results + +## Option A: From numpy arrays (no files needed) + +Use this when you want to experiment quickly or don't have GeoTIFF data yet. + +```python +import numpy as np +import solweig +from datetime import datetime + +# --- 1. Create a surface --- +# A 200×200 m flat area at 2 m elevation, with a 15 m tall building in the centre +dsm = np.full((200, 200), 2.0, dtype=np.float32) +dsm[80:120, 80:120] = 15.0 # 40×40 m building + +surface = solweig.SurfaceData(dsm=dsm, pixel_size=1.0) # 1 pixel = 1 metre +# SVF is required before calculate(); compute once and reuse on this surface +surface.compute_svf() + +# --- 2. Define location and weather --- +location = solweig.Location( + latitude=48.8, # Paris + longitude=2.3, + utc_offset=1, # Central European Time (UTC+1) +) + +weather = solweig.Weather( + datetime=datetime(2025, 7, 15, 14, 0), # 2pm, July 15 + ta=32.0, # Air temperature (°C) + rh=40.0, # Relative humidity (%) + global_rad=850.0, # Global horizontal irradiance (W/m²) +) + +# --- 3. Calculate --- +result = solweig.calculate(surface, location, weather) + +# --- 4. Inspect results --- +print(f"Mean Tmrt: {result.tmrt.mean():.1f}°C") +print(f"Sunlit Tmrt: {result.tmrt[result.shadow > 0.5].mean():.1f}°C") +print(f"Shaded Tmrt: {result.tmrt[result.shadow < 0.5].mean():.1f}°C") +``` + +!!! note "SVF is explicit" + `calculate()` requires SVF to already be available. For array-based workflows, call `surface.compute_svf()` once before the first calculation. For GeoTIFF workflows, `SurfaceData.prepare()` computes/caches SVF for you. + If you explicitly set `use_anisotropic_sky=True`, shadow matrices must also already be available (prepared via the same preprocessing step). + +## Option B: From GeoTIFF files (real-world data) + +This is the typical workflow for real projects. You need: + +- A **DSM** GeoTIFF (Digital Surface Model — building/terrain heights) +- An **EPW** weather file (standard format, downloadable from climate databases) + +```python +import solweig + +# --- 1. Load and prepare surface --- +# Walls, sky view factors, and NaN handling are all automatic +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + working_dir="cache/", # Preprocessing cached here for reuse + cdsm="data/trees.tif", # Optional: vegetation canopy heights +) + +# --- 2. Load weather and location from EPW --- +weather_list = solweig.Weather.from_epw( + "data/weather.epw", + start="2025-07-01", # Date range to simulate + end="2025-07-03", +) +location = solweig.Location.from_epw("data/weather.epw") + +print(f"Location: {location.latitude:.1f}°N, {location.longitude:.1f}°E") +print(f"Loaded {len(weather_list)} hourly timesteps") + +# --- 3. Run timeseries --- +# Results saved as GeoTIFFs; thermal state carried between timesteps +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir="output/", + outputs=["tmrt", "shadow"], +) + +print(f"Done — {len(results)} timesteps saved to output/") +``` + +If disk space is limited, omit `output_dir` — the returned `TimeseriesSummary` +contains aggregated grids (mean/max/min Tmrt, UTCI, sun hours, etc.). See +[Timeseries](../guide/timeseries.md#choose-an-output-strategy). + +### What `prepare()` does behind the scenes + +When you call `SurfaceData.prepare()`, it automatically: + +1. Loads the DSM (and optional CDSM, DEM, land cover) +2. Fills NaN/nodata values using the ground reference +3. Computes **wall heights and aspects** from the DSM edges +4. Computes **Sky View Factors** (15 directional grids) +5. Caches everything to `working_dir/` so the next run is instant + +## Adding thermal comfort + +Tmrt tells you how much radiation a person absorbs, but thermal comfort also depends on air temperature, humidity, and wind. UTCI and PET combine all of these. + +UTCI and PET summary grids are included in the `TimeseriesSummary` by default. +For per-timestep arrays or saved files, include `"utci"` or `"pet"` in +`timestep_outputs` or `outputs`: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + outputs=["tmrt", "utci"], # save per-timestep GeoTIFFs + timestep_outputs=["tmrt", "utci"], # keep per-timestep arrays in memory + output_dir="output/", +) +print(summary.report()) # Full summary with Tmrt, UTCI, sun hours, thresholds +``` + +| UTCI range | Meaning | +| ---------- | ------- | +| > 46°C | Extreme heat stress | +| 38–46°C | Very strong heat stress | +| 32–38°C | Strong heat stress | +| 26–32°C | Moderate heat stress | +| 9–26°C | No thermal stress | +| < 9°C | Cold stress categories | + +## Common setup patterns + +Use these patterns when your input data and workflow differ from the basic examples above. + +### Surface setup patterns + +#### Pattern 1: GeoTIFF workflow (recommended) + +```python +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + cdsm="data/trees.tif", # Optional + dem="data/dem.tif", # Optional + working_dir="cache/", +) +``` + +`prepare()` computes/caches walls and SVF automatically. + +#### Pattern 2: In-memory arrays with absolute heights + +```python +import numpy as np + +dsm_abs = np.array(...) # Absolute elevation (e.g., m above sea level) +cdsm_abs = np.array(...) # Optional canopy elevation (absolute) + +surface = solweig.SurfaceData( + dsm=dsm_abs, + cdsm=cdsm_abs, # Optional + dsm_relative=False, + cdsm_relative=False, + pixel_size=1.0, +) +surface.compute_svf() # Required before calculate() +``` + +#### Pattern 3: In-memory arrays with relative heights + +```python +import numpy as np + +dsm_rel = np.array(...) # Height above ground +cdsm_rel = np.array(...) # Optional canopy height above ground +dem = np.array(...) # Ground elevation + +surface = solweig.SurfaceData( + dsm=dsm_rel, + dem=dem, + cdsm=cdsm_rel, # Optional + dsm_relative=True, + cdsm_relative=True, + pixel_size=1.0, +) +surface.preprocess() # Converts relative -> absolute +surface.compute_svf() # Required before calculate() +``` + +### Weather setup patterns + +#### Pattern 1: Existing EPW file + +```python +weather_list = solweig.Weather.from_epw( + "data/weather.epw", + start="2025-07-01", + end="2025-07-03", +) +``` + +#### Pattern 2: Download EPW, then load + +```python +epw_path = solweig.download_epw( + latitude=37.98, + longitude=23.73, + output_path="athens.epw", +) +weather_list = solweig.Weather.from_epw(epw_path) +``` + +#### Pattern 3: Manually create one timestep + +```python +from datetime import datetime + +weather = solweig.Weather( + datetime=datetime(2025, 7, 15, 14, 0), + ta=32.0, + rh=40.0, + global_rad=850.0, + wind_speed=2.0, # Optional but useful for UTCI/PET +) +``` + +### Location setup patterns + +#### Pattern 1: From EPW metadata (recommended with EPW weather) + +```python +location = solweig.Location.from_epw("data/weather.epw") +``` + +#### Pattern 2: From surface CRS + +```python +location = solweig.Location.from_surface(surface, utc_offset=1) +``` + +#### Pattern 3: Manual coordinates + +```python +location = solweig.Location( + latitude=48.8, + longitude=2.3, + utc_offset=1, +) +``` + +!!! warning "Always set `utc_offset` correctly" + UTC offset directly affects sun position timing and therefore shadows and Tmrt. + +## Where to get input data + +### DSM (Digital Surface Model) + +A raster grid where each pixel contains the height in metres (including buildings and terrain). Common sources: + +- **LiDAR point clouds** processed to raster (national mapping agencies often provide these) +- **Photogrammetry** from drone surveys +- **OpenStreetMap building footprints** extruded to heights + +### EPW (EnergyPlus Weather) + +Hourly weather data in a standard format. Free sources: + +- [Climate.OneBuilding.Org](https://climate.onebuilding.org/) — global coverage +- [PVGIS](https://re.jrc.ec.europa.eu/pvg_tools/en/) — European Commission tool + +You can also download an EPW directly from PVGIS (no API key needed): + +```python +# Download weather data for any location +epw_path = solweig.download_epw( + latitude=37.98, + longitude=23.73, + output_path="athens.epw", +) + +# Then load it +weather_list = solweig.Weather.from_epw(epw_path) +location = solweig.Location.from_epw(epw_path) +``` + +!!! note "Data attribution" + PVGIS weather data is derived from ERA5 reanalysis and contains modified Copernicus Climate Change Service information. Neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus information or data it contains. + +### CDSM (Canopy Digital Surface Model) + +Vegetation canopy heights, either from LiDAR or by rasterising tree survey data: + +```python +import geopandas as gpd + +trees = gpd.read_file("trees.gpkg") +cdsm, transform = solweig.io.rasterise_gdf( + trees, "geometry", "height", + bbox=[minx, miny, maxx, maxy], + pixel_size=1.0, +) +``` + +## Key classes at a glance + +| Class | What it holds | +| ----- | ------------- | +| `SurfaceData` | DSM, optional vegetation/DEM/land cover, preprocessed walls and SVF | +| `Location` | Latitude, longitude, altitude, UTC offset | +| `Weather` | Air temperature, humidity, radiation for one timestep | +| `HumanParams` | Body parameters for Tmrt/PET (optional — sensible defaults provided) | +| `SolweigResult` | Output grids: Tmrt, shadow, radiation components | + +## Complete working demos + +The repository includes full end-to-end demos you can run directly: + +- **[demos/athens-demo.py](https://github.com/UMEP-dev/solweig/blob/main/demos/athens-demo.py)** — Full workflow: rasterise tree vectors, load GeoTIFFs, run a multi-day timeseries, post-process UTCI. The best starting point for real projects. +- **[demos/solweig_gbg_test.py](https://github.com/UMEP-dev/solweig/blob/main/demos/solweig_gbg_test.py)** — Gothenburg test data: surface preparation, SVF caching, and timeseries calculation. + +## Next steps + +- [Basic Usage](../guide/basic-usage.md) — Vegetation, height conventions, custom parameters, validation +- [Working with GeoTIFFs](../guide/geotiffs.md) — File loading, caching, saving results +- [Timeseries](../guide/timeseries.md) — Multi-day simulations with thermal state +- [Thermal Comfort](../guide/thermal-comfort.md) — UTCI and PET in depth +- [API Reference](../api/index.md) — All classes and functions diff --git a/docs/guide/basic-usage.md b/docs/guide/basic-usage.md new file mode 100644 index 0000000..158d095 --- /dev/null +++ b/docs/guide/basic-usage.md @@ -0,0 +1,294 @@ +# Basic Usage + +This guide covers the core concepts and common patterns for day-to-day use of SOLWEIG. + +## The three required inputs + +Every SOLWEIG calculation needs exactly three things: + +### 1. Surface — the physical environment + +A `SurfaceData` object holds the building/terrain heights and optional vegetation. The only required field is a DSM (Digital Surface Model). + +**From numpy arrays:** + +```python +import numpy as np +import solweig + +# Minimum: just a height grid +dsm = np.full((200, 200), 2.0, dtype=np.float32) +dsm[80:120, 80:120] = 15.0 # A building + +surface = solweig.SurfaceData(dsm=dsm, pixel_size=1.0) +# SVF is required before calculate(); compute once for this surface +surface.compute_svf() +``` + +**From GeoTIFF files:** + +```python +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + working_dir="cache/", +) +``` + +### 2. Location — where on Earth + +A `Location` tells SOLWEIG where the site is, so it can compute sun position correctly. + +```python +# Manual +location = solweig.Location(latitude=48.8, longitude=2.3, utc_offset=1) + +# From a GeoTIFF's coordinate system +location = solweig.Location.from_surface(surface, utc_offset=1) + +# From an EPW weather file header +location = solweig.Location.from_epw("weather.epw") +``` + +!!! warning "Always set `utc_offset`" + The UTC offset determines how clock time maps to sun position. Getting it wrong shifts shadows by hours. When creating a `Location` manually, always provide `utc_offset` explicitly. + +### 3. Weather — atmospheric conditions + +A `Weather` object holds the meteorological data for one point in time. + +```python +from datetime import datetime + +weather = solweig.Weather( + datetime=datetime(2025, 7, 15, 14, 0), + ta=32.0, # Air temperature (°C) + rh=40.0, # Relative humidity (%) + global_rad=850.0, # Global horizontal irradiance (W/m²) + wind_speed=2.0, # Wind speed (m/s) — used for UTCI/PET +) +``` + +For timeseries, load from an EPW file: + +```python +weather_list = solweig.Weather.from_epw( + "weather.epw", + start="2025-07-01", + end="2025-07-03", +) +``` + +### Downloading weather data + +Don't have an EPW file? Download one directly from PVGIS (no API key needed): + +```python +epw_path = solweig.download_epw( + latitude=37.98, # Athens + longitude=23.73, + output_path="athens.epw", +) +weather_list = solweig.Weather.from_epw(epw_path) +location = solweig.Location.from_epw(epw_path) +``` + +## Running a calculation + +### Single timestep + +```python +result = solweig.calculate(surface, location, weather) +``` + +### Multiple timesteps (timeseries) + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir="output/", # Save GeoTIFFs as they're computed + outputs=["tmrt", "shadow"], # Which outputs to save +) +``` + +`calculate_timeseries` automatically carries **thermal state** between timesteps — ground and wall temperatures from one hour affect the next. This matters for accuracy; avoid looping over `calculate()` manually. + +Two common output patterns: + +1. Stream outputs to disk as they are computed (`output_dir=...`) for long runs and low RAM. +2. Keep outputs in memory (no `output_dir`), aggregate manually, and save only final products to minimize disk usage. + +See [Timeseries](timeseries.md#choose-an-output-strategy) for full examples of both patterns. + +## Understanding the output + +`SolweigResult` contains 2D grids with the same shape as your DSM: + +| Field | Unit | What it means | +| ----- | ---- | ------------- | +| `tmrt` | °C | **Mean Radiant Temperature** — how much radiation a person absorbs. The main output. | +| `shadow` | 0–1 | Shadow fraction. 1 = fully sunlit, 0 = fully shaded. | +| `kdown` | W/m² | Incoming shortwave radiation (sun + diffuse sky). | +| `kup` | W/m² | Reflected shortwave radiation from the ground. | +| `ldown` | W/m² | Incoming longwave radiation (thermal, from sky + walls). | +| `lup` | W/m² | Emitted longwave radiation from the ground. | + +```python +result = solweig.calculate(surface, location, weather) + +# Tmrt difference between sun and shade +sunlit = result.tmrt[result.shadow > 0.5].mean() +shaded = result.tmrt[result.shadow < 0.5].mean() +print(f"Sun-shade Tmrt difference: {sunlit - shaded:.0f}°C") +``` + +## Adding vegetation + +Trees reduce Tmrt significantly through shading. Provide a Canopy DSM (CDSM) — a grid of vegetation canopy heights above ground. + +```python +# From arrays +cdsm = np.zeros_like(dsm) +cdsm[10:40, 50:80] = 8.0 # 8 m tall trees + +surface = solweig.SurfaceData(dsm=dsm, cdsm=cdsm, pixel_size=1.0) + +# From GeoTIFF +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + cdsm="data/trees.tif", + working_dir="cache/", +) +``` + +### Relative vs. absolute heights + +CDSM (and TDSM) heights can be either **relative** (height above ground) or **absolute** (elevation above sea level). By default, SOLWEIG assumes CDSM values are relative. + +```python +# Relative CDSM (default): values are height above ground +# e.g. 8.0 = an 8 m tall tree +surface = solweig.SurfaceData( + dsm=dsm, + cdsm=cdsm, + cdsm_relative=True, # Default — SOLWEIG adds ground elevation automatically + pixel_size=1.0, +) +surface.preprocess() # Converts relative → absolute using DEM or DSM as base + +# Absolute CDSM: values are elevation above sea level +# e.g. 135.0 = tree canopy is at 135 m elevation +surface = solweig.SurfaceData( + dsm=dsm, + cdsm=cdsm_absolute, + cdsm_relative=False, # Already absolute — no conversion needed + pixel_size=1.0, +) +``` + +When using `SurfaceData.prepare()` with GeoTIFFs, this conversion happens automatically. + +The same flags exist for DSM and TDSM: + +```python +# DSM with relative heights (height above ground) — requires a DEM +surface = solweig.SurfaceData( + dsm=dsm_relative, + dem=dem, + dsm_relative=True, + pixel_size=1.0, +) +surface.preprocess() # dsm_absolute = dem + dsm_relative +``` + +### Deciduous vs. evergreen trees + +By default, SOLWEIG uses seasonal leaf-on/leaf-off based on the date. For evergreen trees (conifers), set: + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + conifer=True, # Trees always have full canopy +) +``` + +## Customising human parameters + +The default person is standing, 75 kg, 1.75 m tall, 35 years old. You can change this: + +```python +result = solweig.calculate( + surface, location, weather, + human=solweig.HumanParams( + posture="sitting", # or "standing" (default) + abs_k=0.7, # Shortwave absorption (0–1) + abs_l=0.97, # Longwave absorption (0–1) + weight=65, # kg (affects PET only) + height=1.65, # m (affects PET only) + ), +) +``` + +## Anisotropic sky model + +SOLWEIG supports both isotropic and anisotropic sky models. For reproducible +behavior, set `use_anisotropic_sky` explicitly in your call: + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + use_anisotropic_sky=True, # More accurate, slightly slower +) +``` + +If you explicitly set `use_anisotropic_sky=True`, shadow matrices must already +be available. They are prepared alongside SVF via `SurfaceData.prepare(...)` +or `surface.compute_svf()`. Otherwise, `calculate*()` raises +`MissingPrecomputedData`. + +## Input validation + +Catch problems before the expensive SVF computation: + +```python +try: + warnings = solweig.validate_inputs(surface, location, weather) + for w in warnings: + print(f"Warning: {w}") + result = solweig.calculate(surface, location, weather) +except solweig.GridShapeMismatch as e: + print(f"Grid size mismatch: {e.field} expected {e.expected}, got {e.got}") +except solweig.MissingPrecomputedData as e: + print(f"Missing data: {e}") +``` + +## Common issues + +### NaN pixels in output + +NaN values in DSM/CDSM are automatically filled with the ground reference (DEM, or the DSM itself if no DEM is provided). If output still contains NaN, provide a DEM to maximise valid coverage. + +### Slow first calculation + +SVF must be prepared before `calculate()`. For in-memory array workflows, call `surface.compute_svf()` once and reuse the surface. For production file workflows, use `SurfaceData.prepare()` with a persistent `working_dir` so SVF is cached and reused across runs. + +### GPU not detected + +```python +print(f"GPU: {solweig.is_gpu_available()}") +print(f"Backend: {solweig.get_compute_backend()}") +``` + +The package falls back to CPU automatically. GPU gives ~5–10x speedup for shadow and SVF computation. + +## Complete working demos + +For end-to-end examples you can run directly: + +- **[demos/athens-demo.py](https://github.com/UMEP-dev/solweig/blob/main/demos/athens-demo.py)** — Full GeoTIFF workflow with tree vectors, multi-day timeseries, and UTCI post-processing. +- **[demos/solweig_gbg_test.py](https://github.com/UMEP-dev/solweig/blob/main/demos/solweig_gbg_test.py)** — Gothenburg test data: surface preparation, SVF caching, and timeseries. diff --git a/docs/guide/geotiffs.md b/docs/guide/geotiffs.md new file mode 100644 index 0000000..3762d7d --- /dev/null +++ b/docs/guide/geotiffs.md @@ -0,0 +1,166 @@ +# Working with GeoTIFFs + +Most real-world SOLWEIG projects start from GeoTIFF raster files. This guide covers loading, caching, and saving. + +## Loading surface data + +`SurfaceData.prepare()` is the recommended way to load GeoTIFFs. It handles everything: loading, NaN filling, wall computation, SVF computation, and caching. + +```python +import solweig + +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", # Required: building/terrain heights + working_dir="cache/", # Required: where to cache preprocessing + cdsm="data/trees.tif", # Optional: vegetation heights + dem="data/dem.tif", # Optional: bare ground elevation + land_cover="data/lc.tif", # Optional: surface type classification +) +``` + +### Cropping to a bounding box + +Process only part of a large raster: + +```python +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + working_dir="cache/", + bbox=[476800, 4205850, 477200, 4206250], # [minx, miny, maxx, maxy] + pixel_size=1.0, # Resample to 1 m resolution +) +``` + +Coordinates are in the DSM's native CRS (e.g. UTM metres). + +## What gets cached + +The `working_dir` stores expensive preprocessing so subsequent runs are instant: + +```text +cache/ +├── walls/ +│ ├── wall_hts.tif # Wall heights derived from DSM +│ └── wall_aspects.tif # Wall compass directions +└── svf/ + └── memmap/ + ├── svf.npy # Total Sky View Factor + ├── svf_north.npy # Directional SVF (4 cardinal directions) + └── ... # 15 SVF grids total +``` + +### Force recomputation + +If you change the DSM or want to regenerate everything: + +```python +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + working_dir="cache/", + force_recompute=True, +) +``` + +SOLWEIG also validates cached data against the current DSM — if the dimensions or extent change, the cache is automatically invalidated. + +## Extracting location from CRS + +When the DSM has a projected CRS (e.g. UTM), you can extract lat/lon automatically: + +```python +location = solweig.Location.from_surface(surface, utc_offset=2) +``` + +Or from the EPW file, which also includes UTC offset: + +```python +location = solweig.Location.from_epw("data/weather.epw") +``` + +## Saving results as GeoTIFFs + +### During timeseries + +The simplest approach — results are saved as they're computed: + +```python +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir="output/", + outputs=["tmrt", "shadow"], +) +``` + +Creates files like: + +```text +output/ +├── tmrt/ +│ ├── tmrt_20250701_0000.tif +│ ├── tmrt_20250701_0100.tif +│ └── ... +└── shadow/ + ├── shadow_20250701_0000.tif + └── ... +``` + +### Loading saved results + +```python +# Load a single output raster +arr, transform, crs, nodata = solweig.io.load_raster("output/tmrt/tmrt_20250701_1200.tif") +``` + +## NaN and nodata handling + +SOLWEIG handles missing data automatically at every stage: + +**At load time:** Negative nodata sentinel values (e.g. -9999) are converted to NaN. Zero-valued pixels are preserved as valid data. + +**Before calculation:** NaN pixels in DSM, CDSM, and TDSM are filled with the ground reference (DEM if provided, otherwise the DSM itself). Pixels within 0.1 m of the ground reference are clamped to exactly the ground value to prevent shadow artefacts from resampling noise. + +DEM NaN pixels are never filled — they represent truly missing ground data. + +When using `SurfaceData.prepare()`, this is all handled automatically. When constructing from arrays, `fill_nan()` runs inside `calculate()`. + +## Rasterising vector data + +Convert tree polygons (GeoDataFrame) to a raster grid: + +```python +import geopandas as gpd + +trees = gpd.read_file("trees.gpkg") +trees = trees.to_crs(2154) # Match DSM CRS + +cdsm, transform = solweig.io.rasterise_gdf( + trees, + geom_col="geometry", + ht_col="height", + bbox=[476800, 4205850, 477200, 4206250], + pixel_size=1.0, +) + +# Optionally save to disk +from pyproj import CRS +solweig.io.save_raster( + "data/cdsm.tif", cdsm, transform.to_gdal(), CRS.from_epsg(2154).to_wkt() +) +``` + +## Large rasters + +For rasters too large to fit in memory, SOLWEIG supports tiled processing: + +```python +results = solweig.calculate_tiled( + surface=surface, + location=location, + weather=weather, + tile_size=500, # Process in 500×500 pixel tiles +) +``` + +See [Timeseries](timeseries.md) for combining tiled processing with multi-timestep simulations. diff --git a/docs/guide/thermal-comfort.md b/docs/guide/thermal-comfort.md new file mode 100644 index 0000000..4bd5fec --- /dev/null +++ b/docs/guide/thermal-comfort.md @@ -0,0 +1,158 @@ +# Thermal Comfort Indices + +Tmrt tells you how much radiation a person absorbs, but thermal comfort also depends on air temperature, humidity, and wind. SOLWEIG computes two standard indices that combine all these factors into a single "feels like" temperature. + +## UTCI (Universal Thermal Climate Index) + +UTCI is the most widely used outdoor thermal comfort index. It represents the air temperature of a reference environment that would produce the same thermal strain as the actual conditions. + +**Use UTCI when:** You want a standardised, fast metric for heat stress mapping, urban planning, or public health applications. + +### Summary grids (default) + +UTCI summary grids (mean, max, min, day/night averages) are always computed +as part of `TimeseriesSummary`: + +```python +summary = solweig.calculate_timeseries(surface=surface, weather_series=weather_list) +print(summary.report()) # Includes Tmrt, UTCI, sun hours, threshold exceedance +``` + +### Per-timestep arrays + +Include `"utci"` in `timestep_outputs` to retain per-timestep UTCI grids: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + timestep_outputs=["tmrt", "utci"], +) +for r in summary.results: + print(f"UTCI range: {r.utci.min():.1f} – {r.utci.max():.1f}°C") +``` + +### Per-timestep GeoTIFF files + +Include `"utci"` in `outputs` to save per-timestep UTCI GeoTIFFs: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + output_dir="output/", + outputs=["tmrt", "utci"], +) +``` + +### From a single result + +```python +result = solweig.calculate(surface, location, weather) +utci = result.compute_utci(weather) +print(f"Mean UTCI: {utci.mean():.1f}°C") +``` + +### UTCI stress categories + +| UTCI (°C) | Thermal stress | +| ---------- | -------------- | +| > 46 | Extreme heat stress | +| 38 to 46 | Very strong heat stress | +| 32 to 38 | Strong heat stress | +| 26 to 32 | Moderate heat stress | +| 9 to 26 | No thermal stress | +| 0 to 9 | Slight cold stress | +| -13 to 0 | Moderate cold stress | +| -27 to -13 | Strong cold stress | +| < -40 | Extreme cold stress | + +### Performance + +UTCI uses a fast polynomial approximation (~200 terms). Processing time is negligible compared to the main Tmrt calculation: + +- Single grid: ~1 ms +- 72 timesteps: ~1 s + +--- + +## PET (Physiological Equivalent Temperature) + +PET is the air temperature of a reference indoor environment at which the human heat balance equals the actual outdoor conditions. Unlike UTCI, PET allows you to customise body parameters. + +**Use PET when:** You need thermal comfort for specific populations (elderly, children, athletes) or when research requires the physiological model. + +### Per-timestep PET + +Include `"pet"` in `timestep_outputs` or `outputs`: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + timestep_outputs=["tmrt", "pet"], + human=solweig.HumanParams(weight=60, height=1.65, age=70), +) +``` + +### Single-result PET + +```python +result = solweig.calculate(surface, location, weather) +pet = result.compute_pet(weather) +print(f"Mean PET: {pet.mean():.1f}°C") +``` + +### With custom human parameters + +```python +pet = result.compute_pet( + weather, + human=solweig.HumanParams( + weight=60, # kg + height=1.65, # m + age=70, # years + sex=0, # 0 = female, 1 = male + activity=80.0, # metabolic rate (W) + clothing=0.5, # clothing insulation (clo) + posture="standing", + ), +) +``` + +### PET thermal sensation + +| PET (°C) | Perception | Physiological stress | +| --------- | ---------- | -------------------- | +| > 41 | Very hot | Extreme heat stress | +| 35 to 41 | Hot | Strong heat stress | +| 29 to 35 | Warm | Moderate heat stress | +| 23 to 29 | Slightly warm | Slight heat stress | +| 18 to 23 | Comfortable | No thermal stress | +| 13 to 18 | Slightly cool | Slight cold stress | +| 8 to 13 | Cool | Moderate cold stress | +| 4 to 8 | Cold | Strong cold stress | +| < 4 | Very cold | Extreme cold stress | + +### PET performance + +PET uses an iterative solver and is significantly slower than UTCI: + +- Single grid: ~50 ms +- 72 timesteps: ~1 minute + +!!! warning "PET is ~50x slower than UTCI" + For large-scale studies, use UTCI unless PET's customisable body parameters are specifically needed. + +--- + +## Choosing between UTCI and PET + +| | UTCI | PET | +| - | ---- | --- | +| **Speed** | Fast (polynomial) | Slow (iterative) | +| **Human parameters** | Fixed reference person | Customisable (age, weight, clothing, etc.) | +| **Best for** | Heat warnings, urban planning, large-scale mapping | Detailed comfort studies, vulnerable populations | +| **Common in** | European heat action plans, WMO guidelines | German VDI guidelines, bioclimatology research | + +For most day-to-day urban microclimate work, **UTCI is the recommended default**. It's fast, widely understood, and has standardised stress categories used in public health guidance. diff --git a/docs/guide/timeseries.md b/docs/guide/timeseries.md new file mode 100644 index 0000000..18cb9ce --- /dev/null +++ b/docs/guide/timeseries.md @@ -0,0 +1,225 @@ +# Timeseries Calculations + +For multi-timestep simulations (hours, days, or longer), use `calculate_timeseries()`. It properly carries thermal state between timesteps and optionally saves results to disk as they're computed. + +## Why not loop over `calculate()`? + +Ground and wall temperatures depend on accumulated heating from previous hours (thermal inertia). `calculate_timeseries()` manages this automatically via a `ThermalState` object. Looping over `calculate()` yourself loses this state, producing less accurate results — especially for ground-level longwave radiation. + +```python +# Don't do this — loses thermal state between timesteps +for weather in weather_list: + result = solweig.calculate(surface, location, weather) + +# Do this instead +results = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=weather_list, +) +``` + +## Basic timeseries + +```python +import solweig + +# Load surface +surface = solweig.SurfaceData.prepare( + dsm="data/dsm.tif", + working_dir="cache/", +) + +# Load weather from EPW file +weather_list = solweig.Weather.from_epw( + "data/weather.epw", + start="2025-07-01", + end="2025-07-03", +) +location = solweig.Location.from_epw("data/weather.epw") + +# Calculate all timesteps +results = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=weather_list, +) + +print(f"Processed {len(results)} timesteps") +``` + +## Saving results to disk + +For long simulations, save results as GeoTIFFs as they're computed rather than keeping them all in memory: + +```python +results = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=weather_list, + output_dir="output/", + outputs=["tmrt", "shadow"], # Which outputs to save +) +``` + +This creates timestamped GeoTIFFs: + +```text +output/ +├── tmrt/ +│ ├── tmrt_20250701_0000.tif +│ ├── tmrt_20250701_0100.tif +│ └── ... +├── shadow/ +│ ├── shadow_20250701_0000.tif +│ └── ... +└── run_metadata.json # All parameters for reproducibility +``` + +By default, only summary grids are returned (no per-timestep arrays in memory). +Use `timestep_outputs=["tmrt", "shadow"]` to retain specific per-timestep arrays. + +## Inspecting results + +### Summary report + +`report()` returns a human-readable text summary of the run: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + output_dir="output/", + outputs=["tmrt", "shadow"], +) +print(summary.report()) +``` + +In Jupyter notebooks, placing `summary` as the last expression in a cell +renders the report automatically (via `_repr_html_`). + +### Per-timestep timeseries + +`summary.timeseries` contains 1-D arrays of spatial means at each timestep +— useful for understanding how conditions evolved over the simulation: + +```python +ts = summary.timeseries +print(ts.datetime) # timestamps +print(ts.ta) # air temperature per step +print(ts.tmrt_mean) # spatial mean Tmrt per step +print(ts.utci_mean) # spatial mean UTCI per step +print(ts.sun_fraction) # fraction of sunlit pixels per step +``` + +### Plotting + +`plot()` produces a multi-panel figure showing temperature, radiation, +sun exposure, and meteorological inputs over time: + +```python +summary.plot() # interactive display +summary.plot(save_path="output/timeseries.png") # save to file +``` + +Requires `matplotlib` (`pip install matplotlib`). + +## Choose an output strategy + +### Strategy A: Stream to disk during computation + +Use this for long runs and limited RAM. + +```python +summary = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=weather_list, + output_dir="output/", + outputs=["tmrt", "shadow"], +) +``` + +- Pros: lowest memory use, immediate GeoTIFF outputs, restart-friendly +- Cons: more disk I/O/storage + +### Strategy B: Summary only (no file output) + +Use this when disk space is tight and you only need summary products. +`TimeseriesSummary` aggregates per-pixel statistics (mean/max/min Tmrt and +UTCI, sun/shade hours, threshold exceedance) incrementally during the loop, +so per-timestep arrays are freed immediately. + +```python +summary = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=weather_list, + # No output_dir -> summary-only, minimal memory +) +print(summary.report()) +summary.to_geotiff("output/") # Save summary grids only +``` + +- Pros: minimal disk usage and memory, automatic aggregation +- Cons: no per-timestep files on disk + +## Per-timestep UTCI and PET + +UTCI and PET summary grids (mean, max, min, day/night averages) are always +included in the returned `TimeseriesSummary`. To also retain per-timestep +UTCI or PET arrays, include them in `timestep_outputs`: + +```python +summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + timestep_outputs=["tmrt", "utci"], # per-timestep Tmrt + UTCI + output_dir="output/", + outputs=["tmrt", "utci"], # save both as GeoTIFF files +) +for r in summary.results: + print(f"UTCI range: {r.utci.min():.1f} – {r.utci.max():.1f}°C") +``` + +To also save per-timestep files to disk, add `"utci"` or `"pet"` to the +`outputs` parameter. The indices are computed inline during the main loop +(UTCI uses a fast Rust polynomial; PET uses an iterative solver). + +## Memory management for long simulations + +### Processing in chunks + +For very long simulations (weeks or months), process in daily chunks: + +```python +for chunk_start in range(0, len(weather_list), 24): + chunk = weather_list[chunk_start:chunk_start + 24] + results = solweig.calculate_timeseries( + surface=surface, + location=location, + weather_series=chunk, + output_dir=f"output/", + ) +``` + +## Performance + +| Grid size | Surface prep (SVF) | Per timestep | 72 timesteps | +| --------- | -------------------- | -------------------- | ------------ | +| 100x100 | ~5 s | ~10 ms | ~1 s | +| 200x200 | ~67 s | ~20 ms | ~2 s | +| 500x500 | ~10 min | ~100 ms | ~8 s | + +SVF is prepared explicitly (via `SurfaceData.prepare()` or `surface.compute_svf()`). Use a persistent `working_dir` with `prepare()` to avoid recomputing SVF on every run. + +## Run metadata + +`calculate_timeseries()` automatically saves a `run_metadata.json` file capturing all parameters used. Load it later for reproducibility: + +```python +metadata = solweig.load_run_metadata("output/run_metadata.json") +print(f"Version: {metadata['solweig_version']}") +print(f"Timesteps: {metadata['timeseries']['timesteps']}") +print(f"Date range: {metadata['timeseries']['start']} to {metadata['timeseries']['end']}") +``` diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..8af0aa6 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,107 @@ +# SOLWEIG + +**Map how hot it *feels* across a city — pixel by pixel.** + +SOLWEIG computes **Mean Radiant Temperature (Tmrt)** and thermal comfort indices (**UTCI**, **PET**) for urban environments. Give it a building height model and weather data, and it produces high-resolution maps showing where people experience heat stress — and where trees, shade, and cool surfaces make a difference. + +## Who is this for? + +- **Urban planners** comparing street designs, tree planting, or cool-roof strategies +- **Researchers** running controlled microclimate experiments at 1 m resolution +- **Climate service providers** generating heat-risk maps for public health or events +- **Students** learning about urban radiation and thermal comfort + +## The 30-second version + +```python +import solweig + +# Load your building heights and weather +surface = solweig.SurfaceData.prepare(dsm="dsm.tif", working_dir="cache/") +weather_list = solweig.Weather.from_epw("weather.epw", start="2025-07-01", end="2025-07-03") +location = solweig.Location.from_epw("weather.epw") + +# Run — results saved as GeoTIFFs +solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, + location=location, + output_dir="output/", +) +``` + +That's it. `SurfaceData.prepare()` computes/caches walls and SVF; then `calculate_timeseries()` computes shadows, radiation, and Tmrt. + +!!! note "SVF Rule" + `calculate()` / `calculate_timeseries()` require SVF to already be available on `surface` (or via `precomputed.svf`). + Use `SurfaceData.prepare(...)` for automatic SVF preparation/caching, or call `surface.compute_svf()` explicitly for in-memory/manual surfaces. + +!!! note "Anisotropic Rule" + If you explicitly set `use_anisotropic_sky=True`, shadow matrices must already be available + (`surface.shadow_matrices` or `precomputed.shadow_matrices`), typically prepared via + `SurfaceData.prepare(...)` or `surface.compute_svf()`. + +## How it works + +SOLWEIG models the complete radiation budget experienced by a person standing outdoors: + +1. **Shadows** — Which pixels are shaded by buildings and trees? +2. **Sky View Factor** — How much open sky does each point see? +3. **Surface temperatures** — How hot are the ground and walls? +4. **Radiation balance** — Sum shortwave (sun) and longwave (heat) from all directions +5. **Tmrt** — Convert absorbed radiation into a single "felt temperature" +6. **Thermal comfort** — Optionally derive UTCI or PET indices + +## Documentation + +
+ +- :material-rocket-launch:{ .lg .middle } **Getting Started** + + --- + + Install SOLWEIG and run your first calculation in minutes + + [:octicons-arrow-right-24: Installation](getting-started/installation.md) + [:octicons-arrow-right-24: Quick Start](getting-started/quick-start.md) + +- :material-book-open-variant:{ .lg .middle } **User Guide** + + --- + + Common workflows: loading GeoTIFFs, running timeseries, thermal comfort + + [:octicons-arrow-right-24: Basic Usage](guide/basic-usage.md) + [:octicons-arrow-right-24: Working with GeoTIFFs](guide/geotiffs.md) + [:octicons-arrow-right-24: Timeseries](guide/timeseries.md) + [:octicons-arrow-right-24: Thermal Comfort](guide/thermal-comfort.md) + +- :material-api:{ .lg .middle } **API Reference** + + --- + + Complete reference for all classes and functions + + [:octicons-arrow-right-24: API Reference](api/index.md) + +- :material-flask:{ .lg .middle } **Physics** + + --- + + Scientific documentation of the radiation model + + [:octicons-arrow-right-24: Physics](physics/index.md) + +
+ +## Citation + +If you use SOLWEIG in your research, please cite the original model paper and the UMEP platform: + +1. Lindberg F, Holmer B, Thorsson S (2008) SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *International Journal of Biometeorology* 52, 697–713 [doi:10.1007/s00484-008-0162-7](https://doi.org/10.1007/s00484-008-0162-7) + +2. Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) – An integrated tool for city-based climate services. *Environmental Modelling and Software* 99, 70-87 [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +## License + +GNU Affero General Public License v3.0. See [LICENSE](https://github.com/UMEP-dev/solweig/blob/main/LICENSE) for details. diff --git a/docs/javascripts/mathjax.js b/docs/javascripts/mathjax.js new file mode 100644 index 0000000..06dbf38 --- /dev/null +++ b/docs/javascripts/mathjax.js @@ -0,0 +1,16 @@ +window.MathJax = { + tex: { + inlineMath: [["\\(", "\\)"]], + displayMath: [["\\[", "\\]"]], + processEscapes: true, + processEnvironments: true + }, + options: { + ignoreHtmlClass: ".*|", + processHtmlClass: "arithmatex" + } +}; + +document$.subscribe(() => { + MathJax.typesetPromise() +}) diff --git a/docs/physics/ground-temperature.md b/docs/physics/ground-temperature.md new file mode 100644 index 0000000..694c080 --- /dev/null +++ b/docs/physics/ground-temperature.md @@ -0,0 +1,69 @@ +# Ground Temperature + +Ground surface temperature significantly affects upwelling longwave radiation and thermal comfort. + +## TsWaveDelay Model + +SOLWEIG uses a simplified thermal mass model that accounts for: + +1. **Solar heating**: Ground absorbs shortwave radiation +2. **Thermal inertia**: Temperature responds slowly to forcing +3. **Phase lag**: Peak temperature lags peak radiation + +## Governing Equation + +Ground temperature evolution: + +$$T_g(t) = T_{air} + \Delta T_{max} \cdot f(t - \phi)$$ + +Where: + +- $T_{air}$ = Air temperature +- $\Delta T_{max}$ = Maximum ground-air temperature difference +- $\phi$ = Phase lag (thermal delay) +- $f(t)$ = Diurnal temperature wave function + +## Land Cover Dependency + +Different surfaces have different thermal properties: + +| Surface | Thermal Admittance | Typical $\Delta T_{max}$ | +|---------|-------------------|-------------------------| +| Asphalt | High | 15-25°C | +| Concrete | High | 12-20°C | +| Grass | Low | 5-10°C | +| Water | Very High | 2-5°C | + +## Shading Effects + +Shaded ground has reduced temperature: + +$$T_{g,shade} = T_{g,sun} - \Delta T_{shade}$$ + +Where $\Delta T_{shade}$ depends on shadow duration and surface properties. + +## Timeseries Considerations + +Ground temperature requires previous timesteps for accurate modeling: + +```python +# CORRECT: Full timeseries preserves thermal state +results = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_list, +) + +# WRONG: Single timestep loses thermal history +result = solweig.calculate(surface, location, weather_noon) +``` + +## Implementation + +Ground temperature is computed in `components/ground.py` using the TsWaveDelay algorithm from UMEP. + +## References + +- Lindberg, F., Holmer, B., & Thorsson, S. (2008). SOLWEIG 1.0–Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *International Journal of Biometeorology*, 52(7), 697-713. +- Lindberg, F., Onomura, S., & Grimmond, C. S. B. (2016). Influence of ground surface characteristics on the mean radiant temperature in urban areas. *International Journal of Biometeorology*, 60(9), 1439-1452. +- Lindberg, F., Grimmond, C. S. B., & Martilli, A. (2015). Sunlit fractions on urban facets–Impact of spatial resolution and approach. *Urban Climate*, 12, 65-84. +- Offerle, B., Grimmond, C. S. B., & Oke, T. R. (2003). Parameterization of net all-wave radiation for urban areas. *Journal of Applied Meteorology*, 42(8), 1157-1173. diff --git a/docs/physics/gvf.md b/docs/physics/gvf.md new file mode 100644 index 0000000..dcb2cf5 --- /dev/null +++ b/docs/physics/gvf.md @@ -0,0 +1,52 @@ +# Ground View Factor (GVF) + +The Ground View Factor quantifies the fraction of surrounding ground visible from a point, important for reflected radiation calculations. + +## Definition + +GVF represents the hemispherical view of ground surfaces: + +$$GVF = 1 - SVF - WVF$$ + +Where: + +- $SVF$ = Sky View Factor +- $WVF$ = Wall View Factor + +## Components + +| Component | Description | +|-----------|-------------| +| `gvf` | Total ground view factor | +| `gvf_norm` | Normalized GVF for reflected radiation | + +## Role in Radiation + +GVF affects upwelling radiation calculations: + +1. **Reflected shortwave**: Ground reflects incoming solar radiation +2. **Emitted longwave**: Ground emits thermal radiation based on temperature + +```python +# Upwelling shortwave from ground +Kup = albedo * Kdown * gvf + +# Upwelling longwave from ground +Lup = emissivity * stefan_boltzmann * T_ground^4 * gvf +``` + +## Computation + +GVF is computed during SVF calculation by tracking rays that hit ground instead of sky: + +1. Cast rays from each point +2. Rays not blocked by buildings/vegetation that hit ground contribute to GVF +3. Weight by solid angle + +## Performance + +GVF is computed alongside SVF with minimal additional cost. + +## References + +- Lindberg, F., & Grimmond, C. S. B. (2011). The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas. *Theoretical and Applied Climatology*, 105(3), 311-323. diff --git a/docs/physics/index.md b/docs/physics/index.md new file mode 100644 index 0000000..71dc7b0 --- /dev/null +++ b/docs/physics/index.md @@ -0,0 +1,97 @@ +# Physics Overview + +SOLWEIG (Solar and Longwave Environmental Irradiance Geometry) calculates the mean radiant temperature (Tmrt) by modeling the complete radiation budget at a point in an urban environment. + +## Radiation Budget + +The total radiation received by a human body is: + +$$ +R_{total} = R_{short} + R_{long} +$$ + +Where: + +- **Shortwave** ($R_{short}$): Direct and diffuse solar radiation +- **Longwave** ($R_{long}$): Thermal radiation from sky, ground, and walls + +## Calculation Pipeline + +```mermaid +graph LR + DSM[DSM] --> SVF[Sky View Factor] + DSM --> Shadows[Shadow Masks] + SVF --> GVF[Ground View Factor] + Shadows --> Radiation + GVF --> Radiation + Weather --> Radiation + Radiation --> Tmrt[Mean Radiant Temp] + Tmrt --> UTCI + Tmrt --> PET +``` + +## Component Models + +### 1. Sky View Factor (SVF) + +Fraction of sky visible from each point, accounting for buildings and vegetation. + +[:octicons-arrow-right-24: SVF Details](svf.md) + +### 2. Shadow Calculation + +Sun position and ray tracing to determine shadow patterns. + +[:octicons-arrow-right-24: Shadow Details](shadows.md) + +### 3. Ground View Factor (GVF) + +View factor from point to ground surface, with albedo weighting. + +[:octicons-arrow-right-24: GVF Details](gvf.md) + +### 4. Radiation Model + +Complete shortwave and longwave radiation budget. + +[:octicons-arrow-right-24: Radiation Details](radiation.md) + +### 5. Ground Temperature + +Surface temperature model with thermal inertia. + +[:octicons-arrow-right-24: Ground Temp Details](ground-temperature.md) + +### 6. Mean Radiant Temperature + +Integration of all radiation fluxes into Tmrt. + +[:octicons-arrow-right-24: Tmrt Details](tmrt.md) + +## Thermal Comfort Indices + +### UTCI (Universal Thermal Climate Index) + +Fast polynomial approximation for outdoor thermal comfort. + +[:octicons-arrow-right-24: UTCI Details](utci.md) + +### PET (Physiological Equivalent Temperature) + +Iterative solver based on human energy balance. + +[:octicons-arrow-right-24: PET Details](pet.md) + +## References + +1. Lindberg F, Holmer B, Thorsson S (2008) SOLWEIG 1.0 – Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *Int J Biometeorol* 52:697-713 [doi:10.1007/s00484-008-0162-7](https://doi.org/10.1007/s00484-008-0162-7) + +2. Lindberg F, Grimmond CSB (2011) The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas: model development and evaluation. *Theor Appl Climatol* 105:311-323 + +3. Lindberg F, Onomura S, Grimmond CSB (2016) Influence of ground surface characteristics on the mean radiant temperature in urban areas. *Int J Biometeorol* 60:1439-1452 + +4. Lindberg F, Grimmond CSB, Martilli A (2015) Sunlit fractions on urban facets – Impact of spatial resolution and approach. *Urban Climate* 12:65-84 + +5. Konarska J, Lindberg F, Larsson A, Thorsson S, Holmer B (2014) Transmissivity of solar radiation through crowns of single urban trees — application for outdoor thermal comfort modelling. *Theor Appl Climatol* 117:363-376 + +6. Lindberg F, Grimmond CSB, Gabey A, et al. (2018) Urban Multi-scale Environmental Predictor (UMEP) – An integrated tool for city-based climate services. *Environ Model Softw* 99:70-87 [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) diff --git a/docs/physics/pet.md b/docs/physics/pet.md new file mode 100644 index 0000000..ea48102 --- /dev/null +++ b/docs/physics/pet.md @@ -0,0 +1,108 @@ +# PET (Physiological Equivalent Temperature) + +PET is the air temperature in a reference environment at which the heat balance of the human body is maintained with core and skin temperatures equal to those under the actual conditions. + +## Definition + +PET is based on the Munich Energy Balance Model for Individuals (MEMI), which solves: + +$$M + W = R + C + E_{sk} + E_{res} + S$$ + +Where: + +- $M$ = Metabolic rate +- $W$ = Mechanical work +- $R$ = Net radiation +- $C$ = Convective heat loss +- $E_{sk}$ = Evaporative heat loss (skin) +- $E_{res}$ = Respiratory heat loss +- $S$ = Heat storage + +## Reference Conditions + +The reference environment for PET has: + +- Wind speed 0.1 m/s +- Water vapor pressure 12 hPa +- Tmrt = Air temperature + +## Input Variables + +| Variable | Symbol | Units | +|----------|--------|-------| +| Air temperature | $T_a$ | °C | +| Mean radiant temperature | $T_{mrt}$ | °C | +| Wind speed | $v$ | m/s | +| Relative humidity | $RH$ | % | + +## Human Parameters + +Unlike UTCI, PET allows customizable human parameters: + +```python +human = solweig.HumanParams( + weight=70, # kg + height=1.75, # m + age=35, # years + sex="male", # or "female" + posture="standing", # or "sitting" +) + +pet = result.compute_pet(weather, human=human) +``` + +## Thermal Perception Scale + +| PET (°C) | Thermal Perception | Physiological Stress | +|----------|-------------------|---------------------| +| > 41 | Very hot | Extreme heat stress | +| 35 to 41 | Hot | Strong heat stress | +| 29 to 35 | Warm | Moderate heat stress | +| 23 to 29 | Slightly warm | Slight heat stress | +| 18 to 23 | Comfortable | No thermal stress | +| 13 to 18 | Slightly cool | Slight cold stress | +| 8 to 13 | Cool | Moderate cold stress | +| 4 to 8 | Cold | Strong cold stress | +| < 4 | Very cold | Extreme cold stress | + +## Algorithm + +PET uses an iterative solver: + +1. Calculate body heat balance under actual conditions +2. Determine core and skin temperatures +3. Iteratively find reference air temperature that produces same thermal state +4. Convergence typically requires 20-50 iterations + +## Performance + +PET is significantly slower than UTCI due to the iterative solver: + +| Metric | UTCI | PET | +|--------|------|-----| +| Single point | ~0.01 ms | ~0.5 ms | +| 100×100 grid | ~1 ms | ~50 ms | +| 72 timesteps | ~1 s | ~1 min | + +!!! warning "PET is ~50× slower than UTCI" + For large-scale studies, consider using UTCI unless PET's physiological basis is specifically needed. + +## Usage + +```python +result = solweig.calculate(surface, location, weather) + +# Compute PET with default human +pet = result.compute_pet(weather) + +# Compute PET with custom parameters +pet = result.compute_pet( + weather, + human=solweig.HumanParams(weight=60, height=1.65) +) +``` + +## References + +- Höppe, P. (1999). The physiological equivalent temperature–a universal index for the biometeorological assessment of the thermal environment. *International Journal of Biometeorology*, 43(2), 71-75. +- Matzarakis, A., Rutz, F., & Mayer, H. (2007). Modelling radiation fluxes in simple and complex environments. *International Journal of Biometeorology*, 51(4), 323-334. diff --git a/docs/physics/radiation.md b/docs/physics/radiation.md new file mode 100644 index 0000000..82c0ec3 --- /dev/null +++ b/docs/physics/radiation.md @@ -0,0 +1,72 @@ +# Radiation Model + +SOLWEIG computes the complete 3D radiation environment for a standing human. + +## Radiation Components + +### Shortwave (Solar) + +| Component | Symbol | Description | +|-----------|--------|-------------| +| Direct | $K_{dir}$ | Direct beam from sun | +| Diffuse | $K_{dif}$ | Scattered by atmosphere | +| Reflected | $K_{ref}$ | Reflected from surfaces | + +### Longwave (Thermal) + +| Component | Symbol | Description | +|-----------|--------|-------------| +| Downwelling | $L_{down}$ | From sky and atmosphere | +| Upwelling | $L_{up}$ | From ground | +| Lateral | $L_{side}$ | From walls and surfaces | + +## Six-Direction Model + +Radiation is computed for six directions around a standing person: + +- **Up**: Sky/canopy radiation +- **Down**: Ground radiation +- **North, South, East, West**: Lateral radiation from walls + +## Direct/Diffuse Split + +Global radiation is split into direct and diffuse components using the Reindl model: + +$$K_{dir} = K_{global} \times (1 - k_d)$$ +$$K_{dif} = K_{global} \times k_d$$ + +Where $k_d$ is the diffuse fraction, estimated from clearness index. + +## Anisotropic vs Isotropic Sky + +**Isotropic**: Assumes uniform sky radiance (faster) + +**Anisotropic**: Models non-uniform sky brightness (more accurate): + +- Circumsolar brightening near sun +- Horizon brightening +- Zenith darkening + +## Wall Radiation + +Walls contribute lateral radiation based on: + +- Wall temperature (function of orientation and solar exposure) +- Wall emissivity +- View factor from point to wall + +## Cylindric Weighting + +For a standing human (approximated as cylinder), radiation from different directions is weighted: + +$$K_{absorbed} = a_k \sum_i w_i K_i$$ + +Where $w_i$ are direction-dependent weighting factors. + +## References + +- Lindberg, F., Holmer, B., & Thorsson, S. (2008). SOLWEIG 1.0. *International Journal of Biometeorology*, 52(7), 697-713. +- Lindberg, F., Onomura, S., & Grimmond, C. S. B. (2016). Influence of ground surface characteristics on the mean radiant temperature in urban areas. *International Journal of Biometeorology*, 60(9), 1439-1452. +- Reindl, D. T., Beckman, W. A., & Duffie, J. A. (1990). Diffuse fraction correlations. *Solar Energy*, 45(1), 1-7. +- Perez, R., Seals, R., & Michalsky, J. (1993). All-weather model for sky luminance distribution. *Solar Energy*, 50(3), 235-245. +- Jonsson, P., Eliasson, I., Holmer, B., & Grimmond, C. S. B. (2006). Longwave incoming radiation in the Tropics: results from field work in three African cities. *Theoretical and Applied Climatology*, 85, 185-201. diff --git a/docs/physics/shadows.md b/docs/physics/shadows.md new file mode 100644 index 0000000..ab4d5de --- /dev/null +++ b/docs/physics/shadows.md @@ -0,0 +1,66 @@ +# Shadow Calculation + +Shadow computation determines which grid cells are shaded from direct solar radiation at a given sun position. + +## Sun Position + +Sun position is computed from: + +- **Latitude/Longitude**: Geographic location +- **DateTime**: Local time with UTC offset +- **Algorithm**: NREL Solar Position Algorithm (SPA) + +Outputs: + +- **Altitude** ($\alpha$): Angle above horizon (0-90°) +- **Azimuth** ($\psi$): Compass direction (0-360°, north=0°) + +## Shadow Algorithm + +SOLWEIG uses a shadow volume approach: + +1. For each building pixel, compute shadow projection based on sun angle +2. Project shadow along solar azimuth +3. Shadow length: $L = h / \tan(\alpha)$ where $h$ is building height + +```python +# Shadow projection distance +shadow_length = height / tan(sun_altitude) + +# Shadow direction (opposite to sun) +shadow_azimuth = (sun_azimuth + 180) % 360 +``` + +## Building Shadows + +Building shadows are binary (0 or 1): + +- **0** = Sunlit +- **1** = Shaded by building + +## Vegetation Shadows + +Vegetation provides partial shade with transmissivity: + +$$F_{sh,veg} = 1 - T_{veg}$$ + +Where $T_{veg}$ depends on leaf area index (LAI) and path length through canopy. + +## Shadow Matrices + +For anisotropic sky calculations, SOLWEIG pre-computes shadow patterns for multiple sun positions covering the sky hemisphere. + +## Output + +The shadow calculation produces: + +| Output | Description | +|--------|-------------| +| `shadow` | Combined shadow fraction (0-1) | +| `shadow_building` | Building shadow mask | +| `shadow_vegetation` | Vegetation shadow fraction | + +## References + +- Lindberg, F., Holmer, B., & Thorsson, S. (2008). SOLWEIG 1.0–Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings. *International Journal of Biometeorology*, 52(7), 697-713. +- Konarska, J., Lindberg, F., Larsson, A., Thorsson, S., & Holmer, B. (2014). Transmissivity of solar radiation through crowns of single urban trees—application for outdoor thermal comfort modelling. *Theoretical and Applied Climatology*, 117, 363-376. diff --git a/docs/physics/svf.md b/docs/physics/svf.md new file mode 100644 index 0000000..cd5e053 --- /dev/null +++ b/docs/physics/svf.md @@ -0,0 +1,63 @@ +# Sky View Factor (SVF) + +The Sky View Factor quantifies the fraction of sky visible from a point on the ground. + +## Definition + +$$SVF = \frac{1}{\pi} \int_0^{2\pi} \int_0^{\pi/2} \cos(\theta) \sin(\theta) \, d\theta \, d\phi$$ + +Where: + +- $\theta$ = zenith angle +- $\phi$ = azimuth angle + +SVF ranges from 0 (no sky visible) to 1 (open sky). + +## Directional Components + +SOLWEIG computes directional SVF for anisotropic radiation: + +| Component | Description | +|-----------|-------------| +| `svf` | Total sky view factor | +| `svf_north` | Northern hemisphere contribution | +| `svf_south` | Southern hemisphere contribution | +| `svf_east` | Eastern hemisphere contribution | +| `svf_west` | Western hemisphere contribution | + +## Algorithm + +SVF is computed using hemisphere sampling with configurable resolution: + +1. Cast rays from each ground point across the hemisphere +2. Check occlusion against DSM (buildings) and CDSM (vegetation) +3. Weight visible rays by solid angle +4. Sum contributions for total and directional components + +## Vegetation Handling + +Vegetation (CDSM) partially blocks sky view with transmissivity: + +- **Trans** = trunk zone transmissivity (~0.43 default) +- **TransVeg** = vegetation transmissivity function + +```python +# Effective SVF through vegetation +svf_veg = svf * trans_veg + svf_building * (1 - trans_veg) +``` + +## Performance + +SVF computation is expensive (O(n² × rays)): + +| Grid Size | Computation Time | +|-----------|-----------------| +| 100×100 | ~5 seconds | +| 200×200 | ~67 seconds | +| 500×500 | ~10 minutes | + +SVF only depends on geometry, so it's computed once and cached. + +## References + +- Lindberg, F., & Grimmond, C. S. B. (2011). The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas: model development and evaluation. *Theoretical and Applied Climatology*, 105(3), 311-323. diff --git a/docs/physics/tmrt.md b/docs/physics/tmrt.md new file mode 100644 index 0000000..7747e73 --- /dev/null +++ b/docs/physics/tmrt.md @@ -0,0 +1,71 @@ +# Mean Radiant Temperature (Tmrt) + +Mean Radiant Temperature is the uniform temperature of an imaginary black enclosure that would result in the same radiant heat exchange as the actual non-uniform environment. + +## Definition + +$$T_{mrt} = \sqrt[4]{\frac{\sum_i F_i T_i^4}{\sigma}}$$ + +Where: + +- $F_i$ = View factor to surface $i$ +- $T_i$ = Temperature of surface $i$ +- $\sigma$ = Stefan-Boltzmann constant + +## SOLWEIG Calculation + +SOLWEIG computes Tmrt from absorbed radiation: + +$$T_{mrt} = \sqrt[4]{\frac{S_{str}}{\varepsilon_p \sigma}} - 273.15$$ + +Where $S_{str}$ is the mean radiant flux density (W/m²). + +## Mean Radiant Flux + +The mean radiant flux combines all radiation components: + +$$S_{str} = a_k (K_{down} + K_{up} + K_{side}) + a_l (L_{down} + L_{up} + L_{side})$$ + +Where: + +- $a_k$ = Shortwave absorptivity (~0.7 for clothed human) +- $a_l$ = Longwave absorptivity (~0.97 for clothed human) +- $K$ = Shortwave radiation components +- $L$ = Longwave radiation components + +## Directional Components + +For a standing human (cylinder approximation): + +| Direction | Weight Factor | +|-----------|--------------| +| Up | 0.06 | +| Down | 0.06 | +| North | 0.22 | +| South | 0.22 | +| East | 0.22 | +| West | 0.22 | + +## Typical Values + +| Environment | Tmrt Range | +|-------------|-----------| +| Deep shade | ~Air temperature | +| Open sky, summer noon | 50-70°C | +| Near hot pavement | +10-20°C above air | +| Near cool grass | -5-10°C below open | + +## Output + +```python +result = solweig.calculate(surface, location, weather) + +# Tmrt grid (°C) +tmrt = result.tmrt +print(f"Mean Tmrt: {tmrt.mean():.1f}°C") +print(f"Max Tmrt: {tmrt.max():.1f}°C") +``` + +## References + +- Thorsson, S., Lindberg, F., Eliasson, I., & Holmer, B. (2007). Different methods for estimating the mean radiant temperature in an outdoor urban setting. *International Journal of Climatology*, 27(14), 1983-1993. diff --git a/docs/physics/utci.md b/docs/physics/utci.md new file mode 100644 index 0000000..b94ffcc --- /dev/null +++ b/docs/physics/utci.md @@ -0,0 +1,78 @@ +# UTCI (Universal Thermal Climate Index) + +UTCI represents the air temperature of a reference environment that produces the same thermal strain as the actual environment. + +## Definition + +UTCI is based on a multi-node thermophysiological model that simulates: + +- Heat exchange between body and environment +- Thermoregulation (sweating, shivering, vasodilation) +- Clothing adaptation + +## Reference Conditions + +The reference environment has: + +- 50% relative humidity (vapor pressure capped at 20 hPa) +- Wind speed 0.5 m/s at 10m height +- Tmrt = Air temperature +- Metabolic rate 135 W/m² (walking 4 km/h) + +## Input Variables + +| Variable | Symbol | Units | +|----------|--------|-------| +| Air temperature | $T_a$ | °C | +| Mean radiant temperature | $T_{mrt}$ | °C | +| Wind speed (10m) | $v_{10}$ | m/s | +| Relative humidity | $RH$ | % | + +## Polynomial Approximation + +SOLWEIG uses a fast polynomial approximation (~200 terms): + +$$UTCI = T_a + f(T_a, T_{mrt} - T_a, v_{10}, e)$$ + +Where $e$ is water vapor pressure (hPa). + +## Validity Range + +| Variable | Min | Max | +|----------|-----|-----| +| $T_a$ | -50°C | +50°C | +| $T_{mrt} - T_a$ | -30°C | +70°C | +| $v_{10}$ | 0.5 m/s | 17 m/s | + +## Stress Categories + +| UTCI (°C) | Stress Category | +|-----------|-----------------| +| > 46 | Extreme heat stress | +| 38 to 46 | Very strong heat stress | +| 32 to 38 | Strong heat stress | +| 26 to 32 | Moderate heat stress | +| 9 to 26 | No thermal stress | +| 0 to 9 | Slight cold stress | +| -13 to 0 | Moderate cold stress | +| -27 to -13 | Strong cold stress | +| < -40 | Extreme cold stress | + +## Usage + +```python +result = solweig.calculate(surface, location, weather) + +# Compute UTCI from Tmrt +utci = result.compute_utci(weather) +print(f"Mean UTCI: {utci.mean():.1f}°C") +``` + +## Performance + +UTCI computation is fast (~1ms per grid) due to the polynomial approximation. + +## References + +- Jendritzky, G., de Dear, R., & Havenith, G. (2012). UTCI—Why another thermal index? *International Journal of Biometeorology*, 56(3), 421-428. +- Bröde, P., et al. (2012). Deriving the operational procedure for the Universal Thermal Climate Index (UTCI). *International Journal of Biometeorology*, 56(3), 481-494. diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..632ce18 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,99 @@ +site_name: SOLWEIG +site_description: High-performance urban microclimate model for Mean Radiant Temperature and thermal comfort +site_url: https://umep-dev.github.io/solweig/ +repo_url: https://github.com/UMEP-dev/solweig +repo_name: UMEP-dev/solweig + +theme: + name: material + palette: + - scheme: default + primary: teal + accent: amber + toggle: + icon: material/brightness-7 + name: Switch to dark mode + - scheme: slate + primary: teal + accent: amber + toggle: + icon: material/brightness-4 + name: Switch to light mode + features: + - navigation.instant + - navigation.tracking + - navigation.sections + - navigation.expand + - navigation.top + - search.highlight + - search.suggest + - content.code.copy + - content.code.annotate + icon: + repo: fontawesome/brands/github + +plugins: + - search + - mkdocstrings: + handlers: + python: + options: + docstring_style: google + show_source: true + show_root_heading: true + members_order: source + +markdown_extensions: + - admonition + - pymdownx.details + - pymdownx.superfences + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.tabbed: + alternate_style: true + - pymdownx.arithmatex: + generic: true + - tables + - toc: + permalink: true + +extra_javascript: + - javascripts/mathjax.js + - https://unpkg.com/mathjax@3/es5/tex-mml-chtml.js + +nav: + - Home: index.md + - Getting Started: + - Quick Start: getting-started/quick-start.md + - Installation: getting-started/installation.md + - User Guide: + - Basic Usage: guide/basic-usage.md + - Working with GeoTIFFs: guide/geotiffs.md + - Timeseries Calculations: guide/timeseries.md + - Thermal Comfort (UTCI/PET): guide/thermal-comfort.md + - API Reference: + - Overview: api/index.md + - Core Functions: api/functions.md + - Data Classes: api/dataclasses.md + - Errors: api/errors.md + - Physics: + - Overview: physics/index.md + - Sky View Factor: physics/svf.md + - Shadows: physics/shadows.md + - Ground View Factor: physics/gvf.md + - Radiation: physics/radiation.md + - Ground Temperature: physics/ground-temperature.md + - Mean Radiant Temperature: physics/tmrt.md + - UTCI: physics/utci.md + - PET: physics/pet.md + - Development: + - Contributing: development/contributing.md + - Architecture: development/architecture.md + - GPU Execution Plan: development/gpu-execution-plan.md + - Roadmap: development/roadmap.md + +extra: + social: + - icon: fontawesome/brands/github + link: https://github.com/UMEP-dev/solweig diff --git a/pyproject.toml b/pyproject.toml index 7d60412..ac36b7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] -name = "umepr" -version = "0.0.1b61" -description = "rust implementation of urban multi-scale environmental predictor" +name = "solweig" +version = "0.1.0b47" +description = "High-performance SOLWEIG urban microclimate model (Rust + Python)" readme = "README.md" -requires-python = ">=3.9, <3.14" +requires-python = ">=3.11, <3.14" license = { text = "AGPL-3.0" } keywords = [ "python3", @@ -26,29 +26,34 @@ keywords = [ authors = [{ name = "UMEP Developers" }] maintainers = [{ name = "UMEP Developers" }] classifiers = [ - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", + "Development Status :: 3 - Alpha", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Rust", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Scientific/Engineering :: GIS", ] dependencies = [ - "geopandas>=1.0.1", - "matplotlib>=3.9.4", - "momepy>=0.6.0", - "numpy>=2.0.2", - "pandas>=2.3.1", - "pvlib>=0.13.0", - "pyepw>=0.1", - "pyproj>=3.6.1", - "pytz>=2025.2", - "rasterio>=1.4.3", - "rioxarray>=0.15.0", - "scipy>=1.13.1", - "shapely>=2.0.7", - "tqdm>=4.67.1", - "umep==0.0.1a18", - "xarray>=2024.7.0", + "numpy>=1.26.0", # QGIS 4 master ships 1.26.4 + "pyproj>=3.7.0", # Bundled with QGIS + "shapely>=2.0.4", # Bundled with QGIS +] + +[project.optional-dependencies] +# Full installation with all features (recommended for standalone use) +full = [ + "geopandas>=1.0.1", # POI/WOI point-of-interest analysis + "rasterio>=1.3.0", # Fast raster I/O (pip-installable, falls back to GDAL in QGIS) + "tqdm>=4.67.1", # Terminal progress bars + "pillow>=9.0.0", # Preview PNG generation for GeoTIFFs +] +# QGIS plugin mode - uses bundled GDAL, QGIS progress bars +qgis = [ + # No additional dependencies - uses bundled OSGeo4W packages + # geopandas: optional, only for POI/WOI features + # rasterio: not needed, uses GDAL backend + # tqdm: not needed, uses QGIS QgsProcessingFeedback ] [build-system] @@ -58,11 +63,12 @@ build-backend = "maturin" [tool.maturin] manifest-path = "rust/Cargo.toml" python-source = "pysrc" -module-name = 'umepr.rustalgos' -features = ["pyo3/extension-module", "gpu"] +module-name = 'solweig.rustalgos' +features = ["pyo3/extension-module", "pyo3/abi3-py311", "gpu"] +include = ["LICENSE"] [tool.setuptools] -packages = ["umepr"] +packages = ["solweig"] [dependency-groups] dev = [ @@ -75,16 +81,53 @@ dev = [ "pip>=23.2", "ruff>=0.5.1", "poethepoet>=0.29.0", - "pyright>=1.1.398", + "umep>=0.0.1a18", # For cross-checking tests against reference implementation + "ty>=0.0.12", + # Include full optional dependencies for dev/testing + "geopandas>=1.0.1", # POI/WOI point-of-interest analysis + "rasterio>=1.3.0", # Fast raster I/O (falls back to GDAL) + "tqdm>=4.67.1", # Terminal progress bars + "pillow>=9.0.0", # Preview PNG generation for GeoTIFFs + "scipy>=1.13.0", # Golden tests (ndimage) + "pre-commit>=4.3.0", + # Documentation + "mkdocs>=1.6.0", + "mkdocs-material>=9.5.0", + "mkdocstrings[python]>=0.27.0", + "ipykernel>=6.31.0", +] +# QGIS 4 compatibility testing - uses GDAL backend, no rasterio/geopandas +# Tests the minimal dependency set that works in QGIS 4 / Qt6 Python environment +qgis-compat = [ + "numpy>=1.26.0", + "pandas>=2.2.2", + # Note: GDAL is a system package in QGIS/OSGeo4W, not pip-installed. + # In CI, install matching version separately: uv pip install gdal==$(gdal-config --version) + "pyproj>=3.7.0", + "shapely>=2.0.4", + # Note: scipy NOT bundled in QGIS — only needed for dev/golden tests + # Note: tqdm not bundled in OSGeo4W - progress.py falls back to QGIS progress bars + # Note: geopandas not bundled - POI/WOI features unavailable without manual install + "pytest>=7.2.0", # Testing + "maturin>=1.8.3", # Build Rust extension ] [project.urls] -homepage = "https://github.com/UMEP-dev/umep-rust" -documentation = "https://github.com/UMEP-dev/umep-rust" -repository = "https://github.com/UMEP-dev/umep-rust" +homepage = "https://github.com/UMEP-dev/solweig" +documentation = "https://github.com/UMEP-dev/solweig" +repository = "https://github.com/UMEP-dev/solweig" [tool.poe.tasks] -verify_project = { shell = "ruff format && ruff check && pyright . && pytest ./tests" } +verify_project = { shell = "ruff format && ruff check --fix && ty check pysrc/ tests/ demos/ scripts/ qgis_plugin/ && pytest ./tests" } +lint = { shell = "ruff format && ruff check --fix" } +typecheck = { shell = "ty check pysrc/ tests/ demos/ scripts/ qgis_plugin/" } +test_quick = { shell = "pytest tests/ -m 'not slow' -x -q" } +test_full = { shell = "pytest tests/ -x -q" } +test_benchmarks = { shell = "pytest tests/benchmarks/ -x -q" } +test_gpu_gates = { shell = "pytest tests/spec/test_svf.py tests/spec/test_svf_core_api.py tests/spec/test_aniso_gpu_parity.py -q && pytest tests/test_tiling_integration.py -k 'multitile_vs_nontiled_comparison or anisotropic_tiled_vs_nontiled' -q" } +test_gpu_perf_gate = { shell = "pytest tests/benchmarks/test_tiling_benchmark.py -q" } +docs = { shell = "mkdocs serve" } +docs_build = { shell = "mkdocs build --strict" } [tool.ruff] line-length = 120 @@ -107,6 +150,10 @@ select = [ ] fixable = ["ALL"] +[tool.ruff.lint.per-file-ignores] +# EPW test data contains literal long lines matching real file format +"tests/test_io.py" = ["E501"] + [tool.ruff.format] quote-style = "double" indent-style = "space" @@ -115,30 +162,20 @@ line-ending = "auto" docstring-code-format = true docstring-code-line-length = "dynamic" -[tool.pyright] -pythonVersion = "3.11" -include = ["umepr"] -exclude = [ - "**/__pycache__", - "**/__pypackages__", - "build", - "docs", - "**/temp/", - "tests/", - "dist", - ".venv", -] -typeCheckingMode = "strict" -useLibraryCodeForTypes = true -reportUntypedFunctionDecorator = false -reportUntypedClassDecorator = false -reportUnknownMemberType = false -reportMissingTypeStubs = false -reportUnnecessaryIsInstance = false - [tool.pytest.ini_options] console_output_style = "count" log_cli = true log_cli_level = "INFO" testpaths = ["tests"] addopts = "-s" +markers = [ + "slow: marks tests as slow (full SOLWEIG computation, deselect with '-m \"not slow\"')", + "validation: marks tests requiring external validation datasets (deselect with '-m \"not validation\"')", +] + +[tool.ty.rules] +# Ignore unresolved imports for external/optional modules that ty cannot resolve +# (rustalgos is a Rust extension, osgeo/qgis are optional dependencies) +unresolved-import = "ignore" +# numpy stub overloads are too strict for np.nanmean / np.mean on ndarray +no-matching-overload = "ignore" diff --git a/pysrc/solweig/__init__.py b/pysrc/solweig/__init__.py new file mode 100644 index 0000000..8cf4dd8 --- /dev/null +++ b/pysrc/solweig/__init__.py @@ -0,0 +1,220 @@ +"""SOLWEIG - High-performance urban microclimate model. + +A Python package with Rust-accelerated algorithms for computing mean radiant +temperature (Tmrt) and thermal comfort indices (UTCI, PET) in complex urban +environments. + +Quick start:: + + import solweig + from datetime import datetime + + result = solweig.calculate( + surface=solweig.SurfaceData(dsm=my_dsm_array), + location=solweig.Location(latitude=57.7, longitude=12.0), + weather=solweig.Weather(datetime=datetime(2025, 7, 15, 12, 0), ta=25, rh=50, global_rad=800), + ) + print(f"Tmrt: {result.tmrt.mean():.1f} C") + +Time series:: + + results = solweig.calculate_timeseries( + surface=surface, + weather_series=[weather1, weather2, weather3], + location=location, + ) + +I/O helpers:: + + # Load raster data + dsm, transform, crs, nodata = solweig.io.load_raster("dsm.tif") + + # Generate wall heights and aspects + solweig.walls.generate_wall_hts(dsm_path, bbox, out_dir) +""" + +import contextlib +import logging +from importlib.metadata import PackageNotFoundError, version + +logger = logging.getLogger(__name__) + +# Version: single source of truth is pyproject.toml +try: + __version__ = version("solweig") +except PackageNotFoundError: + __version__ = "0.0.0.dev0" # Fallback for editable/source installs without metadata + +# Import simplified API +# Import utility modules +from . import io, progress, walls # noqa: E402 +from .api import ( # noqa: E402 + HumanParams, + Location, + ModelConfig, + PrecomputedData, + SolweigResult, + SurfaceData, + TileSpec, + Timeseries, + TimeseriesSummary, + Weather, + calculate, + # Tiled processing helpers + calculate_buffer_distance, + calculate_tiled, + calculate_timeseries, + calculate_timeseries_tiled, + compute_pet_grid, + # Post-processing: Thermal comfort indices + compute_utci_grid, + # Run metadata/provenance + create_run_metadata, + # I/O + download_epw, + generate_tiles, + load_materials, + load_params, + load_physics, + load_run_metadata, + save_run_metadata, + # Validation + validate_inputs, +) +from .errors import SolweigError # noqa: E402 + +# Try to import Rust algorithms +try: + from .rustalgos import GPU_ENABLED, RELEASE_BUILD, gvf, pet, shadowing, sky, skyview, utci, vegetation + + # Enable GPU by default if available + if GPU_ENABLED: + shadowing.enable_gpu() + logger.info("GPU acceleration enabled by default") + else: + logger.debug("GPU support not compiled in this build") + +except ImportError as e: + logger.warning(f"Failed to import Rust algorithms: {e}") + GPU_ENABLED = False + RELEASE_BUILD = False + shadowing = None + skyview = None + gvf = None + sky = None + vegetation = None + utci = None + pet = None + + +def is_gpu_available() -> bool: + """ + Check if GPU acceleration is available at runtime. + + Returns True if: + - GPU support was compiled into the Rust extension + - A GPU device was successfully detected and initialized + + Use this to check GPU status before running compute-intensive operations. + + Returns: + True if GPU acceleration is available, False otherwise. + """ + if not GPU_ENABLED: + return False + if shadowing is None: + return False + try: + return shadowing.is_gpu_enabled() + except (AttributeError, RuntimeError): + return False + + +def get_compute_backend() -> str: + """ + Get the current compute backend. + + Returns: + "gpu" if GPU acceleration is available and enabled, "cpu" otherwise. + """ + return "gpu" if is_gpu_available() else "cpu" + + +def disable_gpu() -> None: + """ + Disable GPU acceleration, falling back to CPU. + + This can be useful for debugging or if GPU results differ from expected. + The change takes effect immediately for subsequent calculations. + """ + if shadowing is not None: + with contextlib.suppress(AttributeError): + shadowing.disable_gpu() + + +def get_gpu_limits() -> dict[str, int | str] | None: + """ + Query real GPU buffer limits from the wgpu adapter. + + Returns a dict with keys: + - ``max_buffer_size``: int — largest single GPU buffer in bytes + - ``backend``: str — GPU backend name (``"Metal"``, ``"Vulkan"``, ``"Dx12"``, ``"Gl"``, etc.) + + Returns ``None`` if GPU is not available or not compiled in. + Lazily initialises the GPU context on first call. + """ + if not GPU_ENABLED or shadowing is None: + return None + try: + return shadowing.gpu_limits() + except (AttributeError, RuntimeError): + return None + + +__all__ = [ + # Version + "__version__", + # Core API + "SurfaceData", + "PrecomputedData", + "Location", + "Weather", + "HumanParams", + "ModelConfig", + "SolweigResult", + "Timeseries", + "TimeseriesSummary", + "SolweigError", + "calculate", + "calculate_timeseries", + "calculate_tiled", + "calculate_timeseries_tiled", + "validate_inputs", + "load_params", + "load_physics", + "load_materials", + # Tiled processing + "calculate_buffer_distance", + "TileSpec", + "generate_tiles", + # Post-processing: Thermal comfort + "compute_utci_grid", + "compute_pet_grid", + # Run metadata/provenance + "create_run_metadata", + "save_run_metadata", + "load_run_metadata", + # I/O + "download_epw", + # Utility modules + "io", + "walls", + "progress", + # GPU utilities + "is_gpu_available", + "get_compute_backend", + "get_gpu_limits", + "disable_gpu", + "GPU_ENABLED", + "RELEASE_BUILD", +] diff --git a/pysrc/solweig/_compat.py b/pysrc/solweig/_compat.py new file mode 100644 index 0000000..9d932f0 --- /dev/null +++ b/pysrc/solweig/_compat.py @@ -0,0 +1,123 @@ +"""Geospatial backend detection — single source of truth. + +Determines whether to use rasterio or GDAL for raster I/O and geometric +utilities. In QGIS / OSGeo4W environments rasterio is never attempted +(it causes numpy binary-incompatibility crashes). + +Exported flags +-------------- +GDAL_ENV : bool + True → use GDAL for raster operations. + False → use rasterio. +RASTERIO_AVAILABLE : bool + True when rasterio was successfully imported. +GDAL_AVAILABLE : bool + True when GDAL (osgeo) was successfully imported. +""" + +from __future__ import annotations + +import logging +import os +import sys + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# Environment detection +# --------------------------------------------------------------------------- + + +def in_osgeo_environment() -> bool: + """Return True when running inside QGIS or OSGeo4W.""" + if "qgis" in sys.modules or "qgis.core" in sys.modules: + return True + if any(k in os.environ for k in ("QGIS_PREFIX_PATH", "QGIS_DEBUG", "OSGEO4W_ROOT")): + return True + exe = sys.executable.lower() + return any(m in exe for m in ("osgeo4w", "qgis")) + + +# --------------------------------------------------------------------------- +# Import probes +# --------------------------------------------------------------------------- + + +def _try_import_rasterio() -> bool: + try: + import pyproj # noqa: F401 + import rasterio # noqa: F401 + from rasterio.features import rasterize # noqa: F401 + from rasterio.mask import mask # noqa: F401 + from rasterio.transform import Affine, from_origin # noqa: F401 + from rasterio.windows import Window # noqa: F401 + from shapely import geometry # noqa: F401 + + return True + except (ImportError, OSError, RuntimeError) as e: + logger.debug("Rasterio import failed: %s", e) + return False + + +def _try_import_gdal() -> bool: + try: + from osgeo import gdal, osr # noqa: F401 + + return True + except (ImportError, OSError) as e: + logger.debug("GDAL import failed: %s", e) + return False + + +# --------------------------------------------------------------------------- +# Backend selection (runs once at first import) +# --------------------------------------------------------------------------- + + +def _setup_geospatial_backend() -> tuple[bool, bool, bool]: + """Choose the geospatial backend. + + Returns (gdal_env, rasterio_available, gdal_available). + """ + # 1. Forced via env-var + if os.environ.get("UMEP_USE_GDAL", "").lower() in ("1", "true", "yes"): + if _try_import_gdal(): + logger.info("Using GDAL for raster operations (forced via UMEP_USE_GDAL).") + return True, False, True + raise ImportError("UMEP_USE_GDAL is set but GDAL could not be imported. Install GDAL or unset UMEP_USE_GDAL.") + + # 2. QGIS / OSGeo4W — prefer GDAL, never try rasterio first + if in_osgeo_environment(): + logger.debug("Detected OSGeo4W/QGIS environment, preferring GDAL backend.") + if _try_import_gdal(): + logger.info("Using GDAL for raster operations (OSGeo4W/QGIS environment).") + return True, False, True + # Unexpected — GDAL should always be present here + logger.warning("GDAL import failed in OSGeo4W/QGIS environment, trying rasterio...") + if _try_import_rasterio(): + logger.info("Using rasterio for raster operations.") + return False, True, False + raise ImportError( + "Failed to import both GDAL and rasterio in OSGeo4W/QGIS environment.\n" + "This is unexpected — GDAL should be available. Check your installation." + ) + + # 3. Standard environment — prefer rasterio, fall back to GDAL + if _try_import_rasterio(): + logger.info("Using rasterio for raster operations.") + return False, True, False + + logger.warning("Rasterio import failed, trying GDAL...") + if _try_import_gdal(): + logger.info("Using GDAL for raster operations.") + return True, False, True + + raise ImportError( + "Neither rasterio nor GDAL could be imported.\n" + "Install with: pip install rasterio\n" + "Or for QGIS/OSGeo4W environments, ensure GDAL is properly configured." + ) + + +GDAL_ENV, RASTERIO_AVAILABLE, GDAL_AVAILABLE = _setup_geospatial_backend() diff --git a/pysrc/solweig/api.py b/pysrc/solweig/api.py new file mode 100644 index 0000000..22ae7fd --- /dev/null +++ b/pysrc/solweig/api.py @@ -0,0 +1,508 @@ +"""Public SOLWEIG API. + +This module re-exports all user-facing symbols and provides the top-level +:func:`calculate` and :func:`validate_inputs` entry points. Internal +machinery (sun position, radiation split, etc.) is handled automatically, +while SVF must be prepared ahead of time (for example via +``SurfaceData.prepare()`` or ``surface.compute_svf()``). + +Example:: + + import solweig + from datetime import datetime + + result = solweig.calculate( + surface=solweig.SurfaceData(dsm=my_dsm_array), + location=solweig.Location(latitude=57.7, longitude=12.0), + weather=solweig.Weather(datetime=datetime(2025, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0), + ) + print(f"Tmrt: {result.tmrt.mean():.1f} C") +""" + +from __future__ import annotations + +from types import SimpleNamespace +from typing import TYPE_CHECKING + +import numpy as np + +from .computation import calculate_core_fused +from .errors import ( + ConfigurationError, + GridShapeMismatch, + InvalidSurfaceData, + MissingPrecomputedData, + SolweigError, + WeatherDataError, +) +from .io import download_epw + +# Import from extracted modules +from .loaders import load_materials, load_params, load_physics, resolve_wall_params +from .metadata import create_run_metadata, load_run_metadata, save_run_metadata +from .models import ( + HumanParams, + Location, + ModelConfig, + PrecomputedData, + ShadowArrays, + SolweigResult, + SurfaceData, + SvfArrays, + ThermalState, + TileSpec, + Weather, +) +from .postprocess import ( + compute_pet_grid, + compute_utci_grid, +) +from .summary import Timeseries, TimeseriesSummary +from .tiling import ( + calculate_buffer_distance, + calculate_tiled, + calculate_timeseries_tiled, + generate_tiles, + validate_tile_size, +) +from .timeseries import calculate_timeseries +from .utils import dict_to_namespace, extract_bounds, intersect_bounds, namespace_to_dict, resample_to_grid + +if TYPE_CHECKING: + pass + + +def validate_inputs( + surface: SurfaceData, + location: Location | None = None, + weather: Weather | list[Weather] | None = None, + use_anisotropic_sky: bool = False, + precomputed: PrecomputedData | None = None, +) -> list[str]: + """ + Validate inputs before calculation (preflight check). + + Call this before expensive operations to catch errors early. + Raises exceptions for fatal errors, returns warnings for potential issues. + + Args: + surface: Surface data to validate. + location: Location to validate (optional). + weather: Weather data to validate (optional, can be single or list). + use_anisotropic_sky: Whether anisotropic sky will be used. + precomputed: Precomputed data to validate. + + Returns: + List of warning messages (empty if all valid). + + Raises: + GridShapeMismatch: If surface grid shapes don't match DSM. + MissingPrecomputedData: If required precomputed data is missing. + WeatherDataError: If weather data is invalid. + + Example: + try: + warnings = solweig.validate_inputs(surface, location, weather) + for w in warnings: + print(f"Warning: {w}") + result = solweig.calculate(surface, location, weather) + except solweig.GridShapeMismatch as e: + print(f"Grid mismatch: {e.field} expected {e.expected}, got {e.got}") + except solweig.MissingPrecomputedData as e: + print(f"Missing data: {e}") + """ + warnings = [] + dsm_shape = surface.dsm.shape + + # Check grid shapes match DSM + grids_to_check = [ + ("cdsm", surface.cdsm), + ("dem", surface.dem), + ("tdsm", surface.tdsm), + ("wall_height", surface.wall_height), + ("wall_aspect", surface.wall_aspect), + ("land_cover", surface.land_cover), + ("albedo", surface.albedo), + ("emissivity", surface.emissivity), + ] + for name, grid in grids_to_check: + if grid is not None and grid.shape != dsm_shape: + raise GridShapeMismatch(name, dsm_shape, grid.shape) + + # Check SVF arrays if present + if surface.svf is not None: + svf_grids = [ + ("svf.svf", surface.svf.svf), + ("svf.svf_north", surface.svf.svf_north), + ("svf.svf_east", surface.svf.svf_east), + ("svf.svf_south", surface.svf.svf_south), + ("svf.svf_west", surface.svf.svf_west), + ] + for name, grid in svf_grids: + if grid is not None and grid.shape != dsm_shape: + raise GridShapeMismatch(name, dsm_shape, grid.shape) + + # Check SVF is available (required for all calculations) + if surface.svf is None and (precomputed is None or precomputed.svf is None): + raise MissingPrecomputedData( + "Sky View Factor (SVF) data is required but not available.", + "Call surface.compute_svf() before calculate(), or use SurfaceData.prepare() " + "which computes SVF automatically.", + ) + + # Check anisotropic sky requirements + if use_anisotropic_sky: + has_shadow_matrices = (precomputed is not None and precomputed.shadow_matrices is not None) or ( + surface.shadow_matrices is not None + ) + if not has_shadow_matrices: + raise MissingPrecomputedData( + "shadow_matrices required for anisotropic sky model", + "Either set use_anisotropic_sky=False, or provide shadow matrices via " + "precomputed=PrecomputedData(shadow_matrices=...) or surface.shadow_matrices", + ) + + # Check for potential issues (warnings, not errors) + if surface.cdsm is not None and not surface._preprocessed and surface.cdsm_relative: + warnings.append( + "CDSM provided with cdsm_relative=True but preprocess() not called. " + "Vegetation heights may be incorrect. Call surface.preprocess() first." + ) + if surface.tdsm is not None and not surface._preprocessed and surface.tdsm_relative: + warnings.append( + "TDSM provided with tdsm_relative=True but preprocess() not called. " + "Trunk heights may be incorrect. Call surface.preprocess() first." + ) + + # DSM height sanity checks + dsm_max = float(np.nanmax(surface.dsm)) + dsm_min = float(np.nanmin(surface.dsm)) + height_range = dsm_max - dsm_min + + if height_range > 500: + warnings.append( + f"DSM height range is {height_range:.0f}m (max={dsm_max:.0f}m, min={dsm_min:.0f}m). " + "This is typical for mountainous terrain. Shadow rays will use full terrain " + "relief; consider increasing max_shadow_distance_m for wide valleys." + ) + + if surface.dem is None and dsm_min > 100: + warnings.append( + f"DSM minimum value is {dsm_min:.0f}m with no DEM provided. " + "If this is above-sea-level elevation, provide a DEM so SOLWEIG can " + "compute building heights correctly." + ) + + # Per-layer relative height mismatch detection + for grid_name, grid, is_relative in [ + ("CDSM", surface.cdsm, surface.cdsm_relative), + ("TDSM", surface.tdsm, surface.tdsm_relative), + ]: + if grid is not None and is_relative: + nonzero = grid[grid > 0] + if nonzero.size > 0: + grid_min_nz = float(np.nanmin(nonzero)) + if grid_min_nz > 50: + flag = f"{grid_name.lower()}_relative" + warnings.append( + f"{grid_name} minimum non-zero value is {grid_min_nz:.0f}m with " + f"{flag}=True. Relative vegetation heights are typically " + f"0-50m. If it contains absolute elevations, set {flag}=False." + ) + + if surface.cdsm is not None and not surface.cdsm_relative and surface._looks_like_relative_heights(): + cdsm_max = float(np.nanmax(surface.cdsm)) + warnings.append( + f"CDSM values (max={cdsm_max:.1f}m) are much smaller than DSM " + f"(min={dsm_min:.1f}m) with cdsm_relative=False. " + "If CDSM contains height-above-ground, set cdsm_relative=True " + "and call surface.preprocess()." + ) + + # Validate weather if provided + if weather is not None: + weather_list = weather if isinstance(weather, list) else [weather] + for i, w in enumerate(weather_list): + # Basic range checks (Weather.__post_init__ catches some, but we add more) + if w.ta < -100 or w.ta > 60: + warnings.append( + f"Weather[{i}].ta={w.ta}°C is outside typical range [-100, 60]. Verify this is correct." + ) + if w.global_rad > 1400: + warnings.append( + f"Weather[{i}].global_rad={w.global_rad} W/m² exceeds solar constant. Verify this is correct." + ) + if w.ws == 0: + warnings.append( + f"Weather[{i}].ws=0 m/s. UTCI is sensitive to wind speed near zero; " + "consider using a small positive value (e.g. 0.5 m/s) if calm conditions are not intended." + ) + + return warnings + + +def calculate( + surface: SurfaceData, + location: Location, + weather: Weather, + config: ModelConfig | None = None, + human: HumanParams | None = None, + precomputed: PrecomputedData | None = None, + use_anisotropic_sky: bool | None = None, + conifer: bool = False, + poi_coords: list[tuple[int, int]] | None = None, + state: ThermalState | None = None, + physics: SimpleNamespace | None = None, + materials: SimpleNamespace | None = None, + wall_material: str | None = None, + max_shadow_distance_m: float | None = None, + return_state_copy: bool = True, + _requested_outputs: set[str] | None = None, +) -> SolweigResult: + """ + Calculate mean radiant temperature (Tmrt). + + This is the main entry point for SOLWEIG calculations. + + Args: + surface: Surface/terrain data (DSM required, CDSM/DEM optional). + location: Geographic location (lat, lon, UTC offset). + weather: Weather data (datetime, temperature, humidity, radiation). + config: Model configuration object providing base settings. + Explicit parameters (human, use_anisotropic_sky, etc.) override + config values when provided. + human: Human body parameters (absorption, posture, weight, height, etc.). + If None, uses config.human or HumanParams defaults. + precomputed: Pre-computed preprocessing data (walls, SVF, shadow matrices). Optional. + When provided, skips expensive preprocessing computations. + Use PrecomputedData.load() to load from directories. + use_anisotropic_sky: Use anisotropic sky model for radiation. + If None, uses config.use_anisotropic_sky (default True). + Requires precomputed.shadow_matrices to be provided. + Uses Perez diffuse model and patch-based longwave calculation. + conifer: Treat vegetation as evergreen conifers (always leaf-on). Default False. + When False, uses seasonal leaf on/off logic (deciduous trees). + When True, vegetation always has leaves (transmissivity constant). + Only relevant when CDSM (canopy) data is provided in surface. + poi_coords: Optional list of (row, col) coordinates for POI mode. + If provided, only computes at these points (much faster). + state: Thermal state from previous timestep. Optional. + When provided, enables accurate multi-timestep simulation with + thermal inertia modeling (TsWaveDelay). The returned result + will include updated state for the next timestep. + physics: Physics parameters (Tree_settings, Posture geometry) from load_physics(). + Site-independent scientific constants. If None, uses config.physics or bundled defaults. + materials: Material properties (albedo, emissivity per landcover class) from load_materials(). + Site-specific landcover parameters. Only needed if surface has land_cover grid. + If None, uses config.materials. + wall_material: Wall material type for temperature model. + One of "brick", "concrete", "wood", "cobblestone" (case-insensitive). + If None (default), uses generic wall params from materials JSON. + return_state_copy: If True (default), return a deep-copied thermal state. + Set False in internal time-series loops to avoid per-step state copies. + + Returns: + SolweigResult with Tmrt and optionally UTCI/PET grids. + When state parameter is provided, result.state contains the + updated thermal state for the next timestep. + + Example: + # Single timestep with all defaults + result = calculate( + surface=SurfaceData(dsm=my_dsm), + location=Location(latitude=57.7, longitude=12.0), + weather=Weather(datetime=dt, ta=25, rh=50, global_rad=800), + ) + + # Multi-timestep with state management + state = ThermalState.initial(dsm.shape) + for weather in weather_list: + result = calculate(surface, location, weather, state=state) + state = result.state # Carry forward to next timestep + + # With custom human parameters + result = calculate( + surface=surface, + location=location, + weather=weather, + human=HumanParams(abs_k=0.65, weight=70, height=1.65), + ) + + # With config as base, explicit param override + config = ModelConfig(use_anisotropic_sky=True) + result = calculate( + surface, location, weather, + config=config, + use_anisotropic_sky=False, # Explicit param wins + ) + """ + import logging + + logger = logging.getLogger(__name__) + + # Track whether anisotropic mode was explicitly requested by direct API arg. + # Config/default fallbacks intentionally do not trigger strict precondition + # failures because we cannot distinguish "config default" from a deliberate + # explicit request at runtime. + anisotropic_requested_explicitly = use_anisotropic_sky is True + + # Build effective configuration: explicit params override config + # Config provides base values, explicit params take precedence + effective_aniso = use_anisotropic_sky + effective_human = human + effective_physics = physics + effective_materials = materials + effective_max_shadow = max_shadow_distance_m + + if config is not None: + # Use config values as fallback for None parameters + if effective_aniso is None: + effective_aniso = config.use_anisotropic_sky + if effective_human is None: + effective_human = config.human + if effective_physics is None: + effective_physics = config.physics + if effective_materials is None: + effective_materials = config.materials + if effective_max_shadow is None: + effective_max_shadow = config.max_shadow_distance_m + + # Debug log when explicit params override config + overrides = [] + if use_anisotropic_sky is not None and use_anisotropic_sky != config.use_anisotropic_sky: + overrides.append(f"use_anisotropic_sky={use_anisotropic_sky}") + if human is not None and config.human is not None: + overrides.append("human") + if physics is not None and config.physics is not None: + overrides.append("physics") + if materials is not None and config.materials is not None: + overrides.append("materials") + if overrides: + logger.debug(f"Explicit params override config: {', '.join(overrides)}") + + # Apply defaults for anything still None + if effective_aniso is None: + effective_aniso = True + if effective_human is None: + effective_human = HumanParams() + # Auto-load bundled UMEP JSON as default materials (single source of truth) + if effective_materials is None: + effective_materials = load_params() + + # Assign back to use in the rest of the function + use_anisotropic_sky = effective_aniso + human = effective_human + physics = effective_physics + materials = effective_materials + + # Use default human params if not provided + if human is None: + human = HumanParams() + + # Load default physics if not provided + if physics is None: + physics = load_physics() + + # Compute derived weather values (sun position, radiation split) + if not weather._derived_computed: + weather.compute_derived(location) + + # Note: poi_coords parameter exists but POI mode not yet implemented + if poi_coords is not None: + raise NotImplementedError("POI mode (point-of-interest calculation) is planned for Phase 4") + + # Fill NaN in surface layers (idempotent — skipped if already done) + surface.fill_nan() + + # Explicit anisotropic requests must have shadow matrices available. + if anisotropic_requested_explicitly and use_anisotropic_sky: + has_shadow_matrices = (precomputed is not None and precomputed.shadow_matrices is not None) or ( + surface.shadow_matrices is not None + ) + if not has_shadow_matrices: + raise MissingPrecomputedData( + "shadow_matrices required for anisotropic sky model", + "Either set use_anisotropic_sky=False, or provide shadow matrices via " + "precomputed=PrecomputedData(shadow_matrices=...) or surface.shadow_matrices", + ) + + # Fused Rust pipeline — single FFI call per daytime timestep. + # Both isotropic and anisotropic sky models are supported. + return calculate_core_fused( + surface=surface, + location=location, + weather=weather, + human=human, + precomputed=precomputed, + state=state, + physics=physics, + materials=materials, + conifer=conifer, + wall_material=wall_material, + use_anisotropic_sky=use_anisotropic_sky, + max_shadow_distance_m=effective_max_shadow, + return_state_copy=return_state_copy, + requested_outputs=_requested_outputs, + ) + + +# ============================================================================= +# Public API - All exports +# ============================================================================= + +__all__ = [ + # Main calculation functions + "calculate", + "calculate_timeseries", + "calculate_tiled", + "calculate_timeseries_tiled", + "validate_inputs", + # Dataclasses - Core inputs + "SurfaceData", + "Location", + "Weather", + "HumanParams", + # Dataclasses - Configuration + "ModelConfig", + "PrecomputedData", + "ThermalState", + "TileSpec", + # Dataclasses - Internal (for advanced use) + "SvfArrays", + "ShadowArrays", + # Results + "SolweigResult", + "Timeseries", + "TimeseriesSummary", + # Errors + "SolweigError", + "InvalidSurfaceData", + "GridShapeMismatch", + "MissingPrecomputedData", + "WeatherDataError", + "ConfigurationError", + # Post-processing + "compute_utci_grid", + "compute_pet_grid", + # Configuration loading + "load_params", + "load_physics", + "load_materials", + "resolve_wall_params", + # Metadata + "create_run_metadata", + "save_run_metadata", + "load_run_metadata", + # Tiling utilities + "calculate_buffer_distance", + "validate_tile_size", + "generate_tiles", + # I/O + "download_epw", + # Utilities + "dict_to_namespace", + "namespace_to_dict", + "extract_bounds", + "intersect_bounds", + "resample_to_grid", +] diff --git a/pysrc/solweig/buffers.py b/pysrc/solweig/buffers.py new file mode 100644 index 0000000..abf669b --- /dev/null +++ b/pysrc/solweig/buffers.py @@ -0,0 +1,246 @@ +""" +Pre-allocated buffer pools for reducing per-timestep memory allocation. + +This module provides a BufferPool class that manages reusable numpy arrays +to avoid repeated allocation/deallocation during time series calculations. + +Usage: + pool = BufferPool(shape=(1000, 1000)) + + # Get a zeroed buffer + temp = pool.get_zeros("ani_lum") + + # Get an uninitialized buffer (faster, use when you'll overwrite all values) + temp = pool.get("shadow_temp") + + # Buffers are automatically reused on next get() call with same name +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +class BufferPool: + """ + Manages pre-allocated numpy arrays for reuse across timesteps. + + This reduces memory allocation overhead during time series calculations + by reusing the same memory for intermediate computations. + + The pool uses named buffers - each unique name gets its own buffer that + persists across calls. When you request a buffer by name, you get the + same underlying memory (optionally zeroed). + + Attributes: + shape: The 2D shape for all buffers in this pool + dtype: Data type for buffers (default: float32) + _buffers: Dictionary mapping names to pre-allocated arrays + + Example: + pool = BufferPool((1000, 1000)) + + # First call allocates + buf1 = pool.get_zeros("radiation_temp") + buf1[:] = some_computation() + + # Second call reuses same memory (zeroed) + buf1 = pool.get_zeros("radiation_temp") # Same buffer, zeroed + """ + + __slots__ = ("shape", "dtype", "_buffers") + + def __init__( + self, + shape: tuple[int, int], + dtype: np.dtype | type = np.float32, + ) -> None: + """ + Initialize a buffer pool. + + Args: + shape: 2D shape (rows, cols) for all buffers + dtype: NumPy dtype for buffers (default: float32) + """ + self.shape = shape + self.dtype = np.dtype(dtype) + self._buffers: dict[str, NDArray[np.floating]] = {} + + def get(self, name: str) -> NDArray[np.floating]: + """ + Get a buffer by name (uninitialized). + + Returns an uninitialized buffer - use this when you will overwrite + all values anyway. Faster than get_zeros(). + + Args: + name: Unique identifier for this buffer + + Returns: + Pre-allocated array (contents undefined) + """ + if name not in self._buffers: + self._buffers[name] = np.empty(self.shape, dtype=self.dtype) + return self._buffers[name] + + def get_zeros(self, name: str) -> NDArray[np.floating]: + """ + Get a zeroed buffer by name. + + Returns a buffer filled with zeros. Use this when you need + a clean slate for accumulation operations. + + Args: + name: Unique identifier for this buffer + + Returns: + Pre-allocated array filled with zeros + """ + buf = self.get(name) + buf.fill(0.0) + return buf + + def get_full(self, name: str, fill_value: float) -> NDArray[np.floating]: + """ + Get a buffer filled with a specific value. + + Args: + name: Unique identifier for this buffer + fill_value: Value to fill the buffer with + + Returns: + Pre-allocated array filled with fill_value + """ + buf = self.get(name) + buf.fill(fill_value) + return buf + + def ensure_float32( + self, + arr: NDArray, + name: str | None = None, + ) -> NDArray[np.float32]: + """ + Ensure array is float32, using pooled buffer if conversion needed. + + If the array is already float32, returns it unchanged (no copy). + If conversion is needed and a name is provided, uses a pooled buffer. + Otherwise, falls back to regular astype(). + + Args: + arr: Input array (any dtype) + name: Optional buffer name for pooled conversion + + Returns: + Array with float32 dtype (may be same object if already float32) + """ + if arr.dtype == np.float32: + return arr + + if name is not None and arr.shape == self.shape: + buf = self.get(name) + np.copyto(buf, arr, casting="unsafe") + return buf + + return arr.astype(np.float32) + + def clear(self) -> None: + """ + Clear all buffers from the pool. + + Call this to release memory when done with a calculation series. + """ + self._buffers.clear() + + @property + def num_buffers(self) -> int: + """Number of buffers currently allocated.""" + return len(self._buffers) + + @property + def memory_bytes(self) -> int: + """Total memory used by all buffers in bytes.""" + if not self._buffers: + return 0 + return len(self._buffers) * self.shape[0] * self.shape[1] * self.dtype.itemsize + + def __repr__(self) -> str: + mb = self.memory_bytes / (1024 * 1024) + return f"BufferPool(shape={self.shape}, dtype={self.dtype}, buffers={self.num_buffers}, memory={mb:.1f}MB)" + + +class TimestepBuffers: + """ + Context manager for timestep-scoped buffer reuse. + + This provides a convenient way to reuse buffers within a single timestep + calculation without polluting the namespace. + + Usage: + with TimestepBuffers((1000, 1000)) as buffers: + temp1 = buffers.get_zeros("radiation") + temp2 = buffers.get_zeros("shadow") + # ... use buffers ... + # Buffers are cleared when exiting context + """ + + __slots__ = ("pool",) + + def __init__(self, shape: tuple[int, int], dtype: np.dtype | type = np.float32): + self.pool = BufferPool(shape, dtype) + + def __enter__(self) -> BufferPool: + return self.pool + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.pool.clear() + return None + + +def ensure_float32_inplace(arr: NDArray) -> NDArray[np.float32]: + """ + Convert array to float32 in-place if possible, otherwise copy. + + This is a utility function for cases where we want to avoid allocation + when the input is already float32. + + Args: + arr: Input array + + Returns: + Float32 array (same object if already float32, new array otherwise) + """ + if arr.dtype == np.float32: + return arr + return arr.astype(np.float32) + + +def as_float32(arr: NDArray) -> NDArray[np.float32]: + """ + Ensure array is float32, avoiding copy if already correct dtype. + + Shorthand for ensure_float32_inplace() - use this in component code + to replace `.astype(np.float32)` calls where the array might already + be float32. + + Args: + arr: Input array (any dtype) + + Returns: + Float32 array (same object if already float32, copy otherwise) + + Example: + # Instead of: + svf.astype(np.float32) # Always copies + + # Use: + as_float32(svf) # Only copies if needed + """ + if arr.dtype == np.float32: + return arr + return arr.astype(np.float32) diff --git a/pysrc/solweig/bundles.py b/pysrc/solweig/bundles.py new file mode 100644 index 0000000..b271775 --- /dev/null +++ b/pysrc/solweig/bundles.py @@ -0,0 +1,271 @@ +""" +Data bundle classes for SOLWEIG computation components. + +These dataclasses group related arrays and values to reduce parameter passing +and make data flow clearer through the computation pipeline. + +Each bundle represents the output of a distinct computation stage: +- DirectionalArrays: N/E/S/W directional components (used by SVF and radiation) +- SvfBundle: All sky view factor arrays +- ShadowBundle: Shadow computation results +- GroundBundle: Ground temperature model outputs +- RadiationBundle: Radiation calculation results +- GvfBundle: Ground view factor results +- LupBundle: Upwelling longwave with thermal state + +This modular design enables: +1. Easier testing of individual components +2. Clearer boundaries for Rust migration +3. Reduced parameter counts (bundles instead of 10+ arrays) +4. Better code organization and maintainability +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import numpy as np + from numpy.typing import NDArray + + from .models import ThermalState + + +@dataclass +class DirectionalArrays: + """ + Directional arrays for N, E, S, W components. + + Used for: + - SVF directional components (svf_north, svf_east, svf_south, svf_west) + - Radiation directional components (kside_n, kside_e, kside_s, kside_w) + - Longwave directional components (lside_n, lside_e, lside_s, lside_w) + + Attributes: + north: North-facing component + east: East-facing component + south: South-facing component + west: West-facing component + """ + + north: NDArray[np.floating] + east: NDArray[np.floating] + south: NDArray[np.floating] + west: NDArray[np.floating] + + +@dataclass +class SvfBundle: + """ + Sky View Factor computation results. + + Groups all SVF-related arrays to simplify passing to radiation calculations. + + Attributes: + svf: Total sky view factor (0-1) + svf_directional: Directional SVF components (N, E, S, W) + svf_veg: Vegetation-only SVF + svf_veg_directional: Directional vegetation SVF (N, E, S, W) + svf_aveg: SVF above vegetation (building shadow on veg) + svf_aveg_directional: Directional SVF above vegetation (N, E, S, W) + svfbuveg: Combined SVF accounting for vegetation transmissivity + svfalfa: Angular factor from SVF (for anisotropic calculations) + """ + + svf: NDArray[np.floating] + svf_directional: DirectionalArrays + svf_veg: NDArray[np.floating] + svf_veg_directional: DirectionalArrays + svf_aveg: NDArray[np.floating] + svf_aveg_directional: DirectionalArrays + svfbuveg: NDArray[np.floating] + svfalfa: NDArray[np.floating] + + +@dataclass +class ShadowBundle: + """ + Shadow computation results. + + Attributes: + shadow: Combined shadow fraction (1=sunlit, 0=shaded) + bldg_sh: Building shadow only + veg_sh: Vegetation shadow only + wallsun: Wall sun exposure (for wall temperature) + psi: Vegetation transmissivity used (for reference) + """ + + shadow: NDArray[np.floating] + bldg_sh: NDArray[np.floating] + veg_sh: NDArray[np.floating] + wallsun: NDArray[np.floating] + psi: float + + +@dataclass +class GroundBundle: + """ + Ground temperature model outputs. + + Results from the ground temperature computation, including + spatially-varying surface properties. + + Attributes: + tg: Ground temperature deviation from air temperature (K or °C) + tg_wall: Wall temperature deviation from air temperature + ci_tg: Clearness index correction factor + alb_grid: Albedo per pixel (0-1) + emis_grid: Emissivity per pixel (0-1) + """ + + tg: NDArray[np.floating] + tg_wall: float + ci_tg: float + alb_grid: NDArray[np.floating] + emis_grid: NDArray[np.floating] + + +@dataclass +class GvfBundle: + """ + Ground View Factor computation results. + + Includes upwelling longwave radiation components before thermal delay + and albedo view factors for reflected shortwave radiation. + + Attributes: + lup: Upwelling longwave radiation (W/m²) + lup_e: Upwelling longwave from east + lup_s: Upwelling longwave from south + lup_w: Upwelling longwave from west + lup_n: Upwelling longwave from north + gvfalb: Ground view factor × albedo (for Kup calculation) + gvfalb_e: GVF × albedo from east + gvfalb_s: GVF × albedo from south + gvfalb_w: GVF × albedo from west + gvfalb_n: GVF × albedo from north + gvfalbnosh: GVF × albedo without shadow (for anisotropic) + gvfalbnosh_e: GVF × albedo (no shadow) from east + gvfalbnosh_s: GVF × albedo (no shadow) from south + gvfalbnosh_w: GVF × albedo (no shadow) from west + gvfalbnosh_n: GVF × albedo (no shadow) from north + """ + + lup: NDArray[np.floating] + lup_e: NDArray[np.floating] + lup_s: NDArray[np.floating] + lup_w: NDArray[np.floating] + lup_n: NDArray[np.floating] + gvfalb: NDArray[np.floating] + gvfalb_e: NDArray[np.floating] + gvfalb_s: NDArray[np.floating] + gvfalb_w: NDArray[np.floating] + gvfalb_n: NDArray[np.floating] + gvfalbnosh: NDArray[np.floating] + gvfalbnosh_e: NDArray[np.floating] + gvfalbnosh_s: NDArray[np.floating] + gvfalbnosh_w: NDArray[np.floating] + gvfalbnosh_n: NDArray[np.floating] + + +@dataclass +class LupBundle: + """ + Upwelling longwave radiation with thermal state. + + Results after applying TsWaveDelay thermal inertia model. + Includes updated thermal state for next timestep. + + Attributes: + lup: Final upwelling longwave (center view) after thermal delay + lup_e: Final upwelling longwave from east + lup_s: Final upwelling longwave from south + lup_w: Final upwelling longwave from west + lup_n: Final upwelling longwave from north + state: Updated thermal state to carry forward to next timestep + """ + + lup: NDArray[np.floating] + lup_e: NDArray[np.floating] + lup_s: NDArray[np.floating] + lup_w: NDArray[np.floating] + lup_n: NDArray[np.floating] + state: ThermalState | None # Forward reference to avoid circular import + + +@dataclass +class RadiationBundle: + """ + Radiation calculation outputs. + + Complete radiation budget including shortwave and longwave components. + + Attributes: + kdown: Downwelling shortwave radiation (W/m²) + kup: Upwelling shortwave radiation (W/m²) + ldown: Downwelling longwave radiation (W/m²) + lup: Upwelling longwave radiation (W/m²) + kside: Shortwave radiation from 4 directions (W/m²) + lside: Longwave radiation from 4 directions (W/m²) + kside_total: Total shortwave on vertical surface (for anisotropic Tmrt) + lside_total: Total longwave on vertical surface (for anisotropic Tmrt) + drad: Diffuse radiation term (for Tmrt calculation) + """ + + kdown: NDArray[np.floating] + kup: NDArray[np.floating] + ldown: NDArray[np.floating] + lup: NDArray[np.floating] + kside: DirectionalArrays + lside: DirectionalArrays + kside_total: NDArray[np.floating] + lside_total: NDArray[np.floating] + drad: NDArray[np.floating] + + +@dataclass +class WallBundle: + """ + Wall geometry data. + + Wall heights and aspects needed for shadow calculation and wall temperature. + + Attributes: + wall_height: Wall height at each pixel (meters) + wall_aspect: Wall orientation at each pixel (degrees, 0=North) + """ + + wall_height: NDArray[np.floating] + wall_aspect: NDArray[np.floating] + + +@dataclass +class VegetationBundle: + """ + Vegetation geometry data. + + Vegetation heights needed for shadow and SVF calculations. + + Attributes: + cdsm: Canopy Digital Surface Model (vegetation heights) + tdsm: Trunk Digital Surface Model (trunk zone heights) + bush: Bush/shrub layer (boolean or height) + """ + + cdsm: NDArray[np.floating] | None + tdsm: NDArray[np.floating] | None + bush: NDArray[np.floating] | None + + +__all__ = [ + "DirectionalArrays", + "SvfBundle", + "ShadowBundle", + "GroundBundle", + "GvfBundle", + "LupBundle", + "RadiationBundle", + "WallBundle", + "VegetationBundle", +] diff --git a/pysrc/solweig/cache.py b/pysrc/solweig/cache.py new file mode 100644 index 0000000..75c6723 --- /dev/null +++ b/pysrc/solweig/cache.py @@ -0,0 +1,192 @@ +""" +Cache validation utilities for SVF and wall data. + +Provides hash-based validation to detect stale caches when input data changes. +""" + +from __future__ import annotations + +import hashlib +import json +import logging +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING + +import numpy as np + +if TYPE_CHECKING: + pass + +logger = logging.getLogger(__name__) + +# Cache metadata filename +CACHE_METADATA_FILE = "cache_meta.json" + + +def pixel_size_tag(pixel_size: float) -> str: + """Return a directory-safe tag encoding the pixel size, e.g. ``'px1.000'``.""" + return f"px{pixel_size:.3f}" + + +def compute_array_hash(arr: np.ndarray, *, sample_size: int = 10000) -> str: + """ + Compute a fast hash of a numpy array. + + Uses a combination of shape, dtype, and sampled values for speed. + For large arrays, samples evenly spaced values rather than hashing everything. + + Args: + arr: Numpy array to hash. + sample_size: Maximum number of values to sample for hashing. + + Returns: + Hex string hash. + """ + hasher = hashlib.sha256() + + # Include shape and dtype + hasher.update(str(arr.shape).encode()) + hasher.update(str(arr.dtype).encode()) + + # For small arrays, hash everything + flat = arr.ravel() + if len(flat) <= sample_size: + hasher.update(flat.tobytes()) + else: + # Sample evenly spaced values for large arrays + indices = np.linspace(0, len(flat) - 1, sample_size, dtype=np.int64) + hasher.update(flat[indices].tobytes()) + + return hasher.hexdigest()[:16] # First 16 chars is enough + + +@dataclass +class CacheMetadata: + """Metadata for cache validation.""" + + dsm_hash: str + dsm_shape: tuple[int, int] + pixel_size: float + cdsm_hash: str | None = None + version: str = "1.0" + + def to_dict(self) -> dict: + """Convert to dictionary for JSON serialization.""" + return { + "version": self.version, + "dsm_hash": self.dsm_hash, + "dsm_shape": list(self.dsm_shape), + "pixel_size": self.pixel_size, + "cdsm_hash": self.cdsm_hash, + } + + @classmethod + def from_dict(cls, data: dict) -> CacheMetadata: + """Create from dictionary.""" + return cls( + version=data.get("version", "1.0"), + dsm_hash=data["dsm_hash"], + dsm_shape=tuple(data["dsm_shape"]), + pixel_size=data["pixel_size"], + cdsm_hash=data.get("cdsm_hash"), + ) + + @classmethod + def from_arrays( + cls, + dsm: np.ndarray, + pixel_size: float, + cdsm: np.ndarray | None = None, + ) -> CacheMetadata: + """Create metadata from input arrays.""" + return cls( + dsm_hash=compute_array_hash(dsm), + dsm_shape=(dsm.shape[0], dsm.shape[1]), + pixel_size=pixel_size, + cdsm_hash=compute_array_hash(cdsm) if cdsm is not None else None, + ) + + def matches(self, other: CacheMetadata) -> bool: + """Check if this metadata matches another.""" + return ( + self.dsm_hash == other.dsm_hash + and self.dsm_shape == other.dsm_shape + and abs(self.pixel_size - other.pixel_size) < 0.001 + and self.cdsm_hash == other.cdsm_hash + ) + + def save(self, directory: Path) -> None: + """Save metadata to cache directory.""" + meta_path = directory / CACHE_METADATA_FILE + with open(meta_path, "w") as f: + json.dump(self.to_dict(), f, indent=2) + + @classmethod + def load(cls, directory: Path) -> CacheMetadata | None: + """Load metadata from cache directory. Returns None if not found.""" + meta_path = directory / CACHE_METADATA_FILE + if not meta_path.exists(): + return None + try: + with open(meta_path) as f: + data = json.load(f) + return cls.from_dict(data) + except (json.JSONDecodeError, KeyError) as e: + logger.warning(f"Failed to load cache metadata: {e}") + return None + + +def validate_cache( + cache_dir: Path, + dsm: np.ndarray, + pixel_size: float, + cdsm: np.ndarray | None = None, +) -> bool: + """ + Validate that cached data matches current inputs. + + Args: + cache_dir: Directory containing cached data. + dsm: Current DSM array. + pixel_size: Current pixel size. + cdsm: Current CDSM array (optional). + + Returns: + True if cache is valid, False if stale or missing. + """ + stored = CacheMetadata.load(cache_dir) + if stored is None: + logger.debug(f"No cache metadata found in {cache_dir}") + return False + + current = CacheMetadata.from_arrays(dsm, pixel_size, cdsm) + + if stored.matches(current): + logger.debug(f"Cache validated: {cache_dir}") + return True + else: + logger.info(f"Cache stale (input changed): {cache_dir}") + logger.debug(f" Stored: dsm_hash={stored.dsm_hash}, shape={stored.dsm_shape}") + logger.debug(f" Current: dsm_hash={current.dsm_hash}, shape={current.dsm_shape}") + return False + + +def clear_stale_cache(cache_dir: Path) -> None: + """ + Remove stale cache files from a directory. + + Deletes all .npy files and the metadata file. + """ + if not cache_dir.exists(): + return + + import shutil + + for item in cache_dir.iterdir(): + if item.is_file() and (item.suffix == ".npy" or item.name == CACHE_METADATA_FILE): + item.unlink() + elif item.is_dir(): + shutil.rmtree(item) + + logger.info(f"Cleared stale cache: {cache_dir}") diff --git a/pysrc/solweig/components/__init__.py b/pysrc/solweig/components/__init__.py new file mode 100644 index 0000000..ce93e02 --- /dev/null +++ b/pysrc/solweig/components/__init__.py @@ -0,0 +1,19 @@ +""" +SOLWEIG computation components. + +The production pipeline uses the fused Rust path (``pipeline.compute_timestep``), +which performs shadows, ground temp, GVF, radiation, and Tmrt in a single FFI call. + +These Python modules are retained as readable reference implementations and for +use in tests and validation scripts. The modules still called by the fused path are: + +- **svf_resolution** — ``resolve_svf()``, ``adjust_svfbuveg_with_psi()`` +- **shadows** — ``compute_transmissivity()`` +- **gvf** — ``detect_building_mask()`` + +The remaining component functions (``compute_radiation``, ``compute_tmrt``, +``compute_shadows``, ``compute_gvf``, ``compute_ground_temperature``) are +reference-only and not called by the production ``calculate()`` API. +""" + +__all__ = ["ground", "svf_resolution", "shadows", "gvf", "radiation", "tmrt"] diff --git a/pysrc/solweig/components/ground.py b/pysrc/solweig/components/ground.py new file mode 100644 index 0000000..131129d --- /dev/null +++ b/pysrc/solweig/components/ground.py @@ -0,0 +1,137 @@ +""" +Ground temperature model component. + +Implements the SOLWEIG TgMaps ground temperature model with: +- Parameterization from land cover properties +- Diurnal temperature cycle based on sun altitude +- Clearness index correction for cloudy conditions + +Reference: +- Lindberg et al. (2008, 2016) - SOLWEIG ground temperature parameterization +- Reindl et al. (1990) - Clearness index approach +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from ..bundles import GroundBundle +from ..physics.clearnessindex_2013b import clearnessindex_2013b +from ..physics.daylen import daylen +from ..physics.diffusefraction import diffusefraction + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..api import Location, Weather + + +def compute_ground_temperature( + weather: Weather, + location: Location, + alb_grid: NDArray[np.floating], + emis_grid: NDArray[np.floating], + tgk_grid: NDArray[np.floating], + tstart_grid: NDArray[np.floating], + tmaxlst_grid: NDArray[np.floating], + *, + tgk_wall: float | None = None, + tstart_wall: float | None = None, + tmaxlst_wall: float | None = None, +) -> GroundBundle: + """ + Compute ground and wall temperature deviations from air temperature. + + Uses the SOLWEIG TgMaps model with land-cover-specific parameterization. + Temperature amplitude depends on max sun altitude and land cover type. + Clearness index correction accounts for reduced heating under cloudy skies. + + Args: + weather: Weather data including temperature, radiation, sun position + location: Geographic location (latitude, longitude) for sunrise calculation + alb_grid: Albedo per pixel (0-1) from land cover properties + emis_grid: Emissivity per pixel (0-1) from land cover properties + tgk_grid: TgK parameter per pixel (temperature gain coefficient) + tstart_grid: Tstart parameter per pixel (temperature baseline offset) + tmaxlst_grid: TmaxLST parameter per pixel (hour of maximum temperature) + tgk_wall: Optional wall TgK parameter. If None, uses cobblestone default (0.37). + tstart_wall: Optional wall Tstart parameter. If None, uses cobblestone default (-3.41). + tmaxlst_wall: Optional wall TmaxLST parameter. If None, uses cobblestone default (15.0). + + Returns: + GroundBundle containing: + - tg: Ground temperature deviation from air temperature (K) + - tg_wall: Wall temperature deviation from air temperature (K) + - ci_tg: Clearness index correction factor (0-1) + - alb_grid: Albedo grid (passed through for convenience) + - emis_grid: Emissivity grid (passed through for convenience) + + Reference: + Lindberg et al. (2008): "Urban Multi-scale Environmental Predictor (UMEP)" + Formula: Tgamp = TgK * altmax + Tstart + Tg = Tgamp * sin(phase * pi/2) * CI_TgG + """ + from ..rustalgos import ground as ground_rust + + # Day of year and sunrise time + jday = weather.datetime.timetuple().tm_yday + _, _, _, snup = daylen(jday, location.latitude) + + # Maximum sun altitude for the day (computed in Weather.compute_derived()) + altmax = weather.altmax + + # Decimal time (fraction of day) + dectime = (weather.datetime.hour + weather.datetime.minute / 60.0) / 24.0 + + # CI_TgG correction for non-clear conditions (Lindberg et al. 2008, Reindl et al. 1990) + # This accounts for reduced ground heating under cloudy skies + # Full formula from solweig.py: CI_TgG = (radG / radG0) + (1 - corr) + zen = (90.0 - weather.sun_altitude) * (np.pi / 180.0) # zenith in radians + deg2rad = np.pi / 180.0 + + # Get clear sky radiation (I0) from clearnessindex function + location_dict = {"latitude": location.latitude, "longitude": location.longitude, "altitude": 0.0} + i0, _, _, _, _ = clearnessindex_2013b( + zen, jday, weather.ta, weather.rh / 100.0, weather.global_rad, location_dict, -999.0 + ) + + # Calculate clear sky direct and diffuse components + if i0 > 0 and weather.sun_altitude > 0: + rad_i0, rad_d0 = diffusefraction(i0, weather.sun_altitude, 1.0, weather.ta, weather.rh) + # Clear sky global horizontal radiation + rad_g0 = rad_i0 * np.sin(weather.sun_altitude * deg2rad) + rad_d0 + else: + rad_g0 = 0.0 + + # Zenith angle in degrees + zen_deg = 90.0 - weather.sun_altitude + + # Call Rust implementation (positional args to match Rust signature) + tg, tg_wall, ci_tg, alb_grid_out, emis_grid_out = ground_rust.compute_ground_temperature( + weather.ta, + weather.sun_altitude, + altmax, + dectime, + snup, + weather.global_rad, + rad_g0, + zen_deg, + alb_grid.astype(np.float32), + emis_grid.astype(np.float32), + tgk_grid.astype(np.float32), + tstart_grid.astype(np.float32), + tmaxlst_grid.astype(np.float32), + tgk_wall=tgk_wall, + tstart_wall=tstart_wall, + tmaxlst_wall=tmaxlst_wall, + ) + + return GroundBundle( + tg=tg, + tg_wall=float(tg_wall), + ci_tg=float(ci_tg), + alb_grid=alb_grid_out, + emis_grid=emis_grid_out, + ) diff --git a/pysrc/solweig/components/gvf.py b/pysrc/solweig/components/gvf.py new file mode 100644 index 0000000..b188b89 --- /dev/null +++ b/pysrc/solweig/components/gvf.py @@ -0,0 +1,270 @@ +""" +Ground View Factor (GVF) computation component. + +Computes upwelling longwave radiation from surrounding surfaces (ground + walls) +and albedo view factors for reflected shortwave radiation. + +The GVF represents how much a person at a given height "sees" the ground and walls +versus the sky. This determines the thermal radiation received from below and sides. + +Reference: +- Lindberg et al. (2008) - SOLWEIG GVF model with wall radiation +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from ..buffers import as_float32 +from ..bundles import GvfBundle +from ..constants import KELVIN_OFFSET, SBC +from ..physics.morphology import generate_binary_structure + +try: + from ..rustalgos import morphology as _rust_morph + + def _binary_dilation(input_array, structure, iterations): + return _rust_morph.binary_dilation( + input_array.astype(np.uint8), + structure.astype(np.uint8), + iterations, + ).astype(bool) +except ImportError: + from ..physics.morphology import binary_dilation as _binary_dilation + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..api import HumanParams, SurfaceData, Weather + + +def detect_building_mask( + dsm: NDArray[np.floating], + land_cover: NDArray[np.integer] | None, + wall_height: NDArray[np.floating] | None, + pixel_size: float, +) -> NDArray[np.floating]: + """ + Create a building mask for GVF calculation. + + GVF (Ground View Factor) expects: 0=building, 1=ground. + This is used to normalize GVF values over buildings where GVF doesn't apply. + + Args: + dsm: Digital Surface Model array. + land_cover: Optional land cover grid (UMEP standard: ID 2 = buildings). + wall_height: Optional wall height grid. + pixel_size: Pixel size in meters. + + Returns: + Building mask where 0=building pixels, 1=ground pixels. + + Detection strategy: + 1. If land_cover provided: Use ID 2 (buildings) directly. + 2. Elif wall_height provided: Dilate wall pixels to fill building + interiors, then combine with pixels elevated >2 m above the + 10th-percentile ground level (catches rooftops). + 3. Else: Assume all ground (no buildings). + """ + if land_cover is not None: + # Use land cover directly: ID 2 = buildings + buildings = np.ones_like(dsm, dtype=np.float32) + buildings[land_cover == 2] = 0.0 + return buildings + + if wall_height is not None: + # Approximate building footprints from wall heights + # Wall pixels mark building edges; dilate to capture interiors + wall_mask = wall_height > 0 + + # Dilate to capture building interiors (typical building width up to 50m) + struct = generate_binary_structure(2, 2) # 8-connectivity + iterations = int(25 / pixel_size) + 1 + dilated = _binary_dilation(wall_mask, struct, iterations=iterations) + + # Also detect elevated areas (building roofs) + ground_level = np.nanpercentile(dsm[~wall_mask], 10) if np.any(~wall_mask) else np.nanmin(dsm) + elevated = dsm > (ground_level + 2.0) # At least 2m above ground + + # Combine: building pixels where either dilated walls OR elevated flat areas + is_building = dilated | (elevated & ~np.isnan(dsm)) + + # Invert: 0=building, 1=ground + return (~is_building).astype(np.float32) + + # No building info available - assume all ground + return np.ones_like(dsm, dtype=np.float32) + + +def compute_gvf( + surface: SurfaceData, + weather: Weather, + human: HumanParams, + tg: NDArray[np.floating], + tg_wall: float, + shadow: NDArray[np.floating], + wallsun: NDArray[np.floating], + alb_grid: NDArray[np.floating], + emis_grid: NDArray[np.floating], + svf: NDArray[np.floating], + pixel_size: float, + wall_ht: NDArray[np.floating] | None = None, + wall_asp: NDArray[np.floating] | None = None, +) -> GvfBundle: + """ + Compute Ground View Factor for upwelling longwave and albedo components. + + GVF represents how much a person "sees" the ground and walls from a given height. + This determines thermal radiation received from surrounding surfaces. + + Args: + surface: Surface data (DSM, land cover) + weather: Weather data (temperature) + human: Human parameters (height, posture) + tg: Ground temperature deviation from air temperature (K) + tg_wall: Wall temperature deviation from air temperature (K) + shadow: Combined shadow fraction (1=sunlit, 0=shaded) + wallsun: Wall sun exposure (for wall temperature) + alb_grid: Albedo per pixel (0-1) + emis_grid: Emissivity per pixel (0-1) + svf: Sky view factor (for simplified GVF when no walls) + pixel_size: Grid resolution in meters + wall_ht: Wall heights (optional, for full GVF with walls) + wall_asp: Wall aspects in degrees (optional, for full GVF with walls) + + Returns: + GvfBundle containing: + - lup_*: Upwelling longwave from 5 directions (center, N, E, S, W) + - gvfalb_*: Ground view factor × albedo (for reflected shortwave) + - gvfalbnosh_*: GVF × albedo without shadow (for anisotropic) + + Reference: + Lindberg et al. (2008) - SOLWEIG model equations for GVF calculation + """ + # Import here to avoid circular dependency + from ..rustalgos import gvf as gvf_module + + has_walls = wall_ht is not None and wall_asp is not None + + # Human height parameters for GVF (matching runner: first=round(height), second=round(height*20)) + first = np.round(human.height) + if first == 0.0: + first = 1.0 + second = np.round(human.height * 20.0) + + # Building mask for GVF calculation + buildings = detect_building_mask( + surface.dsm, + surface.land_cover, + wall_ht if has_walls else None, + pixel_size, + ) + + # Wall properties (from SOLWEIG parameters) + albedo_wall = 0.20 + emis_wall = 0.90 + + # Land cover settings for gvf_calc + use_landcover = surface.land_cover is not None + lc_grid = surface.land_cover.astype(np.float32) if surface.land_cover is not None else None + + if has_walls: + # Type narrowing - wall_ht and wall_asp are not None when has_walls is True + assert wall_ht is not None + assert wall_asp is not None + # Use full GVF calculation with wall radiation + # Create parameter struct (reduces 20 params to 11) + gvf_params = gvf_module.GvfScalarParams( + scale=pixel_size, + first=first, + second=second, + tgwall=tg_wall, + ta=weather.ta, + ewall=emis_wall, + sbc=SBC, + albedo_b=albedo_wall, + twater=weather.ta, # Twater = Ta (approximation for water temperature) + landcover=use_landcover, + ) + gvf_result = gvf_module.gvf_calc( + as_float32(wallsun), + as_float32(wall_ht), + as_float32(buildings), + as_float32(shadow), + as_float32(wall_asp), + as_float32(tg), + as_float32(emis_grid), + as_float32(alb_grid), + lc_grid, + gvf_params, + ) + + # Extract GVF results + lup = np.array(gvf_result.gvf_lup) + lup_e = np.array(gvf_result.gvf_lup_e) + lup_s = np.array(gvf_result.gvf_lup_s) + lup_w = np.array(gvf_result.gvf_lup_w) + lup_n = np.array(gvf_result.gvf_lup_n) + gvfalb = np.array(gvf_result.gvfalb) + gvfalb_e = np.array(gvf_result.gvfalb_e) + gvfalb_s = np.array(gvf_result.gvfalb_s) + gvfalb_w = np.array(gvf_result.gvfalb_w) + gvfalb_n = np.array(gvf_result.gvfalb_n) + gvfalbnosh = np.array(gvf_result.gvfalbnosh) + gvfalbnosh_e = np.array(gvf_result.gvfalbnosh_e) + gvfalbnosh_s = np.array(gvf_result.gvfalbnosh_s) + gvfalbnosh_w = np.array(gvf_result.gvfalbnosh_w) + gvfalbnosh_n = np.array(gvf_result.gvfalbnosh_n) + else: + # Simplified GVF (no walls) + # Ground view factor is complement of sky view factor + gvf_simple = 1.0 - svf + + # Ground temperature with shadow effect + # Convention: shadow=1 for sunlit, shadow=0 for shaded + # Sunlit areas get full ground temperature deviation; shaded areas get none + tg_with_shadow = tg * shadow + + # Upwelling longwave: Stefan-Boltzmann law for ground emission + # Lup = emissivity × SBC × T^4 + lup = emis_grid * SBC * np.power(weather.ta + tg_with_shadow + KELVIN_OFFSET, 4) + + # Simplified: assume isotropic (all directions same) + lup_e = lup + lup_s = lup + lup_w = lup + lup_n = lup + + # Albedo view factors for reflected shortwave + gvfalb = alb_grid * gvf_simple + gvfalb_e = gvfalb + gvfalb_s = gvfalb + gvfalb_w = gvfalb + gvfalb_n = gvfalb + + # Without shadow (for anisotropic calculations) + gvfalbnosh = alb_grid + gvfalbnosh_e = alb_grid + gvfalbnosh_s = alb_grid + gvfalbnosh_w = alb_grid + gvfalbnosh_n = alb_grid + + return GvfBundle( + lup=as_float32(lup), + lup_e=as_float32(lup_e), + lup_s=as_float32(lup_s), + lup_w=as_float32(lup_w), + lup_n=as_float32(lup_n), + gvfalb=as_float32(gvfalb), + gvfalb_e=as_float32(gvfalb_e), + gvfalb_s=as_float32(gvfalb_s), + gvfalb_w=as_float32(gvfalb_w), + gvfalb_n=as_float32(gvfalb_n), + gvfalbnosh=as_float32(gvfalbnosh), + gvfalbnosh_e=as_float32(gvfalbnosh_e), + gvfalbnosh_s=as_float32(gvfalbnosh_s), + gvfalbnosh_w=as_float32(gvfalbnosh_w), + gvfalbnosh_n=as_float32(gvfalbnosh_n), + ) diff --git a/pysrc/solweig/components/radiation.py b/pysrc/solweig/components/radiation.py new file mode 100644 index 0000000..929bd74 --- /dev/null +++ b/pysrc/solweig/components/radiation.py @@ -0,0 +1,467 @@ +""" +Radiation calculation component — **reference implementation only**. + +Not called by the production ``calculate()`` API. The fused Rust pipeline +(``pipeline.compute_timestep``) performs all radiation calculations internally, +including Perez sky luminance distribution. + +Retained for readability, tests, and validation against UMEP. + +Computes complete radiation budget: +- Shortwave: direct beam, diffuse sky, ground reflection, wall reflection +- Longwave: sky emission, ground emission, wall emission +- Directional components (N, E, S, W) for human body sides + +Supports both isotropic and anisotropic (Perez et al. 1993) diffuse sky models. + +Reference: +- Lindberg et al. (2008, 2016) - SOLWEIG radiation model +- Perez et al. (1993) - Anisotropic sky luminance distribution +- Jonsson et al. (2006) - Longwave radiation formulas +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from ..buffers import as_float32 +from ..bundles import DirectionalArrays, RadiationBundle +from ..constants import F_SIDE_SITTING, F_SIDE_STANDING, F_UP_SITTING, F_UP_STANDING, KELVIN_OFFSET, SBC +from ..physics.patch_radiation import patch_steradians + +try: + from umep.functions.SOLWEIGpython.Kup_veg_2015a import Kup_veg_2015a + from umep.util.SEBESOLWEIGCommonFiles.Perez_v3 import Perez_v3 +except ImportError: + Kup_veg_2015a = None # type: ignore[assignment,misc] + Perez_v3 = None # type: ignore[assignment,misc] +from ..rustalgos import sky as _sky + +if TYPE_CHECKING: + from ..api import HumanParams, PrecomputedData, Weather + from ..bundles import GvfBundle, LupBundle, ShadowBundle, SvfBundle + + +def compute_radiation( + weather: Weather, + svf_bundle: SvfBundle, + shadow_bundle: ShadowBundle, + gvf_bundle: GvfBundle, + lup_bundle: LupBundle, + human: HumanParams, + use_anisotropic_sky: bool, + precomputed: PrecomputedData | None, + albedo_wall: float = 0.20, + emis_wall: float = 0.90, + tg_wall: float = 0.0, +) -> RadiationBundle: + """ + Compute radiation budget for Tmrt calculation. + + Computes complete shortwave and longwave radiation fluxes from all directions, + accounting for sky, ground, walls, and vegetation effects. + + Args: + weather: Weather data (temperature, humidity, radiation, sun position) + svf_bundle: Sky view factors with directional components + shadow_bundle: Shadow fractions and vegetation transmissivity + gvf_bundle: Ground view factors and albedo components + lup_bundle: Upwelling longwave after thermal delay + human: Human parameters (height, posture, absorptivities) + use_anisotropic_sky: Use anisotropic (Perez) diffuse model if shadow matrices available + precomputed: Optional pre-computed shadow matrices for anisotropic model + albedo_wall: Wall albedo (default 0.20 for cobblestone) + emis_wall: Wall emissivity (default 0.90 for brick/concrete) + tg_wall: Wall temperature deviation from air temperature (K) + + Returns: + RadiationBundle containing: + - kdown: Downwelling shortwave (W/m²) + - kup: Upwelling shortwave (reflected from ground) + - ldown: Downwelling longwave (sky + wall emission) + - lup: Upwelling longwave (from lup_bundle after thermal delay) + - kside: Directional shortwave components (N, E, S, W) + - lside: Directional longwave components (N, E, S, W) + - kside_direct: Direct beam on vertical surface (for anisotropic) + - drad: Diffuse radiation term (anisotropic or isotropic) + + Reference: + - Lindberg et al. (2008) - SOLWEIG radiation model equations + - Perez et al. (1993) - Anisotropic sky model + - Jonsson et al. (2006) - Longwave radiation formulation + """ + # Import here to avoid circular dependency + from ..rustalgos import sky, vegetation + + # Sky emissivity (Jonsson et al. 2006) + ta_k = weather.ta + KELVIN_OFFSET + ea = 6.107 * 10 ** ((7.5 * weather.ta) / (237.3 + weather.ta)) * (weather.rh / 100.0) + msteg = 46.5 * (ea / ta_k) + esky = 1 - (1 + msteg) * np.exp(-np.sqrt(1.2 + 3.0 * msteg)) + + # View factors depend on posture (standing vs sitting projection areas) + cyl = human.posture == "standing" + if cyl: + _f_up = F_UP_STANDING # noqa: F841 + _f_side = F_SIDE_STANDING # noqa: F841 + else: + _f_up = F_UP_SITTING # noqa: F841 + _f_side = F_SIDE_SITTING # noqa: F841 + + # Shortwave radiation components + sin_alt = np.sin(np.radians(weather.sun_altitude)) + rad_i = weather.direct_rad + rad_d = weather.diffuse_rad + rad_g = weather.global_rad + + # Extract SVF components + svf = svf_bundle.svf + svf_directional = svf_bundle.svf_directional + svf_veg = svf_bundle.svf_veg + svf_veg_directional = svf_bundle.svf_veg_directional + svf_aveg = svf_bundle.svf_aveg + svf_aveg_directional = svf_bundle.svf_aveg_directional + svfbuveg = svf_bundle.svfbuveg + svfalfa = svf_bundle.svfalfa + + # Extract shadow components + shadow = shadow_bundle.shadow + psi = shadow_bundle.psi + + # Check if anisotropic sky model should be used + has_shadow_matrices = precomputed is not None and precomputed.shadow_matrices is not None + use_aniso = use_anisotropic_sky and has_shadow_matrices + + # Compute F_sh (fraction shadow on building walls based on sun altitude and SVF) + zen = weather.sun_zenith * (np.pi / 180.0) # Convert to radians for cylindric_wedge + f_sh = _sky.cylindric_wedge(float(zen), as_float32(svfalfa)) + f_sh = np.nan_to_num(f_sh, nan=0.5) + + # Compute Kup (ground-reflected shortwave) using full directional model + kup, kup_e, kup_s, kup_w, kup_n = Kup_veg_2015a( + rad_i, + rad_d, + rad_g, + weather.sun_altitude, + svfbuveg, + albedo_wall, + f_sh, + gvf_bundle.gvfalb, + gvf_bundle.gvfalb_e, + gvf_bundle.gvfalb_s, + gvf_bundle.gvfalb_w, + gvf_bundle.gvfalb_n, + gvf_bundle.gvfalbnosh, + gvf_bundle.gvfalbnosh_e, + gvf_bundle.gvfalbnosh_s, + gvf_bundle.gvfalbnosh_w, + gvf_bundle.gvfalbnosh_n, + ) + + # Compute diffuse radiation and directional shortwave + if use_aniso: + # Type narrowing - precomputed and shadow_matrices are not None when use_aniso is True + assert precomputed is not None + assert precomputed.shadow_matrices is not None + # Anisotropic Diffuse Radiation after Perez et al. 1993 + shadow_mats = precomputed.shadow_matrices + patch_option = shadow_mats.patch_option + jday = weather.datetime.timetuple().tm_yday + + # Get Perez luminance distribution + lv, _, _ = Perez_v3( + weather.sun_zenith, + weather.sun_azimuth, + rad_d, + rad_i, + jday, + patchchoice=1, + patch_option=patch_option, + ) + + # Get diffuse shadow matrix (accounts for vegetation transmissivity) + diffsh = shadow_mats.diffsh(psi, use_vegetation=psi < 0.5) + shadow_mats.release_float32_cache() # Free unpacked float32; bitpacked still available + + # Total relative luminance from sky patches into each cell + ani_lum = _sky.weighted_patch_sum( + as_float32(diffsh), + as_float32(lv[:, 2]), + ) + + drad = ani_lum * rad_d + + # Compute asvf (angle from SVF) for anisotropic calculations + asvf = np.arccos(np.sqrt(np.clip(svf, 0.0, 1.0))) + + # Pass bitpacked shadow matrices directly to Rust + shmat = np.ascontiguousarray(shadow_mats._shmat_u8) + vegshmat = np.ascontiguousarray(shadow_mats._vegshmat_u8) + vbshmat = np.ascontiguousarray(shadow_mats._vbshmat_u8) + + # Compute base Ldown first (needed for lside_veg) + ldown_base = ( + (svf + svf_veg - 1) * esky * SBC * (ta_k**4) + + (2 - svf_veg - svf_aveg) * emis_wall * SBC * (ta_k**4) + + (svf_aveg - svf) * emis_wall * SBC * ((weather.ta + tg_wall + KELVIN_OFFSET) ** 4) + + (2 - svf - svf_veg) * (1 - emis_wall) * esky * SBC * (ta_k**4) + ) + + # CI correction for non-clear conditions + ci = weather.clearness_index + if ci < 0.95: + c = 1.0 - ci + ldown_cloudy = ( + (svf + svf_veg - 1) * SBC * (ta_k**4) + + (2 - svf_veg - svf_aveg) * emis_wall * SBC * (ta_k**4) + + (svf_aveg - svf) * emis_wall * SBC * ((weather.ta + tg_wall + KELVIN_OFFSET) ** 4) + + (2 - svf - svf_veg) * (1 - emis_wall) * SBC * (ta_k**4) + ) + ldown_base = ldown_base * (1 - c) + ldown_cloudy * c + + # Call lside_veg for base directional longwave (Least, Lsouth, Lwest, Lnorth) + lside_veg_result = vegetation.lside_veg( + as_float32(svf_directional.south), + as_float32(svf_directional.west), + as_float32(svf_directional.north), + as_float32(svf_directional.east), + as_float32(svf_veg_directional.east), + as_float32(svf_veg_directional.south), + as_float32(svf_veg_directional.west), + as_float32(svf_veg_directional.north), + as_float32(svf_aveg_directional.east), + as_float32(svf_aveg_directional.south), + as_float32(svf_aveg_directional.west), + as_float32(svf_aveg_directional.north), + weather.sun_azimuth, + weather.sun_altitude, + weather.ta, + tg_wall, + SBC, + emis_wall, + as_float32(ldown_base), + esky, + 0.0, # t (instrument offset, matching reference) + as_float32(f_sh), + weather.clearness_index, + as_float32(lup_bundle.lup_e), # TsWaveDelay-processed values + as_float32(lup_bundle.lup_s), + as_float32(lup_bundle.lup_w), + as_float32(lup_bundle.lup_n), + True, # anisotropic_sky flag + ) + # Extract base directional longwave + lside_e_base = np.array(lside_veg_result.least) + lside_s_base = np.array(lside_veg_result.lsouth) + lside_w_base = np.array(lside_veg_result.lwest) + lside_n_base = np.array(lside_veg_result.lnorth) + + # Compute steradians for patches + steradians, _, _ = patch_steradians(lv) + + # Create L_patches array for anisotropic sky (altitude, azimuth, luminance) + l_patches = as_float32(lv) + + # Adjust sky emissivity for cloudy conditions (CI < 0.95) + # This matches the reference implementation: esky = CI * esky + (1 - CI) * 1.0 + esky_aniso = esky + ci = weather.clearness_index + if ci < 0.95: + esky_aniso = ci * esky + (1 - ci) * 1.0 + + # Create parameter structs for cleaner function signature + sun_params = sky.SunParams( + altitude=weather.sun_altitude, + azimuth=weather.sun_azimuth, + ) + sky_params = sky.SkyParams( + esky=esky_aniso, + ta=weather.ta, + cyl=bool(cyl), + wall_scheme=False, + albedo=albedo_wall, + ) + surface_params = sky.SurfaceParams( + tgwall=tg_wall, + ewall=emis_wall, + rad_i=rad_i, + rad_d=rad_d, + ) + + # Call full Rust anisotropic sky function with structs + ani_sky_result = sky.anisotropic_sky( + shmat, + vegshmat, + vbshmat, + sun_params, + as_float32(asvf), + sky_params, + l_patches, + None, # voxelTable + None, # voxelMaps + as_float32(steradians), + surface_params, + as_float32(lup_bundle.lup), # TsWaveDelay-processed value + as_float32(lv), + as_float32(shadow), + as_float32(kup_e), + as_float32(kup_s), + as_float32(kup_w), + as_float32(kup_n), + ) + + # Extract results from anisotropic sky + ldown = np.array(ani_sky_result.ldown) + # For directional longwave, use lside_veg_result (base) values + # ani_sky_result provides anisotropic additions, but for cyl=1, aniso=1 + # the Sstr formula uses base directional longwave from lside_veg + lside_e = lside_e_base + lside_s = lside_s_base + lside_w = lside_w_base + lside_n = lside_n_base + # Shortwave from anisotropic sky result + kside_e = np.array(ani_sky_result.keast) + kside_s = np.array(ani_sky_result.ksouth) + kside_w = np.array(ani_sky_result.kwest) + kside_n = np.array(ani_sky_result.knorth) + kside_i = np.array(ani_sky_result.kside_i) + # Total radiation on vertical surfaces (for Tmrt f_cyl term) + kside_total = np.array(ani_sky_result.kside) + lside_total = np.array(ani_sky_result.lside) + + else: + # Isotropic model - use Rust functions for kside and lside + + # Isotropic diffuse radiation (psi-adjusted: vegetation partially transparent) + veg_block = np.maximum(1.0 - svf_veg, 0.0) + svfbuveg_eff = np.clip(svfbuveg + veg_block * psi, 0.0, 1.0) + drad = rad_d * svfbuveg_eff + + # Compute asvf for Rust functions (needed even for isotropic) + asvf = np.arccos(np.sqrt(np.clip(svf, 0.0, 1.0))) + + # Use Rust kside_veg for directional shortwave (isotropic mode: no lv, no shadow matrices) + kside_result = vegetation.kside_veg( + rad_i, + rad_d, + rad_g, + as_float32(shadow), + as_float32(svf_directional.south), + as_float32(svf_directional.west), + as_float32(svf_directional.north), + as_float32(svf_directional.east), + as_float32(svf_veg_directional.east), + as_float32(svf_veg_directional.south), + as_float32(svf_veg_directional.west), + as_float32(svf_veg_directional.north), + weather.sun_azimuth, + weather.sun_altitude, + psi, + 0.0, # t (instrument offset) + albedo_wall, + as_float32(f_sh), + as_float32(kup_e), + as_float32(kup_s), + as_float32(kup_w), + as_float32(kup_n), + bool(cyl), + None, # lv (None for isotropic) + False, # anisotropic_sky + None, # diffsh (None for isotropic) + as_float32(asvf), + None, # shmat (None for isotropic) + None, # vegshmat (None for isotropic) + None, # vbshvegshmat (None for isotropic) + ) + kside_e = np.array(kside_result.keast) + kside_s = np.array(kside_result.ksouth) + kside_w = np.array(kside_result.kwest) + kside_n = np.array(kside_result.knorth) + kside_i = np.array(kside_result.kside_i) + # Total radiation on vertical surfaces (for Tmrt f_cyl term) + kside_total = kside_i # Isotropic uses direct beam only + lside_total = np.zeros_like(kside_i) # Not used in isotropic Tmrt formula + + # Longwave: Ldown (from Jonsson et al. 2006) + ldown = ( + (svf + svf_veg - 1) * esky * SBC * (ta_k**4) + + (2 - svf_veg - svf_aveg) * emis_wall * SBC * (ta_k**4) + + (svf_aveg - svf) * emis_wall * SBC * ((weather.ta + tg_wall + KELVIN_OFFSET) ** 4) + + (2 - svf - svf_veg) * (1 - emis_wall) * esky * SBC * (ta_k**4) + ) + + # CI correction for non-clear conditions (reference: if CI < 0.95) + # Under cloudy skies, effective sky emissivity approaches 1.0 + ci = weather.clearness_index + if ci < 0.95: + c = 1.0 - ci + ldown_cloudy = ( + (svf + svf_veg - 1) * SBC * (ta_k**4) # No esky for cloudy + + (2 - svf_veg - svf_aveg) * emis_wall * SBC * (ta_k**4) + + (svf_aveg - svf) * emis_wall * SBC * ((weather.ta + tg_wall + KELVIN_OFFSET) ** 4) + + (2 - svf - svf_veg) * (1 - emis_wall) * SBC * (ta_k**4) # No esky + ) + ldown = ldown * (1 - c) + ldown_cloudy * c + + # Use Rust lside_veg for directional longwave + lside_veg_result = vegetation.lside_veg( + as_float32(svf_directional.south), + as_float32(svf_directional.west), + as_float32(svf_directional.north), + as_float32(svf_directional.east), + as_float32(svf_veg_directional.east), + as_float32(svf_veg_directional.south), + as_float32(svf_veg_directional.west), + as_float32(svf_veg_directional.north), + as_float32(svf_aveg_directional.east), + as_float32(svf_aveg_directional.south), + as_float32(svf_aveg_directional.west), + as_float32(svf_aveg_directional.north), + weather.sun_azimuth, + weather.sun_altitude, + weather.ta, + tg_wall, + SBC, + emis_wall, + as_float32(ldown), + esky, + 0.0, # t (instrument offset, matching reference) + as_float32(f_sh), + weather.clearness_index, + as_float32(lup_bundle.lup_e), # TsWaveDelay-processed values + as_float32(lup_bundle.lup_s), + as_float32(lup_bundle.lup_w), + as_float32(lup_bundle.lup_n), + False, # anisotropic_sky + ) + lside_e = np.array(lside_veg_result.least) + lside_s = np.array(lside_veg_result.lsouth) + lside_w = np.array(lside_veg_result.lwest) + lside_n = np.array(lside_veg_result.lnorth) + + # Kdown (downwelling shortwave = direct on horizontal + diffuse sky + wall reflected) + kdown = rad_i * shadow * sin_alt + drad + albedo_wall * (1 - svfbuveg) * (rad_g * (1 - f_sh) + rad_d * f_sh) + + return RadiationBundle( + kdown=as_float32(kdown), + kup=as_float32(kup), + ldown=as_float32(ldown), + lup=lup_bundle.lup, # Already float32 from LupBundle + kside=DirectionalArrays( + north=as_float32(kside_n), + east=as_float32(kside_e), + south=as_float32(kside_s), + west=as_float32(kside_w), + ), + lside=DirectionalArrays( + north=as_float32(lside_n), + east=as_float32(lside_e), + south=as_float32(lside_s), + west=as_float32(lside_w), + ), + kside_total=as_float32(kside_total), + lside_total=as_float32(lside_total), + drad=as_float32(drad), + ) diff --git a/pysrc/solweig/components/shadows.py b/pysrc/solweig/components/shadows.py new file mode 100644 index 0000000..2253f88 --- /dev/null +++ b/pysrc/solweig/components/shadows.py @@ -0,0 +1,182 @@ +""" +Shadow computation component. + +Handles: +- Ray tracing for building and vegetation shadows +- Vegetation transmissivity (seasonal leaf on/off) +- Combined shadow accounting for light penetration through vegetation +- Wall sun exposure for thermal calculations + +Returns a ShadowBundle with all shadow components. +""" + +from __future__ import annotations + +from types import SimpleNamespace +from typing import TYPE_CHECKING + +import numpy as np + +from ..bundles import ShadowBundle + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..api import Weather + + +def compute_transmissivity( + doy: int, + physics: SimpleNamespace | None = None, + conifer: bool = False, +) -> float: + """ + Compute vegetation transmissivity based on day of year and leaf status. + + Implements seasonal leaf on/off logic for deciduous vegetation. + During leaf-on season, vegetation transmits less light (low psi ~0.03). + During leaf-off season (winter), bare branches transmit more light (psi ~0.5). + + Args: + doy: Day of year (1-366) + physics: Physics params from load_physics() containing Tree_settings. + If provided, reads Transmissivity, First_day_leaf, Last_day_leaf. + conifer: Override to treat vegetation as conifer (always leaf-on). + + Returns: + Transmissivity value: + - 0.03 (default) during leaf-on period + - 0.5 during leaf-off period (deciduous trees in winter) + + Reference: + Lindberg et al. (2008) - SOLWEIG vegetation transmissivity model + """ + # Default values for deciduous vegetation + transmissivity = 0.03 + transmissivity_leafoff = 0.5 + first_day = 100 # ~April 10 + last_day = 300 # ~October 27 + is_conifer = conifer + + # Override from physics params if provided + if physics is not None and hasattr(physics, "Tree_settings"): + ts = physics.Tree_settings.Value + transmissivity = getattr(ts, "Transmissivity", 0.03) + transmissivity_leafoff = getattr(ts, "Transmissivity_leafoff", 0.5) + first_day = int(getattr(ts, "First_day_leaf", 100)) + last_day = int(getattr(ts, "Last_day_leaf", 300)) + # Note: Conifer flag may not be in all params files + is_conifer = conifer or getattr(ts, "Conifer", False) + + # Determine leaf on/off + if is_conifer: + leaf_on = True + elif first_day > last_day: + # Wraps around year end (southern hemisphere or unusual dates) + leaf_on = doy > first_day or doy < last_day + else: + # Normal case: leaves on between first_day and last_day + leaf_on = first_day < doy < last_day + + return transmissivity if leaf_on else transmissivity_leafoff + + +def compute_shadows( + weather: Weather, + dsm: NDArray[np.floating], + pixel_size: float, + max_height: float, + use_veg: bool, + physics: SimpleNamespace | None, + conifer: bool, + cdsm: NDArray[np.floating] | None = None, + tdsm: NDArray[np.floating] | None = None, + bush: NDArray[np.floating] | None = None, + wall_ht: NDArray[np.floating] | None = None, + wall_asp_rad: NDArray[np.floating] | None = None, + max_shadow_distance_m: float = 0.0, +) -> ShadowBundle: + """ + Compute shadows from buildings and vegetation. + + Uses ray tracing to determine shadowed areas based on sun position. + Accounts for vegetation transmissivity (light passing through canopy). + + Args: + weather: Weather data including sun position (azimuth, altitude) + dsm: Digital Surface Model (building heights) + pixel_size: Grid resolution in meters + max_height: Maximum building height for shadow computation + use_veg: Whether to include vegetation shadows + physics: Physics parameters (for transmissivity calculation) + conifer: Whether vegetation is coniferous (always leaf-on) + cdsm: Canopy Digital Surface Model (optional, for vegetation) + tdsm: Trunk Digital Surface Model (optional, for vegetation) + bush: Bush/shrub layer (optional, for vegetation) + wall_ht: Wall heights (optional, for wall sun exposure) + wall_asp_rad: Wall aspects in radians (optional, for wall sun exposure) + + Returns: + ShadowBundle containing: + - shadow: Combined shadow fraction (1=sunlit, 0=shaded) + - bldg_sh: Building shadow only + - veg_sh: Vegetation shadow only + - wallsun: Wall sun exposure (for wall temperature) + - psi: Vegetation transmissivity used + + Reference: + Lindberg et al. (2008) - SOLWEIG shadow model + Formula: shadow = bldg_sh - (1 - veg_sh) * (1 - psi) + """ + # Import here to avoid circular dependency + from ..rustalgos import shadowing + + has_walls = wall_ht is not None and wall_asp_rad is not None + + # Call Rust shadow calculation + shadow_result = shadowing.calculate_shadows_wall_ht_25( + weather.sun_azimuth, + weather.sun_altitude, + pixel_size, + max_height, + dsm, + cdsm if use_veg else None, + tdsm if use_veg else None, + bush if use_veg else None, + wall_ht if has_walls else None, + wall_asp_rad if has_walls else None, + None, # walls_scheme + None, # aspect_scheme + 3.0, # min_sun_altitude + max_shadow_distance_m, + ) + + # Vegetation transmissivity - compute dynamically based on season + doy = weather.datetime.timetuple().tm_yday + psi = compute_transmissivity(doy, physics, conifer) + + # Extract shadow arrays + bldg_sh = np.array(shadow_result.bldg_sh) + + # Compute combined shadow accounting for vegetation transmissivity + # This matches the reference: shadow = bldg_sh - (1 - veg_sh) * (1 - psi) + # where psi is vegetation transmissivity (fraction of light that passes through) + if use_veg: + veg_sh = np.array(shadow_result.veg_sh) + shadow = bldg_sh - (1 - veg_sh) * (1 - psi) + # Note: No clipping here to match reference exactly. In practice, shadow + # should stay in [0,1] because veg_sh is constrained by bldg_sh. + else: + veg_sh = np.zeros_like(bldg_sh) + shadow = bldg_sh + + # Wall sun exposure (for wall temperature calculation) + wallsun = np.array(shadow_result.wall_sun) if has_walls else np.zeros_like(dsm) + + return ShadowBundle( + shadow=shadow.astype(np.float32), + bldg_sh=bldg_sh.astype(np.float32), + veg_sh=veg_sh.astype(np.float32), + wallsun=wallsun.astype(np.float32), + psi=psi, + ) diff --git a/pysrc/solweig/components/svf_resolution.py b/pysrc/solweig/components/svf_resolution.py new file mode 100644 index 0000000..99ef9b6 --- /dev/null +++ b/pysrc/solweig/components/svf_resolution.py @@ -0,0 +1,187 @@ +""" +SVF (Sky View Factor) resolution component. + +Resolves SVF data from two sources: +1. Cached SVF from surface preparation (surface.svf) +2. Pre-computed SVF (precomputed.svf) + +Raises MissingPrecomputedData if no SVF is available. +SVF must be computed explicitly via surface.compute_svf() or +SurfaceData.prepare() before calling calculate(). +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from ..bundles import DirectionalArrays, SvfBundle + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..api import PrecomputedData, SurfaceData + + +def resolve_svf( + surface: SurfaceData, + precomputed: PrecomputedData | None, + dsm: NDArray[np.floating], + cdsm: NDArray[np.floating] | None, + tdsm: NDArray[np.floating] | None, + pixel_size: float, + use_veg: bool, + max_height: float, + psi: float | None = None, +) -> tuple[SvfBundle, bool]: + """ + Resolve SVF data from available precomputed sources. + + Checks two sources in priority order: + 1. surface.svf (cached/prepared) - fastest + 2. precomputed.svf (legacy) - fast + + Args: + surface: Surface data (may contain cached SVF) + precomputed: Pre-computed data (may contain SVF) + dsm: Digital Surface Model (reserved for compatibility; not used here) + cdsm: Canopy DSM (reserved for compatibility; not used here) + tdsm: Trunk DSM (reserved for compatibility; not used here) + pixel_size: Grid resolution in meters + use_veg: Whether vegetation is active (kept for signature compatibility) + max_height: Maximum building height (reserved for compatibility; not used here) + psi: Vegetation transmissivity (optional, for svfbuveg calculation) + Reserved for compatibility; not used here. + + Returns: + Tuple of (SvfBundle, needs_psi_adjustment): + - SvfBundle: Complete SVF data with all directional components + - needs_psi_adjustment: True if svfbuveg needs recalculation with psi + + Note: + SVF is required input to runtime calculation and must be prepared ahead + of time (e.g. with surface.compute_svf() or SurfaceData.prepare()). + This function does not compute SVF. + """ + # Import here to avoid circular dependency + + needs_psi_adjustment = False + + # Priority 1: Check surface.svf (from prepare/cache) + if surface.svf is not None: + svf_data = surface.svf + svf = svf_data.svf + svf_directional = DirectionalArrays( + north=svf_data.svf_north, + east=svf_data.svf_east, + south=svf_data.svf_south, + west=svf_data.svf_west, + ) + svf_veg = svf_data.svf_veg + svf_veg_directional = DirectionalArrays( + north=svf_data.svf_veg_north, + east=svf_data.svf_veg_east, + south=svf_data.svf_veg_south, + west=svf_data.svf_veg_west, + ) + svf_aveg = svf_data.svf_aveg + svf_aveg_directional = DirectionalArrays( + north=svf_data.svf_aveg_north, + east=svf_data.svf_aveg_east, + south=svf_data.svf_aveg_south, + west=svf_data.svf_aveg_west, + ) + svfbuveg = svf_data.svfbuveg + # Geometric svfbuveg without psi — adjusted at calculation time + needs_psi_adjustment = False + + # Priority 2: Check precomputed.svf (legacy) + elif precomputed is not None and precomputed.svf is not None: + svf_data = precomputed.svf + svf = svf_data.svf + svf_directional = DirectionalArrays( + north=svf_data.svf_north, + east=svf_data.svf_east, + south=svf_data.svf_south, + west=svf_data.svf_west, + ) + svf_veg = svf_data.svf_veg + svf_veg_directional = DirectionalArrays( + north=svf_data.svf_veg_north, + east=svf_data.svf_veg_east, + south=svf_data.svf_veg_south, + west=svf_data.svf_veg_west, + ) + svf_aveg = svf_data.svf_aveg + svf_aveg_directional = DirectionalArrays( + north=svf_data.svf_aveg_north, + east=svf_data.svf_aveg_east, + south=svf_data.svf_aveg_south, + west=svf_data.svf_aveg_west, + ) + svfbuveg = svf_data.svfbuveg + # Geometric svfbuveg without psi — adjusted at calculation time + needs_psi_adjustment = False + + # No SVF available — require explicit computation + else: + from ..errors import MissingPrecomputedData + + raise MissingPrecomputedData( + "Sky View Factor (SVF) data is required but not available.", + "Call surface.compute_svf() before calculate(), or use SurfaceData.prepare() " + "which computes SVF automatically.", + ) + + # Compute svfalfa (SVF angle) from SVF values + # Formula: svfalfa = arcsin(exp(log(1 - (svf + svf_veg - 1)) / 2)) + # Used in anisotropic sky calculations + tmp = np.clip(svf + svf_veg - 1.0, 0.0, 1.0) + eps = np.finfo(np.float32).tiny + safe_term = np.clip(1.0 - tmp, eps, 1.0) + svfalfa = np.arcsin(np.exp(np.log(safe_term) / 2.0)) + + # Construct bundle + bundle = SvfBundle( + svf=svf, + svf_directional=svf_directional, + svf_veg=svf_veg, + svf_veg_directional=svf_veg_directional, + svf_aveg=svf_aveg, + svf_aveg_directional=svf_aveg_directional, + svfbuveg=svfbuveg, + svfalfa=svfalfa, + ) + + return bundle, needs_psi_adjustment + + +def adjust_svfbuveg_with_psi( + svf: NDArray[np.floating], + svf_veg: NDArray[np.floating], + psi: float, + use_veg: bool, +) -> NDArray[np.floating]: + """ + Adjust svfbuveg with vegetation transmissivity. + + This is needed when SVF was computed fresh without knowledge of psi. + + Args: + svf: Total sky view factor + svf_veg: Vegetation-only SVF + psi: Vegetation transmissivity (0.03 typical for deciduous trees) + use_veg: Whether vegetation is active + + Returns: + Adjusted svfbuveg array + + Formula: + svfbuveg = svf - (1 - svf_veg) * (1 - psi) + """ + if use_veg: + svfbuveg = svf - (1.0 - svf_veg) * (1.0 - psi) + return np.clip(svfbuveg, 0.0, 1.0).astype(np.float32) + else: + return svf.astype(np.float32) diff --git a/pysrc/solweig/components/tmrt.py b/pysrc/solweig/components/tmrt.py new file mode 100644 index 0000000..e615d3c --- /dev/null +++ b/pysrc/solweig/components/tmrt.py @@ -0,0 +1,107 @@ +""" +Mean Radiant Temperature (Tmrt) computation component — **reference implementation only**. + +Not called by the production ``calculate()`` API. The fused Rust pipeline +(``pipeline.compute_timestep``) computes Tmrt internally. + +Retained for readability, tests, and validation against UMEP. + +Computes Tmrt from radiation budget using human body geometry and absorptivities. +The human body is modeled as a standing or sitting cylinder with specific view factors. + +Reference: +- Lindberg et al. (2008, 2016) - SOLWEIG Tmrt calculation +- Höppe (1992) - Human body radiation model +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np + +from .. import rustalgos +from ..buffers import as_float32 + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..api import HumanParams + from ..bundles import RadiationBundle + + +def compute_tmrt( + radiation: RadiationBundle, + human: HumanParams, + use_anisotropic_sky: bool, +) -> NDArray[np.floating]: + """ + Compute Mean Radiant Temperature from radiation budget. + + Tmrt represents the uniform temperature of an imaginary enclosure where + the radiant heat exchange with the human body equals that in the actual + non-uniform radiant environment. + + Args: + radiation: Complete radiation budget from all directions + human: Human parameters (height, posture, absorptivities) + use_anisotropic_sky: Whether anisotropic sky model was used + + Returns: + Tmrt array in degrees Celsius, clipped to [-50, 80] + + Formula: + Tmrt = (Sstr / (abs_l × SBC))^0.25 - 273.15 + where Sstr = absorbed shortwave + absorbed longwave + + Reference: + Lindberg et al. (2008): "SOLWEIG 1.0 - modelling spatial variations + of 3D radiant fluxes and mean radiant temperature in complex urban settings" + """ + # Extract radiation components (use as_float32 to avoid unnecessary copies) + kdown = as_float32(radiation.kdown) + kup = as_float32(radiation.kup) + ldown = as_float32(radiation.ldown) + lup = as_float32(radiation.lup) + kside_n = as_float32(radiation.kside.north) + kside_e = as_float32(radiation.kside.east) + kside_s = as_float32(radiation.kside.south) + kside_w = as_float32(radiation.kside.west) + lside_n = as_float32(radiation.lside.north) + lside_e = as_float32(radiation.lside.east) + lside_s = as_float32(radiation.lside.south) + lside_w = as_float32(radiation.lside.west) + kside_total = as_float32(radiation.kside_total) + lside_total = as_float32(radiation.lside_total) + + # Posture flag (True for standing, False for sitting) + is_standing = human.posture == "standing" + + # Create parameter struct (reduces 18 params to 15) + tmrt_params = rustalgos.tmrt.TmrtParams( + abs_k=human.abs_k, + abs_l=human.abs_l, + is_standing=is_standing, + use_anisotropic_sky=use_anisotropic_sky, + ) + + # Call Rust implementation with parameter struct + tmrt = rustalgos.tmrt.compute_tmrt( + kdown, + kup, + ldown, + lup, + kside_n, + kside_e, + kside_s, + kside_w, + lside_n, + lside_e, + lside_s, + lside_w, + kside_total, + lside_total, + tmrt_params, + ) + + return tmrt diff --git a/pysrc/solweig/computation.py b/pysrc/solweig/computation.py new file mode 100644 index 0000000..1121899 --- /dev/null +++ b/pysrc/solweig/computation.py @@ -0,0 +1,681 @@ +"""Orchestration layer for a single SOLWEIG timestep. + +The public entry point is :func:`calculate_core_fused`, which hands off +the full pipeline (shadows, ground temperature, GVF, thermal delay, +radiation, Tmrt) to a single fused Rust FFI call. + +Pipeline:: + + SVF resolution → Shadows → Ground temp → GVF + → Thermal delay → Radiation → Tmrt +""" + +from __future__ import annotations + +from types import SimpleNamespace +from typing import TYPE_CHECKING + +import numpy as np + +from .bundles import LupBundle +from .rustalgos import ground as ground_rust + +_OUT_SHADOW = 1 << 0 +_OUT_KDOWN = 1 << 1 +_OUT_KUP = 1 << 2 +_OUT_LDOWN = 1 << 3 +_OUT_LUP = 1 << 4 +_OUT_ALL = _OUT_SHADOW | _OUT_KDOWN | _OUT_KUP | _OUT_LDOWN | _OUT_LUP + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from .api import HumanParams, Location, PrecomputedData, SolweigResult, SurfaceData, ThermalState, Weather + from .bundles import GvfBundle + + +def _apply_thermal_delay( + gvf_bundle: GvfBundle, + ground_tg: NDArray[np.floating], + shadow: NDArray[np.floating], + weather: Weather, + state: ThermalState | None, +) -> LupBundle: + """ + Apply thermal inertia (TsWaveDelay) to upwelling longwave radiation. + + This models the thermal mass of ground and walls, smoothing rapid temperature + changes throughout the day. Essential for accurate time-series simulations. + + Uses batched Rust function to reduce FFI overhead from 6 calls to 1. + + Args: + gvf_bundle: Ground view factor results (raw Lup before delay) + ground_tg: Ground temperature deviation from air temperature (K) + shadow: Shadow fraction (for ground temperature with shadow effect) + weather: Weather data (for air temperature and daytime flag) + state: Thermal state carrying forward surface temperature history + + Returns: + LupBundle with thermally-delayed upwelling longwave and updated state + """ + from .buffers import as_float32 + + output_state = None + + if state is not None: + # Compute ground temperature with shadow effect + tg_temp = (ground_tg * shadow + weather.ta).astype(np.float32) + + # Apply TsWaveDelay for thermal mass effect (batched - 6 calls → 1) + firstdaytime_int = int(state.firstdaytime) + result = ground_rust.ts_wave_delay_batch( + as_float32(gvf_bundle.lup), + as_float32(gvf_bundle.lup_e), + as_float32(gvf_bundle.lup_s), + as_float32(gvf_bundle.lup_w), + as_float32(gvf_bundle.lup_n), + tg_temp, + firstdaytime_int, + state.timeadd, + state.timestep_dec, + as_float32(state.tgmap1), + as_float32(state.tgmap1_e), + as_float32(state.tgmap1_s), + as_float32(state.tgmap1_w), + as_float32(state.tgmap1_n), + as_float32(state.tgout1), + ) + + # Extract delayed outputs + lup = np.asarray(result.lup) + lup_e = np.asarray(result.lup_e) + lup_s = np.asarray(result.lup_s) + lup_w = np.asarray(result.lup_w) + lup_n = np.asarray(result.lup_n) + + # Build output state from result (copy first, then mutate the copy) + output_state = state.copy() + output_state.timeadd = result.timeadd + output_state.tgmap1 = np.asarray(result.tgmap1) + output_state.tgmap1_e = np.asarray(result.tgmap1_e) + output_state.tgmap1_s = np.asarray(result.tgmap1_s) + output_state.tgmap1_w = np.asarray(result.tgmap1_w) + output_state.tgmap1_n = np.asarray(result.tgmap1_n) + output_state.tgout1 = np.asarray(result.tgout1) + + # Update firstdaytime flag for next timestep + if weather.is_daytime: + output_state.firstdaytime = 0.0 + else: + output_state.firstdaytime = 1.0 + output_state.timeadd = 0.0 + else: + # Single timestep: use raw GVF values (no thermal delay) + lup = gvf_bundle.lup + lup_e = gvf_bundle.lup_e + lup_s = gvf_bundle.lup_s + lup_w = gvf_bundle.lup_w + lup_n = gvf_bundle.lup_n + + return LupBundle( + lup=lup.astype(np.float32), + lup_e=lup_e.astype(np.float32), + lup_s=lup_s.astype(np.float32), + lup_w=lup_w.astype(np.float32), + lup_n=lup_n.astype(np.float32), + state=output_state, + ) + + +def calculate_core_fused( + surface: SurfaceData, + location: Location, + weather: Weather, + human: HumanParams, + precomputed: PrecomputedData | None, + state: ThermalState | None, + physics: SimpleNamespace | None, + materials: SimpleNamespace | None, + conifer: bool = False, + wall_material: str | None = None, + use_anisotropic_sky: bool = False, + max_shadow_distance_m: float | None = None, + return_state_copy: bool = True, + requested_outputs: set[str] | None = None, +) -> SolweigResult: + """ + Fused SOLWEIG calculation — single Rust FFI call per daytime timestep. + + Functionally identical to calculate_core() but orchestrates shadows, ground + temperature, GVF, thermal delay, radiation, and Tmrt entirely within Rust, + eliminating intermediate numpy allocations and FFI round-trips. + + This is the primary compute path used by ``calculate()``. + Supports both isotropic and anisotropic (Perez) sky models. + + Args: + surface: Surface/terrain data (DSM, vegetation, walls, land cover). + location: Geographic location (latitude, longitude). + weather: Weather conditions with derived sun position. + human: Human parameters (height, posture, absorptivities). + precomputed: Optional pre-computed data (SVF, shadow matrices). + state: Optional thermal state for time-series (carries forward temperatures). + physics: Optional physics parameters (vegetation transmissivity, etc.). + materials: Optional material properties (albedo, emissivity by land cover). + conifer: Treat vegetation as evergreen conifers (always leaf-on). + wall_material: Wall material type ("brick", "concrete", "wood", "cobblestone"). + use_anisotropic_sky: Use anisotropic (Perez) diffuse sky model. + max_shadow_distance_m: Maximum shadow reach in metres. + return_state_copy: If True, return a deep-copied thermal state. + requested_outputs: Set of output names to materialize (None = all). + + Returns: + SolweigResult with Tmrt, shadow, radiation components, and updated state. + """ + from .api import SolweigResult + from .buffers import as_float32 + from .components.gvf import detect_building_mask + from .components.shadows import compute_transmissivity + from .components.svf_resolution import resolve_svf + from .models.state import ThermalState + from .physics.clearnessindex_2013b import clearnessindex_2013b + from .physics.daylen import daylen + from .physics.diffusefraction import diffusefraction + from .rustalgos import pipeline + + # Ensure derived weather fields are computed (sun position, radiation split) + if not weather._derived_computed: + weather.compute_derived(location) + + # === Precompute (stays in Python) === + + rows, cols = surface.dsm.shape + pixel_size = surface.pixel_size + + # Valid pixel mask (True where all layers have finite data) + # Computed once by SurfaceData.prepare(), or derived from DSM if missing + valid_mask = surface.valid_mask + valid_source = valid_mask if valid_mask is not None else surface.dsm + valid_mask_key = (id(valid_source), valid_source.shape) + valid_mask_cache = getattr(surface, "_valid_mask_u8_cache", None) + if valid_mask_cache is not None and valid_mask_cache[0] == valid_mask_key: + valid_mask_u8 = valid_mask_cache[1] + else: + if valid_mask is None: + valid_mask = np.isfinite(surface.dsm) + valid_mask_u8 = np.ascontiguousarray(valid_mask, dtype=np.uint8) + surface._valid_mask_u8_cache = valid_mask_key, valid_mask_u8 + + # Valid-bounds crop (ported from old main implementation): + # trim heavy per-timestep compute to the minimal bounding rectangle of valid pixels. + bbox_cache = getattr(surface, "_valid_bbox_cache", None) + if bbox_cache is not None and bbox_cache[0] == valid_mask_key: + r0, r1, c0, c1 = bbox_cache[1] + else: + rows_any = np.any(valid_mask_u8 != 0, axis=1) + cols_any = np.any(valid_mask_u8 != 0, axis=0) + if not rows_any.any() or not cols_any.any(): + r0, r1, c0, c1 = 0, rows, 0, cols + else: + r_idx = np.flatnonzero(rows_any) + c_idx = np.flatnonzero(cols_any) + r0, r1 = int(r_idx[0]), int(r_idx[-1]) + 1 + c0, c1 = int(c_idx[0]), int(c_idx[-1]) + 1 + surface._valid_bbox_cache = valid_mask_key, (r0, r1, c0, c1) + + full_area = rows * cols + crop_area = (r1 - r0) * (c1 - c0) + use_crop = (r0, r1, c0, c1) != (0, rows, 0, cols) and crop_area < int(full_area * 0.98) + crop_slice = (slice(r0, r1), slice(c0, c1)) + + # Select which non-Tmrt outputs to materialize from Rust. + if requested_outputs is None: + output_mask = _OUT_ALL + else: + output_mask = 0 + if "shadow" in requested_outputs: + output_mask |= _OUT_SHADOW + if "kdown" in requested_outputs: + output_mask |= _OUT_KDOWN + if "kup" in requested_outputs: + output_mask |= _OUT_KUP + if "ldown" in requested_outputs: + output_mask |= _OUT_LDOWN + if "lup" in requested_outputs: + output_mask |= _OUT_LUP + + # Land cover properties + lc_props_key = (id(surface.land_cover), id(surface.albedo), id(surface.emissivity), id(materials)) + lc_props_cache = getattr(surface, "_land_cover_props_cache", None) + if lc_props_cache is not None and lc_props_cache[0] == lc_props_key: + alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid = lc_props_cache[1] + else: + alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid = surface.get_land_cover_properties(materials) + surface._land_cover_props_cache = lc_props_key, (alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid) + + # Vegetation inputs + use_veg = surface.cdsm is not None + cdsm = surface.cdsm if use_veg else None + tdsm = surface.tdsm if use_veg else None + if use_veg: + pool = surface.get_buffer_pool() + bush = pool.get_zeros("bush") + else: + bush = None + + # Wall inputs + has_walls = surface.wall_height is not None and surface.wall_aspect is not None + wall_ht = surface.wall_height if has_walls else None + wall_asp = surface.wall_aspect if has_walls else None + + # Use full terrain relief for shadow ray termination so that mountain + # ridges can correctly shadow valleys. The horizontal reach is still + # bounded by max_shadow_distance_m via max_index in Rust, so rays + # don't run forever — they just won't terminate prematurely on the + # vertical axis when terrain relief exceeds building heights. + max_height = surface.max_height + + # SVF resolution (cached between timesteps) + svf_bundle, _needs_psi_adjustment = resolve_svf( + surface=surface, + precomputed=precomputed, + dsm=surface.dsm, + cdsm=cdsm, + tdsm=tdsm, + pixel_size=pixel_size, + use_veg=use_veg, + max_height=max_height, + ) + + # Vegetation transmissivity + doy = weather.datetime.timetuple().tm_yday + psi = compute_transmissivity(doy, physics, conifer) + + # Adjust svfbuveg for vegetation transmissivity (shortwave sees through canopy) + # Without this, isotropic diffuse (drad), Kup, and Kdown treat vegetation as + # fully opaque. The anisotropic path already applies psi per sky patch via + # diffsh(psi), and kside_veg applies psi per direction, but the scalar svfbuveg + # used for isotropic diffuse and wall reflection was unadjusted. + from .components.svf_resolution import adjust_svfbuveg_with_psi + + svf_bundle.svfbuveg = adjust_svfbuveg_with_psi(svf_bundle.svf, svf_bundle.svf_veg, psi, use_veg) + + # Wall material resolution + tgk_wall = 0.37 + tstart_wall = -3.41 + tmaxlst_wall = 15.0 + albedo_wall = 0.20 + emis_wall = 0.90 + if wall_material is not None: + from .loaders import resolve_wall_params + + tgk_wall, tstart_wall, tmaxlst_wall = resolve_wall_params(wall_material, materials) + elif materials is not None: + tgk_w = getattr(getattr(getattr(materials, "Ts_deg", None), "Value", None), "Walls", None) + tstart_w = getattr(getattr(getattr(materials, "Tstart", None), "Value", None), "Walls", None) + tmaxlst_w = getattr(getattr(getattr(materials, "TmaxLST", None), "Value", None), "Walls", None) + if tgk_w is not None: + tgk_wall = tgk_w + if tstart_w is not None: + tstart_wall = tstart_w + if tmaxlst_w is not None: + tmaxlst_wall = tmaxlst_w + + # Weather-derived scalars for ground temperature model + _, _, _, snup = daylen(doy, location.latitude) + dectime = (weather.datetime.hour + weather.datetime.minute / 60.0) / 24.0 + zen_deg = 90.0 - weather.sun_altitude + + # Clear-sky radiation for ground temperature CI correction + zen_rad = zen_deg * (np.pi / 180.0) + location_dict = { + "latitude": location.latitude, + "longitude": location.longitude, + "altitude": 0.0, + } + i0, _, _, _, _ = clearnessindex_2013b( + zen_rad, + doy, + weather.ta, + weather.rh / 100.0, + weather.global_rad, + location_dict, + -999.0, + ) + if i0 > 0 and weather.sun_altitude > 0: + rad_i0, rad_d0 = diffusefraction(i0, weather.sun_altitude, 1.0, weather.ta, weather.rh) + rad_g0 = rad_i0 * np.sin(weather.sun_altitude * np.pi / 180.0) + rad_d0 + else: + rad_g0 = 0.0 + + # === Build Rust input structs === + + ws = pipeline.WeatherScalars( + sun_azimuth=float(weather.sun_azimuth), + sun_altitude=float(weather.sun_altitude), + sun_zenith=float(weather.sun_zenith), + ta=float(weather.ta), + rh=float(weather.rh), + global_rad=float(weather.global_rad), + direct_rad=float(weather.direct_rad), + diffuse_rad=float(weather.diffuse_rad), + altmax=float(weather.altmax), + clearness_index=float(weather.clearness_index), + dectime=float(dectime), + snup=float(snup), + rad_g0=float(rad_g0), + zen_deg=float(zen_deg), + psi=float(psi), + is_daytime=weather.sun_altitude > 0, + jday=int(weather.datetime.timetuple().tm_yday) if weather.datetime is not None else 180, + patch_option=0, # Set below if anisotropic + ) + + hs = pipeline.HumanScalars( + height=float(human.height), + abs_k=float(human.abs_k), + abs_l=float(human.abs_l), + is_standing=human.posture == "standing", + ) + + cs = pipeline.ConfigScalars( + pixel_size=float(pixel_size), + max_height=float(max_height), + albedo_wall=float(albedo_wall), + emis_wall=float(emis_wall), + tgk_wall=float(tgk_wall), + tstart_wall=float(tstart_wall), + tmaxlst_wall=float(tmaxlst_wall), + use_veg=use_veg, + has_walls=has_walls, + conifer=conifer, + use_anisotropic=use_anisotropic_sky, + max_shadow_distance_m=float(max_shadow_distance_m or 1000.0), + ) + + # Buildings mask for GVF (computed from DSM/land_cover/walls) + buildings_key = (id(surface.dsm), id(surface.land_cover), id(wall_ht), float(pixel_size)) + buildings_cache = getattr(surface, "_buildings_mask_cache", None) + if buildings_cache is not None and buildings_cache[0] == buildings_key: + buildings = buildings_cache[1] + else: + buildings = detect_building_mask( + surface.dsm, + surface.land_cover, + wall_ht, + pixel_size, + ) + surface._buildings_mask_cache = buildings_key, buildings + + if surface.land_cover is not None: + lc_grid_key = id(surface.land_cover) + lc_grid_cache = getattr(surface, "_lc_grid_f32_cache", None) + if lc_grid_cache is not None and lc_grid_cache[0] == lc_grid_key: + lc_grid = lc_grid_cache[1] + else: + lc_grid = surface.land_cover.astype(np.float32) + surface._lc_grid_f32_cache = lc_grid_key, lc_grid + else: + lc_grid = None + + # GVF geometry cache: precompute on first daytime call, reuse on subsequent. + # Keep separate caches for full-grid and cropped-grid execution. + gvf_cache = None + if has_walls: + assert wall_asp is not None # guaranteed by has_walls + assert wall_ht is not None + if use_crop: + gvf_crop_key = ( + id(buildings), + id(wall_asp), + id(wall_ht), + id(alb_grid), + r0, + r1, + c0, + c1, + float(pixel_size), + float(human.height), + float(albedo_wall), + ) + gvf_crop_cache = getattr(surface, "_gvf_geometry_cache_crop", None) + if gvf_crop_cache is not None and gvf_crop_cache[0] == gvf_crop_key: + gvf_cache = gvf_crop_cache[1] + else: + gvf_cache = pipeline.precompute_gvf_cache( + as_float32(buildings[crop_slice]), + as_float32(wall_asp[crop_slice]), + as_float32(wall_ht[crop_slice]), + as_float32(alb_grid[crop_slice]), + float(pixel_size), + float(human.height), + float(albedo_wall), + ) + surface._gvf_geometry_cache_crop = gvf_crop_key, gvf_cache + else: + gvf_cache = getattr(surface, "_gvf_geometry_cache", None) + if gvf_cache is None: + gvf_cache = pipeline.precompute_gvf_cache( + as_float32(buildings), + as_float32(wall_asp), + as_float32(wall_ht), + as_float32(alb_grid), + float(pixel_size), + float(human.height), + float(albedo_wall), + ) + surface._gvf_geometry_cache = gvf_cache + + # Anisotropic sky: Perez luminance, steradians, ASVF, and esky are now + # computed inside the Rust pipeline (no Python round-trip). We only need + # the shadow matrices and the patch_option. + aniso_shmat = None + aniso_vegshmat = None + aniso_vbshmat = None + + if use_anisotropic_sky: + shadow_mats = None + if precomputed is not None and precomputed.shadow_matrices is not None: + shadow_mats = precomputed.shadow_matrices + elif surface.shadow_matrices is not None: + shadow_mats = surface.shadow_matrices + + if shadow_mats is not None: + ws.patch_option = shadow_mats.patch_option + if use_crop: + aniso_crop_key = ( + id(shadow_mats._shmat_u8), + id(shadow_mats._vegshmat_u8), + id(shadow_mats._vbshmat_u8), + r0, + r1, + c0, + c1, + ) + aniso_crop_cache = getattr(surface, "_aniso_shadow_crop_cache", None) + if aniso_crop_cache is not None and aniso_crop_cache[0] == aniso_crop_key: + aniso_shmat, aniso_vegshmat, aniso_vbshmat = aniso_crop_cache[1] + else: + aniso_shmat = np.ascontiguousarray(shadow_mats._shmat_u8[crop_slice]) + aniso_vegshmat = np.ascontiguousarray(shadow_mats._vegshmat_u8[crop_slice]) + aniso_vbshmat = np.ascontiguousarray(shadow_mats._vbshmat_u8[crop_slice]) + surface._aniso_shadow_crop_cache = aniso_crop_key, (aniso_shmat, aniso_vegshmat, aniso_vbshmat) + else: + # Keep original arrays to preserve stable pointers across timesteps. + aniso_shmat = shadow_mats._shmat_u8 + aniso_vegshmat = shadow_mats._vegshmat_u8 + aniso_vbshmat = shadow_mats._vbshmat_u8 + + # Thermal state (create initial if None) + if state is None: + state = ThermalState.initial((rows, cols)) + + firstdaytime_int = int(state.firstdaytime) + + def _sel(arr): + if arr is None: + return None + return arr[crop_slice] if use_crop else arr + + dsm_call = _sel(surface.dsm) + cdsm_call = _sel(cdsm) + tdsm_call = _sel(tdsm) + bush_call = _sel(bush) + wall_ht_call = _sel(wall_ht) + wall_asp_call = _sel(wall_asp) + svf_call = _sel(svf_bundle.svf) + svf_n_call = _sel(svf_bundle.svf_directional.north) + svf_e_call = _sel(svf_bundle.svf_directional.east) + svf_s_call = _sel(svf_bundle.svf_directional.south) + svf_w_call = _sel(svf_bundle.svf_directional.west) + svf_veg_call = _sel(svf_bundle.svf_veg) + svf_veg_n_call = _sel(svf_bundle.svf_veg_directional.north) + svf_veg_e_call = _sel(svf_bundle.svf_veg_directional.east) + svf_veg_s_call = _sel(svf_bundle.svf_veg_directional.south) + svf_veg_w_call = _sel(svf_bundle.svf_veg_directional.west) + svf_aveg_call = _sel(svf_bundle.svf_aveg) + svf_aveg_n_call = _sel(svf_bundle.svf_aveg_directional.north) + svf_aveg_e_call = _sel(svf_bundle.svf_aveg_directional.east) + svf_aveg_s_call = _sel(svf_bundle.svf_aveg_directional.south) + svf_aveg_w_call = _sel(svf_bundle.svf_aveg_directional.west) + svfbuveg_call = _sel(svf_bundle.svfbuveg) + svfalfa_call = _sel(svf_bundle.svfalfa) + alb_call = _sel(alb_grid) + emis_call = _sel(emis_grid) + tgk_call = _sel(tgk_grid) + tstart_call = _sel(tstart_grid) + tmaxlst_call = _sel(tmaxlst_grid) + buildings_call = _sel(buildings) + lc_grid_call = _sel(lc_grid) + valid_mask_call = _sel(valid_mask_u8) + tgmap1_call = _sel(state.tgmap1) + tgmap1_e_call = _sel(state.tgmap1_e) + tgmap1_s_call = _sel(state.tgmap1_s) + tgmap1_w_call = _sel(state.tgmap1_w) + tgmap1_n_call = _sel(state.tgmap1_n) + tgout1_call = _sel(state.tgout1) + + # === Call fused Rust pipeline === + + result = pipeline.compute_timestep( + # Scalar structs + ws, + hs, + cs, + # GVF geometry cache (None on first call triggers full GVF, then cached) + gvf_cache, + # Surface arrays + as_float32(dsm_call), + as_float32(cdsm_call) if cdsm_call is not None else None, + as_float32(tdsm_call) if tdsm_call is not None else None, + as_float32(bush_call) if bush_call is not None else None, + as_float32(wall_ht_call) if wall_ht_call is not None else None, + as_float32(wall_asp_call) if wall_asp_call is not None else None, + # SVF arrays + as_float32(svf_call), + as_float32(svf_n_call), + as_float32(svf_e_call), + as_float32(svf_s_call), + as_float32(svf_w_call), + as_float32(svf_veg_call), + as_float32(svf_veg_n_call), + as_float32(svf_veg_e_call), + as_float32(svf_veg_s_call), + as_float32(svf_veg_w_call), + as_float32(svf_aveg_call), + as_float32(svf_aveg_n_call), + as_float32(svf_aveg_e_call), + as_float32(svf_aveg_s_call), + as_float32(svf_aveg_w_call), + as_float32(svfbuveg_call), + as_float32(svfalfa_call), + # Land cover property grids + as_float32(alb_call), + as_float32(emis_call), + as_float32(tgk_call), + as_float32(tstart_call), + as_float32(tmaxlst_call), + # Buildings mask + land cover + as_float32(buildings_call), + as_float32(lc_grid_call) if lc_grid_call is not None else None, + # Anisotropic sky inputs (None for isotropic; Perez computed in Rust) + aniso_shmat, + aniso_vegshmat, + aniso_vbshmat, + # Thermal state + firstdaytime_int, + float(state.timeadd), + float(state.timestep_dec), + as_float32(tgmap1_call), + as_float32(tgmap1_e_call), + as_float32(tgmap1_s_call), + as_float32(tgmap1_w_call), + as_float32(tgmap1_n_call), + as_float32(tgout1_call), + # Valid pixel mask for early NaN exit + valid_mask_call, + output_mask, + ) + + # === Unpack result and update thermal state === + + state.timeadd = result.timeadd + if use_crop: + state.tgmap1[crop_slice] = np.asarray(result.tgmap1) + state.tgmap1_e[crop_slice] = np.asarray(result.tgmap1_e) + state.tgmap1_s[crop_slice] = np.asarray(result.tgmap1_s) + state.tgmap1_w[crop_slice] = np.asarray(result.tgmap1_w) + state.tgmap1_n[crop_slice] = np.asarray(result.tgmap1_n) + state.tgout1[crop_slice] = np.asarray(result.tgout1) + else: + state.tgmap1 = np.asarray(result.tgmap1) + state.tgmap1_e = np.asarray(result.tgmap1_e) + state.tgmap1_s = np.asarray(result.tgmap1_s) + state.tgmap1_w = np.asarray(result.tgmap1_w) + state.tgmap1_n = np.asarray(result.tgmap1_n) + state.tgout1 = np.asarray(result.tgout1) + + if weather.is_daytime: + state.firstdaytime = 0.0 + else: + state.firstdaytime = 1.0 + state.timeadd = 0.0 + + output_state = state.copy() if return_state_copy else state + + tmrt = np.asarray(result.tmrt) + shadow = np.asarray(result.shadow) if result.shadow is not None else None + kdown = np.asarray(result.kdown) if result.kdown is not None else None + kup = np.asarray(result.kup) if result.kup is not None else None + ldown = np.asarray(result.ldown) if result.ldown is not None else None + lup = np.asarray(result.lup) if result.lup is not None else None + + if use_crop: + + def _uncrop(arr: np.ndarray | None) -> np.ndarray | None: + if arr is None: + return None + full = np.full((rows, cols), np.nan, dtype=np.float32) + full[crop_slice] = arr + return full + + tmrt = _uncrop(tmrt) + shadow = _uncrop(shadow) + kdown = _uncrop(kdown) + kup = _uncrop(kup) + ldown = _uncrop(ldown) + lup = _uncrop(lup) + + assert tmrt is not None # tmrt is always computed + return SolweigResult( + tmrt=tmrt, + shadow=shadow, + kdown=kdown, + kup=kup, + ldown=ldown, + lup=lup, + utci=None, + pet=None, + state=output_state, + ) diff --git a/pysrc/solweig/constants.py b/pysrc/solweig/constants.py new file mode 100644 index 0000000..dd3a893 --- /dev/null +++ b/pysrc/solweig/constants.py @@ -0,0 +1,82 @@ +""" +Physical constants and default parameters for SOLWEIG. + +This module consolidates all physical constants and default parameter values +to eliminate duplication across the codebase and provide clear documentation +with proper references. +""" + +# ============================================================================= +# Physical Constants +# ============================================================================= + +# Stefan-Boltzmann constant (W/m²/K⁴) +# Used for blackbody radiation calculations: E = σ × T⁴ +# Reference: CODATA 2018 recommended value +SBC = 5.67e-8 + +# Kelvin to Celsius conversion offset +# Used for temperature unit conversions +KELVIN_OFFSET = 273.15 + +# Minimum sun elevation for shadow calculations (degrees) +# Below this threshold, shadows are not computed (negligible solar radiation) +MIN_SUN_ELEVATION_DEG = 3.0 + + +# ============================================================================= +# View Factor Constants +# ============================================================================= +# View factors represent the fraction of radiation leaving one surface +# that is intercepted by another surface. For a human body modeled as +# a cylinder or cube, these factors depend on posture. +# +# Reference: Höppe (1992) - "The physiological equivalent temperature" +# ============================================================================= + +# Standing posture view factors (cylindrical model) +# Human body modeled as a standing cylinder +F_UP_STANDING = 0.06 # View factor to sky/ground from top/bottom +F_SIDE_STANDING = 0.22 # View factor from each of 4 cardinal directions (N, E, S, W) +F_CYL_STANDING = 0.28 # Cylindrical projection factor for direct beam radiation + +# Sitting posture view factors (cubic model) +# Human body modeled as a sitting cube with equal area on all 6 sides +F_UP_SITTING = 0.166666 # View factor to sky/ground (1/6 per side) +F_SIDE_SITTING = 0.166666 # View factor from each of 4 cardinal directions (1/6 per side) +# Note: F_CYL is not used for sitting posture in current implementation + + +# ============================================================================= +# Default Physical Parameters +# ============================================================================= +# These are common default values used when not specified by the user +# or when materials/physics config is not provided. +# ============================================================================= + +# Default wall properties +DEFAULT_ALBEDO_WALL = 0.20 # Wall albedo (reflectance) +DEFAULT_EMIS_WALL = 0.90 # Wall emissivity (longwave radiation) +DEFAULT_TG_WALL = 0.0 # Wall temperature deviation from air temperature (K) + + +# ============================================================================= +# Public API +# ============================================================================= + +__all__ = [ + # Physical constants + "SBC", + "KELVIN_OFFSET", + "MIN_SUN_ELEVATION_DEG", + # View factors + "F_UP_STANDING", + "F_SIDE_STANDING", + "F_CYL_STANDING", + "F_UP_SITTING", + "F_SIDE_SITTING", + # Defaults + "DEFAULT_ALBEDO_WALL", + "DEFAULT_EMIS_WALL", + "DEFAULT_TG_WALL", +] diff --git a/pysrc/solweig/data/default_materials.json b/pysrc/solweig/data/default_materials.json new file mode 100644 index 0000000..263335b --- /dev/null +++ b/pysrc/solweig/data/default_materials.json @@ -0,0 +1,221 @@ +{ + "Names": { + "Value": { + "0": "Cobble_stone_2014a", + "1": "Dark_asphalt", + "2": "Roofs(buildings)", + "5": "Grass_unmanaged", + "6": "Bare_soil", + "7": "Water", + "99": "Walls", + "100": "Brick_wall", + "101": "Concrete_wall", + "102": "Wood_wall" + }, + "Comment": "Name of each respective land cover class in land cover data." + }, + "Code": { + "Value": { + "Cobble_stone_2014a": 0, + "Dark_asphalt": 1, + "Roofs(buildings)": 2, + "Grass_unmanaged": 5, + "Bare_soil": 6, + "Water": 7, + "Walls": 99, + "Brick_wall": 100, + "Concrete_wall": 101, + "Wood_wall": 102 + }, + "Comment": "Code for each land cover class name." + }, + "Albedo": { + "Effective": { + "Value": { + "Cobble_stone_2014a": 0.2, + "Dark_asphalt": 0.18, + "Roofs(buildings)": 0.18, + "Grass_unmanaged": 0.16, + "Bare_soil": 0.25, + "Water": 0.05, + "Walls": 0.2 + }, + "Comment": "Effective albedos according to Lindberg et al., 2008; 2016." + }, + "Material": { + "Value": { + "Brick_wall": 0.2, + "Concrete_wall": 0.2, + "Wood_wall": 0.2 + }, + "Comment": "Material albedos according to Wallenberg et al., 2025." + } + }, + "Emissivity": { + "Value": { + "Cobble_stone_2014a": 0.95, + "Dark_asphalt": 0.95, + "Roofs(buildings)": 0.95, + "Grass_unmanaged": 0.94, + "Bare_soil": 0.94, + "Water": 0.98, + "Walls": 0.9, + "Brick_wall": 0.9, + "Concrete_wall": 0.9, + "Wood_wall": 0.9 + }, + "Comment": "Emissivity of each land cover class." + }, + "Specific_heat": { + "Value": { + "Cobble_stone_2014a": -9999.0, + "Dark_asphalt": -9999.0, + "Roofs(buildings)": -9999.0, + "Grass_unmanaged": -9999.0, + "Bare_soil": -9999.0, + "Water": -9999.0, + "Walls": -9999.0, + "Brick_wall": 800, + "Concrete_wall": 840, + "Wood_wall": 1880 + }, + "Comment": "Specific heat capacity, in units J kg-1 K-1, used for wall surface temperatures according to Wallenberg et al. 2025." + }, + "Thermal_conductivity": { + "Value": { + "Cobble_stone_2014a": -9999.0, + "Dark_asphalt": -9999.0, + "Roofs(buildings)": -9999.0, + "Grass_unmanaged": -9999.0, + "Bare_soil": -9999.0, + "Water": -9999.0, + "Walls": -9999.0, + "Brick_wall": 0.84, + "Concrete_wall": 1.7, + "Wood_wall": 0.17 + }, + "Comment": "Thermal conductivity of each land cover class, in units W m-1 K-1, used for wall surface temperatures according to Wallenberg et al. 2025." + }, + "Density": { + "Value": { + "Cobble_stone_2014a": -9999.0, + "Dark_asphalt": -9999.0, + "Roofs(buildings)": -9999.0, + "Grass_unmanaged": -9999.0, + "Bare_soil": -9999.0, + "Water": -9999.0, + "Walls": -9999.0, + "Brick_wall": 1700, + "Concrete_wall": 2200, + "Wood_wall": 700 + }, + "Comment": "Density of the material in units kg m-3, used for wall surface temperatures according to Wallenberg et al. 2025." + }, + "Wall_thickness": { + "Value": { + "Brick_wall": 0.1, + "Concrete_wall": 0.2, + "Wood_wall": 0.03 + }, + "Comment": "Wall thickness in units meters, used to calculate characteristic time for wall surface temperatures (Wallenberg et al., 2025)." + }, + "TmaxLST": { + "Value": { + "Cobble_stone_2014a": 15.0, + "Dark_asphalt": 15.0, + "Roofs(buildings)": 15.0, + "Grass_unmanaged": 14.0, + "Bare_soil": 14.0, + "Water": 12.0, + "Walls": 15.0, + "Brick_wall": 15.0, + "Concrete_wall": 16.0, + "Wood_wall": 14.0 + }, + "Comment": "TmaxLST used for ground surface temperatures and wall surface temperatures according to Lindberg et al. 2008; 2016." + }, + "Ts_deg": { + "Value": { + "Cobble_stone_2014a": 0.37, + "Dark_asphalt": 0.58, + "Roofs(buildings)": 0.58, + "Grass_unmanaged": 0.21, + "Bare_soil": 0.33, + "Water": 0.0, + "Walls": 0.37, + "Brick_wall": 0.40, + "Concrete_wall": 0.35, + "Wood_wall": 0.50 + }, + "Comment": "Ts_deg used for ground surface temperatures and wall surface temperatures according to Lindberg et al. 2008; 2016." + }, + "Tstart": { + "Value": { + "Cobble_stone_2014a": -3.41, + "Dark_asphalt": -9.78, + "Roofs(buildings)": -9.78, + "Grass_unmanaged": -3.38, + "Bare_soil": -3.01, + "Water": 0.0, + "Walls": -3.41, + "Brick_wall": -4.0, + "Concrete_wall": -5.0, + "Wood_wall": -2.0 + }, + "Comment": "Tstart used for ground surface temperatures and wall surface temperatures according to Lindberg et al. 2008; 2016." + }, + "Tmrt_params": { + "Value": { + "absK": 0.70, + "absL": 0.97, + "posture": "Standing" + }, + "Comment": "Absorption coefficients per ISO 7726:1998. absK=0.70 (shortwave), absL=0.97 (longwave)." + }, + "PET_settings": { + "Value": { + "Age": 35, + "Weight": 75.0, + "Height": 180, + "Sex": "Male", + "Activity": 80.0, + "clo": 0.90 + }, + "Comment": "Settings to calculate Physiological Equivalent Temperature (PET). Sex is either Male or Female." + }, + "Wind_Height": { + "Value": { + "magl": 10.0 + }, + "Comment": "Height of wind sensor for PET and UTCI calculations." + }, + "Tree_settings": { + "Value": { + "Transmissivity": 0.03, + "Trunk_ratio": 0.25, + "First_day_leaf": 97, + "Last_day_leaf": 300 + }, + "Comment": "Settings for trees. Shortwave transmissivity in %. Trunk ratio as a fraction of total height." + }, + "Posture": { + "Standing": { + "Value": { + "Fside": 0.22, + "Fup": 0.06, + "height": 1.1, + "Fcyl": 0.28 + }, + "Comment": "Standing posture of human body. Used in Tmrt calculations." + }, + "Sitting": { + "Value": { + "Fside": 0.166666, + "Fup": 0.166666, + "height": 0.75, + "Fcyl": 0.2 + }, + "Comment": "Sitting posture of human body. Used in Tmrt calculations." + } + } +} diff --git a/pysrc/solweig/data/default_params.json b/pysrc/solweig/data/default_params.json new file mode 100644 index 0000000..d67ce65 --- /dev/null +++ b/pysrc/solweig/data/default_params.json @@ -0,0 +1,56 @@ +{ + "Tmrt_params": { + "Value": { + "absK": 0.7, + "absL": 0.97, + "posture": "Standing" + }, + "Comment": "Absorption coefficients per ISO 7726:1998. absK=0.70 (shortwave, clothed human), absL=0.97 (longwave). Posture is Standing or Sitting." + }, + "PET_settings": { + "Value": { + "Age": 35, + "Weight": 75.0, + "Height": 180, + "Sex": "Male", + "Activity": 80.0, + "clo": 0.9 + }, + "Comment": "Settings to calculate Physiological Equivalent Temperature (PET). Sex is either Male or Female." + }, + "Wind_Height": { + "Value": { + "magl": 10.0 + }, + "Comment": "Height of wind sensor for PET and UTCI calculations." + }, + "Tree_settings": { + "Value": { + "Transmissivity": 0.03, + "Trunk_ratio": 0.25, + "First_day_leaf": 97, + "Last_day_leaf": 300 + }, + "Comment": "Settings for trees. Shortwave transmissivity in %. Trunk ratio as a fraction of total height." + }, + "Posture": { + "Standing": { + "Value": { + "Fside": 0.22, + "Fup": 0.06, + "height": 1.1, + "Fcyl": 0.28 + }, + "Comment": "Standing posture of human body. Used in Tmrt calculations." + }, + "Sitting": { + "Value": { + "Fside": 0.166666, + "Fup": 0.166666, + "height": 0.75, + "Fcyl": 0.2 + }, + "Comment": "Sitting posture of human body. Used in Tmrt calculations." + } + } +} diff --git a/pysrc/solweig/data/physics_defaults.json b/pysrc/solweig/data/physics_defaults.json new file mode 100644 index 0000000..8803794 --- /dev/null +++ b/pysrc/solweig/data/physics_defaults.json @@ -0,0 +1,32 @@ +{ + "Tree_settings": { + "Value": { + "Transmissivity": 0.03, + "Transmissivity_leafoff": 0.5, + "Trunk_ratio": 0.25, + "First_day_leaf": 97, + "Last_day_leaf": 300 + }, + "Comment": "Settings for trees. Shortwave transmissivity for leaf-on and leaf-off seasons. Trunk ratio as a fraction of total height. Seasonal dates (day of year)." + }, + "Posture": { + "Standing": { + "Value": { + "Fside": 0.22, + "Fup": 0.06, + "height": 1.1, + "Fcyl": 0.28 + }, + "Comment": "Standing posture geometry. Projected area fractions for human body. Used in Tmrt calculations." + }, + "Sitting": { + "Value": { + "Fside": 0.166666, + "Fup": 0.166666, + "height": 0.75, + "Fcyl": 0.2 + }, + "Comment": "Sitting posture geometry. Projected area fractions for human body. Used in Tmrt calculations." + } + } +} diff --git a/pysrc/solweig/errors.py b/pysrc/solweig/errors.py new file mode 100644 index 0000000..f026cb8 --- /dev/null +++ b/pysrc/solweig/errors.py @@ -0,0 +1,123 @@ +"""SOLWEIG error types for actionable error messages. + +These exceptions provide structured information about what went wrong +and how to fix it, rather than generic error messages. + +Example: + try: + result = solweig.calculate(surface, location, weather) + except solweig.GridShapeMismatch as e: + print(f"Grid '{e.field}' has wrong shape: expected {e.expected}, got {e.got}") + except solweig.MissingPrecomputedData as e: + print(f"Missing data: {e}") +""" + +from __future__ import annotations + + +class SolweigError(Exception): + """Base class for all SOLWEIG errors.""" + + pass + + +class InvalidSurfaceData(SolweigError): + """Raised when surface data is invalid or inconsistent. + + Attributes: + message: Human-readable error description. + field: Name of the problematic field (e.g., "cdsm", "dem"). + expected: What was expected (optional). + got: What was actually provided (optional). + """ + + def __init__( + self, + message: str, + field: str | None = None, + expected: str | None = None, + got: str | None = None, + ): + self.field = field + self.expected = expected + self.got = got + super().__init__(message) + + +class GridShapeMismatch(InvalidSurfaceData): + """Raised when grid shapes don't match the DSM. + + All surface grids (CDSM, DEM, TDSM, land_cover, etc.) must have + the same shape as the DSM. + + Example: + >>> surface = SurfaceData(dsm=np.ones((100, 100)), cdsm=np.ones((50, 50))) + GridShapeMismatch: Grid shape mismatch for 'cdsm': + Expected: (100, 100) (matching DSM) + Got: (50, 50) + """ + + def __init__(self, field: str, expected_shape: tuple, actual_shape: tuple): + message = ( + f"Grid shape mismatch for '{field}':\n" + f" Expected: {expected_shape} (matching DSM)\n" + f" Got: {actual_shape}\n" + "Ensure all surface grids have the same dimensions as the DSM." + ) + super().__init__(message, field=field, expected=str(expected_shape), got=str(actual_shape)) + self.expected_shape = expected_shape + self.actual_shape = actual_shape + + +class MissingPrecomputedData(SolweigError): + """Raised when required precomputed data is not available. + + Some features require precomputed data (e.g., anisotropic sky needs + shadow matrices). This error explains what's missing and how to fix it. + + Attributes: + what: Description of the missing data. + suggestion: How to fix the issue (optional). + """ + + def __init__(self, what: str, suggestion: str | None = None): + self.what = what + self.suggestion = suggestion + message = f"Missing precomputed data: {what}" + if suggestion: + message += f"\n{suggestion}" + super().__init__(message) + + +class WeatherDataError(SolweigError): + """Raised when weather data is invalid. + + Attributes: + field: The problematic weather field (e.g., "ta", "rh"). + value: The invalid value. + reason: Why the value is invalid. + """ + + def __init__(self, field: str, value: float | str, reason: str | None = None): + self.field = field + self.value = value + self.reason = reason + message = f"Invalid weather data for '{field}': {value}" + if reason: + message += f" ({reason})" + super().__init__(message) + + +class ConfigurationError(SolweigError): + """Raised when configuration is invalid or inconsistent. + + Attributes: + parameter: The problematic parameter name. + reason: Why the configuration is invalid. + """ + + def __init__(self, parameter: str, reason: str): + self.parameter = parameter + self.reason = reason + message = f"Invalid configuration for '{parameter}': {reason}" + super().__init__(message) diff --git a/pysrc/solweig/io.py b/pysrc/solweig/io.py new file mode 100644 index 0000000..63ee6f9 --- /dev/null +++ b/pysrc/solweig/io.py @@ -0,0 +1,1218 @@ +"""Raster I/O and EPW weather file handling. + +Provides functions for reading and writing GeoTIFF rasters, parsing +EnergyPlus Weather (EPW) files, and downloading TMY data from PVGIS. +Automatically selects between rasterio and GDAL backends (see ``_compat``). +""" + +from __future__ import annotations + +import logging +import math +from pathlib import Path + +import numpy as np + +from ._compat import GDAL_ENV +from .buffers import as_float32 + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Conditional imports based on the backend chosen in _compat +if GDAL_ENV: + from osgeo import gdal +else: + import pyproj + import rasterio + from rasterio.features import rasterize + from rasterio.mask import mask + from rasterio.transform import Affine, from_origin + from rasterio.windows import Window + from shapely import geometry + + +FLOAT_TOLERANCE = 1e-9 + + +def _assert_north_up(transform) -> None: + """Ensure the raster transform describes a north-up raster.""" + if hasattr(transform, "b") and hasattr(transform, "d"): + if not math.isclose(transform.b, 0.0, abs_tol=FLOAT_TOLERANCE) or not math.isclose( + transform.d, 0.0, abs_tol=FLOAT_TOLERANCE + ): + raise ValueError("Only north-up rasters (no rotation) are supported.") + else: + # GDAL-style tuple (c, a, b, f, d, e) + if len(transform) < 6: + raise ValueError("Transform must contain 6 elements.") + if not math.isclose(transform[2], 0.0, abs_tol=FLOAT_TOLERANCE) or not math.isclose( + transform[4], 0.0, abs_tol=FLOAT_TOLERANCE + ): + raise ValueError("Only north-up rasters (no rotation) are supported.") + + +def _shrink_axis_to_grid(min_val: float, max_val: float, origin: float, pixel_size: float) -> tuple[float, float]: + if pixel_size == 0: + raise ValueError("Pixel size must be non-zero to shrink bbox to pixel grid.") + step = abs(pixel_size) + start_idx = math.ceil(((min_val - origin) / step) - FLOAT_TOLERANCE) + end_idx = math.floor(((max_val - origin) / step) + FLOAT_TOLERANCE) + new_min = origin + start_idx * step + new_max = origin + end_idx * step + if not new_max > new_min: + raise ValueError("Bounding box collapsed after snapping to the pixel grid.") + return new_min, new_max + + +def shrink_bbox_to_pixel_grid( + bbox: tuple[float, float, float, float], + origin_x: float, + origin_y: float, + pixel_width: float, + pixel_height: float, +) -> tuple[float, float, float, float]: + """Shrink bbox so its edges land on the pixel grid defined by the raster origin.""" + + minx, miny, maxx, maxy = bbox + if minx >= maxx or miny >= maxy: + raise ValueError("Bounding box is invalid (min must be < max for both axes).") + snapped_minx, snapped_maxx = _shrink_axis_to_grid(minx, maxx, origin_x, pixel_width) + snapped_miny, snapped_maxy = _shrink_axis_to_grid(miny, maxy, origin_y, pixel_height) + return snapped_minx, snapped_miny, snapped_maxx, snapped_maxy + + +def _bounds_to_tuple(bounds) -> tuple[float, float, float, float]: + if hasattr(bounds, "left"): + return bounds.left, bounds.bottom, bounds.right, bounds.top + return tuple(bounds) + + +def _validate_bbox_within_bounds( + bbox: tuple[float, float, float, float], bounds, *, tol: float = FLOAT_TOLERANCE +) -> None: + minx, miny, maxx, maxy = bbox + left, bottom, right, top = _bounds_to_tuple(bounds) + if minx < left - tol or maxx > right + tol or miny < bottom - tol or maxy > top + tol: + raise ValueError("Bounding box is not fully contained within the raster dataset bounds") + + +def _compute_bounds_from_transform(transform, width: int, height: int) -> tuple[float, float, float, float]: + """Return raster bounds for a GDAL-style transform tuple.""" + left = transform[0] + top = transform[3] + right = transform[0] + width * transform[1] + bottom = transform[3] + height * transform[5] + minx = min(left, right) + maxx = max(left, right) + miny = min(top, bottom) + maxy = max(top, bottom) + return minx, miny, maxx, maxy + + +def _normalise_bbox(bbox_sequence) -> tuple[float, float, float, float]: + try: + minx, miny, maxx, maxy = bbox_sequence + except Exception as exc: # noqa: BLE001 + raise ValueError("Bounding box must contain exactly four numeric values") from exc + return float(minx), float(miny), float(maxx), float(maxy) + + +def rasterise_gdf(gdf, geom_col, ht_col, bbox=None, pixel_size: float = 1.0): + # Define raster parameters + if bbox is not None: + # Unpack bbox values + minx, miny, maxx, maxy = _normalise_bbox(bbox) + else: + # Use the total bounds of the GeoDataFrame + minx, miny, maxx, maxy = map(float, gdf.total_bounds) + if pixel_size <= 0: + raise ValueError("Pixel size must be a positive number.") + minx, miny, maxx, maxy = shrink_bbox_to_pixel_grid( + (minx, miny, maxx, maxy), + origin_x=minx, + origin_y=maxy, + pixel_width=pixel_size, + pixel_height=pixel_size, + ) + width = int(round((maxx - minx) / pixel_size)) + height = int(round((maxy - miny) / pixel_size)) + if width <= 0 or height <= 0: + raise ValueError("Bounding box collapsed after snapping to pixel grid.") + transform = from_origin(minx, maxy, pixel_size, pixel_size) + # Create a blank array for the raster + raster = np.zeros((height, width), dtype=np.float32) + # Burn geometries into the raster + shapes = ((geom, value) for geom, value in zip(gdf[geom_col], gdf[ht_col], strict=False)) + raster = rasterize(shapes, out_shape=raster.shape, transform=transform, fill=0, dtype=np.float32) + + return raster, transform + + +def check_path(path_str: str | Path, make_dir: bool = False) -> Path: + # Ensure path exists + path = Path(path_str).absolute() + if not path.parent.exists(): + if make_dir: + path.parent.mkdir(parents=True, exist_ok=True) + else: + raise OSError( + f"Parent directory {path.parent} does not exist for path {path}. Set make_dir=True to create it." + ) + if not path.exists() and not path.suffix: + if make_dir: + path.mkdir(parents=True, exist_ok=True) + else: + raise OSError(f"Path {path} does not exist. Set make_dir=True to create it.") + return path + + +# Default color scale ranges for preview images (ensures consistency across timesteps) +# Format: prefix -> (vmin, vmax) +_PREVIEW_RANGES: dict[str, tuple[float, float]] = { + "tmrt": (0, 80), # Mean radiant temperature (°C) + "utci": (-40, 50), # Universal Thermal Climate Index (°C) + "pet": (-40, 50), # Physiological Equivalent Temperature (°C) + "shadow": (0, 1), # Shadow fraction (1=sunlit, 0=shaded) + "kdown": (0, 1200), # Downwelling shortwave radiation (W/m²) + "kup": (0, 800), # Upwelling shortwave radiation (W/m²) + "ldown": (150, 550), # Downwelling longwave radiation (W/m²) + "lup": (250, 650), # Upwelling longwave radiation (W/m²) + "svf": (0, 1), # Sky view factor + "gvf": (0, 1), # Ground view factor +} + + +def _get_preview_range(filename: str) -> tuple[float, float] | None: + """Get the color scale range for a variable based on filename prefix.""" + name = filename.lower() + for prefix, range_vals in _PREVIEW_RANGES.items(): + if name.startswith(prefix): + return range_vals + return None + + +def _generate_preview_png(data_arr: np.ndarray, out_path: Path, max_size: int = 512, colormap: str = "turbo") -> None: + """ + Generate a color PNG preview image from raster data. + + Uses consistent color scales for known variable types (tmrt, utci, shadow, etc.) + to enable visual comparison across timesteps. Falls back to percentile-based + scaling for unknown variables. + + Args: + data_arr: 2D numpy array to visualize + out_path: Output file path (preview will be saved as .preview.png) + max_size: Maximum dimension for the preview image (maintains aspect ratio) + colormap: Matplotlib colormap name (default: 'turbo'). Falls back to grayscale if unavailable. + Common options: 'turbo', 'viridis', 'plasma', 'inferno', 'magma', 'coolwarm' + """ + try: + from PIL import Image + + # Handle NaN values + valid_mask = ~np.isnan(data_arr) + if not np.any(valid_mask): + return # All NaN, skip preview + + # Use variable-specific range if available, otherwise fall back to percentiles + preset_range = _get_preview_range(out_path.stem) + if preset_range is not None: + vmin, vmax = preset_range + else: + # Fallback: use percentiles for unknown variables + valid_data = data_arr[valid_mask] + vmin, vmax = np.nanpercentile(valid_data, [2, 98]) + + if vmax <= vmin: + vmax = vmin + 1 # Avoid division by zero + + # Normalize to 0-1 + normalized = np.clip((data_arr - vmin) / (vmax - vmin), 0, 1) + normalized = np.nan_to_num(normalized, nan=0) + + # Try to apply matplotlib colormap for color output + try: + import matplotlib.pyplot as plt + + # Get colormap and apply + cmap = plt.get_cmap(colormap) + colored = cmap(normalized) # Returns RGBA in [0, 1] + + # Convert to RGB uint8 (drop alpha channel) + rgb = (colored[:, :, :3] * 255).astype(np.uint8) + img = Image.fromarray(rgb, mode="RGB") + except (ImportError, ValueError): + # Fallback to grayscale if matplotlib not available or colormap invalid + grayscale = (normalized * 255).astype(np.uint8) + img = Image.fromarray(grayscale, mode="L") + + # Resize to max_size while maintaining aspect ratio + if max(img.size) > max_size: + ratio = max_size / max(img.size) + new_size = (int(img.width * ratio), int(img.height * ratio)) + img = img.resize(new_size, Image.Resampling.LANCZOS) + + # Save preview + preview_path = out_path.with_suffix(".preview.png") + img.save(preview_path, "PNG") + logger.debug(f"Saved preview: {preview_path}") + except ImportError: + logger.debug("PIL not available, skipping preview generation") + except Exception as e: + logger.warning(f"Failed to generate preview: {e}") + + +def save_raster( + out_path_str: str, + data_arr: np.ndarray, + trf_arr: list[float], + crs_wkt: str | None, + no_data_val: float = -9999, + ensure_float32: bool = True, + use_cog: bool = True, + generate_preview: bool = True, +): + """ + Save raster to GeoTIFF (Cloud-Optimized by default). + + Args: + out_path_str: Output file path + data_arr: 2D numpy array to save + trf_arr: GDAL-style geotransform [top_left_x, pixel_width, rotation, top_left_y, rotation, pixel_height] + crs_wkt: CRS in WKT format + no_data_val: No-data value to use + ensure_float32: If True, ensure array is float32 before saving + (default: True — converts any non-float32 dtype) + use_cog: If True, save as Cloud-Optimized GeoTIFF with built-in overviews + (default: True for better OS thumbnail support) + generate_preview: If True, generate a sidecar .preview.png file for OS thumbnails + (default: True for float data that can't be previewed directly) + """ + if ensure_float32: + data_arr = as_float32(data_arr) + + attempts = 2 + while attempts > 0: + attempts -= 1 + try: + out_path = check_path(out_path_str, make_dir=True) + height, width = data_arr.shape + + if GDAL_ENV is False: + trf = Affine.from_gdal(*trf_arr) + crs = None + if crs_wkt: + crs = pyproj.CRS(crs_wkt) + + if use_cog: + # Write as Cloud-Optimized GeoTIFF + # COG driver creates overviews automatically + from rasterio.io import MemoryFile + + # Create in memory first, then write as COG + memfile = MemoryFile() + with memfile.open( + driver="GTiff", + height=height, + width=width, + count=1, + dtype=data_arr.dtype, + crs=crs, + transform=trf, + nodata=no_data_val, + ) as mem: + mem.write(data_arr, 1) + + # Now copy to COG format + from rasterio.shutil import copy + + copy( + memfile.open(), + out_path, + driver="COG", + overview_resampling="average", + ) + memfile.close() + logger.debug(f"Saved COG: {out_path}") + else: + # Standard GeoTIFF + with rasterio.open( + out_path, + "w", + driver="GTiff", + height=height, + width=width, + count=1, + dtype=data_arr.dtype, + crs=crs, + transform=trf, + nodata=no_data_val, + ) as dst: + dst.write(data_arr, 1) + else: + # GDAL backend + if use_cog: + # Use COG driver (GDAL 3.1+) + driver = gdal.GetDriverByName("COG") + if driver is None: + # Fallback to GTiff with overviews if COG driver not available + logger.warning("COG driver not available, using GTiff with overviews") + driver = gdal.GetDriverByName("GTiff") + options = ["TILED=YES"] + ds = driver.Create(str(out_path), width, height, 1, gdal.GDT_Float32, options) + ds.SetGeoTransform(trf_arr) + if crs_wkt: + ds.SetProjection(crs_wkt) + band = ds.GetRasterBand(1) + band.SetNoDataValue(no_data_val) + band.WriteArray(data_arr) + # Build overviews + if min(height, width) > 256: + overview_levels = [] + size = min(height, width) + level = 2 + while size // level > 128: + overview_levels.append(level) + level *= 2 + if overview_levels: + ds.BuildOverviews("AVERAGE", overview_levels) + ds = None + else: + # COG driver requires creating via CreateCopy from memory dataset + mem_driver = gdal.GetDriverByName("MEM") + mem_ds = mem_driver.Create("", width, height, 1, gdal.GDT_Float32) + mem_ds.SetGeoTransform(trf_arr) + if crs_wkt: + mem_ds.SetProjection(crs_wkt) + band = mem_ds.GetRasterBand(1) + band.SetNoDataValue(no_data_val) + band.WriteArray(data_arr) + + # Copy to COG + cog_options = ["OVERVIEW_RESAMPLING=AVERAGE"] + driver.CreateCopy(str(out_path), mem_ds, options=cog_options) + mem_ds = None + logger.debug(f"Saved COG: {out_path}") + else: + # Standard GeoTIFF + driver = gdal.GetDriverByName("GTiff") + ds = driver.Create(str(out_path), width, height, 1, gdal.GDT_Float32) + ds.SetGeoTransform(trf_arr) + if crs_wkt: + ds.SetProjection(crs_wkt) + band = ds.GetRasterBand(1) + band.SetNoDataValue(no_data_val) + band.WriteArray(data_arr) + ds = None + + # Generate sidecar preview PNG for float data (OS can't render float GeoTIFFs) + if generate_preview and np.issubdtype(data_arr.dtype, np.floating): + _generate_preview_png(data_arr, out_path) + + return + except Exception as e: + if attempts == 0: + raise e + logger.warning(f"Failed to save raster to {out_path_str}: {e}. Retrying...") + + +def get_raster_metadata(path_str: str | Path) -> dict: + """ + Get raster metadata without loading the whole file. + Returns dict with keys: rows, cols, transform, crs, nodata, res. + Transform is always a list [c, a, b, f, d, e] (GDAL-style). + CRS is always a WKT string (or None). + """ + path = check_path(path_str) + if GDAL_ENV is False: + with rasterio.open(path) as src: + # Convert Affine to GDAL-style list + trf = src.transform + transform_list = [trf.c, trf.a, trf.b, trf.f, trf.d, trf.e] + # Convert CRS to WKT string + crs_wkt = src.crs.to_wkt() if src.crs is not None else None + return { + "rows": src.height, + "cols": src.width, + "transform": transform_list, + "crs": crs_wkt, + "nodata": src.nodata, + "res": src.res, # (xres, yres) + "bounds": src.bounds, + } + else: + ds = gdal.Open(str(path)) + if ds is None: + raise OSError(f"Could not open {path}") + gt = ds.GetGeoTransform() + return { + "rows": ds.RasterYSize, + "cols": ds.RasterXSize, + "transform": gt, + "crs": ds.GetProjection() or None, + "nodata": ds.GetRasterBand(1).GetNoDataValue(), + "res": (gt[1], abs(gt[5])), # Approximate resolution + } + + +def read_raster_window(path_str: str | Path, window: tuple[slice, slice], band: int = 1) -> np.ndarray: + """ + Read a window from a raster file. + window is (row_slice, col_slice). + """ + path = check_path(path_str) + row_slice, col_slice = window + + # Handle None slices (read full dimension) + # This is tricky without knowing full shape, so we assume caller provides valid slices + # or we'd need to open file to check shape first. + # For now, assume valid integer slices. + + if GDAL_ENV is False: + with rasterio.open(path) as src: + # rasterio Window(col_off, row_off, width, height) + # Slices are start:stop + r_start = row_slice.start if row_slice.start is not None else 0 + r_stop = row_slice.stop if row_slice.stop is not None else src.height + c_start = col_slice.start if col_slice.start is not None else 0 + c_stop = col_slice.stop if col_slice.stop is not None else src.width + + win = Window(c_start, r_start, c_stop - c_start, r_stop - r_start) # type: ignore[too-many-positional-arguments] + return src.read(band, window=win) + else: + ds = gdal.Open(str(path)) + if ds is None: + raise OSError(f"Could not open {path}") + + r_start = row_slice.start if row_slice.start is not None else 0 + r_stop = row_slice.stop if row_slice.stop is not None else ds.RasterYSize + c_start = col_slice.start if col_slice.start is not None else 0 + c_stop = col_slice.stop if col_slice.stop is not None else ds.RasterXSize + + xoff = c_start + yoff = r_start + xsize = c_stop - c_start + ysize = r_stop - r_start + + return ds.GetRasterBand(band).ReadAsArray(xoff, yoff, xsize, ysize) + + +def load_raster( + path_str: str, bbox: list[int] | None = None, band: int = 0, ensure_float32: bool = True +) -> tuple[np.ndarray, list[float], str | None, float | None]: + """ + Load raster, optionally crop to bbox. + + Args: + path_str: Path to raster file + bbox: Optional bounding box [minx, miny, maxx, maxy] + band: Band index to read (0-based) + ensure_float32: If True, ensure output array is float32 + (default: True — converts any non-float32 dtype including integers) + + Returns: + Tuple of (array, transform, crs_wkt, no_data_value) + """ + # Load raster, optionally crop to bbox + path = check_path(path_str, make_dir=False) + if not path.exists(): + raise FileNotFoundError(f"Raster file {path} does not exist.") + if GDAL_ENV is False: + with rasterio.open(path) as dataset: + _assert_north_up(dataset.transform) + crs_wkt = dataset.crs.to_wkt() if dataset.crs is not None else None + no_data_val = dataset.nodata + transform = dataset.transform + if bbox is not None: + bbox_tuple = _normalise_bbox(bbox) + snapped_bbox = shrink_bbox_to_pixel_grid( + bbox_tuple, + origin_x=transform.c, + origin_y=transform.f, + pixel_width=transform.a, + pixel_height=transform.e, + ) + _validate_bbox_within_bounds(snapped_bbox, dataset.bounds) + bbox_geom = geometry.box(*snapped_bbox) + rast, trf = mask(dataset, [bbox_geom], crop=True) + else: + rast = dataset.read() + trf = transform + # Convert rasterio Affine to GDAL-style list + trf_arr = [trf.c, trf.a, trf.b, trf.f, trf.d, trf.e] + # rast shape: (bands, rows, cols) + if rast.ndim == 3: + if band < 0 or band >= rast.shape[0]: + raise IndexError(f"Requested band {band} out of range; raster has {rast.shape[0]} band(s)") + rast_arr = rast[band] + else: + rast_arr = rast + else: + dataset = gdal.Open(str(path)) + if dataset is None: + raise FileNotFoundError(f"Could not open {path}") + trf = dataset.GetGeoTransform() + _assert_north_up(trf) + # GetProjection returns WKT string (or empty string) + crs_wkt = dataset.GetProjection() or None + rb = dataset.GetRasterBand(band + 1) + if rb is None: + dataset = None + raise IndexError(f"Requested band {band} out of range in GDAL dataset") + rast_arr = rb.ReadAsArray() + no_data_val = rb.GetNoDataValue() + if bbox is not None: + bbox_tuple = _normalise_bbox(bbox) + snapped_bbox = shrink_bbox_to_pixel_grid( + bbox_tuple, + origin_x=trf[0], + origin_y=trf[3], + pixel_width=trf[1], + pixel_height=trf[5], + ) + bounds = _compute_bounds_from_transform(trf, dataset.RasterXSize, dataset.RasterYSize) + _validate_bbox_within_bounds(snapped_bbox, bounds) + min_x, min_y, max_x, max_y = snapped_bbox + pixel_width = trf[1] + pixel_height = abs(trf[5]) + xoff = int(round((min_x - trf[0]) / pixel_width)) + yoff = int(round((trf[3] - max_y) / pixel_height)) + xsize = int(round((max_x - min_x) / pixel_width)) + ysize = int(round((max_y - min_y) / pixel_height)) + # guard offsets/sizes + if xoff < 0 or yoff < 0 or xsize <= 0 or ysize <= 0: + dataset = None + raise ValueError("Computed window from bbox is out of raster bounds or invalid") + rast_arr = rast_arr[yoff : yoff + ysize, xoff : xoff + xsize] + trf_arr = [min_x, trf[1], 0, max_y, 0, trf[5]] + else: + trf_arr = [trf[0], trf[1], 0, trf[3], 0, trf[5]] + dataset = None # ensure dataset closed + if ensure_float32: + rast_arr = as_float32(rast_arr) + # Handle no-data (support NaN) + if no_data_val is not None and not np.isnan(no_data_val): + logger.info(f"No-data value is {no_data_val}, replacing with NaN") + rast_arr[rast_arr == no_data_val] = np.nan + if rast_arr.size == 0: + raise ValueError("Raster array is empty after loading/cropping") + return rast_arr, trf_arr, crs_wkt, no_data_val + + +def xy_to_lnglat(crs_wkt: str | None, x, y): + """Convert x, y coordinates to longitude and latitude. + + Accepts scalar or array-like x/y. If crs_wkt is None the inputs are + assumed already to be lon/lat and are returned unchanged. + """ + if crs_wkt is None: + logger.info("No CRS provided, assuming coordinates are already in WGS84 (lon/lat).") + return x, y + + try: + if GDAL_ENV is False: + source_crs = pyproj.CRS(crs_wkt) + target_crs = pyproj.CRS(4326) # WGS84 + transformer = pyproj.Transformer.from_crs(source_crs, target_crs, always_xy=True) + lng, lat = transformer.transform(x, y) + else: + old_cs = gdal.osr.SpatialReference() + old_cs.ImportFromWkt(crs_wkt) + new_cs = gdal.osr.SpatialReference() + new_cs.ImportFromEPSG(4326) + transform = gdal.osr.CoordinateTransformation(old_cs, new_cs) + out = transform.TransformPoint(float(x), float(y)) + lng, lat = out[0], out[1] + + return lng, lat + + except Exception: + logger.exception("Failed to transform coordinates") + raise + + +def create_empty_raster( + path_str: str | Path, + rows: int, + cols: int, + transform: list[float], + crs_wkt: str, + dtype=np.float32, + nodata: float = -9999, + bands: int = 1, +): + """ + Create an empty GeoTIFF file initialized with nodata. + """ + path = check_path(path_str, make_dir=True) + + if GDAL_ENV is False: + trf = Affine.from_gdal(*transform) + crs = None + if crs_wkt: + crs = pyproj.CRS(crs_wkt) + + with rasterio.open( + path, + "w", + driver="GTiff", + height=rows, + width=cols, + count=bands, + dtype=dtype, + crs=crs, + transform=trf, + nodata=nodata, + ): + pass # Just create empty raster + else: + driver = gdal.GetDriverByName("GTiff") + # Map numpy dtype to GDAL type + gdal_type = gdal.GDT_Float32 # Default + if dtype == np.float64: + gdal_type = gdal.GDT_Float64 + elif dtype == np.int32: + gdal_type = gdal.GDT_Int32 + elif dtype == np.int16: + gdal_type = gdal.GDT_Int16 + elif dtype == np.uint8: + gdal_type = gdal.GDT_Byte + + ds = driver.Create(str(path), cols, rows, bands, gdal_type) + ds.SetGeoTransform(transform) + if crs_wkt: + ds.SetProjection(crs_wkt) + for b in range(1, bands + 1): + band = ds.GetRasterBand(b) + band.SetNoDataValue(nodata) + band.Fill(nodata) + ds = None + + +def write_raster_window(path_str: str | Path, data: np.ndarray, window: tuple[slice, slice], band: int = 1): + """ + Write a data array to a specific window in an existing raster. + window is (row_slice, col_slice). + """ + path = check_path(path_str) + row_slice, col_slice = window + + if GDAL_ENV is False: + from rasterio.windows import Window + + with rasterio.open(path, "r+") as dst: + win = Window( + col_slice.start, # type: ignore[too-many-positional-arguments] + row_slice.start, + col_slice.stop - col_slice.start, + row_slice.stop - row_slice.start, + ) + dst.write(data, band, window=win) + else: + ds = gdal.Open(str(path), gdal.GA_Update) + if ds is None: + raise OSError(f"Could not open {path} for update") + + xoff = col_slice.start + yoff = row_slice.start + + ds.GetRasterBand(band).WriteArray(data, xoff, yoff) + ds = None + + +class _EpwDataIndex: + """Lightweight index class mimicking pandas DatetimeIndex for EPW data.""" + + def __init__(self, timestamps: list): + self._timestamps = timestamps + self.tz = None + self.name = "datetime" + + def __len__(self): + return len(self._timestamps) + + def __getitem__(self, idx): + return self._timestamps[idx] + + def __iter__(self): + return iter(self._timestamps) + + def __ge__(self, other): + """Greater than or equal comparison, returns boolean array.""" + return _BooleanArray([t >= other for t in self._timestamps]) + + def __le__(self, other): + """Less than or equal comparison, returns boolean array.""" + return _BooleanArray([t <= other for t in self._timestamps]) + + def __gt__(self, other): + """Greater than comparison, returns boolean array.""" + return _BooleanArray([t > other for t in self._timestamps]) + + def __lt__(self, other): + """Less than comparison, returns boolean array.""" + return _BooleanArray([t < other for t in self._timestamps]) + + @property + def empty(self): + return len(self._timestamps) == 0 + + @property + def year(self): + return [t.year for t in self._timestamps] + + @property + def month(self): + return _IndexAccessor([t.month for t in self._timestamps]) + + @property + def day(self): + return _IndexAccessor([t.day for t in self._timestamps]) + + @property + def hour(self): + return _IndexAccessor([t.hour for t in self._timestamps]) + + def min(self): + return min(self._timestamps) if self._timestamps else None + + def max(self): + return max(self._timestamps) if self._timestamps else None + + def tz_localize(self, tz): + # Return self since we don't handle timezones in the fallback + return self + + +class _IndexAccessor: + """Helper for index property access like df.index.hour.""" + + def __init__(self, values: list): + self._values = values + + def __iter__(self): + return iter(self._values) + + def __gt__(self, other): + return _BooleanArray([v > other for v in self._values]) + + def __ge__(self, other): + return _BooleanArray([v >= other for v in self._values]) + + def __lt__(self, other): + return _BooleanArray([v < other for v in self._values]) + + def __le__(self, other): + return _BooleanArray([v <= other for v in self._values]) + + def __eq__(self, other): + return _BooleanArray([v == other for v in self._values]) + + def isin(self, values_set): + return [v in values_set for v in self._values] + + +class _BooleanArray: + """Helper for boolean array operations (& and |).""" + + def __init__(self, values: list): + self._values = values + + def __and__(self, other): + if isinstance(other, _BooleanArray): + return _BooleanArray([a and b for a, b in zip(self._values, other._values, strict=False)]) + return _BooleanArray([a and b for a, b in zip(self._values, other, strict=False)]) + + def __or__(self, other): + if isinstance(other, _BooleanArray): + return _BooleanArray([a or b for a, b in zip(self._values, other._values, strict=False)]) + return _BooleanArray([a or b for a, b in zip(self._values, other, strict=False)]) + + def __iter__(self): + return iter(self._values) + + def __getitem__(self, idx): + return self._values[idx] + + def __len__(self): + return len(self._values) + + def all(self): + return all(self._values) + + def any(self): + return any(self._values) + + def tolist(self): + return self._values + + +class _EpwRow: + """Lightweight row class mimicking pandas Series for EPW data.""" + + def __init__(self, data: dict): + self._data = data + + def __getitem__(self, key): + return self._data.get(key, float("nan")) + + def get(self, key, default=None): + """Get value with default, like dict.get().""" + val = self._data.get(key, default) + if val is None or (isinstance(val, float) and val != val): # NaN check + return default + return val + + +class _EpwColumn: + """Lightweight column accessor mimicking a pandas Series for a single column.""" + + def __init__(self, values: list): + self._values = values + + def __getitem__(self, idx): + return self._values[idx] + + def __len__(self): + return len(self._values) + + def __iter__(self): + return iter(self._values) + + def min(self): + return min(v for v in self._values if v == v) # skip NaN + + def max(self): + return max(v for v in self._values if v == v) # skip NaN + + def __ge__(self, other): + return _BooleanArray([v >= other for v in self._values]) + + def __le__(self, other): + return _BooleanArray([v <= other for v in self._values]) + + def __gt__(self, other): + return _BooleanArray([v > other for v in self._values]) + + def __lt__(self, other): + return _BooleanArray([v < other for v in self._values]) + + def all(self): + return all(self._values) + + +class _EpwIloc: + """Positional indexing for _EpwDataFrame.""" + + def __init__(self, rows: list[dict]): + self._rows = rows + + def __getitem__(self, idx): + return _EpwRow(self._rows[idx]) + + +class _EpwDataFrame: + """Lightweight DataFrame-like class for EPW data without pandas dependency.""" + + def __init__(self, rows: list[dict], timestamps: list): + self._rows = rows + self._timestamps = timestamps + self.index = _EpwDataIndex(timestamps) + + def __len__(self): + return len(self._rows) + + @property + def columns(self): + """Column names from the first row.""" + if self._rows: + return list(self._rows[0].keys()) + return [] + + @property + def iloc(self): + """Positional indexing (returns _EpwRow objects).""" + return _EpwIloc(self._rows) + + def __getitem__(self, key): + """Access by column name (str) or filter by boolean mask.""" + if isinstance(key, str): + return _EpwColumn([row.get(key, float("nan")) for row in self._rows]) + if isinstance(key, _BooleanArray): + key = key._values + if isinstance(key, list): + filtered_rows = [r for r, m in zip(self._rows, key, strict=False) if m] + filtered_ts = [t for t, m in zip(self._timestamps, key, strict=False) if m] + return _EpwDataFrame(filtered_rows, filtered_ts) + raise TypeError(f"Unsupported indexing type: {type(key)}") + + @property + def empty(self): + return len(self._rows) == 0 + + def iterrows(self): + """Iterate over (timestamp, row) pairs.""" + for ts, row_data in zip(self._timestamps, self._rows, strict=False): + yield _EpwTimestamp(ts), _EpwRow(row_data) + + def to_dataframe(self): + """Convert to pandas DataFrame if pandas is available. + + Returns: + pd.DataFrame with DatetimeIndex, or self if pandas unavailable. + """ + try: + import pandas as pd + + df = pd.DataFrame(self._rows) + df.index = pd.DatetimeIndex(self._timestamps, name="datetime") + return df + except ImportError: + return self + + +class _EpwTimestamp: + """Wrapper for datetime to provide pandas-like interface.""" + + def __init__(self, dt_obj): + self._dt = dt_obj + + def __getattr__(self, name): + return getattr(self._dt, name) + + def to_pydatetime(self): + return self._dt + + def replace(self, **kwargs): + return self._dt.replace(**kwargs) + + +def _parse_epw_metadata(path: Path) -> dict: + """Parse EPW header to extract metadata.""" + metadata = {} + with open(path, encoding="utf-8") as f: + location_line = f.readline().strip() + if not location_line.startswith("LOCATION"): + raise ValueError("Invalid EPW file: first line must start with 'LOCATION'") + + location_parts = location_line.split(",") + if len(location_parts) < 10: + raise ValueError(f"Invalid LOCATION line: expected at least 10 fields, got {len(location_parts)}") + + metadata["city"] = location_parts[1].strip() + metadata["state"] = location_parts[2].strip() + metadata["country"] = location_parts[3].strip() + metadata["latitude"] = float(location_parts[6]) + metadata["longitude"] = float(location_parts[7]) + metadata["tz_offset"] = float(location_parts[8]) + metadata["elevation"] = float(location_parts[9]) + + return metadata + + +def _read_epw_pure_python(path: Path) -> tuple: + """Pure Python EPW parser without pandas dependency.""" + import csv + from datetime import datetime as dt_class + from datetime import timedelta + + metadata = _parse_epw_metadata(path) + + # Column indices for the fields we need + # EPW format has 35 fields per line + col_indices = { + "year": 0, + "month": 1, + "day": 2, + "hour": 3, + "minute": 4, + "temp_air": 6, + "relative_humidity": 8, + "atmospheric_pressure": 9, + "ghi": 13, + "dni": 14, + "dhi": 15, + "wind_direction": 20, + "wind_speed": 21, + } + + na_values = {"99", "999", "9999", "99999", "999999999", ""} + + rows = [] + timestamps = [] + + with open(path, encoding="utf-8") as f: + # Skip 8 header lines + for _ in range(8): + f.readline() + + reader = csv.reader(f) + for line in reader: + if len(line) < 22: + continue + + try: + year = int(line[col_indices["year"]]) + month = int(line[col_indices["month"]]) + day = int(line[col_indices["day"]]) + hour = int(line[col_indices["hour"]]) + minute = int(line[col_indices["minute"]]) + + # EPW uses 1-24 hour format; hour 24 means midnight of next day + if hour == 24: + timestamp = dt_class(year, month, day, 0, minute) + timedelta(days=1) + else: + timestamp = dt_class(year, month, day, hour, minute) + timestamps.append(timestamp) + + def parse_float(idx, row_data=line): + val = row_data[idx].strip() + if val in na_values: + return float("nan") + try: + return float(val) + except (ValueError, TypeError): + return float("nan") + + row = { + "temp_air": parse_float(col_indices["temp_air"]), + "relative_humidity": parse_float(col_indices["relative_humidity"]), + "atmospheric_pressure": parse_float(col_indices["atmospheric_pressure"]), + "ghi": parse_float(col_indices["ghi"]), + "dni": parse_float(col_indices["dni"]), + "dhi": parse_float(col_indices["dhi"]), + "wind_speed": parse_float(col_indices["wind_speed"]), + "wind_direction": parse_float(col_indices["wind_direction"]), + } + rows.append(row) + except (ValueError, IndexError): + continue + + if not rows: + raise ValueError("EPW file contains no valid data rows") + + df = _EpwDataFrame(rows, timestamps) + logger.info(f"Loaded EPW file: {metadata['city']}, {len(df)} timesteps (pure Python parser)") + + return df, metadata + + +def download_epw( + latitude: float, + longitude: float, + output_path: str | Path, + *, + timeout: int = 60, +) -> Path: + """ + Download a Typical Meteorological Year (TMY) EPW file from PVGIS. + + Uses the EU Joint Research Centre's PVGIS API (no API key required). + Coverage is near-global (all continents except polar regions), + using ERA5 reanalysis data. + + The downloaded data contains modified Copernicus Climate Change Service + information. Neither the European Commission nor ECMWF is responsible + for any use that may be made of the Copernicus information or data it + contains. See https://cds.climate.copernicus.eu/disclaimer for the full + licence terms. + + Args: + latitude: Latitude in decimal degrees (-90 to 90). + longitude: Longitude in decimal degrees (-180 to 180). + output_path: Path where the EPW file will be saved. + timeout: HTTP request timeout in seconds (default 60). + + Returns: + Path to the saved EPW file. + + Raises: + ValueError: If coordinates are out of range. + ConnectionError: If the PVGIS server is unreachable. + RuntimeError: If the download fails (e.g. location over ocean). + + Example: + >>> from solweig.io import download_epw + >>> path = download_epw(37.98, 23.73, "athens.epw") + >>> data, metadata = read_epw(path) + """ + import urllib.error + import urllib.request + + if not -90 <= latitude <= 90: + raise ValueError(f"Latitude must be between -90 and 90, got {latitude}") + if not -180 <= longitude <= 180: + raise ValueError(f"Longitude must be between -180 and 180, got {longitude}") + + output_path = Path(output_path) + + url = f"https://re.jrc.ec.europa.eu/api/v5_3/tmy?lat={latitude}&lon={longitude}&outputformat=epw" + + logger.info(f"Downloading EPW from PVGIS for ({latitude:.4f}, {longitude:.4f})...") + + try: + req = urllib.request.Request(url) + with urllib.request.urlopen(req, timeout=timeout) as resp: + data = resp.read() + except urllib.error.HTTPError as e: + if e.code == 400: + raise RuntimeError( + f"PVGIS has no data for ({latitude}, {longitude}). The location may be over ocean or outside coverage." + ) from e + raise RuntimeError(f"PVGIS download failed (HTTP {e.code}): {e.reason}") from e + except urllib.error.URLError as e: + raise ConnectionError(f"Cannot reach PVGIS server: {e.reason}") from e + + if len(data) < 1000: + # PVGIS returns a short error message for invalid locations + text = data.decode("utf-8", errors="replace") + raise RuntimeError(f"PVGIS returned an error: {text.strip()}") + + output_path.parent.mkdir(parents=True, exist_ok=True) + output_path.write_bytes(data) + + lines = data.decode("utf-8", errors="replace").split("\n") + n_data_lines = len(lines) - 8 # subtract header lines + logger.info(f"Saved EPW file: {output_path} ({n_data_lines} hourly records)") + + return output_path + + +def read_epw(path: str | Path) -> tuple: + """ + Read EnergyPlus Weather (EPW) file and return weather data with metadata. + + EPW files have 8 header lines followed by hourly weather data. + Uses pure Python parser (no pandas/scipy dependencies). + + Args: + path: Path to EPW file (string or Path) + + Returns: + Tuple of (data, metadata_dict): + - data: DataFrame-like object with datetime index and weather columns: + - temp_air: Dry bulb temperature (°C) + - relative_humidity: Relative humidity (%) + - atmospheric_pressure: Atmospheric pressure (Pa) + - ghi: Global horizontal irradiance (W/m²) + - dni: Direct normal irradiance (W/m²) + - dhi: Diffuse horizontal irradiance (W/m²) + - wind_speed: Wind speed (m/s) + - wind_direction: Wind direction (degrees) + - metadata_dict: Dictionary with keys: + - city: Location city name + - latitude: Latitude (degrees) + - longitude: Longitude (degrees) + - elevation: Elevation (m) + - tz_offset: Timezone offset (hours) + + Raises: + FileNotFoundError: If EPW file doesn't exist + ValueError: If EPW file is malformed + """ + path = Path(path) + if not path.exists(): + raise FileNotFoundError(f"EPW file not found: {path}") + + return _read_epw_pure_python(path) diff --git a/pysrc/solweig/loaders.py b/pysrc/solweig/loaders.py new file mode 100644 index 0000000..e079407 --- /dev/null +++ b/pysrc/solweig/loaders.py @@ -0,0 +1,257 @@ +"""Configuration and parameter loading from JSON files.""" + +from __future__ import annotations + +import json +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING + +import numpy as np + +from .utils import dict_to_namespace + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +def load_params(params_json_path: str | Path | None = None) -> SimpleNamespace: + """ + Load SOLWEIG parameters from a JSON file. + + Returns a mutable SimpleNamespace with all UMEP-standard parameters: + land cover properties, wall materials, Tmrt settings, PET settings, + tree settings, and posture geometry. + + Args: + params_json_path: Path to the parameters JSON file. + If None (default), loads the bundled default_materials.json + with all UMEP-standard values. + + Returns: + SimpleNamespace object with nested parameter values accessible via attributes. + The namespace is mutable — override individual values as needed. + + Examples: + Load bundled defaults: + + >>> params = load_params() + >>> params.Tmrt_params.Value.absK # 0.7 + >>> params.Albedo.Effective.Value.Dark_asphalt # 0.18 + + Override a specific value: + + >>> params = load_params() + >>> params.Ts_deg.Value.Walls = 0.50 # Change wall TgK + """ + if params_json_path is None: + # Use bundled default parameters (full UMEP-format JSON with all sections) + params_path = Path(__file__).parent / "data" / "default_materials.json" + else: + params_path = Path(params_json_path) + + if not params_path.exists(): + raise FileNotFoundError(f"Parameters file not found: {params_path}") + + with open(params_path) as f: + params_dict = json.load(f) + + result = dict_to_namespace(params_dict) + assert isinstance(result, SimpleNamespace) + return result + + +def load_physics(physics_json_path: str | Path | None = None) -> SimpleNamespace: + """ + Load physics parameters (site-independent scientific constants). + + Physics parameters include: + - Tree_settings: Vegetation transmissivity, seasonal dates, trunk ratio + - Posture: Human posture geometry (Standing/Sitting projected area fractions) + + These are universal constants that rarely need customization. + + Args: + physics_json_path: Path to a custom physics JSON file. + If None (default), loads bundled physics_defaults.json with standard values. + + Returns: + SimpleNamespace object with physics parameters accessible via attributes. + + Examples: + Load bundled defaults: + + >>> physics = load_physics() # Uses bundled physics_defaults.json + >>> physics.Tree_settings.Value.Transmissivity # 0.03 + >>> physics.Posture.Standing.Value.Fside # 0.22 + + Load custom physics (e.g., different tree transmissivity): + + >>> physics = load_physics("custom_trees.json") + """ + if physics_json_path is None: + # Use bundled physics defaults + physics_path = Path(__file__).parent / "data" / "physics_defaults.json" + else: + physics_path = Path(physics_json_path) + + if not physics_path.exists(): + raise FileNotFoundError(f"Physics parameters file not found: {physics_path}") + + with open(physics_path) as f: + physics_dict = json.load(f) + + result = dict_to_namespace(physics_dict) + assert isinstance(result, SimpleNamespace) + return result + + +def load_materials(materials_json_path: str | Path) -> SimpleNamespace: + """ + Load material properties (site-specific landcover parameters). + + Material properties include per-landcover-class values for: + - Names: Landcover class names (e.g., "Dark_asphalt", "Grass_unmanaged") + - Code: Landcover class IDs + - Albedo: Surface albedo per class + - Emissivity: Surface emissivity per class + - TmaxLST, Ts_deg, Tstart: Ground temperature model parameters per class + - Specific_heat, Thermal_conductivity, Density, Wall_thickness: Wall thermal properties + + These are site-specific and require a landcover grid (land_cover input). + + Args: + materials_json_path: Path to a materials JSON file. + This file must contain landcover-specific property definitions. + + Returns: + SimpleNamespace object with material parameters accessible via attributes. + + Examples: + Load site-specific materials: + + >>> materials = load_materials("site_materials.json") + >>> materials.Albedo.Effective.Value.Dark_asphalt # 0.18 + >>> materials.Emissivity.Value.Grass_unmanaged # 0.94 + + Notes: + Materials are ONLY used when a landcover grid is provided to SurfaceData. + If no landcover grid, uniform default properties are used. + """ + materials_path = Path(materials_json_path) + + if not materials_path.exists(): + raise FileNotFoundError(f"Materials file not found: {materials_path}") + + with open(materials_path) as f: + materials_dict = json.load(f) + + result = dict_to_namespace(materials_dict) + assert isinstance(result, SimpleNamespace) + return result + + +def get_lc_properties_from_params( + land_cover: NDArray[np.integer], + params: SimpleNamespace, + shape: tuple[int, int], +) -> tuple[ + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], +]: + """ + Derive surface properties from land cover grid using loaded params. + + This mirrors the logic in configs.py TgMaps class. + + Args: + land_cover: Land cover classification grid (UMEP standard IDs). + params: Loaded parameters from JSON file. + shape: Output grid shape (rows, cols). + + Returns: + Tuple of (albedo_grid, emissivity_grid, tgk_grid, tstart_grid, tmaxlst_grid). + """ + rows, cols = shape + alb_grid = np.full((rows, cols), 0.15, dtype=np.float32) + emis_grid = np.full((rows, cols), 0.95, dtype=np.float32) + tgk_grid = np.full((rows, cols), 0.37, dtype=np.float32) + tstart_grid = np.full((rows, cols), -3.41, dtype=np.float32) + tmaxlst_grid = np.full((rows, cols), 15.0, dtype=np.float32) + + # Get unique land cover IDs and filter to valid ones (0-7) + lc = np.copy(land_cover) + lc[lc >= 100] = 2 # Treat wall codes as buildings + unique_ids = np.unique(lc) + valid_ids = unique_ids[unique_ids <= 7].astype(int) + + # Build mappings from land cover ID to name to parameter values + for lc_id in valid_ids: + # Get land cover name from ID (e.g., 0 -> "Cobble_stone_2014a") + name = getattr(params.Names.Value, str(lc_id), None) + if name is None: + continue + + # Get parameter values for this land cover type + albedo = getattr(params.Albedo.Effective.Value, name, 0.15) + emissivity = getattr(params.Emissivity.Value, name, 0.95) + tgk = getattr(params.Ts_deg.Value, name, 0.37) + tstart = getattr(params.Tstart.Value, name, -3.41) + tmaxlst = getattr(params.TmaxLST.Value, name, 15.0) + + # Apply to grid where land cover matches + mask = lc == lc_id + if np.any(mask): + alb_grid[mask] = albedo + emis_grid[mask] = emissivity + tgk_grid[mask] = tgk + tstart_grid[mask] = tstart + tmaxlst_grid[mask] = tmaxlst + + return alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid + + +# Map user-facing wall material names to JSON keys in default_materials.json +WALL_MATERIAL_MAP: dict[str, str] = { + "brick": "Brick_wall", + "concrete": "Concrete_wall", + "wood": "Wood_wall", + "cobblestone": "Walls", +} + + +def resolve_wall_params( + wall_material: str, + materials: SimpleNamespace | None = None, +) -> tuple[float, float, float]: + """Resolve wall material name to (tgk_wall, tstart_wall, tmaxlst_wall). + + Args: + wall_material: Material name (case-insensitive). + One of: "brick", "concrete", "wood", "cobblestone". + materials: Loaded materials namespace. If None, loads bundled defaults. + + Returns: + Tuple of (tgk_wall, tstart_wall, tmaxlst_wall) floats. + + Raises: + ValueError: If wall_material is not a recognized material name. + """ + key = wall_material.lower() + if key not in WALL_MATERIAL_MAP: + valid = ", ".join(sorted(WALL_MATERIAL_MAP)) + msg = f"Unknown wall material {wall_material!r}. Valid options: {valid}" + raise ValueError(msg) + + json_name = WALL_MATERIAL_MAP[key] + + if materials is None: + materials = load_params() + + tgk = float(getattr(materials.Ts_deg.Value, json_name)) + tstart = float(getattr(materials.Tstart.Value, json_name)) + tmaxlst = float(getattr(materials.TmaxLST.Value, json_name)) + return tgk, tstart, tmaxlst diff --git a/pysrc/solweig/metadata.py b/pysrc/solweig/metadata.py new file mode 100644 index 0000000..7cb898b --- /dev/null +++ b/pysrc/solweig/metadata.py @@ -0,0 +1,137 @@ +"""Run metadata and provenance tracking.""" + +from __future__ import annotations + +import json +from datetime import datetime as dt +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .models import HumanParams, Location, SurfaceData, Weather + + +def create_run_metadata( + surface: SurfaceData, + location: Location, + weather_series: list[Weather], + human: HumanParams | None, + physics: SimpleNamespace | None, + materials: SimpleNamespace | None, + use_anisotropic_sky: bool, + conifer: bool, + output_dir: str | Path, + outputs: list[str] | None, +) -> dict: + """ + Create run metadata dictionary for provenance tracking. + + Args: + surface: Surface data used in calculation. + location: Location parameters. + weather_series: List of Weather objects. + human: Human parameters (or None for defaults). + physics: Physics parameters (or None for defaults). + materials: Materials parameters (or None). + use_anisotropic_sky: Whether anisotropic sky model was used. + conifer: Whether conifer mode was used. + output_dir: Output directory path. + outputs: List of output variables saved. + + Returns: + Dictionary containing run metadata. + """ + from .utils import namespace_to_dict + + metadata = { + "solweig_version": "0.0.1a1", + "run_timestamp": dt.now().isoformat(), + "grid": { + "rows": surface.shape[0], + "cols": surface.shape[1], + "pixel_size": surface.pixel_size, + "crs": surface.crs, + }, + "location": { + "latitude": location.latitude, + "longitude": location.longitude, + "utc_offset": location.utc_offset, + }, + "timeseries": { + "start": weather_series[0].datetime.isoformat(), + "end": weather_series[-1].datetime.isoformat(), + "timesteps": len(weather_series), + }, + "parameters": { + "use_anisotropic_sky": use_anisotropic_sky, + "conifer": conifer, + }, + "outputs": { + "directory": str(output_dir), + "variables": outputs or [], + }, + } + + # Add optional parameter info + if human is not None: + metadata["human"] = { + "abs_k": human.abs_k, + "abs_l": human.abs_l, + "posture": human.posture, + } + + if physics is not None: + physics_info = {} + try: + physics_info["full_params"] = namespace_to_dict(physics) + except Exception: + physics_info["note"] = "Physics parameters provided but not serializable" + metadata["physics"] = physics_info + + if materials is not None: + materials_info = {} + try: + materials_info["full_params"] = namespace_to_dict(materials) + except Exception: + materials_info["note"] = "Materials parameters provided but not serializable" + metadata["materials"] = materials_info + + return metadata + + +def save_run_metadata(metadata: dict, output_dir: str | Path, filename: str = "run_metadata.json") -> Path: + """ + Save run metadata to JSON file. + + Args: + metadata: Metadata dictionary from create_run_metadata(). + output_dir: Output directory. + filename: Filename for metadata JSON (default: run_metadata.json). + + Returns: + Path to saved metadata file. + """ + output_path = Path(output_dir) + metadata_path = output_path / filename + + with open(metadata_path, "w") as f: + json.dump(metadata, f, indent=2) + + return metadata_path + + +def load_run_metadata(metadata_path: str | Path) -> dict: + """ + Load run metadata from JSON file. + + Args: + metadata_path: Path to metadata JSON file. + + Returns: + Metadata dictionary. + """ + with open(metadata_path) as f: + metadata = json.load(f) + + return metadata diff --git a/pysrc/solweig/models/__init__.py b/pysrc/solweig/models/__init__.py new file mode 100644 index 0000000..9e7ddee --- /dev/null +++ b/pysrc/solweig/models/__init__.py @@ -0,0 +1,46 @@ +"""Data models for SOLWEIG calculations. + +Modules +------- +state + ``ThermalState`` (thermal inertia carry-forward) and ``TileSpec`` + (tile geometry for large-raster processing). +surface + ``SurfaceData`` — DSM, CDSM, DEM, land cover, walls, and SVF. +weather + ``Location`` and ``Weather`` dataclasses. +precomputed + ``SvfArrays``, ``ShadowArrays``, ``PrecomputedData`` — cached + preprocessing results loaded from disk. +config + ``ModelConfig`` and ``HumanParams`` — run-time settings. +results + ``SolweigResult`` — output grids (Tmrt, radiation, shadow). +""" + +from .config import HumanParams, ModelConfig +from .precomputed import PrecomputedData, ShadowArrays, SvfArrays +from .results import SolweigResult +from .state import ThermalState, TileSpec +from .surface import SurfaceData +from .weather import Location, Weather + +__all__ = [ + # State management + "ThermalState", + "TileSpec", + # Surface data + "SurfaceData", + # Weather and location + "Location", + "Weather", + # Precomputed data + "SvfArrays", + "ShadowArrays", + "PrecomputedData", + # Configuration + "ModelConfig", + "HumanParams", + # Results + "SolweigResult", +] diff --git a/pysrc/solweig/models/config.py b/pysrc/solweig/models/config.py new file mode 100644 index 0000000..04c2898 --- /dev/null +++ b/pysrc/solweig/models/config.py @@ -0,0 +1,267 @@ +"""Model configuration and human-body parameter classes. + +Defines :class:`ModelConfig` (run-time settings such as sky model, +tiling, and shadow distance) and :class:`HumanParams` (posture, +absorption coefficients, and PET body parameters). +""" + +from __future__ import annotations + +import json +import logging +from dataclasses import dataclass, field +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + pass + + +@dataclass +class ModelConfig: + """ + Model configuration for SOLWEIG calculations. + + Groups all computational settings in one typed object. + Pure configuration - no paths or data. + + Attributes: + use_anisotropic_sky: Use Perez anisotropic sky model for diffuse + radiation. Default True. Requires precomputed shadow matrices. + human: Human body parameters for Tmrt calculations. + material_params: Optional material properties from JSON file. + outputs: Which outputs to save in timeseries calculations. + Default ``["tmrt"]``. + physics: Physics parameters (vegetation, posture geometry). Optional. + materials: Material properties (albedo, emissivity). Optional. + max_shadow_distance_m: Maximum shadow reach in metres. Default 1000.0. + Caps horizontal shadow ray distance and serves as tile overlap + buffer for automatic tiled processing. On mountainous terrain, + increase this to capture terrain shadows across valleys. + tile_workers: Number of workers for tiled orchestration. If None, + picks an adaptive default based on CPU count. + tile_queue_depth: Extra queued tile tasks beyond active workers. + If None, defaults to one queue slot per worker when prefetching + is enabled. + prefetch_tiles: Whether to prefetch tile tasks beyond active workers. + If None, runtime chooses automatically based on memory pressure. + + Note: + UTCI and PET are computed inline when requested via + ``timestep_outputs=["utci"]`` or ``outputs=["utci"]``. + + Examples: + Basic usage with defaults: + + >>> config = ModelConfig.defaults() + >>> config.save("my_config.json") + + Custom configuration: + + >>> config = ModelConfig( + ... use_anisotropic_sky=True, + ... human=HumanParams(abs_k=0.7, posture="standing"), + ... ) + + Load from legacy JSON: + + >>> config = ModelConfig.from_json("parametersforsolweig.json") + """ + + use_anisotropic_sky: bool = True + human: HumanParams | None = None + material_params: SimpleNamespace | None = None + outputs: list[str] = field(default_factory=lambda: ["tmrt"]) + physics: SimpleNamespace | None = None + materials: SimpleNamespace | None = None + max_shadow_distance_m: float = 1000.0 + tile_workers: int | None = None + tile_queue_depth: int | None = None + prefetch_tiles: bool | None = None + + def __post_init__(self): + """Validate configuration fields. + + Note: ``human`` is intentionally left as None here. + Default ``HumanParams()`` is instantiated by ``calculate()`` + and ``calculate_timeseries()`` when no human params are provided. + """ + pass + + @classmethod + def defaults(cls) -> ModelConfig: + """ + Create a ModelConfig with recommended defaults. + + Returns: + ModelConfig with ``use_anisotropic_sky=True`` and all other + fields at their dataclass defaults. + """ + return cls( + use_anisotropic_sky=True, + ) + + @classmethod + def from_json(cls, path: str | Path) -> ModelConfig: + """ + Load configuration from legacy JSON parameters file. + + Args: + path: Path to parametersforsolweig.json + + Returns: + ModelConfig with settings extracted from JSON + + Example: + >>> config = ModelConfig.from_json("parametersforsolweig.json") + >>> config.human.abs_k # From Tmrt_params + 0.7 + """ + from ..loaders import load_params + + params = load_params(path) + + # Extract human parameters from JSON + human = HumanParams() + if hasattr(params, "Tmrt_params"): + human.abs_k = getattr(params.Tmrt_params, "absK", 0.7) + human.abs_l = getattr(params.Tmrt_params, "absL", 0.97) + posture_str = getattr(params.Tmrt_params, "posture", "Standing") + human.posture = posture_str.lower() + + if hasattr(params, "PET_settings"): + human.age = getattr(params.PET_settings, "Age", 35) + human.weight = getattr(params.PET_settings, "Weight", 75.0) + human.height = getattr(params.PET_settings, "Height", 1.75) + human.sex = getattr(params.PET_settings, "Sex", 1) + human.activity = getattr(params.PET_settings, "Activity", 80.0) + human.clothing = getattr(params.PET_settings, "clo", 0.9) + + return cls( + human=human, + material_params=params, + ) + + def save(self, path: str | Path): + """ + Save configuration to JSON file. + + Args: + path: Output path for JSON file + + Example: + >>> config = ModelConfig.defaults() + >>> config.save("my_settings.json") + """ + path = Path(path) + path.parent.mkdir(parents=True, exist_ok=True) + + # Serialize to dict + data = { + "use_anisotropic_sky": self.use_anisotropic_sky, + "max_shadow_distance_m": self.max_shadow_distance_m, + "tile_workers": self.tile_workers, + "tile_queue_depth": self.tile_queue_depth, + "prefetch_tiles": self.prefetch_tiles, + "outputs": self.outputs, + "human": { + "posture": self.human.posture, + "abs_k": self.human.abs_k, + "abs_l": self.human.abs_l, + "age": self.human.age, + "weight": self.human.weight, + "height": self.human.height, + "sex": self.human.sex, + "activity": self.human.activity, + "clothing": self.human.clothing, + } + if self.human + else None, + } + + with open(path, "w") as f: + json.dump(data, f, indent=2) + + logger.info(f"Saved configuration to {path}") + + @classmethod + def load(cls, path: str | Path) -> ModelConfig: + """ + Load configuration from JSON file. + + Args: + path: Path to JSON configuration file + + Returns: + ModelConfig loaded from file + + Example: + >>> config = ModelConfig.load("my_settings.json") + >>> results = calculate_timeseries(surface, weather, config=config) + """ + path = Path(path) + + with open(path) as f: + data = json.load(f) + + # Deserialize human params + human = None + if data.get("human"): + human = HumanParams(**data["human"]) + + return cls( + use_anisotropic_sky=data.get("use_anisotropic_sky", False), + max_shadow_distance_m=data.get("max_shadow_distance_m", 1000.0), + tile_workers=data.get("tile_workers"), + tile_queue_depth=data.get("tile_queue_depth"), + prefetch_tiles=data.get("prefetch_tiles"), + human=human, + outputs=data.get("outputs", ["tmrt"]), + ) + + +@dataclass +class HumanParams: + """ + Human body parameters for thermal comfort calculations. + + These parameters affect how radiation is absorbed by a person. + Default values represent a standard reference person. + + Attributes: + posture: Body posture ("standing" or "sitting"). Default "standing". + abs_k: Shortwave absorption coefficient. Default 0.7. + abs_l: Longwave absorption coefficient. Default 0.97. + + PET-specific parameters (used when ``"pet"`` is in timestep_outputs/outputs): + age: Age in years. Default 35. + weight: Body weight in kg. Default 75. + height: Body height in meters. Default 1.75. + sex: Biological sex (1=male, 2=female). Default 1. + activity: Metabolic activity in W. Default 80. + clothing: Clothing insulation in clo. Default 0.9. + """ + + posture: str = "standing" + abs_k: float = 0.7 + abs_l: float = 0.97 + + # PET-specific (optional) + age: int = 35 + weight: float = 75.0 + height: float = 1.75 + sex: int = 1 + activity: float = 80.0 + clothing: float = 0.9 + + def __post_init__(self): + valid_postures = ("standing", "sitting") + if self.posture not in valid_postures: + raise ValueError(f"Posture must be one of {valid_postures}, got {self.posture}") + if not 0 < self.abs_k <= 1: + raise ValueError(f"abs_k must be in (0, 1], got {self.abs_k}") + if not 0 < self.abs_l <= 1: + raise ValueError(f"abs_l must be in (0, 1], got {self.abs_l}") diff --git a/pysrc/solweig/models/precomputed.py b/pysrc/solweig/models/precomputed.py new file mode 100644 index 0000000..cece374 --- /dev/null +++ b/pysrc/solweig/models/precomputed.py @@ -0,0 +1,807 @@ +"""Precomputed preprocessing data (SVF arrays and shadow matrices). + +Defines :class:`SvfArrays` (15 directional sky view factor grids), +:class:`ShadowArrays` (bitpacked shadow matrices for the anisotropic +sky model), and :class:`PrecomputedData` (a convenience wrapper that +bundles both). These can be loaded from the ``svfs.zip`` / +``shadowmats.npz`` files produced by :meth:`SurfaceData.prepare`. +""" + +from __future__ import annotations + +import json +from dataclasses import dataclass, field +from pathlib import Path +from typing import TYPE_CHECKING, Literal + +import numpy as np + +from ..cache import CacheMetadata, pixel_size_tag +from ..solweig_logging import get_logger + +if TYPE_CHECKING: + from numpy.typing import NDArray + +logger = get_logger(__name__) + + +@dataclass +class SvfArrays: + """ + Pre-computed Sky View Factor arrays. + + Use this when you have already computed SVF and want to skip + re-computation. Can be loaded from SOLWEIG svfs.zip format. + + Attributes: + svf: Total sky view factor (0-1). + svf_north, svf_east, svf_south, svf_west: Directional SVF components. + svf_veg: Vegetation SVF (set to ones if no vegetation). + svf_veg_north, svf_veg_east, svf_veg_south, svf_veg_west: Directional veg SVF. + svf_aveg: Vegetation blocking building shadow. + svf_aveg_north, svf_aveg_east, svf_aveg_south, svf_aveg_west: Directional. + + Memory note: + All arrays are stored as float32. For a 768x768 grid with all 15 arrays, + total memory is approximately 35 MB. + """ + + svf: NDArray[np.floating] + svf_north: NDArray[np.floating] + svf_east: NDArray[np.floating] + svf_south: NDArray[np.floating] + svf_west: NDArray[np.floating] + svf_veg: NDArray[np.floating] + svf_veg_north: NDArray[np.floating] + svf_veg_east: NDArray[np.floating] + svf_veg_south: NDArray[np.floating] + svf_veg_west: NDArray[np.floating] + svf_aveg: NDArray[np.floating] + svf_aveg_north: NDArray[np.floating] + svf_aveg_east: NDArray[np.floating] + svf_aveg_south: NDArray[np.floating] + svf_aveg_west: NDArray[np.floating] + + def __post_init__(self): + # Ensure all arrays are float32 for memory efficiency + # Note: np.asarray preserves memmap arrays (doesn't copy unless dtype changes) + def ensure_f32(arr): + if isinstance(arr, np.memmap): + # Preserve memmap - only convert dtype if needed + if arr.dtype != np.float32: + # This would load into memory - warn user + logger.warning("Memmap array has wrong dtype, loading into memory") + return np.asarray(arr, dtype=np.float32) + return arr + return np.asarray(arr, dtype=np.float32) + + self.svf = ensure_f32(self.svf) + self.svf_north = ensure_f32(self.svf_north) + self.svf_east = ensure_f32(self.svf_east) + self.svf_south = ensure_f32(self.svf_south) + self.svf_west = ensure_f32(self.svf_west) + self.svf_veg = ensure_f32(self.svf_veg) + self.svf_veg_north = ensure_f32(self.svf_veg_north) + self.svf_veg_east = ensure_f32(self.svf_veg_east) + self.svf_veg_south = ensure_f32(self.svf_veg_south) + self.svf_veg_west = ensure_f32(self.svf_veg_west) + self.svf_aveg = ensure_f32(self.svf_aveg) + self.svf_aveg_north = ensure_f32(self.svf_aveg_north) + self.svf_aveg_east = ensure_f32(self.svf_aveg_east) + self.svf_aveg_south = ensure_f32(self.svf_aveg_south) + self.svf_aveg_west = ensure_f32(self.svf_aveg_west) + + @property + def svfalfa(self) -> NDArray[np.floating]: + """Compute SVF alpha (angle) from SVF values. Computed on-demand.""" + tmp = self.svf + self.svf_veg - 1.0 + tmp = np.clip(tmp, 0.0, 1.0) + eps = np.finfo(np.float32).tiny + safe_term = np.clip(1.0 - tmp, eps, 1.0) + return np.arcsin(np.exp(np.log(safe_term) / 2.0)) + + @property + def svfbuveg(self) -> NDArray[np.floating]: + """Combined building + vegetation SVF. Computed on-demand.""" + return np.clip(self.svf + self.svf_veg - 1.0, 0.0, 1.0) + + def crop(self, r0: int, r1: int, c0: int, c1: int) -> SvfArrays: + """Crop all SVF arrays to [r0:r1, c0:c1].""" + return SvfArrays( + svf=self.svf[r0:r1, c0:c1].copy(), + svf_north=self.svf_north[r0:r1, c0:c1].copy(), + svf_east=self.svf_east[r0:r1, c0:c1].copy(), + svf_south=self.svf_south[r0:r1, c0:c1].copy(), + svf_west=self.svf_west[r0:r1, c0:c1].copy(), + svf_veg=self.svf_veg[r0:r1, c0:c1].copy(), + svf_veg_north=self.svf_veg_north[r0:r1, c0:c1].copy(), + svf_veg_east=self.svf_veg_east[r0:r1, c0:c1].copy(), + svf_veg_south=self.svf_veg_south[r0:r1, c0:c1].copy(), + svf_veg_west=self.svf_veg_west[r0:r1, c0:c1].copy(), + svf_aveg=self.svf_aveg[r0:r1, c0:c1].copy(), + svf_aveg_north=self.svf_aveg_north[r0:r1, c0:c1].copy(), + svf_aveg_east=self.svf_aveg_east[r0:r1, c0:c1].copy(), + svf_aveg_south=self.svf_aveg_south[r0:r1, c0:c1].copy(), + svf_aveg_west=self.svf_aveg_west[r0:r1, c0:c1].copy(), + ) + + @classmethod + def from_bundle(cls, bundle) -> SvfArrays: + """ + Create SvfArrays from a SvfBundle (computation result). + + This enables caching fresh-computed SVF back to surface.svf for reuse. + + Args: + bundle: SvfBundle from resolve_svf() or skyview.calculate_svf() + + Returns: + SvfArrays instance suitable for caching on SurfaceData.svf + """ + return cls( + svf=bundle.svf, + svf_north=bundle.svf_directional.north, + svf_east=bundle.svf_directional.east, + svf_south=bundle.svf_directional.south, + svf_west=bundle.svf_directional.west, + svf_veg=bundle.svf_veg, + svf_veg_north=bundle.svf_veg_directional.north, + svf_veg_east=bundle.svf_veg_directional.east, + svf_veg_south=bundle.svf_veg_directional.south, + svf_veg_west=bundle.svf_veg_directional.west, + svf_aveg=bundle.svf_aveg, + svf_aveg_north=bundle.svf_aveg_directional.north, + svf_aveg_east=bundle.svf_aveg_directional.east, + svf_aveg_south=bundle.svf_aveg_directional.south, + svf_aveg_west=bundle.svf_aveg_directional.west, + ) + + @classmethod + def from_zip(cls, zip_path: str | Path, use_vegetation: bool = True) -> SvfArrays: + """ + Load SVF arrays from SOLWEIG svfs.zip format. + + Args: + zip_path: Path to svfs.zip file. + use_vegetation: Whether to load vegetation SVF arrays. Default True. + + Returns: + SvfArrays instance with loaded data. + + Memory note: + Files are extracted temporarily and loaded as float32 arrays. + The zip file contains GeoTIFF rasters. + """ + import tempfile + import zipfile + + from .. import io as common + + zip_path = Path(zip_path) + if not zip_path.exists(): + raise FileNotFoundError(f"SVF zip file not found: {zip_path}") + + with tempfile.TemporaryDirectory() as tmpdir: + with zipfile.ZipFile(str(zip_path), "r") as zf: + zf.extractall(tmpdir) + + tmppath = Path(tmpdir) + + def load(filename: str) -> NDArray[np.floating]: + filepath = tmppath / filename + if not filepath.exists(): + raise FileNotFoundError(f"Expected SVF file not found in zip: {filename}") + data, _, _, _ = common.load_raster(str(filepath), ensure_float32=True) + return data + + # Load basic SVF arrays + svf = load("svf.tif") + svf_n = load("svfN.tif") + svf_e = load("svfE.tif") + svf_s = load("svfS.tif") + svf_w = load("svfW.tif") + + # Load vegetation arrays or create defaults + if use_vegetation: + svf_veg = load("svfveg.tif") + svf_veg_n = load("svfNveg.tif") + svf_veg_e = load("svfEveg.tif") + svf_veg_s = load("svfSveg.tif") + svf_veg_w = load("svfWveg.tif") + svf_aveg = load("svfaveg.tif") + svf_aveg_n = load("svfNaveg.tif") + svf_aveg_e = load("svfEaveg.tif") + svf_aveg_s = load("svfSaveg.tif") + svf_aveg_w = load("svfWaveg.tif") + else: + ones = np.ones_like(svf) + svf_veg = ones + svf_veg_n = ones + svf_veg_e = ones + svf_veg_s = ones + svf_veg_w = ones + svf_aveg = ones + svf_aveg_n = ones + svf_aveg_e = ones + svf_aveg_s = ones + svf_aveg_w = ones + + return cls( + svf=svf, + svf_north=svf_n, + svf_east=svf_e, + svf_south=svf_s, + svf_west=svf_w, + svf_veg=svf_veg, + svf_veg_north=svf_veg_n, + svf_veg_east=svf_veg_e, + svf_veg_south=svf_veg_s, + svf_veg_west=svf_veg_w, + svf_aveg=svf_aveg, + svf_aveg_north=svf_aveg_n, + svf_aveg_east=svf_aveg_e, + svf_aveg_south=svf_aveg_s, + svf_aveg_west=svf_aveg_w, + ) + + def to_memmap(self, directory: str | Path, metadata: CacheMetadata | None = None) -> Path: + """ + Save SVF arrays as memory-mapped .npy files for efficient large-raster processing. + + This enables processing of 10k×10k+ rasters without loading all SVF data into RAM. + The OS handles paging, loading only the needed regions into physical memory. + + Args: + directory: Directory to save memmap files. Created if doesn't exist. + metadata: Optional cache metadata for validation on reload. + When provided, enables automatic cache invalidation if inputs change. + + Returns: + Path to the directory containing memmap files. + + Memory note: + For a 10k×10k grid with 15 arrays: ~6 GB on disk, but only accessed + regions are loaded into RAM. Typical usage loads <100 MB. + + Example: + svf = SvfArrays.from_zip("svfs.zip") + svf.to_memmap("cache/svf_memmap") + # Later: + svf = SvfArrays.from_memmap("cache/svf_memmap") + """ + directory = Path(directory) + directory.mkdir(parents=True, exist_ok=True) + + # Save each array as .npy file + array_names = [ + "svf", + "svf_north", + "svf_east", + "svf_south", + "svf_west", + "svf_veg", + "svf_veg_north", + "svf_veg_east", + "svf_veg_south", + "svf_veg_west", + "svf_aveg", + "svf_aveg_north", + "svf_aveg_east", + "svf_aveg_south", + "svf_aveg_west", + ] + + for name in array_names: + arr = getattr(self, name) + np.save(directory / f"{name}.npy", arr) + + # Save metadata for cache validation + if metadata is not None: + metadata.save(directory) + + logger.info(f"Saved SVF memmap cache to {directory} ({len(array_names)} arrays)") + return directory + + @classmethod + def from_memmap(cls, directory: str | Path, mode: Literal["r", "r+", "c"] = "r") -> SvfArrays: + """ + Load SVF arrays as memory-mapped files for efficient large-raster processing. + + Memory-mapped arrays are not loaded into RAM until accessed. The OS handles + paging, making this suitable for rasters larger than available RAM. + + Args: + directory: Directory containing memmap .npy files (from to_memmap()). + mode: Memory-map mode. Default "r" (read-only). + - "r": Read-only (safest, allows OS caching) + - "r+": Read-write (modifications saved to disk) + - "c": Copy-on-write (modifications not saved) + + Returns: + SvfArrays with memory-mapped backing. + + Memory note: + Only accessed regions are loaded into physical RAM. For tiled processing, + this dramatically reduces memory usage compared to loading full arrays. + + Example: + svf = SvfArrays.from_memmap("cache/svf_memmap") + # Arrays are loaded on-demand as tiles access them + """ + directory = Path(directory) + if not directory.exists(): + raise FileNotFoundError(f"SVF memmap directory not found: {directory}") + + def load_memmap(name: str) -> np.ndarray: + path = directory / f"{name}.npy" + if not path.exists(): + raise FileNotFoundError(f"SVF memmap file not found: {path}") + return np.load(path, mmap_mode=mode) + + return cls( + svf=load_memmap("svf"), + svf_north=load_memmap("svf_north"), + svf_east=load_memmap("svf_east"), + svf_south=load_memmap("svf_south"), + svf_west=load_memmap("svf_west"), + svf_veg=load_memmap("svf_veg"), + svf_veg_north=load_memmap("svf_veg_north"), + svf_veg_east=load_memmap("svf_veg_east"), + svf_veg_south=load_memmap("svf_veg_south"), + svf_veg_west=load_memmap("svf_veg_west"), + svf_aveg=load_memmap("svf_aveg"), + svf_aveg_north=load_memmap("svf_aveg_north"), + svf_aveg_east=load_memmap("svf_aveg_east"), + svf_aveg_south=load_memmap("svf_aveg_south"), + svf_aveg_west=load_memmap("svf_aveg_west"), + ) + + +def _unpack_bitpacked_to_float32(packed: NDArray[np.uint8], patch_count: int) -> NDArray[np.floating]: + """Unpack bitpacked shadow matrix to float32 (0.0 or 1.0). + + Args: + packed: Bitpacked array, shape (rows, cols, n_pack) where n_pack = ceil(patch_count/8). + patch_count: Number of actual patches. + + Returns: + Float32 array, shape (rows, cols, patch_count) with values 0.0 or 1.0. + """ + rows, cols, _ = packed.shape + result = np.zeros((rows, cols, patch_count), dtype=np.float32) + for p in range(patch_count): + byte_idx = p >> 3 + bit_mask = np.uint8(1 << (p & 7)) + result[:, :, p] = ((packed[:, :, byte_idx] & bit_mask) != 0).astype(np.float32) + return result + + +def _pack_u8_to_bitpacked( + u8_data: NDArray[np.uint8], +) -> NDArray[np.uint8]: + """Pack u8 shadow matrix (0 or 255 per patch) to bitpacked format. + + Args: + u8_data: Array shape (rows, cols, patch_count) with values 0 or 255. + + Returns: + Bitpacked array, shape (rows, cols, n_pack) where n_pack = ceil(patch_count/8). + """ + rows, cols, patch_count = u8_data.shape + n_pack = (patch_count + 7) // 8 + packed = np.zeros((rows, cols, n_pack), dtype=np.uint8) + for p in range(patch_count): + byte_idx = p >> 3 + bit_mask = np.uint8(1 << (p & 7)) + packed[:, :, byte_idx] |= np.where(u8_data[:, :, p] >= 128, bit_mask, np.uint8(0)) + return packed + + +@dataclass +class ShadowArrays: + """ + Pre-computed anisotropic shadow matrices for sky patch calculations. + + Internally stored as bitpacked uint8 arrays of shape (rows, cols, n_pack) + where n_pack = ceil(patch_count / 8). Each bit represents one sky patch + (1 = sky visible / shadowed value was 255, 0 = blocked / was 0). + + Memory optimization: + Bitpacking stores 8 patches per byte instead of 1, reducing memory 7.6x. + For a 2500x2500 grid with 153 patches: 375 MB bitpacked vs 2.87 GB as uint8. + Converted to float32 only when accessed via properties (e.g. for diffsh). + + Attributes: + _shmat_u8: Building shadow matrix (bitpacked uint8). + _vegshmat_u8: Vegetation shadow matrix (bitpacked uint8). + _vbshmat_u8: Combined veg+building shadow matrix (bitpacked uint8). + patch_count: Number of sky patches (145, 153, 306, or 612). + """ + + _shmat_u8: NDArray[np.uint8] + _vegshmat_u8: NDArray[np.uint8] + _vbshmat_u8: NDArray[np.uint8] + _n_patches: int = 153 + patch_count: int = field(init=False) + # Cache for converted float32 arrays (allocated on first access) + _shmat_f32: NDArray[np.floating] | None = field(init=False, default=None, repr=False) + _vegshmat_f32: NDArray[np.floating] | None = field(init=False, default=None, repr=False) + _vbshmat_f32: NDArray[np.floating] | None = field(init=False, default=None, repr=False) + _steradians: NDArray[np.float32] | None = field(init=False, default=None, repr=False) + + def __post_init__(self): + # Ensure uint8 dtype + if self._shmat_u8.dtype != np.uint8: + self._shmat_u8 = self._shmat_u8.astype(np.uint8) + if self._vegshmat_u8.dtype != np.uint8: + self._vegshmat_u8 = self._vegshmat_u8.astype(np.uint8) + if self._vbshmat_u8.dtype != np.uint8: + self._vbshmat_u8 = self._vbshmat_u8.astype(np.uint8) + + self.patch_count = self._n_patches + # Initialize cache as None (lazy allocation) + self._shmat_f32 = None + self._vegshmat_f32 = None + self._vbshmat_f32 = None + self._steradians = None + + @property + def shmat(self) -> NDArray[np.floating]: + """Building shadow matrix as float32 (0.0-1.0). Unpacked from bitpacked on demand.""" + if self._shmat_f32 is None: + self._shmat_f32 = _unpack_bitpacked_to_float32(self._shmat_u8, self.patch_count) + return self._shmat_f32 + + @property + def vegshmat(self) -> NDArray[np.floating]: + """Vegetation shadow matrix as float32 (0.0-1.0). Unpacked from bitpacked on demand.""" + if self._vegshmat_f32 is None: + self._vegshmat_f32 = _unpack_bitpacked_to_float32(self._vegshmat_u8, self.patch_count) + return self._vegshmat_f32 + + @property + def vbshmat(self) -> NDArray[np.floating]: + """Combined shadow matrix as float32 (0.0-1.0). Unpacked from bitpacked on demand.""" + if self._vbshmat_f32 is None: + self._vbshmat_f32 = _unpack_bitpacked_to_float32(self._vbshmat_u8, self.patch_count) + return self._vbshmat_f32 + + @property + def patch_option(self) -> int: + """Patch option code (1=145, 2=153, 3=306, 4=612 patches).""" + patch_map = {145: 1, 153: 2, 306: 3, 612: 4} + return patch_map.get(self.patch_count, 2) + + @property + def steradians(self) -> NDArray[np.float32]: + """Patch steradians (cached, depends only on patch layout).""" + if self._steradians is None: + from ..physics.create_patches import create_patches + from ..physics.patch_radiation import patch_steradians + + skyvaultalt, skyvaultazi, *_ = create_patches(self.patch_option) + # patch_steradians only uses column 0 (altitudes) + lv_stub = np.column_stack([skyvaultalt.ravel(), skyvaultazi.ravel(), np.zeros(skyvaultalt.size)]) + self._steradians, _, _ = patch_steradians(lv_stub) + return self._steradians + + def diffsh(self, transmissivity: float = 0.03, use_vegetation: bool = True) -> NDArray[np.floating]: + """ + Compute diffuse shadow matrix. + + Args: + transmissivity: Vegetation transmissivity (default 0.03). + use_vegetation: Whether to account for vegetation. + + Returns: + Diffuse shadow matrix as float32. + """ + shmat = self.shmat + if use_vegetation: + vegshmat = self.vegshmat + return (shmat - (1 - vegshmat) * (1 - transmissivity)).astype(np.float32) + return shmat + + def release_float32_cache(self) -> None: + """Release cached float32 shadow matrices to free memory. + + The bitpacked originals remain available. Future property access will + re-unpack as needed. + """ + self._shmat_f32 = None + self._vegshmat_f32 = None + self._vbshmat_f32 = None + + def crop(self, r0: int, r1: int, c0: int, c1: int) -> ShadowArrays: + """Crop all shadow matrices to [r0:r1, c0:c1] (3D: rows, cols, n_pack).""" + return ShadowArrays( + _shmat_u8=self._shmat_u8[r0:r1, c0:c1, :].copy(), + _vegshmat_u8=self._vegshmat_u8[r0:r1, c0:c1, :].copy(), + _vbshmat_u8=self._vbshmat_u8[r0:r1, c0:c1, :].copy(), + _n_patches=self.patch_count, + ) + + @classmethod + def from_npz(cls, npz_path: str | Path) -> ShadowArrays: + """ + Load shadow matrices from SOLWEIG shadowmats.npz format. + + Handles both legacy u8-per-patch format and new bitpacked format. + Legacy files have shape[2] matching patch count (145/153/306/612). + New files include a 'patch_count' metadata key. + """ + npz_path = Path(npz_path) + if not npz_path.exists(): + raise FileNotFoundError(f"Shadow matrices file not found: {npz_path}") + + data = np.load(str(npz_path)) + + shmat = data["shadowmat"] + vegshmat = data["vegshadowmat"] + vbshmat = data["vbshmat"] + + # Detect format: new bitpacked files include 'patch_count' key + if "patch_count" in data: + patch_count = int(data["patch_count"]) + # Data is already bitpacked uint8 + return cls( + _shmat_u8=shmat.astype(np.uint8), + _vegshmat_u8=vegshmat.astype(np.uint8), + _vbshmat_u8=vbshmat.astype(np.uint8), + _n_patches=patch_count, + ) + + # Legacy format: shape[2] == patch_count, values are 0/255 uint8 or 0.0/1.0 float32 + # Convert float32 → uint8 first if needed + if shmat.dtype != np.uint8: + shmat = (np.clip(shmat, 0, 1) * 255).astype(np.uint8) + if vegshmat.dtype != np.uint8: + vegshmat = (np.clip(vegshmat, 0, 1) * 255).astype(np.uint8) + if vbshmat.dtype != np.uint8: + vbshmat = (np.clip(vbshmat, 0, 1) * 255).astype(np.uint8) + + patch_count = shmat.shape[2] + + # Pack u8 → bitpacked + return cls( + _shmat_u8=_pack_u8_to_bitpacked(shmat), + _vegshmat_u8=_pack_u8_to_bitpacked(vegshmat), + _vbshmat_u8=_pack_u8_to_bitpacked(vbshmat), + _n_patches=patch_count, + ) + + @classmethod + def from_memmap(cls, directory: str | Path, mode: Literal["r", "r+", "c"] = "r") -> ShadowArrays: + """ + Load bitpacked shadow matrices from a memmap directory. + + Expected files: + - metadata.json (shape, patch_count, file names) + - shmat.dat + - vegshmat.dat + - vbshmat.dat + """ + directory = Path(directory) + if not directory.exists(): + raise FileNotFoundError(f"Shadow memmap directory not found: {directory}") + + metadata_path = directory / "metadata.json" + if not metadata_path.exists(): + raise FileNotFoundError(f"Shadow memmap metadata not found: {metadata_path}") + + with metadata_path.open("r", encoding="utf-8") as f: + meta = json.load(f) + + shape_raw = meta.get("shape") + if not isinstance(shape_raw, (list, tuple)) or len(shape_raw) != 3: + raise ValueError(f"Invalid shadow memmap shape metadata in {metadata_path}: {shape_raw}") + shape = tuple(int(v) for v in shape_raw) + patch_count = int(meta.get("patch_count", 153)) + + sh_file = meta.get("shadowmat_file", "shmat.dat") + veg_file = meta.get("vegshadowmat_file", "vegshmat.dat") + vb_file = meta.get("vbshmat_file", "vbshmat.dat") + + sh_path = directory / sh_file + veg_path = directory / veg_file + vb_path = directory / vb_file + for path in (sh_path, veg_path, vb_path): + if not path.exists(): + raise FileNotFoundError(f"Expected shadow memmap file not found: {path}") + + return cls( + _shmat_u8=np.memmap(sh_path, dtype=np.uint8, mode=mode, shape=shape), + _vegshmat_u8=np.memmap(veg_path, dtype=np.uint8, mode=mode, shape=shape), + _vbshmat_u8=np.memmap(vb_path, dtype=np.uint8, mode=mode, shape=shape), + _n_patches=patch_count, + ) + + +@dataclass +class PrecomputedData: + """ + Container for pre-computed preprocessing data to skip expensive calculations. + + Use this to provide already-computed walls, SVF, and/or shadow matrices + to the calculate() function. This is useful when: + - Running multiple timesteps with the same geometry + - Using data generated by external tools + - Optimizing performance by pre-computing once + + Attributes: + wall_height: Pre-computed wall height grid (meters). If None, wall height + can be prepared from DSM via SurfaceData.prepare(). + wall_aspect: Pre-computed wall aspect grid (degrees, 0=N). If None, wall aspect + can be prepared from DSM via SurfaceData.prepare(). + svf: Pre-computed SVF arrays. Required for calculate(); if None, + calculate() raises MissingPrecomputedData. + shadow_matrices: Pre-computed anisotropic shadow matrices. + If None, isotropic sky model is used. + + Example: + # Load all preprocessing + precomputed = PrecomputedData.load( + walls_dir="preprocessed/walls", + svf_dir="preprocessed/svf", + ) + + # Or create manually + svf = SvfArrays.from_zip("path/to/svfs.zip") + shadows = ShadowArrays.from_npz("path/to/shadowmats.npz") + precomputed = PrecomputedData(svf=svf, shadow_matrices=shadows) + + result = calculate( + surface=surface, + location=location, + weather=weather, + precomputed=precomputed, + ) + """ + + wall_height: NDArray[np.floating] | None = None + wall_aspect: NDArray[np.floating] | None = None + svf: SvfArrays | None = None + shadow_matrices: ShadowArrays | None = None + + @classmethod + def prepare( + cls, + walls_dir: str | Path | None = None, + svf_dir: str | Path | None = None, + ) -> PrecomputedData: + """ + Prepare preprocessing data from directories. + + Loads preprocessing files if they exist. If files don't exist, + the corresponding data will be None. + + All parameters are optional. + + Args: + walls_dir: Directory containing wall preprocessing files: + - wall_hts.tif: Wall heights (meters) + - wall_aspects.tif: Wall aspects (degrees, 0=N) + svf_dir: Directory containing SVF preprocessing files: + - svfs.zip: SVF arrays (required if svf_dir provided) + - shadowmats.npz: Shadow matrices for anisotropic sky (optional) + + Returns: + PrecomputedData with loaded arrays. Missing data is set to None. + + Example: + # Prepare all preprocessing + precomputed = PrecomputedData.prepare( + walls_dir="preprocessed/walls", + svf_dir="preprocessed/svf", + ) + + # Prepare only SVF + precomputed = PrecomputedData.prepare(svf_dir="preprocessed/svf") + + # Nothing prepared (SVF must be provided before calculate()) + precomputed = PrecomputedData.prepare() + """ + from .. import io + + wall_height_arr = None + wall_aspect_arr = None + svf_arrays = None + shadow_arrays = None + + def _load_svf_from_dir(base: Path) -> SvfArrays | None: + memmap_dir = base / "memmap" + svf_zip = base / "svfs.zip" + if memmap_dir.exists() and (memmap_dir / "svf.npy").exists(): + logger.info(f" Loaded SVF memmap cache from {memmap_dir}") + return SvfArrays.from_memmap(memmap_dir) + if svf_zip.exists(): + logger.info(f" Loaded SVF zip from {svf_zip}") + return SvfArrays.from_zip(str(svf_zip)) + return None + + def _load_shadow_from_dir(base: Path) -> ShadowArrays | None: + shadow_npz = base / "shadowmats.npz" + if shadow_npz.exists(): + logger.info(f" Loaded shadow matrices from {shadow_npz}") + return ShadowArrays.from_npz(str(shadow_npz)) + + shadow_memmap_dir = base / "shadow_memmaps" + metadata = shadow_memmap_dir / "metadata.json" + if shadow_memmap_dir.exists() and metadata.exists(): + logger.info(f" Loaded shadow memmaps from {shadow_memmap_dir}") + return ShadowArrays.from_memmap(shadow_memmap_dir) + return None + + # Load walls if directory provided + if walls_dir is not None: + walls_path = Path(walls_dir) + wall_height_path = walls_path / "wall_hts.tif" + wall_aspect_path = walls_path / "wall_aspects.tif" + + if wall_height_path.exists(): + wall_height_arr, _, _, _ = io.load_raster(str(wall_height_path)) + logger.info(f" Loaded wall heights from {walls_dir}") + else: + logger.debug(f" Wall heights not found: {wall_height_path}") + + if wall_aspect_path.exists(): + wall_aspect_arr, _, _, _ = io.load_raster(str(wall_aspect_path)) + logger.info(f" Loaded wall aspects from {walls_dir}") + else: + logger.debug(f" Wall aspects not found: {wall_aspect_path}") + + # Load SVF if directory provided + if svf_dir is not None: + svf_path = Path(svf_dir) + svf_arrays = _load_svf_from_dir(svf_path) + shadow_arrays = _load_shadow_from_dir(svf_path) + + # Fallback: look for pixel-size-keyed cache under svf// when + # caller points at a prepared surface directory root. + if svf_arrays is None or shadow_arrays is None: + candidate_dirs: list[Path] = [] + meta_path = svf_path / "metadata.json" + if meta_path.exists(): + try: + with meta_path.open("r", encoding="utf-8") as f: + meta = json.load(f) + px = meta.get("pixel_size") + if px is not None: + candidate_dirs.append(svf_path / "svf" / pixel_size_tag(float(px))) + except Exception: + pass + + svf_root = svf_path / "svf" + if svf_root.exists(): + for child in svf_root.iterdir(): + if child.is_dir(): + candidate_dirs.append(child) + + seen: set[Path] = set() + for candidate in candidate_dirs: + if candidate in seen: + continue + seen.add(candidate) + if svf_arrays is None: + svf_arrays = _load_svf_from_dir(candidate) + if shadow_arrays is None: + shadow_arrays = _load_shadow_from_dir(candidate) + if svf_arrays is not None and shadow_arrays is not None: + break + + if svf_arrays is None: + logger.debug(f" SVF not found in {svf_path}") + else: + logger.info(f" Loaded SVF data: {svf_arrays.svf.shape}") + + if shadow_arrays is None: + logger.debug(" No shadow matrices found (anisotropic sky will be slower)") + else: + logger.info(" Loaded shadow matrices for anisotropic sky") + + return cls( + wall_height=wall_height_arr, + wall_aspect=wall_aspect_arr, + svf=svf_arrays, + shadow_matrices=shadow_arrays, + ) diff --git a/pysrc/solweig/models/results.py b/pysrc/solweig/models/results.py new file mode 100644 index 0000000..e2751e9 --- /dev/null +++ b/pysrc/solweig/models/results.py @@ -0,0 +1,268 @@ +"""Result container returned by SOLWEIG calculations. + +Defines :class:`SolweigResult`, which holds the output grids (Tmrt, +radiation components, shadow mask) and provides convenience methods +for computing thermal comfort indices (UTCI, PET) and saving outputs +to GeoTIFF. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime as dt +from pathlib import Path +from typing import TYPE_CHECKING + +import numpy as np + +from ..solweig_logging import get_logger + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from ..models import HumanParams + from .state import ThermalState + from .surface import SurfaceData + from .weather import Weather + +logger = get_logger(__name__) + + +@dataclass +class SolweigResult: + """ + Results from a SOLWEIG calculation. + + All output grids have the same shape as the input DSM. + + Attributes: + tmrt: Mean Radiant Temperature grid (°C). + utci: Universal Thermal Climate Index grid (°C). Optional. + pet: Physiological Equivalent Temperature grid (°C). Optional. + shadow: Shadow fraction (1.0=sunlit, 0.0=fully shaded, float). + kdown: Downwelling shortwave radiation (W/m²). + kup: Upwelling shortwave radiation (W/m²). + ldown: Downwelling longwave radiation (W/m²). + lup: Upwelling longwave radiation (W/m²). + state: Thermal state for multi-timestep chaining. Optional. + When state parameter was passed to calculate(), this contains + the updated state for the next timestep. + """ + + tmrt: NDArray[np.floating] + shadow: NDArray[np.floating] | None = None + kdown: NDArray[np.floating] | None = None + kup: NDArray[np.floating] | None = None + ldown: NDArray[np.floating] | None = None + lup: NDArray[np.floating] | None = None + utci: NDArray[np.floating] | None = None + pet: NDArray[np.floating] | None = None + state: ThermalState | None = None + + def to_geotiff( + self, + output_dir: str | Path, + timestamp: dt | None = None, + outputs: list[str] | None = None, + surface: SurfaceData | None = None, + transform: list[float] | None = None, + crs_wkt: str | None = None, + ) -> None: + """ + Save results to GeoTIFF files. + + Creates one GeoTIFF file per output variable per timestep. + Filename pattern: {output}_{YYYYMMDD}_{HHMM}.tif + + Args: + output_dir: Directory to write GeoTIFF files. + timestamp: Timestamp for filename. If None, uses current time. + outputs: List of outputs to save. Options: "tmrt", "utci", "pet", + "shadow", "kdown", "kup", "ldown", "lup". + Default: ["tmrt"] (only save Mean Radiant Temperature). + surface: SurfaceData object (if loaded via from_geotiff, contains CRS/transform). + If provided and transform/crs_wkt not specified, uses surface metadata. + transform: GDAL-style geotransform [x_origin, pixel_width, 0, + y_origin, 0, -pixel_height]. If None, attempts to use surface metadata, + otherwise uses identity transform. + crs_wkt: Coordinate reference system in WKT format. If None, attempts to use + surface metadata, otherwise no CRS set. + + Example: + # With surface metadata (recommended when using from_geotiff) + >>> surface, precomputed = SurfaceData.from_geotiff("dsm.tif", svf_dir="svf/") + >>> result = solweig.calculate(surface, location, weather, precomputed=precomputed) + >>> result.to_geotiff("output/", timestamp=weather.datetime, surface=surface) + + # Without surface metadata (explicit transform/CRS) + >>> result.to_geotiff( + ... "output/", + ... timestamp=datetime(2025, 7, 15, 12, 0), + ... outputs=["tmrt", "utci", "pet"], + ... transform=[0, 1, 0, 0, 0, -1], + ... crs_wkt="EPSG:32633", + ... ) + """ + from .. import io + + output_dir = Path(output_dir) + output_dir.mkdir(parents=True, exist_ok=True) + + # Default outputs: just tmrt + if outputs is None: + outputs = ["tmrt"] + + # Default timestamp: current time + if timestamp is None: + timestamp = dt.now() + + # Format timestamp for filename + ts_str = timestamp.strftime("%Y%m%d_%H%M") + + # Use surface metadata if available and not overridden + if surface is not None: + if transform is None and surface._geotransform is not None: + transform = surface._geotransform + if crs_wkt is None and surface._crs_wkt is not None: + crs_wkt = surface._crs_wkt + + # Default transform: identity (top-left origin, 1m pixels) + if transform is None: + height, width = self.tmrt.shape + transform = [0.0, 1.0, 0.0, 0.0, 0.0, -1.0] + + # Default CRS: empty string (no CRS) + if crs_wkt is None: + crs_wkt = "" + + # Map output names to arrays + available_outputs = { + "tmrt": self.tmrt, + "utci": self.utci, + "pet": self.pet, + "shadow": self.shadow, + "kdown": self.kdown, + "kup": self.kup, + "ldown": self.ldown, + "lup": self.lup, + } + + # Save each requested output + for name in outputs: + if name not in available_outputs: + logger.warning(f"Unknown output '{name}', skipping. Valid: {list(available_outputs.keys())}") + continue + + array = available_outputs[name] + if array is None: + logger.warning(f"Output '{name}' is None (not computed), skipping.") + continue + + # Write to GeoTIFF in component subdirectory + comp_dir = output_dir / name + comp_dir.mkdir(parents=True, exist_ok=True) + filepath = comp_dir / f"{name}_{ts_str}.tif" + io.save_raster( + out_path_str=str(filepath), + data_arr=array, + trf_arr=transform, + crs_wkt=crs_wkt, + no_data_val=np.nan, + ) + logger.debug(f"Saved {name} to {filepath}") + + def compute_utci( + self, + weather_or_ta: Weather | float, + rh: float | None = None, + wind: float | None = None, + ) -> NDArray[np.floating]: + """ + Compute UTCI (Universal Thermal Climate Index) from this result's Tmrt. + + Can be called with either a Weather object or individual values: + utci = result.compute_utci(weather) + utci = result.compute_utci(ta=25.0, rh=50.0, wind=2.0) + + Args: + weather_or_ta: Either a Weather object, or air temperature in °C. + rh: Relative humidity in % (required if weather_or_ta is float). + wind: Wind speed at 10m height in m/s. Default 1.0 if not provided. + + Returns: + UTCI grid (°C) with same shape as tmrt. + + Example: + result = solweig.calculate(surface, location, weather) + + # Pattern A: Pass weather object (convenient) + utci = result.compute_utci(weather) + + # Pattern B: Pass individual values (explicit) + utci = result.compute_utci(25.0, rh=50.0, wind=2.0) + """ + from ..postprocess import compute_utci_grid + from .weather import Weather as WeatherClass + + # Duck-type check avoids isinstance failure from dual module import paths + if hasattr(weather_or_ta, "ta") and hasattr(weather_or_ta, "rh"): + w: WeatherClass = weather_or_ta # type: ignore[assignment] + return compute_utci_grid(self.tmrt, w.ta, w.rh, w.ws) + else: + # Individual values + ta = float(weather_or_ta) + if rh is None: + raise ValueError("rh is required when ta is provided as a float") + return compute_utci_grid(self.tmrt, ta, rh, wind if wind is not None else 1.0) + + def compute_pet( + self, + weather_or_ta: Weather | float, + rh: float | None = None, + wind: float | None = None, + human: HumanParams | None = None, + ) -> NDArray[np.floating]: + """ + Compute PET (Physiological Equivalent Temperature) from this result's Tmrt. + + Can be called with either a Weather object or individual values: + pet = result.compute_pet(weather) + pet = result.compute_pet(ta=25.0, rh=50.0, wind=2.0) + + Args: + weather_or_ta: Either a Weather object, or air temperature in °C. + rh: Relative humidity in % (required if weather_or_ta is float). + wind: Wind speed at 10m height in m/s. Default 1.0 if not provided. + human: Human body parameters. Uses defaults if not provided. + + Returns: + PET grid (°C) with same shape as tmrt. + + Note: + PET uses an iterative solver and is ~50× slower than UTCI. + + Example: + result = solweig.calculate(surface, location, weather) + + # Pattern A: Pass weather object (convenient) + pet = result.compute_pet(weather) + + # Pattern B: Pass individual values with custom human params + pet = result.compute_pet( + 25.0, rh=50.0, wind=2.0, + human=HumanParams(weight=70, height=1.65) + ) + """ + from ..postprocess import compute_pet_grid + from .weather import Weather as WeatherClass + + # Duck-type check avoids isinstance failure from dual module import paths + if hasattr(weather_or_ta, "ta") and hasattr(weather_or_ta, "rh"): + w: WeatherClass = weather_or_ta # type: ignore[assignment] + return compute_pet_grid(self.tmrt, w.ta, w.rh, w.ws, human) + else: + # Individual values + ta = float(weather_or_ta) + if rh is None: + raise ValueError("rh is required when ta is provided as a float") + return compute_pet_grid(self.tmrt, ta, rh, wind if wind is not None else 1.0, human) diff --git a/pysrc/solweig/models/state.py b/pysrc/solweig/models/state.py new file mode 100644 index 0000000..fef1b76 --- /dev/null +++ b/pysrc/solweig/models/state.py @@ -0,0 +1,158 @@ +"""Thermal state and tile specification models. + +:class:`ThermalState` carries surface temperature history between +timesteps (ground and wall thermal inertia via TsWaveDelay). +:class:`TileSpec` describes the geometry of a single tile used by the +large-raster tiling engine. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +import numpy as np + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +@dataclass +class ThermalState: + """ + Thermal state for multi-timestep calculations. + + Carries forward surface temperature history between timesteps + to model thermal inertia of ground and walls (TsWaveDelay_2015a). + + This enables accurate time-series simulations where surface temperatures + depend on accumulated heating throughout the day. + + Attributes: + tgmap1: Upwelling longwave history (center view). + tgmap1_e: Upwelling longwave history (east view). + tgmap1_s: Upwelling longwave history (south view). + tgmap1_w: Upwelling longwave history (west view). + tgmap1_n: Upwelling longwave history (north view). + tgout1: Ground temperature output history. + firstdaytime: Flag for first morning timestep (1.0=first, 0.0=subsequent). + timeadd: Accumulated time for thermal delay function. + timestep_dec: Decimal time between steps (fraction of day). + + Example: + # Manual state management for custom time loops + state = ThermalState.initial(dsm.shape) + for weather in weather_list: + result = calculate(..., state=state) + state = result.state + """ + + tgmap1: NDArray[np.floating] + tgmap1_e: NDArray[np.floating] + tgmap1_s: NDArray[np.floating] + tgmap1_w: NDArray[np.floating] + tgmap1_n: NDArray[np.floating] + tgout1: NDArray[np.floating] + firstdaytime: float = 1.0 + timeadd: float = 0.0 + timestep_dec: float = 0.0 + + @classmethod + def initial(cls, shape: tuple[int, int]) -> ThermalState: + """ + Create initial state for first timestep. + + Args: + shape: Grid shape (rows, cols) matching the DSM. + + Returns: + ThermalState with zero-initialized arrays. + """ + zeros = np.zeros(shape, dtype=np.float32) + return cls( + tgmap1=zeros.copy(), + tgmap1_e=zeros.copy(), + tgmap1_s=zeros.copy(), + tgmap1_w=zeros.copy(), + tgmap1_n=zeros.copy(), + tgout1=zeros.copy(), + firstdaytime=1.0, + timeadd=0.0, + timestep_dec=0.0, + ) + + def copy(self) -> ThermalState: + """Create a deep copy of this state.""" + return ThermalState( + tgmap1=self.tgmap1.copy(), + tgmap1_e=self.tgmap1_e.copy(), + tgmap1_s=self.tgmap1_s.copy(), + tgmap1_w=self.tgmap1_w.copy(), + tgmap1_n=self.tgmap1_n.copy(), + tgout1=self.tgout1.copy(), + firstdaytime=self.firstdaytime, + timeadd=self.timeadd, + timestep_dec=self.timestep_dec, + ) + + +@dataclass +class TileSpec: + """ + Specification for a single tile with overlap regions. + + Attributes: + row_start, row_end: Core tile row bounds (without overlap). + col_start, col_end: Core tile column bounds (without overlap). + row_start_full, row_end_full: Full tile row bounds (with overlap). + col_start_full, col_end_full: Full tile column bounds (with overlap). + overlap_top, overlap_bottom: Vertical overlap in pixels. + overlap_left, overlap_right: Horizontal overlap in pixels. + """ + + row_start: int + row_end: int + col_start: int + col_end: int + row_start_full: int + row_end_full: int + col_start_full: int + col_end_full: int + overlap_top: int + overlap_bottom: int + overlap_left: int + overlap_right: int + + @property + def core_shape(self) -> tuple[int, int]: + """Shape of core tile (without overlap).""" + return (self.row_end - self.row_start, self.col_end - self.col_start) + + @property + def full_shape(self) -> tuple[int, int]: + """Shape of full tile (with overlap).""" + return (self.row_end_full - self.row_start_full, self.col_end_full - self.col_start_full) + + @property + def core_slice(self) -> tuple[slice, slice]: + """Slices for extracting core from full tile result.""" + return ( + slice(self.overlap_top, self.overlap_top + self.core_shape[0]), + slice(self.overlap_left, self.overlap_left + self.core_shape[1]), + ) + + @property + def write_slice(self) -> tuple[slice, slice]: + """Slices for writing core to global output.""" + return ( + slice(self.row_start, self.row_end), + slice(self.col_start, self.col_end), + ) + + @property + def read_slice(self) -> tuple[slice, slice]: + """Slices for reading full tile from global input.""" + return ( + slice(self.row_start_full, self.row_end_full), + slice(self.col_start_full, self.col_end_full), + ) diff --git a/pysrc/solweig/models/surface.py b/pysrc/solweig/models/surface.py new file mode 100644 index 0000000..7046f2f --- /dev/null +++ b/pysrc/solweig/models/surface.py @@ -0,0 +1,2247 @@ +"""Surface and terrain data model. + +Defines :class:`SurfaceData`, the primary input container for SOLWEIG +calculations. Holds the DSM and optional rasters (CDSM, DEM, TDSM, +land cover, walls, SVF). The :meth:`SurfaceData.prepare` class method +loads GeoTIFFs from disk, aligns extents, and computes or caches +walls and sky view factors automatically. +""" + +from __future__ import annotations + +import json +import os +import shutil +from collections.abc import Callable +from dataclasses import dataclass, field +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING, Any + +import numpy as np +from affine import Affine as AffineClass + +from .. import io +from .. import walls as walls_module +from ..buffers import BufferPool +from ..cache import CacheMetadata, clear_stale_cache, pixel_size_tag, validate_cache +from ..loaders import get_lc_properties_from_params +from ..rustalgos import skyview +from ..solweig_logging import get_logger +from ..utils import extract_bounds, intersect_bounds, resample_to_grid +from .precomputed import ShadowArrays, SvfArrays + +if TYPE_CHECKING: + from numpy.typing import NDArray + +logger = get_logger(__name__) + + +def _should_compress_svf_exports(n_pixels: int) -> bool: + """ + Return True when SVF/shadow exports should use compression. + + Large rasters spend a long post-GPU tail in single-threaded compression. + Default threshold can be overridden with SOLWEIG_COMPRESS_MAX_PIXELS. + """ + try: + limit = int(os.getenv("SOLWEIG_COMPRESS_MAX_PIXELS", "50000000")) + except ValueError: + limit = 50_000_000 + return n_pixels <= max(0, limit) + + +def _should_export_shadow_npz(n_pixels: int) -> bool: + """ + Return True when shadowmats.npz should be written. + + For very large grids, serializing 3 bitpacked matrices into one NPZ can + dominate runtime after GPU work completes. For those cases we keep the + memmap cache and skip NPZ export by default. + """ + force = os.getenv("SOLWEIG_FORCE_SHADOW_NPZ", "").strip().lower() in ("1", "true") + if force: + return True + try: + limit = int(os.getenv("SOLWEIG_SHADOW_NPZ_MAX_PIXELS", "50000000")) + except ValueError: + limit = 50_000_000 + return n_pixels <= max(0, limit) + + +def _save_svfs_zip(svf_data: SvfArrays, svf_cache_dir: Path, aligned_rasters: dict, *, compress: bool = True) -> None: + """Save SVF arrays as svfs.zip for PrecomputedData.prepare() compatibility.""" + import tempfile + import zipfile + + geotransform = aligned_rasters.get("dsm_transform") + crs_wkt = aligned_rasters.get("dsm_crs") + + # If geotransform/CRS not available, skip zip (memmap still works) + if geotransform is None: + logger.debug(" Skipping svfs.zip (no geotransform available)") + return + + svf_files = { + "svf.tif": svf_data.svf, + "svfN.tif": svf_data.svf_north, + "svfE.tif": svf_data.svf_east, + "svfS.tif": svf_data.svf_south, + "svfW.tif": svf_data.svf_west, + "svfveg.tif": svf_data.svf_veg, + "svfNveg.tif": svf_data.svf_veg_north, + "svfEveg.tif": svf_data.svf_veg_east, + "svfSveg.tif": svf_data.svf_veg_south, + "svfWveg.tif": svf_data.svf_veg_west, + "svfaveg.tif": svf_data.svf_aveg, + "svfNaveg.tif": svf_data.svf_aveg_north, + "svfEaveg.tif": svf_data.svf_aveg_east, + "svfSaveg.tif": svf_data.svf_aveg_south, + "svfWaveg.tif": svf_data.svf_aveg_west, + } + + # Convert Affine to GDAL geotransform list if needed + if isinstance(geotransform, AffineClass): + geotransform = [geotransform.c, geotransform.a, geotransform.b, geotransform.f, geotransform.d, geotransform.e] + + svf_zip_path = svf_cache_dir / "svfs.zip" + with tempfile.TemporaryDirectory() as tmpdir: + for filename, arr in svf_files.items(): + if arr is not None: + tif_path = str(Path(tmpdir) / filename) + # Intermediate export for zip packaging: avoid COG/preview overhead. + io.save_raster( + tif_path, + arr, + geotransform, + crs_wkt, + use_cog=False, + generate_preview=False, + ) + compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + with zipfile.ZipFile(str(svf_zip_path), "w", compression=compression) as zf: + for filename in svf_files: + tif_file = Path(tmpdir) / filename + if tif_file.exists(): + zf.write(str(tif_file), filename) + + mode = "compressed" if compress else "stored (uncompressed)" + logger.info(f" ✓ SVF saved as {svf_zip_path} ({mode})") + + +def _save_shadow_matrices(svf_result, svf_cache_dir: Path, patch_count: int = 153, *, compress: bool = True) -> None: + """Save shadow matrices as shadowmats.npz for anisotropic sky model.""" + # Shadow matrices are bitpacked uint8 from Rust: shape (rows, cols, ceil(patches/8)) + shadow_path = svf_cache_dir / "shadowmats.npz" + save_fn = np.savez_compressed if compress else np.savez + save_fn( + str(shadow_path), + shadowmat=np.array(svf_result.bldg_sh_matrix), + vegshadowmat=np.array(svf_result.veg_sh_matrix), + vbshmat=np.array(svf_result.veg_blocks_bldg_sh_matrix), + patch_count=np.array(patch_count), + ) + + mode = "compressed" if compress else "uncompressed" + logger.info(f" ✓ Shadow matrices saved as {shadow_path} ({mode})") + + +def _max_shadow_height(dsm: np.ndarray, cdsm: np.ndarray | None = None, use_veg: bool = False) -> float: + """ + Estimate maximum casting height above local ground for shadow reach logic. + + Uses local relief (max - min) instead of absolute elevation so tiled buffer + sizing and SVF ray reach do not explode on high-elevation terrain. + + Vegetation is only considered when ``use_veg=True``. This differs from + ``SurfaceData.max_height``, which is intentionally conservative for buffer + sizing and always considers CDSM when present. + """ + if dsm.size == 0 or not np.isfinite(dsm).any(): + return 0.0 + + dsm_max = float(np.nanmax(dsm)) + dsm_min = float(np.nanmin(dsm)) + max_elevation = dsm_max + if use_veg and cdsm is not None and cdsm.size > 0 and np.isfinite(cdsm).any(): + cdsm_max = float(np.nanmax(cdsm)) + if np.isfinite(cdsm_max): + max_elevation = max(max_elevation, cdsm_max) + height = max_elevation - dsm_min + if not np.isfinite(height) or height <= 0: + return 0.0 + return height + + +@dataclass +class SurfaceData: + """ + Surface/terrain data for SOLWEIG calculations. + + Only `dsm` is required. Other rasters are optional and will be + treated as absent if not provided. + + Attributes: + dsm: Digital Surface Model (elevation in meters). Required. + cdsm: Canopy Digital Surface Model (vegetation heights). Optional. + dem: Digital Elevation Model (ground elevation). Optional. + tdsm: Trunk Digital Surface Model (trunk zone heights). Optional. + land_cover: Land cover classification grid (UMEP standard IDs). Optional. + IDs: 0=paved, 1=asphalt, 2=buildings, 5=grass, 6=bare_soil, 7=water. + When provided, albedo and emissivity are derived from land cover. + wall_height: Preprocessed wall heights (meters). Optional. + If not provided, computed during preparation from DSM. + wall_aspect: Preprocessed wall aspects (degrees, 0=N). Optional. + If not provided, computed during preparation from DSM. + svf: Preprocessed Sky View Factor arrays. Optional. + If not provided, must be prepared explicitly before calculate() + (e.g. via SurfaceData.prepare() or compute_svf()). + shadow_matrices: Preprocessed shadow matrices for anisotropic sky. Optional. + pixel_size: Pixel size in meters. Default 1.0. + trunk_ratio: Ratio for auto-generating TDSM from CDSM. Default 0.25. + dsm_relative: Whether DSM contains relative heights (above ground) + rather than absolute elevations. Default False. If True, DEM is + required and preprocess() converts DSM to absolute via DSM + DEM. + cdsm_relative: Whether CDSM contains relative heights. Default True. + If True and preprocess() is not called, a warning is issued. + tdsm_relative: Whether TDSM contains relative heights. Default True. + If True and preprocess() is not called, a warning is issued. + + Note: + Albedo and emissivity are derived internally from land_cover using + standard UMEP parameters. They cannot be directly specified. + + Note: + max_height is auto-computed from dsm as: np.nanmax(dsm) - np.nanmin(dsm) + + Height Conventions: + Each raster layer can independently use relative or absolute heights. + The per-layer flags (``dsm_relative``, ``cdsm_relative``, + ``tdsm_relative``) control the convention for each layer. + + **Relative Heights** (height above ground): + - CDSM/TDSM: vegetation height above ground (e.g., 6m tree) + - DSM: building/surface height above ground (requires DEM) + - Typical range: 0-40m for CDSM, 0-10m for TDSM + - Must call ``preprocess()`` before calculations + + **Absolute Heights** (elevation above sea level): + - Values in the same vertical reference system + - Example: DSM=127m, CDSM=133m means 6m vegetation + - No preprocessing needed + + The internal algorithms (Rust) always use **absolute heights**. The + ``preprocess()`` method converts relative → absolute using: + dsm_absolute = dem + dsm_relative (requires DEM) + cdsm_absolute = base + cdsm_relative + tdsm_absolute = base + tdsm_relative + where ``base = DEM`` if available, else ``base = DSM``. + + Example: + # Relative CDSM (common case): + surface = SurfaceData(dsm=dsm, cdsm=cdsm_rel) + surface.preprocess() # Converts CDSM to absolute + + # Absolute CDSM: + surface = SurfaceData(dsm=dsm, cdsm=cdsm_abs, cdsm_relative=False) + + # Mixed: absolute DSM, relative CDSM, absolute TDSM: + surface = SurfaceData( + dsm=dsm, cdsm=cdsm, tdsm=tdsm, + cdsm_relative=True, tdsm_relative=False, + ) + surface.preprocess() # Only converts CDSM + + # Relative DSM (requires DEM): + surface = SurfaceData(dsm=ndsm, dem=dem, dsm_relative=True) + surface.preprocess() # Converts DSM to absolute via DEM + nDSM + """ + + # Surface rasters + dsm: NDArray[np.floating] + cdsm: NDArray[np.floating] | None = None + dem: NDArray[np.floating] | None = None + tdsm: NDArray[np.floating] | None = None + albedo: NDArray[np.floating] | None = None + emissivity: NDArray[np.floating] | None = None + land_cover: NDArray[np.integer] | None = None + + # Preprocessing data (walls, SVF, shadows) + wall_height: NDArray[np.floating] | None = None + wall_aspect: NDArray[np.floating] | None = None + svf: SvfArrays | None = None + shadow_matrices: ShadowArrays | None = None + + # Grid properties + pixel_size: float = 1.0 + trunk_ratio: float = 0.25 # Trunk zone ratio for auto-generating TDSM from CDSM + dsm_relative: bool = False # Whether DSM contains relative heights (requires DEM) + cdsm_relative: bool = True # Whether CDSM contains relative heights + tdsm_relative: bool = True # Whether TDSM contains relative heights + + # Internal state + _nan_filled: bool = field(default=False, init=False, repr=False) + _preprocessed: bool = field(default=False, init=False, repr=False) + _geotransform: list[float] | None = field(default=None, init=False, repr=False) # GDAL geotransform + _crs_wkt: str | None = field(default=None, init=False, repr=False) # CRS as WKT string + _buffer_pool: BufferPool | None = field(default=None, init=False, repr=False) # Reusable array pool + _gvf_geometry_cache: object = field(default=None, init=False, repr=False) # Rust GVF geometry cache + _valid_mask: NDArray[np.bool_] | None = field(default=None, init=False, repr=False) # Combined valid mask + # Per-timestep computation caches (set by computation.calculate_core_fused) + _valid_mask_u8_cache: object = field(default=None, init=False, repr=False) + _valid_bbox_cache: object = field(default=None, init=False, repr=False) + _land_cover_props_cache: object = field(default=None, init=False, repr=False) + _buildings_mask_cache: object = field(default=None, init=False, repr=False) + _lc_grid_f32_cache: object = field(default=None, init=False, repr=False) + _gvf_geometry_cache_crop: object = field(default=None, init=False, repr=False) + _aniso_shadow_crop_cache: object = field(default=None, init=False, repr=False) + + def __post_init__(self): + # Ensure dsm is float32 for memory efficiency + self.dsm = np.asarray(self.dsm, dtype=np.float32) + + # Convert optional surface arrays if provided + if self.cdsm is not None: + self.cdsm = np.asarray(self.cdsm, dtype=np.float32) + if self.dem is not None: + self.dem = np.asarray(self.dem, dtype=np.float32) + if self.tdsm is not None: + self.tdsm = np.asarray(self.tdsm, dtype=np.float32) + if self.albedo is not None: + self.albedo = np.asarray(self.albedo, dtype=np.float32) + if self.emissivity is not None: + self.emissivity = np.asarray(self.emissivity, dtype=np.float32) + if self.land_cover is not None: + self.land_cover = np.asarray(self.land_cover, dtype=np.uint8) + + # Convert optional preprocessing arrays if provided + if self.wall_height is not None: + self.wall_height = np.asarray(self.wall_height, dtype=np.float32) + if self.wall_aspect is not None: + self.wall_aspect = np.asarray(self.wall_aspect, dtype=np.float32) + + @classmethod + def prepare( + cls, + dsm: str | Path, + working_dir: str | Path, + cdsm: str | Path | None = None, + dem: str | Path | None = None, + tdsm: str | Path | None = None, + land_cover: str | Path | None = None, + wall_height: str | Path | None = None, + wall_aspect: str | Path | None = None, + svf_dir: str | Path | None = None, + bbox: list[float] | None = None, + pixel_size: float | None = None, + trunk_ratio: float = 0.25, + dsm_relative: bool = False, + cdsm_relative: bool = True, + tdsm_relative: bool = True, + force_recompute: bool = False, + feedback: Any = None, + ) -> SurfaceData: + """ + Prepare surface data and optional preprocessing from GeoTIFF files. + + Loads raster data from disk and prepares it for SOLWEIG calculations. + Optionally loads preprocessing data (walls, SVF) and automatically + aligns it to match the surface grid. + + Args: + dsm: Path to DSM GeoTIFF file (required). + working_dir: Working directory for caching computed/resampled data (required). + Computed walls/SVF and resampled rasters are auto-discovered here and + reused on subsequent runs. Structure: working_dir/walls/, working_dir/svf/, + working_dir/resampled/. All intermediate results saved for inspection. + To regenerate cached data, delete the working_dir. + cdsm: Path to CDSM GeoTIFF file (optional). + dem: Path to DEM GeoTIFF file (optional). + tdsm: Path to TDSM GeoTIFF file (optional). + land_cover: Path to land cover GeoTIFF file (optional). + Albedo and emissivity are derived from land cover internally. + wall_height: Path to wall height GeoTIFF file (optional). + If not provided, walls are auto-discovered in working_dir/walls/ or + computed from DSM and cached. + wall_aspect: Path to wall aspect GeoTIFF file (optional, degrees 0=N). + If not provided, walls are auto-discovered in working_dir/walls/ or + computed from DSM and cached. + svf_dir: Directory containing SVF preprocessing files (optional): + - svfs.zip: SVF arrays (required if svf_dir provided) + - shadowmats.npz: Shadow matrices for anisotropic sky (optional) + If not provided, SVF is auto-discovered in working_dir/svf/ or + computed and cached. + bbox: Explicit bounding box [minx, miny, maxx, maxy] (optional). + If provided, all data is cropped/resampled to this extent. + If None, uses auto-intersection of all provided data. + pixel_size: Pixel size in meters. If None, computed from DSM geotransform. + trunk_ratio: Ratio for auto-generating TDSM from CDSM. Default 0.25. + dsm_relative: Whether DSM contains relative heights. Default False. + cdsm_relative: Whether CDSM contains relative heights. Default True. + tdsm_relative: Whether TDSM contains relative heights. Default True. + force_recompute: If True, skip cache and recompute walls/SVF even if they + exist in working_dir. Default False (use cached data when available). + feedback: Optional QGIS QgsProcessingFeedback for progress/cancellation. + + Returns: + SurfaceData instance with loaded terrain and preprocessing data. + + Note: + When preprocessing data (walls/SVF) has different extents or resolution + than the surface data, it is automatically resampled/cropped to match. + Use bbox parameter to explicitly control the output extent. + + Example: + # Load surface with preprocessing + surface = SurfaceData.prepare( + dsm="data/dsm.tif", + cdsm="data/cdsm.tif", + wall_height="preprocessed/walls/wall_hts.tif", + wall_aspect="preprocessed/walls/wall_aspects.tif", + svf_dir="preprocessed/svf", + ) + + # Minimal case - walls and SVF computed automatically + surface = SurfaceData.prepare(dsm="data/dsm.tif") + + # Mixed height conventions + surface = SurfaceData.prepare( + dsm="data/dsm.tif", + cdsm="data/cdsm.tif", + tdsm="data/tdsm.tif", + cdsm_relative=True, + tdsm_relative=False, + ) + """ + logger.info("Preparing surface data from GeoTIFF files...") + + # Load and validate DSM + dsm_arr, dsm_transform, dsm_crs, pixel_size = cls._load_and_validate_dsm(dsm, pixel_size) + + # Load optional terrain rasters + terrain_rasters = cls._load_terrain_rasters(cdsm, dem, tdsm, land_cover, trunk_ratio) + + # Load preprocessing data (walls, SVF) + working_path = Path(working_dir) + preprocess_data = cls._load_preprocessing_data( + wall_height, wall_aspect, svf_dir, working_path, force_recompute, pixel_size=pixel_size + ) + + # Compute extent, validate bbox, and resample all rasters + aligned_rasters = cls._align_rasters( + dsm_arr, + dsm_transform, + dsm_crs, + pixel_size, + terrain_rasters, + preprocess_data, + bbox, + ) + + # Create SurfaceData instance + surface_data = cls._create_surface_instance( + aligned_rasters, + pixel_size, + trunk_ratio, + dsm_relative=dsm_relative, + cdsm_relative=cdsm_relative, + tdsm_relative=tdsm_relative, + ) + + # Validate cached SVF against current inputs (if SVF was loaded) + if preprocess_data["svf_data"] is not None and not force_recompute: + dsm_arr = aligned_rasters["dsm_arr"] + cdsm_arr = aligned_rasters.get("cdsm_arr") + svf_source = preprocess_data.get("svf_source", "none") + + # Resolve the SVF cache directory (pixel-size-keyed or legacy) + svf_base = working_path / "svf" / pixel_size_tag(pixel_size) + if not svf_base.exists(): + svf_base = working_path / "svf" # legacy fallback + + cache_valid = False + if svf_source == "memmap": + # Memmap has cache_meta.json — use hash-based validation + cache_valid = validate_cache(svf_base / "memmap", dsm_arr, pixel_size, cdsm_arr) + elif svf_source == "zip": + # Try metadata first, fall back to shape check + zip_meta_dir = svf_base + cache_valid = validate_cache(zip_meta_dir, dsm_arr, pixel_size, cdsm_arr) + if not cache_valid: + # Legacy zip without metadata — validate by shape only + svf_shape = preprocess_data["svf_data"].svf.shape + cache_valid = svf_shape == dsm_arr.shape + if not cache_valid: + logger.info(f" SVF shape {svf_shape} doesn't match DSM {dsm_arr.shape}") + + if not cache_valid: + logger.info(" → Cache stale, clearing and recomputing SVF...") + clear_stale_cache(svf_base / "memmap") + # Also remove zip/npz/memmaps so stale data doesn't persist + for stale_file in ("svfs.zip", "shadowmats.npz"): + stale_path = svf_base / stale_file + if stale_path.exists(): + stale_path.unlink() + stale_shadow_memmaps = svf_base / "shadow_memmaps" + if stale_shadow_memmaps.exists(): + shutil.rmtree(stale_shadow_memmaps, ignore_errors=True) + preprocess_data["svf_data"] = None + preprocess_data["compute_svf"] = True + surface_data.svf = None + + # Compute and cache walls if needed + if preprocess_data["compute_walls"]: + cls._compute_and_cache_walls(surface_data, aligned_rasters, working_path, pixel_size=pixel_size) + + # Compute and cache SVF if needed + if preprocess_data["compute_svf"]: + cls._compute_and_cache_svf(surface_data, aligned_rasters, working_path, trunk_ratio, feedback=feedback) + + # Preprocess layers with relative heights to absolute + needs_preprocess = ( + dsm_relative + or (cdsm_relative and surface_data.cdsm is not None) + or (tdsm_relative and surface_data.tdsm is not None) + ) + if needs_preprocess: + logger.debug(" Preprocessing relative heights → absolute") + surface_data.preprocess() + + # Compute unified valid mask, apply across all layers, crop to valid bbox + surface_data.compute_valid_mask() + surface_data.apply_valid_mask() + surface_data.crop_to_valid_bbox() + surface_data.save_cleaned(working_path) + + logger.info("✓ Surface data prepared successfully") + return surface_data + + @staticmethod + def _load_and_validate_dsm(dsm: str | Path, pixel_size: float | None) -> tuple: + """ + Load DSM raster and validate CRS. + + Args: + dsm: Path to DSM GeoTIFF file. + pixel_size: Optional pixel size in meters. If None, extracted from geotransform. + + Returns: + Tuple of (dsm_array, dsm_transform, dsm_crs, pixel_size). + + Raises: + ValueError: If DSM has no CRS or CRS is not projected. + """ + from .. import io + + # Load required DSM + dsm_arr, dsm_transform, dsm_crs, _ = io.load_raster(str(dsm)) + logger.info(f" DSM: {dsm_arr.shape[1]}×{dsm_arr.shape[0]} pixels") + + # Compute pixel size from geotransform if not provided + native_pixel_size = abs(dsm_transform[1]) # X pixel size from DSM + if pixel_size is None: + pixel_size = native_pixel_size + logger.info(f" Extracted pixel size from DSM: {pixel_size:.2f} m") + else: + # Validate against native resolution + if pixel_size < native_pixel_size - 0.01: + raise ValueError( + f"Specified pixel_size ({pixel_size:.2f} m) is finer than the DSM native " + f"resolution ({native_pixel_size:.2f} m). Upsampling creates false precision. " + f"Use pixel_size >= {native_pixel_size:.2f} or omit to use native resolution." + ) + if abs(pixel_size - native_pixel_size) > 0.01: + logger.warning( + f" ⚠ Specified pixel_size ({pixel_size:.2f} m) differs from DSM native " + f"resolution ({native_pixel_size:.2f} m) — all rasters will be resampled" + ) + logger.info(f" Using specified pixel size: {pixel_size:.2f} m") + + # Warn if pixel size is less than 1 meter + if pixel_size < 1.0: + logger.warning( + f" ⚠ Pixel size ({pixel_size:.2f} m) is less than 1 meter - calculations may be slow for large areas" + ) + + # Validate CRS is projected (required for distance calculations) + if dsm_crs is None: + raise ValueError("DSM file has no CRS information. SOLWEIG requires a projected coordinate system.") + + try: + from pyproj import CRS as pyproj_CRS + + crs_obj = pyproj_CRS.from_wkt(dsm_crs) + if not crs_obj.is_projected: + raise ValueError( + f"DSM CRS is geographic (lat/lon): {crs_obj.name}. " + f"SOLWEIG requires a projected coordinate system (e.g., UTM, State Plane) " + f"for accurate distance and area calculations. Please reproject your data." + ) + logger.info(f" CRS validated: {crs_obj.name} (EPSG:{crs_obj.to_epsg() or 'custom'})") + except Exception as e: + logger.warning(f" ⚠ Could not validate CRS: {e}") + + return dsm_arr, dsm_transform, dsm_crs, pixel_size + + @staticmethod + def _load_terrain_rasters( + cdsm: str | Path | None, + dem: str | Path | None, + tdsm: str | Path | None, + land_cover: str | Path | None, + trunk_ratio: float, + ) -> dict: + """ + Load optional terrain rasters (CDSM, DEM, TDSM, land_cover). + + Args: + cdsm: Path to CDSM GeoTIFF file (optional). + dem: Path to DEM GeoTIFF file (optional). + tdsm: Path to TDSM GeoTIFF file (optional). + land_cover: Path to land cover GeoTIFF file (optional). + trunk_ratio: Trunk ratio for auto-generating TDSM from CDSM. + + Returns: + Dictionary with keys: cdsm_arr, cdsm_transform, dem_arr, dem_transform, + tdsm_arr, tdsm_transform, land_cover_arr, land_cover_transform. + """ + from .. import io + + result = {} + + # Load CDSM + if cdsm is not None: + result["cdsm_arr"], result["cdsm_transform"], _, _ = io.load_raster(str(cdsm)) + logger.info(" ✓ Canopy DSM (CDSM) provided") + else: + result["cdsm_arr"], result["cdsm_transform"] = None, None + logger.info(" → No vegetation data - simulation without trees/vegetation") + + # Load DEM + if dem is not None: + result["dem_arr"], result["dem_transform"], _, _ = io.load_raster(str(dem)) + logger.info(" ✓ Ground elevation (DEM) provided") + else: + result["dem_arr"], result["dem_transform"] = None, None + + # Load TDSM + if tdsm is not None: + result["tdsm_arr"], result["tdsm_transform"], _, _ = io.load_raster(str(tdsm)) + logger.info(" ✓ Trunk DSM (TDSM) provided") + elif result["cdsm_arr"] is not None: + result["tdsm_arr"], result["tdsm_transform"] = None, None + logger.info(f" → No TDSM provided - will auto-generate from CDSM (ratio={trunk_ratio})") + else: + result["tdsm_arr"], result["tdsm_transform"] = None, None + + # Load land cover + if land_cover is not None: + result["land_cover_arr"], result["land_cover_transform"], _, _ = io.load_raster(str(land_cover)) + logger.info(" ✓ Land cover provided (albedo/emissivity derived from classification)") + else: + result["land_cover_arr"], result["land_cover_transform"] = None, None + + return result + + @staticmethod + def _load_preprocessing_data( + wall_height: str | Path | None, + wall_aspect: str | Path | None, + svf_dir: str | Path | None, + working_path: Path, + force_recompute: bool, + pixel_size: float = 1.0, + ) -> dict: + """ + Load preprocessing data (walls, SVF) with auto-discovery. + + Args: + wall_height: Path to wall height GeoTIFF file (optional). + wall_aspect: Path to wall aspect GeoTIFF file (optional). + svf_dir: Directory containing SVF preprocessing files (optional). + working_path: Working directory for caching. + force_recompute: If True, skip cache and recompute. + pixel_size: Pixel size in metres (used for pixel-size-keyed cache paths). + + Returns: + Dictionary with keys: wall_height_arr, wall_height_transform, wall_aspect_arr, + wall_aspect_transform, svf_data, shadow_data, compute_walls, compute_svf. + """ + from .. import io + from .precomputed import ShadowArrays, SvfArrays + + logger.info("Checking for preprocessing data...") + px_tag = pixel_size_tag(pixel_size) + + result = { + "wall_height_arr": None, + "wall_height_transform": None, + "wall_aspect_arr": None, + "wall_aspect_transform": None, + "svf_data": None, + "svf_source": "none", # "memmap", "zip", or "none" + "shadow_data": None, + "compute_walls": False, + "compute_svf": False, + } + + # Load walls with auto-discovery + if wall_height is not None and wall_aspect is not None: + # Explicit paths provided - use them + result["wall_height_arr"], result["wall_height_transform"], _, _ = io.load_raster(str(wall_height)) + result["wall_aspect_arr"], result["wall_aspect_transform"], _, _ = io.load_raster(str(wall_aspect)) + logger.info(" ✓ Existing walls found (will use precomputed)") + + elif wall_height is not None or wall_aspect is not None: + logger.warning(" ⚠ Only one wall file provided - both wall_height and wall_aspect required") + logger.info(" → Walls will be computed from DSM and cached") + result["compute_walls"] = True + + else: + # Try to auto-discover walls in working_dir (unless force_recompute) + if force_recompute: + logger.info(" → force_recompute=True - will recompute walls from DSM and cache") + result["compute_walls"] = True + else: + walls_cache_dir = working_path / "walls" / px_tag + wall_hts_path = walls_cache_dir / "wall_hts.tif" + wall_aspects_path = walls_cache_dir / "wall_aspects.tif" + + # Legacy fallback: try flat working_dir/walls/ if keyed dir absent + if not wall_hts_path.exists(): + legacy_dir = working_path / "walls" + legacy_hts = legacy_dir / "wall_hts.tif" + legacy_asp = legacy_dir / "wall_aspects.tif" + if legacy_hts.exists() and legacy_asp.exists(): + logger.info( + f" ⚠ Legacy wall cache at {legacy_dir} — future runs will use pixel-size-keyed path" + ) + walls_cache_dir = legacy_dir + wall_hts_path = legacy_hts + wall_aspects_path = legacy_asp + + if wall_hts_path.exists() and wall_aspects_path.exists(): + # Files exist - load them + result["wall_height_arr"], result["wall_height_transform"], _, _ = io.load_raster( + str(wall_hts_path) + ) + result["wall_aspect_arr"], result["wall_aspect_transform"], _, _ = io.load_raster( + str(wall_aspects_path) + ) + logger.info(f" ✓ Walls found in working_dir: {walls_cache_dir}") + else: + # No cached walls - will compute and cache + logger.info(" → No walls found in working_dir - will compute from DSM and cache") + result["compute_walls"] = True + + # Helper to load SVF, preferring memmap for efficiency. + # Returns (SvfArrays | None, source: str) where source is "memmap", "zip", or "none". + def load_svf_from_dir(svf_path: Path) -> tuple[SvfArrays | None, str]: + memmap_dir = svf_path / "memmap" + svf_zip_path = svf_path / "svfs.zip" + + # Prefer memmap (more efficient for large rasters) + if memmap_dir.exists() and (memmap_dir / "svf.npy").exists(): + svf_data = SvfArrays.from_memmap(memmap_dir) + logger.info(" ✓ SVF loaded from memmap (memory-efficient)") + return svf_data, "memmap" + elif svf_zip_path.exists(): + svf_data = SvfArrays.from_zip(str(svf_zip_path)) + logger.info(" ✓ SVF loaded from zip") + return svf_data, "zip" + return None, "none" + + # Helper to load shadow matrices, preferring NPZ then memmap directory. + # Returns (ShadowArrays | None, source: str) where source is "npz", "memmap", or "none". + def load_shadow_from_dir(base_path: Path) -> tuple[ShadowArrays | None, str]: + shadow_npz_path = base_path / "shadowmats.npz" + if shadow_npz_path.exists(): + shadow_data = ShadowArrays.from_npz(str(shadow_npz_path)) + logger.info(" ✓ Shadow matrices loaded from npz") + return shadow_data, "npz" + + shadow_mm_dir = base_path / "shadow_memmaps" + if shadow_mm_dir.exists() and (shadow_mm_dir / "metadata.json").exists(): + shadow_data = ShadowArrays.from_memmap(shadow_mm_dir) + logger.info(" ✓ Shadow matrices loaded from memmap cache") + return shadow_data, "memmap" + + return None, "none" + + # Load SVF with auto-discovery + if svf_dir is not None: + # Explicit SVF directory provided - use it + svf_path = Path(svf_dir) + + svf_data, svf_source = load_svf_from_dir(svf_path) + if svf_data is not None: + result["svf_data"] = svf_data + result["svf_source"] = svf_source + logger.info(" ✓ Existing SVF found (will use precomputed)") + + # First try direct shadow files in svf_dir + shadow_data, _ = load_shadow_from_dir(svf_path) + if shadow_data is None: + # Fallback for prepared surface roots: svf// + tagged_cache = svf_path / "svf" / px_tag + shadow_data, _ = load_shadow_from_dir(tagged_cache) + if shadow_data is not None: + result["shadow_data"] = shadow_data + logger.info(" ✓ Existing shadow matrices found (anisotropic sky enabled)") + else: + logger.info(f" → SVF directory provided but no SVF files found: {svf_path}") + logger.info(" → SVF will be computed and cached") + result["compute_svf"] = True + + else: + # Try to auto-discover SVF in working_dir (unless force_recompute) + if force_recompute: + logger.info(" → force_recompute=True - will recompute SVF and cache") + result["compute_svf"] = True + else: + svf_cache_dir = working_path / "svf" / px_tag + + # Legacy fallback: try flat working_dir/svf/ if keyed dir absent + if not svf_cache_dir.exists(): + legacy_svf_dir = working_path / "svf" + if (legacy_svf_dir / "memmap" / "svf.npy").exists() or (legacy_svf_dir / "svfs.zip").exists(): + logger.info( + f" ⚠ Legacy SVF cache at {legacy_svf_dir} — future runs will use pixel-size-keyed path" + ) + svf_cache_dir = legacy_svf_dir + + svf_data, svf_source = load_svf_from_dir(svf_cache_dir) + if svf_data is not None: + result["svf_data"] = svf_data + result["svf_source"] = svf_source + logger.info(f" ✓ SVF found in working_dir: {svf_cache_dir}") + + shadow_data, _ = load_shadow_from_dir(svf_cache_dir) + if shadow_data is not None: + result["shadow_data"] = shadow_data + logger.info(" ✓ Shadow matrices found (anisotropic sky enabled)") + else: + # No cached SVF - will compute and cache + logger.info(" → No SVF found in working_dir - will compute and cache") + result["compute_svf"] = True + + return result + + @staticmethod + def _align_rasters( + dsm_arr, + dsm_transform, + dsm_crs, + pixel_size: float, + terrain_rasters: dict, + preprocess_data: dict, + bbox: list[float] | None, + ) -> dict: + """ + Compute extent, validate bbox, and resample all rasters to common grid. + + Args: + dsm_arr: DSM array. + dsm_transform: DSM geotransform. + dsm_crs: DSM CRS. + pixel_size: Target pixel size in meters. + terrain_rasters: Dictionary with terrain raster data. + preprocess_data: Dictionary with preprocessing data. + bbox: Optional explicit bounding box. + + Returns: + Dictionary with all aligned rasters and metadata. + """ + logger.info("Computing spatial extent and resolution...") + + # Extract bounds from all loaded rasters + bounds_list = [extract_bounds(dsm_transform, dsm_arr.shape)] + + if terrain_rasters["cdsm_arr"] is not None and terrain_rasters["cdsm_transform"] is not None: + bounds_list.append(extract_bounds(terrain_rasters["cdsm_transform"], terrain_rasters["cdsm_arr"].shape)) + if terrain_rasters["dem_arr"] is not None and terrain_rasters["dem_transform"] is not None: + bounds_list.append(extract_bounds(terrain_rasters["dem_transform"], terrain_rasters["dem_arr"].shape)) + if terrain_rasters["tdsm_arr"] is not None and terrain_rasters["tdsm_transform"] is not None: + bounds_list.append(extract_bounds(terrain_rasters["tdsm_transform"], terrain_rasters["tdsm_arr"].shape)) + if terrain_rasters["land_cover_arr"] is not None and terrain_rasters["land_cover_transform"] is not None: + bounds_list.append( + extract_bounds(terrain_rasters["land_cover_transform"], terrain_rasters["land_cover_arr"].shape) + ) + if preprocess_data["wall_height_arr"] is not None and preprocess_data["wall_height_transform"] is not None: + bounds_list.append( + extract_bounds(preprocess_data["wall_height_transform"], preprocess_data["wall_height_arr"].shape) + ) + if preprocess_data["wall_aspect_arr"] is not None and preprocess_data["wall_aspect_transform"] is not None: + bounds_list.append( + extract_bounds(preprocess_data["wall_aspect_transform"], preprocess_data["wall_aspect_arr"].shape) + ) + + # Determine target bounding box + if bbox is not None: + # User provided explicit bbox - validate it's within intersection + computed_intersection = intersect_bounds(bounds_list) + user_minx, user_miny, user_maxx, user_maxy = bbox + int_minx, int_miny, int_maxx, int_maxy = computed_intersection + + # Check if user bbox is within or equal to intersection + if ( + user_minx < int_minx - 1e-6 + or user_maxx > int_maxx + 1e-6 + or user_miny < int_miny - 1e-6 + or user_maxy > int_maxy + 1e-6 + ): + raise ValueError( + f"Specified bbox {bbox} extends beyond the intersection of input rasters " + f"{computed_intersection}. Bbox must be within or equal to the intersection." + ) + + target_bbox = bbox + logger.info(f" Using user-specified extent: {target_bbox}") + else: + # Auto-compute intersection + target_bbox = intersect_bounds(bounds_list) + logger.info(f" Auto-computed extent from raster intersection: {target_bbox}") + + # Expected target dimensions (same formula as resample_to_grid) + expected_h = int(np.round((target_bbox[3] - target_bbox[1]) / pixel_size)) + expected_w = int(np.round((target_bbox[2] - target_bbox[0]) / pixel_size)) + expected_shape = (expected_h, expected_w) + + def _layer_needs_resample(arr, transform): + """Check if a single layer needs resampling (bounds, pixel size, or shape mismatch).""" + layer_bounds = extract_bounds(transform, arr.shape) + layer_px = abs(transform[1]) if isinstance(transform, list) else abs(transform.a) + return ( + abs(layer_bounds[0] - target_bbox[0]) > 1e-6 + or abs(layer_bounds[1] - target_bbox[1]) > 1e-6 + or abs(layer_bounds[2] - target_bbox[2]) > 1e-6 + or abs(layer_bounds[3] - target_bbox[3]) > 1e-6 + or abs(layer_px - pixel_size) > 1e-6 + or arr.shape != expected_shape + ) + + resampled_any = False + + # Resample DSM if needed + if _layer_needs_resample(dsm_arr, dsm_transform): + dsm_arr, dsm_transform = resample_to_grid( + dsm_arr, dsm_transform, target_bbox, pixel_size, method="bilinear", src_crs=dsm_crs + ) + resampled_any = True + + # Resample optional terrain rasters independently + for key, method in [("cdsm", "bilinear"), ("dem", "bilinear"), ("tdsm", "bilinear"), ("land_cover", "nearest")]: + arr_key, tf_key = f"{key}_arr", f"{key}_transform" + if ( + terrain_rasters[arr_key] is not None + and terrain_rasters[tf_key] is not None + and _layer_needs_resample(terrain_rasters[arr_key], terrain_rasters[tf_key]) + ): + terrain_rasters[arr_key], _ = resample_to_grid( + terrain_rasters[arr_key], + terrain_rasters[tf_key], + target_bbox, + pixel_size, + method=method, + src_crs=dsm_crs, + ) + resampled_any = True + + # Resample preprocessing data independently + for key in ["wall_height", "wall_aspect"]: + arr_key, tf_key = f"{key}_arr", f"{key}_transform" + if ( + preprocess_data[arr_key] is not None + and preprocess_data[tf_key] is not None + and _layer_needs_resample(preprocess_data[arr_key], preprocess_data[tf_key]) + ): + preprocess_data[arr_key], _ = resample_to_grid( + preprocess_data[arr_key], + preprocess_data[tf_key], + target_bbox, + pixel_size, + method="bilinear", + src_crs=dsm_crs, + ) + resampled_any = True + + # Note: SVF resampling is more complex (multiple arrays) - handled separately if needed + if preprocess_data["svf_data"] is not None and preprocess_data["svf_data"].svf.shape != dsm_arr.shape: + logger.warning( + f" ⚠ SVF shape {preprocess_data['svf_data'].svf.shape} doesn't match target shape " + f"{dsm_arr.shape} - SVF resampling not yet implemented. " + f"SVF cache will be dropped; recompute via SurfaceData.prepare() or compute_svf()." + ) + preprocess_data["svf_data"] = None + preprocess_data["shadow_data"] = None + + if resampled_any: + logger.info(f" ✓ Resampled to {dsm_arr.shape[1]}×{dsm_arr.shape[0]} pixels") + else: + logger.info(" ✓ No resampling needed - all rasters match target grid") + + # Return all aligned data + return { + "dsm_arr": dsm_arr, + "dsm_transform": dsm_transform, + "dsm_crs": dsm_crs, + "pixel_size": pixel_size, + "cdsm_arr": terrain_rasters["cdsm_arr"], + "dem_arr": terrain_rasters["dem_arr"], + "tdsm_arr": terrain_rasters["tdsm_arr"], + "land_cover_arr": terrain_rasters["land_cover_arr"], + "wall_height_arr": preprocess_data["wall_height_arr"], + "wall_aspect_arr": preprocess_data["wall_aspect_arr"], + "svf_data": preprocess_data["svf_data"], + "shadow_data": preprocess_data["shadow_data"], + } + + @classmethod + def _create_surface_instance( + cls, + aligned_rasters: dict, + pixel_size: float, + trunk_ratio: float, + *, + dsm_relative: bool = False, + cdsm_relative: bool = True, + tdsm_relative: bool = True, + ) -> SurfaceData: + """ + Create SurfaceData instance from aligned rasters. + + Args: + aligned_rasters: Dictionary with all aligned rasters and metadata. + pixel_size: Pixel size in meters. + trunk_ratio: Trunk ratio for auto-generating TDSM from CDSM. + dsm_relative: Whether DSM contains relative heights. + cdsm_relative: Whether CDSM contains relative heights. + tdsm_relative: Whether TDSM contains relative heights. + + Returns: + SurfaceData instance with loaded terrain and preprocessing data. + """ + from affine import Affine as AffineClass + + # Create SurfaceData instance + surface_data = cls( + dsm=aligned_rasters["dsm_arr"], + cdsm=aligned_rasters["cdsm_arr"], + dem=aligned_rasters["dem_arr"], + tdsm=aligned_rasters["tdsm_arr"], + land_cover=aligned_rasters["land_cover_arr"], + wall_height=aligned_rasters["wall_height_arr"], + wall_aspect=aligned_rasters["wall_aspect_arr"], + svf=aligned_rasters["svf_data"], + shadow_matrices=aligned_rasters["shadow_data"], + pixel_size=pixel_size, + trunk_ratio=trunk_ratio, + dsm_relative=dsm_relative, + cdsm_relative=cdsm_relative, + tdsm_relative=tdsm_relative, + ) + + # Store geotransform and CRS for later export + dsm_transform = aligned_rasters["dsm_transform"] + if isinstance(dsm_transform, AffineClass): + surface_data._geotransform = list(dsm_transform.to_gdal()) + else: + surface_data._geotransform = dsm_transform + surface_data._crs_wkt = aligned_rasters["dsm_crs"] + + # Log what was loaded + layers_loaded = ["DSM"] + if aligned_rasters["cdsm_arr"] is not None: + layers_loaded.append("CDSM") + if aligned_rasters["dem_arr"] is not None: + layers_loaded.append("DEM") + if aligned_rasters["tdsm_arr"] is not None: + layers_loaded.append("TDSM") + if aligned_rasters["land_cover_arr"] is not None: + layers_loaded.append("land_cover") + logger.info(f" Layers loaded: {', '.join(layers_loaded)}") + + return surface_data + + @staticmethod + def _compute_and_cache_walls( + surface_data: SurfaceData, + aligned_rasters: dict, + working_path: Path, + *, + pixel_size: float = 1.0, + ) -> None: + """ + Compute wall heights/aspects from DSM and cache to working_dir. + + Args: + surface_data: SurfaceData instance to update with computed walls. + aligned_rasters: Dictionary with aligned raster data. + working_path: Working directory for caching. + pixel_size: Pixel size in metres (for pixel-size-keyed cache path). + """ + logger.info("Computing walls from DSM and caching to working_dir...") + walls_cache_dir = working_path / "walls" / pixel_size_tag(pixel_size) + + # Save resampled DSM to working_dir so wall computation can use it + resampled_dir = working_path / "resampled" + resampled_dir.mkdir(parents=True, exist_ok=True) + resampled_dsm_path = resampled_dir / "dsm_resampled.tif" + + dsm_transform = aligned_rasters["dsm_transform"] + io.save_raster( + str(resampled_dsm_path), + aligned_rasters["dsm_arr"], + list(dsm_transform.to_gdal()) if isinstance(dsm_transform, AffineClass) else dsm_transform, + aligned_rasters["dsm_crs"], + ) + + # Generate walls using the walls module + walls_module.generate_wall_hts( + dsm_path=str(resampled_dsm_path), + bbox=None, # Already resampled to target extent + out_dir=str(walls_cache_dir), + ) + + # Load the generated walls back into surface_data + wall_hts_path = walls_cache_dir / "wall_hts.tif" + wall_aspects_path = walls_cache_dir / "wall_aspects.tif" + + if wall_hts_path.exists() and wall_aspects_path.exists(): + wall_height_arr, _, _, _ = io.load_raster(str(wall_hts_path)) + wall_aspect_arr, _, _, _ = io.load_raster(str(wall_aspects_path)) + surface_data.wall_height = wall_height_arr + surface_data.wall_aspect = wall_aspect_arr + + # Save cache metadata for wall validation on future runs + dsm_arr = aligned_rasters["dsm_arr"] + cdsm_arr = aligned_rasters.get("cdsm_arr") + wall_pixel_size = aligned_rasters.get("pixel_size", pixel_size) + metadata = CacheMetadata.from_arrays(dsm_arr, wall_pixel_size, cdsm_arr) + metadata.save(walls_cache_dir) + + logger.info(f" ✓ Walls computed and cached to {walls_cache_dir}") + else: + logger.warning(" ⚠ Wall generation completed but files not found") + + @staticmethod + def _compute_and_cache_svf( + surface_data: SurfaceData, + aligned_rasters: dict, + working_path: Path, + trunk_ratio: float, + on_tile_complete: Callable | None = None, + feedback: Any = None, + progress_range: tuple[float, float] | None = None, + ) -> None: + """ + Compute SVF from DSM/CDSM/TDSM and cache to working_dir. + + Automatically tiles the computation for large grids to avoid GPU + buffer size limits. + + Saves cache artifacts: + - memmap/ for fast reload in Python API + - svfs.zip for PrecomputedData.prepare() compatibility + - shadowmats.npz for anisotropic sky model when export size is reasonable + (otherwise shadow_memmaps/ is used directly) + + Args: + surface_data: SurfaceData instance to update with computed SVF. + aligned_rasters: Dictionary with aligned raster data. + working_path: Working directory for caching. + trunk_ratio: Trunk ratio for SVF computation. + on_tile_complete: Optional callback(tile_idx, n_tiles) called after each tile + (only invoked when tiling is used for large grids). + feedback: Optional QGIS QgsProcessingFeedback for progress/cancellation. + progress_range: Optional (start_pct, end_pct) for QGIS progress sub-range. + """ + + dsm_arr = aligned_rasters["dsm_arr"] + cdsm_arr = aligned_rasters["cdsm_arr"] + tdsm_arr = aligned_rasters["tdsm_arr"] + pixel_size = aligned_rasters.get("pixel_size", 1.0) + + rows, cols = dsm_arr.shape + use_veg = cdsm_arr is not None + if use_veg: + logger.info("Computing SVF from DSM/CDSM/TDSM...") + else: + logger.info("Computing SVF from DSM...") + + # Prepare vegetation arrays (Rust requires all three or none) + if use_veg: + cdsm_for_svf = cdsm_arr.astype(np.float32) + # Auto-generate TDSM if not provided + if tdsm_arr is not None: + tdsm_for_svf = tdsm_arr.astype(np.float32) + else: + tdsm_for_svf = (cdsm_arr * trunk_ratio).astype(np.float32) + else: + cdsm_for_svf = np.zeros_like(dsm_arr, dtype=np.float32) + tdsm_for_svf = np.zeros_like(dsm_arr, dtype=np.float32) + + # Height for shadow reach/buffer should be local relief, not absolute elevation. + max_height = _max_shadow_height(dsm_arr, cdsm_arr, use_veg=use_veg) + + # Auto-detect whether tiling is needed based on real GPU/RAM limits. + from ..tiling import compute_max_tile_pixels + + _max_pixels = compute_max_tile_pixels(context="svf") + n_pixels = rows * cols + needs_tiling = n_pixels > _max_pixels + compress_exports = _should_compress_svf_exports(n_pixels) + export_shadow_npz = _should_export_shadow_npz(n_pixels) + if not compress_exports: + logger.info( + " Large SVF export detected; using uncompressed cache files to reduce post-GPU CPU tail " + "(set SOLWEIG_COMPRESS_MAX_PIXELS to tune)" + ) + if not export_shadow_npz: + logger.info( + " Large shadow cache detected; skipping shadowmats.npz export and keeping shadow_memmaps " + "(set SOLWEIG_FORCE_SHADOW_NPZ=1 to force NPZ export)" + ) + + svf_cache_dir = working_path / "svf" / pixel_size_tag(pixel_size) + svf_cache_dir.mkdir(parents=True, exist_ok=True) + metadata = CacheMetadata.from_arrays(dsm_arr, pixel_size, cdsm_arr) + + if needs_tiling: + svf_data, (shmat_mm, vegshmat_mm, vbshmat_mm) = SurfaceData._compute_svf_tiled( + dsm_arr.astype(np.float32), + cdsm_for_svf, + tdsm_for_svf, + pixel_size, + use_veg, + max_height, + svf_cache_dir, + on_tile_complete=on_tile_complete, + feedback=feedback, + progress_range=progress_range, + ) + if svf_data is None: + raise RuntimeError("SVF tiled computation returned None") + n_patches = 153 # patch_option=2 + + # Cache SVF arrays + if feedback is not None and hasattr(feedback, "setProgressText"): + feedback.setProgressText("Finalizing SVF cache...") + memmap_dir = svf_cache_dir / "memmap" + svf_data.to_memmap(memmap_dir, metadata=metadata) + _save_svfs_zip(svf_data, svf_cache_dir, aligned_rasters, compress=compress_exports) + metadata.save(svf_cache_dir) # also at svf dir level for zip validation + + # Save shadow matrices as npz for compatibility when affordable. + # For very large rasters, keep shadow_memmaps and skip expensive repacking. + if export_shadow_npz: + if feedback is not None and hasattr(feedback, "setProgressText"): + feedback.setProgressText("Saving shadow matrices cache...") + shadow_path = svf_cache_dir / "shadowmats.npz" + save_fn = np.savez_compressed if compress_exports else np.savez + save_fn( + str(shadow_path), + shadowmat=np.asarray(shmat_mm), + vegshadowmat=np.asarray(vegshmat_mm), + vbshmat=np.asarray(vbshmat_mm), + patch_count=np.array(n_patches), + ) + mode = "compressed" if compress_exports else "uncompressed" + logger.info(f" ✓ Shadow matrices saved as {shadow_path} ({mode})") + else: + shadow_path = svf_cache_dir / "shadowmats.npz" + if shadow_path.exists(): + shadow_path.unlink() + logger.info(f" ✓ Shadow matrices cached as memmaps in {svf_cache_dir / 'shadow_memmaps'}") + + surface_data.svf = svf_data + # Shadow matrices assembled from tiled memmaps (bitpacked uint8, on disk) + surface_data.shadow_matrices = ShadowArrays( + _shmat_u8=shmat_mm, + _vegshmat_u8=vegshmat_mm, + _vbshmat_u8=vbshmat_mm, + _n_patches=n_patches, + ) + logger.info(f" ✓ SVF computed (tiled) and cached to {svf_cache_dir}") + else: + # Single-shot computation for grids that fit in GPU memory. + # Use SkyviewRunner with threading + polling for progress and cancel. + import threading + + from ..progress import ProgressReporter + + n_patches = 153 # patch_option=2 + + runner = skyview.SkyviewRunner() + result_box: list = [None] + error_box: list = [None] + + def _run_svf(): + try: + result_box[0] = runner.calculate_svf( + dsm_arr.astype(np.float32), + cdsm_for_svf, + tdsm_for_svf, + pixel_size, + use_veg, + max_height, + 2, # patch_option + 3.0, # min_sun_elev_deg + ) + except BaseException as e: + error_box[0] = e + + thread = threading.Thread(target=_run_svf, daemon=True) + thread.start() + + # Poll progress (153 patches) + pbar = ProgressReporter( + total=n_patches, + desc="Computing Sky View Factor", + feedback=feedback, + progress_range=progress_range, + ) + last = 0 + while thread.is_alive(): + thread.join(timeout=0.05) + done = runner.progress() + if done > last: + pbar.update(done - last) + last = done + # Check QGIS cancellation + if feedback is not None and hasattr(feedback, "isCanceled") and feedback.isCanceled(): + runner.cancel() + thread.join(timeout=5.0) + pbar.close() + return + if last < n_patches: + pbar.update(n_patches - last) + pbar.close() + + thread.join() + if error_box[0] is not None: + raise RuntimeError(f"SVF computation failed: {error_box[0]}") from error_box[0] + svf_result = result_box[0] + if svf_result is None: + raise RuntimeError("SVF computation returned None (skyview.calculate_svf produced no result)") + + ones = np.ones_like(dsm_arr, dtype=np.float32) + + svf_data = SvfArrays( + svf=np.array(svf_result.svf), + svf_north=np.array(svf_result.svf_north), + svf_east=np.array(svf_result.svf_east), + svf_south=np.array(svf_result.svf_south), + svf_west=np.array(svf_result.svf_west), + svf_veg=np.array(svf_result.svf_veg) if use_veg else ones.copy(), + svf_veg_north=np.array(svf_result.svf_veg_north) if use_veg else ones.copy(), + svf_veg_east=np.array(svf_result.svf_veg_east) if use_veg else ones.copy(), + svf_veg_south=np.array(svf_result.svf_veg_south) if use_veg else ones.copy(), + svf_veg_west=np.array(svf_result.svf_veg_west) if use_veg else ones.copy(), + svf_aveg=np.array(svf_result.svf_veg_blocks_bldg_sh) if use_veg else ones.copy(), + svf_aveg_north=np.array(svf_result.svf_veg_blocks_bldg_sh_north) if use_veg else ones.copy(), + svf_aveg_east=np.array(svf_result.svf_veg_blocks_bldg_sh_east) if use_veg else ones.copy(), + svf_aveg_south=np.array(svf_result.svf_veg_blocks_bldg_sh_south) if use_veg else ones.copy(), + svf_aveg_west=np.array(svf_result.svf_veg_blocks_bldg_sh_west) if use_veg else ones.copy(), + ) + + # Cache SVF arrays + memmap_dir = svf_cache_dir / "memmap" + svf_data.to_memmap(memmap_dir, metadata=metadata) + _save_svfs_zip(svf_data, svf_cache_dir, aligned_rasters, compress=compress_exports) + metadata.save(svf_cache_dir) # also at svf dir level for zip validation + + # Save shadow matrices (only available in non-tiled mode) + _save_shadow_matrices(svf_result, svf_cache_dir, compress=compress_exports) + + surface_data.svf = svf_data + + # Shadow matrices are bitpacked uint8 from Rust + surface_data.shadow_matrices = ShadowArrays( + _shmat_u8=np.array(svf_result.bldg_sh_matrix), + _vegshmat_u8=np.array(svf_result.veg_sh_matrix), + _vbshmat_u8=np.array(svf_result.veg_blocks_bldg_sh_matrix), + _n_patches=n_patches, + ) + + logger.info(f" ✓ SVF computed and cached to {svf_cache_dir}") + + @staticmethod + def _compute_svf_tiled( + dsm_f32: np.ndarray, + cdsm_f32: np.ndarray, + tdsm_f32: np.ndarray, + pixel_size: float, + use_veg: bool, + max_height: float, + working_path: Path, + on_tile_complete: Callable | None = None, + feedback: Any = None, + progress_range: tuple[float, float] | None = None, + ) -> tuple[SvfArrays, tuple[np.ndarray, np.ndarray, np.ndarray]]: + """ + Compute SVF using tiled processing for large grids. + + Automatically determines the largest safe tile size from the GPU + buffer limit, divides the grid into overlapping tiles, computes + SVF per tile, and stitches the core regions into full-size arrays. + + Shadow matrices are assembled into memory-mapped bitpacked uint8 files to + avoid holding the full 3D arrays in RAM. + + Args: + dsm_f32: DSM array (float32). + cdsm_f32: Canopy DSM array (float32, zeros if no veg). + tdsm_f32: Trunk DSM array (float32, zeros if no veg). + pixel_size: Pixel size in meters. + use_veg: Whether vegetation is present. + max_height: Maximum height in the DSM (for buffer calculation). + working_path: Directory for memmap files. + on_tile_complete: Optional callback(tile_idx, n_tiles) called after each tile. + + Returns: + Tuple of (SvfArrays, (shmat_mm, vegshmat_mm, vbshmat_mm)) + where the shadow matrix memmaps are bitpacked uint8 (rows, cols, n_pack). + """ + from ..progress import ProgressReporter + from ..tiling import calculate_buffer_distance, generate_tiles, validate_tile_size + + rows, cols = dsm_f32.shape + + buffer_m = calculate_buffer_distance(max_height) + buffer_pixels = int(np.ceil(buffer_m / pixel_size)) + + # Compute the largest safe tile size from real GPU/RAM limits. + # The full tile (core + 2*buffer) must fit, so subtract buffer from max side. + from ..tiling import MIN_TILE_SIZE, compute_max_tile_side + + max_full_side = compute_max_tile_side(context="svf") + tile_size = max(MIN_TILE_SIZE, max_full_side - 2 * buffer_pixels) + + adjusted_tile_size, warning = validate_tile_size(tile_size, buffer_pixels, pixel_size, context="svf") + if warning: + logger.warning(warning) + + tiles = generate_tiles(rows, cols, adjusted_tile_size, buffer_pixels) + n_tiles = len(tiles) + + # Determine patch count from a small probe (patch_option=2 → 153 patches) + n_patches = 153 + + logger.info( + f" Tiled SVF: {rows}x{cols} raster, {n_tiles} tiles, " + f"tile_size={adjusted_tile_size}, buffer={buffer_m:.0f}m ({buffer_pixels}px)" + ) + + # SVF field names on the Rust result object + svf_fields = ["svf", "svf_north", "svf_east", "svf_south", "svf_west"] + veg_fields = [ + "svf_veg", + "svf_veg_north", + "svf_veg_east", + "svf_veg_south", + "svf_veg_west", + "svf_veg_blocks_bldg_sh", + "svf_veg_blocks_bldg_sh_north", + "svf_veg_blocks_bldg_sh_east", + "svf_veg_blocks_bldg_sh_south", + "svf_veg_blocks_bldg_sh_west", + ] + all_fields = svf_fields + veg_fields if use_veg else svf_fields + + # Pre-allocate output arrays as memmaps on disk to avoid massive RAM + # use for very large rasters (e.g. >100M pixels). + outputs: dict[str, np.ndarray] = {} + svf_memmap_dir = working_path / "svf_memmaps" + svf_memmap_dir.mkdir(parents=True, exist_ok=True) + for name in all_fields: + mm = np.memmap( + svf_memmap_dir / f"{name}.dat", + dtype=np.float32, + mode="w+", + shape=(rows, cols), + ) + mm[:] = 1.0 # default for untouched pixels / masked edges + outputs[name] = mm + + # Pre-allocate memmap files for shadow matrices (bitpacked uint8, on disk) + memmap_dir = working_path / "shadow_memmaps" + memmap_dir.mkdir(parents=True, exist_ok=True) + n_pack = (n_patches + 7) // 8 # ceil(153/8) = 20 + sh_shape = (rows, cols, n_pack) + shadow_meta = { + "shape": [rows, cols, n_pack], + "patch_count": n_patches, + "shadowmat_file": "shmat.dat", + "vegshadowmat_file": "vegshmat.dat", + "vbshmat_file": "vbshmat.dat", + } + with (memmap_dir / "metadata.json").open("w", encoding="utf-8") as f: + json.dump(shadow_meta, f, indent=2) + shmat_mm = np.memmap( + memmap_dir / "shmat.dat", + dtype=np.uint8, + mode="w+", + shape=sh_shape, + ) + vegshmat_mm = np.memmap( + memmap_dir / "vegshmat.dat", + dtype=np.uint8, + mode="w+", + shape=sh_shape, + ) + vbshmat_mm = np.memmap( + memmap_dir / "vbshmat.dat", + dtype=np.uint8, + mode="w+", + shape=sh_shape, + ) + if not use_veg: + vegshmat_mm[:] = 0 + vbshmat_mm[:] = 0 + + # Progress: n_tiles × n_patches gives fine-grained per-patch visibility. + pbar = ProgressReporter( + total=n_tiles * n_patches, + desc="Computing SVF (tiled)", + feedback=feedback, + progress_range=progress_range, + ) + + # Pipeline: overlap GPU computation of tile N+1 with CPU + # result-copying of tile N. SkyviewRunner.calculate_svf releases the + # GIL inside py.allow_threads(), so a background thread can drive the + # GPU while the main thread polls progress and does numpy bookkeeping. + import threading + + def _submit_tile(tile): + """Prepare inputs and run SVF on background thread with progress.""" + rs = tile.read_slice + cs = tile.core_slice + core_row_start = int(cs[0].start or 0) + core_row_end = int(cs[0].stop or 0) + core_col_start = int(cs[1].start or 0) + core_col_end = int(cs[1].stop or 0) + td = dsm_f32[rs].copy() + tc = cdsm_f32[rs].copy() + tt = tdsm_f32[rs].copy() + mh = _max_shadow_height(td, tc, use_veg=use_veg) + runner = skyview.SkyviewRunner() + box = [None, None] # [result, error] + core_only = hasattr(runner, "calculate_svf_core") + + def _run(): + try: + if core_only: + box[0] = runner.calculate_svf_core( + td, + tc, + tt, + pixel_size, + use_veg, + mh, + 2, # patch_option + 3.0, # min_sun_elev_deg + core_row_start, + core_row_end, + core_col_start, + core_col_end, + ) + else: + box[0] = runner.calculate_svf( + td, + tc, + tt, + pixel_size, + use_veg, + mh, + 2, # patch_option + 3.0, # min_sun_elev_deg + ) + except BaseException as e: + box[1] = e + + t = threading.Thread(target=_run, daemon=True) + t.start() + return t, box, runner, core_only + + def _process_result(tile_result, tile, core_only): + """Copy SVF + shadow matrices from a completed tile.""" + cs = tile.core_slice + ws = tile.write_slice + + # Avoid redundant array copies: Rust returns numpy-backed arrays. + svf_arrays = {name: np.asarray(getattr(tile_result, name)) for name in svf_fields} + for name in svf_fields: + outputs[name][ws] = svf_arrays[name] if core_only else svf_arrays[name][cs] + if use_veg: + veg_arrays = {name: np.asarray(getattr(tile_result, name)) for name in veg_fields} + for name in veg_fields: + outputs[name][ws] = veg_arrays[name] if core_only else veg_arrays[name][cs] + # Shadow matrices are already bitpacked uint8 from Rust + bldg = np.asarray(tile_result.bldg_sh_matrix) + shmat_mm[ws] = bldg if core_only else bldg[cs] + if use_veg: + veg = np.asarray(tile_result.veg_sh_matrix) + vb = np.asarray(tile_result.veg_blocks_bldg_sh_matrix) + vegshmat_mm[ws] = veg if core_only else veg[cs] + vbshmat_mm[ws] = vb if core_only else vb[cs] + + # Kick off first tile + thread, box, runner, core_only = _submit_tile(tiles[0]) + + try: + for tile_idx in range(n_tiles): + pbar.set_description(f"SVF tile {tile_idx + 1}/{n_tiles}") + pbar.set_text(f"Computing SVF — Tile {tile_idx + 1}/{n_tiles}") + + # Poll per-patch progress while tile runs + last_patch = 0 + cancelled = False + while thread.is_alive(): + thread.join(timeout=0.05) + done = runner.progress() + if done > last_patch: + pbar.update(done - last_patch) + last_patch = done + # Check QGIS cancellation within tile + if pbar.is_cancelled(): + runner.cancel() + thread.join(timeout=5.0) + cancelled = True + break + if cancelled: + logger.info(" SVF computation cancelled by user") + break + + # Ensure progress accounts for all patches in this tile + if last_patch < n_patches: + pbar.update(n_patches - last_patch) + + # Check for errors + if box[1] is not None: + tile = tiles[tile_idx] + raise RuntimeError( + f"SVF tile {tile_idx + 1}/{n_tiles} failed (read_slice={tile.read_slice}): {box[1]}" + ) from box[1] + cur_result = box[0] + if cur_result is None: + raise RuntimeError( + f"SVF tile {tile_idx + 1}/{n_tiles} returned None (skyview.calculate_svf produced no result)" + ) + cur_core_only = core_only + + # Submit next tile (GPU starts while we copy results below) + if tile_idx + 1 < n_tiles: + thread, box, runner, core_only = _submit_tile(tiles[tile_idx + 1]) + + # Copy results on main thread (overlaps with next GPU computation) + _process_result(cur_result, tiles[tile_idx], cur_core_only) + if on_tile_complete is not None: + on_tile_complete(tile_idx, n_tiles) + except BaseException: + # Clean up partial memmap files so stale data doesn't persist + import shutil + + for d in (svf_memmap_dir, memmap_dir): + if d.exists(): + shutil.rmtree(d, ignore_errors=True) + raise + finally: + pbar.close() + # Flush memmaps to disk + shmat_mm.flush() + vegshmat_mm.flush() + vbshmat_mm.flush() + + # Shared ones memmap for non-vegetation cases (avoids 5x full-size copies). + # When use_veg is True the veg outputs come from the real computation and + # ``ones`` is unused, but we still need a valid ndarray for the type checker. + if use_veg: + ones = np.ones((1, 1), dtype=np.float32) + else: + ones = np.memmap( + svf_memmap_dir / "ones.dat", + dtype=np.float32, + mode="w+", + shape=(rows, cols), + ) + ones[:] = 1.0 + + svf_data = SvfArrays( + svf=outputs["svf"], + svf_north=outputs["svf_north"], + svf_east=outputs["svf_east"], + svf_south=outputs["svf_south"], + svf_west=outputs["svf_west"], + svf_veg=outputs["svf_veg"] if use_veg else ones, + svf_veg_north=outputs["svf_veg_north"] if use_veg else ones, + svf_veg_east=outputs["svf_veg_east"] if use_veg else ones, + svf_veg_south=outputs["svf_veg_south"] if use_veg else ones, + svf_veg_west=outputs["svf_veg_west"] if use_veg else ones, + svf_aveg=outputs["svf_veg_blocks_bldg_sh"] if use_veg else ones, + svf_aveg_north=outputs["svf_veg_blocks_bldg_sh_north"] if use_veg else ones, + svf_aveg_east=outputs["svf_veg_blocks_bldg_sh_east"] if use_veg else ones, + svf_aveg_south=outputs["svf_veg_blocks_bldg_sh_south"] if use_veg else ones, + svf_aveg_west=outputs["svf_veg_blocks_bldg_sh_west"] if use_veg else ones, + ) + + # Flush all SVF memmaps to disk + for arr in outputs.values(): + if hasattr(arr, "flush"): + arr.flush() # type: ignore[union-attr] + if hasattr(ones, "flush"): + ones.flush() # type: ignore[union-attr] + + return svf_data, (shmat_mm, vegshmat_mm, vbshmat_mm) + + def preprocess(self) -> None: + """ + Convert layers from relative to absolute heights based on per-layer flags. + + Converts each layer that is flagged as relative (``dsm_relative``, + ``cdsm_relative``, ``tdsm_relative``) to absolute heights. Layers + already flagged as absolute are left unchanged. + + This method: + 1. Converts DSM from relative to absolute if ``dsm_relative=True`` + (requires DEM: ``dsm_absolute = dem + dsm_relative``) + 2. Auto-generates TDSM from CDSM * trunk_ratio if TDSM is not provided + 3. Converts CDSM from relative to absolute if ``cdsm_relative=True`` + 4. Converts TDSM from relative to absolute if ``tdsm_relative=True`` + 5. Zeros out vegetation pixels with height < 0.1m + + Note: + This method modifies arrays in-place and clears the per-layer + relative flags once conversion is done. + """ + if self._preprocessed: + return + + # Fill NaN in surface layers before any height conversion + self.fill_nan() + + threshold = np.float32(0.1) + zero32 = np.float32(0.0) + nan32 = np.float32(np.nan) + + # Step 1: Convert DSM from relative to absolute (requires DEM) + if self.dsm_relative: + if self.dem is None: + raise ValueError( + "DSM is flagged as relative (dsm_relative=True) but no DEM " + "is provided. A DEM is required to convert relative DSM " + "(height above ground) to absolute elevations." + ) + logger.info("Converting relative DSM to absolute: DSM = DEM + nDSM") + self.dsm = (self.dem + self.dsm).astype(np.float32) + self.dsm_relative = False + + # Step 2: Auto-generate TDSM from trunk ratio if CDSM provided but not TDSM + if self.cdsm is not None and self.tdsm is None: + logger.info(f"Auto-generating TDSM from CDSM using trunk_ratio={self.trunk_ratio}") + self.tdsm = (self.cdsm * self.trunk_ratio).astype(np.float32) + self.tdsm_relative = self.cdsm_relative + + # Use DEM as base if available, otherwise DSM (now absolute after step 1) + base = self.dem if self.dem is not None else self.dsm + + # Step 3: Convert CDSM from relative to absolute + if self.cdsm_relative and self.cdsm is not None: + cdsm_rel = np.where(np.isnan(self.cdsm), zero32, self.cdsm) + cdsm_abs = np.where(~np.isnan(base), base + cdsm_rel, nan32) + cdsm_abs = np.where(cdsm_abs - base < threshold, base, cdsm_abs) + self.cdsm = cdsm_abs.astype(np.float32) + self.cdsm_relative = False + logger.info(f"Converted relative CDSM to absolute (base: {'DEM' if self.dem is not None else 'DSM'})") + + # Step 4: Convert TDSM from relative to absolute + if self.tdsm_relative and self.tdsm is not None: + tdsm_rel = np.where(np.isnan(self.tdsm), zero32, self.tdsm) + tdsm_abs = np.where(~np.isnan(base), base + tdsm_rel, nan32) + tdsm_abs = np.where(tdsm_abs - base < threshold, base, tdsm_abs) + self.tdsm = tdsm_abs.astype(np.float32) + self.tdsm_relative = False + logger.info(f"Converted relative TDSM to absolute (base: {'DEM' if self.dem is not None else 'DSM'})") + + self._preprocessed = True + + def compute_svf(self) -> None: + """ + Compute Sky View Factor (SVF) and store in self.svf. + + This must be called before calculate() or calculate_timeseries() + when constructing SurfaceData manually (not via prepare()). + + SVF is stored without psi (vegetation transmissivity) adjustment, + since psi depends on day-of-year and conifer flag which are not + known at SVF computation time. The adjustment is applied automatically + during calculation. + + Also computes and stores shadow matrices in self.shadow_matrices + (required for anisotropic sky model). + + Example: + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + surface.preprocess() + surface.compute_svf() + result = calculate(surface, location, weather) + """ + if self.svf is not None: + return # Already computed + + use_veg = self.cdsm is not None + dsm_f32 = self.dsm.astype(np.float32) + + if use_veg: + assert self.cdsm is not None # Type narrowing for type checker + cdsm_f32 = self.cdsm.astype(np.float32) + if self.tdsm is not None: + tdsm_f32 = self.tdsm.astype(np.float32) + else: + tdsm_f32 = (self.cdsm * self.trunk_ratio).astype(np.float32) + else: + cdsm_f32 = np.zeros_like(dsm_f32) + tdsm_f32 = np.zeros_like(dsm_f32) + + max_height = _max_shadow_height(dsm_f32, cdsm_f32 if use_veg else None, use_veg=use_veg) + + logger.info("Computing Sky View Factor...") + svf_result = skyview.calculate_svf( + dsm_f32, + cdsm_f32, + tdsm_f32, + self.pixel_size, + use_veg, + max_height, + 2, # patch_option (153 patches) + 3.0, # min_sun_elev_deg + None, # progress callback + ) + + ones = np.ones_like(dsm_f32) + self.svf = SvfArrays( + svf=np.array(svf_result.svf), + svf_north=np.array(svf_result.svf_north), + svf_east=np.array(svf_result.svf_east), + svf_south=np.array(svf_result.svf_south), + svf_west=np.array(svf_result.svf_west), + svf_veg=np.array(svf_result.svf_veg) if use_veg else ones.copy(), + svf_veg_north=np.array(svf_result.svf_veg_north) if use_veg else ones.copy(), + svf_veg_east=np.array(svf_result.svf_veg_east) if use_veg else ones.copy(), + svf_veg_south=np.array(svf_result.svf_veg_south) if use_veg else ones.copy(), + svf_veg_west=np.array(svf_result.svf_veg_west) if use_veg else ones.copy(), + svf_aveg=np.array(svf_result.svf_veg_blocks_bldg_sh) if use_veg else ones.copy(), + svf_aveg_north=np.array(svf_result.svf_veg_blocks_bldg_sh_north) if use_veg else ones.copy(), + svf_aveg_east=np.array(svf_result.svf_veg_blocks_bldg_sh_east) if use_veg else ones.copy(), + svf_aveg_south=np.array(svf_result.svf_veg_blocks_bldg_sh_south) if use_veg else ones.copy(), + svf_aveg_west=np.array(svf_result.svf_veg_blocks_bldg_sh_west) if use_veg else ones.copy(), + ) + + # Store shadow matrices for anisotropic sky model + # Shadow matrices are bitpacked uint8 from Rust + self.shadow_matrices = ShadowArrays( + _shmat_u8=np.array(svf_result.bldg_sh_matrix), + _vegshmat_u8=np.array(svf_result.veg_sh_matrix), + _vbshmat_u8=np.array(svf_result.veg_blocks_bldg_sh_matrix), + _n_patches=153, # patch_option=2 + ) + + logger.info(" SVF computed successfully") + + @property + def max_height(self) -> float: + """Auto-compute maximum height difference for shadow buffer calculation. + + Considers both DSM (buildings) and CDSM (vegetation) since both cast shadows. + Returns max elevation minus ground level. + + This property is conservative by design for shadow buffer sizing: + CDSM is included whenever present, independent of current per-call + vegetation switches. + """ + if self.dsm.size == 0 or not np.isfinite(self.dsm).any(): + return 0.0 + + dsm_max = float(np.nanmax(self.dsm)) + ground_min = float(np.nanmin(self.dsm)) + + # Also consider vegetation if present (CDSM may be taller than buildings) + if self.cdsm is not None and self.cdsm.size > 0 and np.isfinite(self.cdsm).any(): + cdsm_max = float(np.nanmax(self.cdsm)) + # After preprocessing, CDSM contains absolute elevations + # Use the higher of DSM or CDSM + max_elevation = max(dsm_max, cdsm_max) + else: + max_elevation = dsm_max + + height = max_elevation - ground_min + if not np.isfinite(height) or height <= 0: + return 0.0 + return height + + @property + def shape(self) -> tuple[int, int]: + """Return DSM shape (rows, cols).""" + rows, cols = self.dsm.shape + return (rows, cols) + + @property + def crs(self) -> str | None: + """Return CRS as WKT string, or None if not set.""" + return self._crs_wkt + + @property + def valid_mask(self) -> NDArray[np.bool_] | None: + """Return computed valid mask, or None if not yet computed.""" + return self._valid_mask + + def fill_nan(self, tolerance: float = 0.1) -> None: + """Fill NaN in surface layers using DEM as ground reference. + + NaN in DSM/CDSM/TDSM means "no data, assume ground level." + After filling, values within *tolerance* of ground are clamped + to exactly the ground value to avoid shadow/SVF noise from + resampling jitter. + + Fill rules: + - DSM NaN → DEM value (if DEM provided, else left as NaN) + - CDSM NaN → base value (DEM if available, else DSM) + - TDSM NaN → base value (DEM if available, else DSM) + - DEM NaN → not filled (DEM is the ground-truth baseline) + + Works identically for relative and absolute height conventions. + + Args: + tolerance: Height difference (m) below which a surface pixel + is considered "at ground" and clamped. Default 0.1 m. + """ + if self._nan_filled: + return + + tol = np.float32(tolerance) + + # DSM: fill with DEM where available + if self.dem is not None: + dsm_nan = np.isnan(self.dsm) + if np.any(dsm_nan): + n = int(dsm_nan.sum()) + self.dsm = np.where(dsm_nan, self.dem, self.dsm).astype(np.float32) + logger.info(f" Filled {n} NaN DSM pixels with DEM") + + base = self.dem if self.dem is not None else self.dsm + base_label = "DEM" if self.dem is not None else "DSM" + + # CDSM: fill NaN with base, clamp near-ground noise + if self.cdsm is not None: + cdsm_nan = np.isnan(self.cdsm) + if np.any(cdsm_nan): + n = int(cdsm_nan.sum()) + self.cdsm = np.where(cdsm_nan, base, self.cdsm).astype(np.float32) + logger.info(f" Filled {n} NaN CDSM pixels with {base_label}") + near_ground = np.abs(self.cdsm - base) < tol + if np.any(near_ground): + self.cdsm = np.where(near_ground, base, self.cdsm).astype(np.float32) + + # TDSM: same treatment as CDSM + if self.tdsm is not None: + tdsm_nan = np.isnan(self.tdsm) + if np.any(tdsm_nan): + n = int(tdsm_nan.sum()) + self.tdsm = np.where(tdsm_nan, base, self.tdsm).astype(np.float32) + logger.info(f" Filled {n} NaN TDSM pixels with {base_label}") + near_ground = np.abs(self.tdsm - base) < tol + if np.any(near_ground): + self.tdsm = np.where(near_ground, base, self.tdsm).astype(np.float32) + + self._nan_filled = True + + def compute_valid_mask(self) -> NDArray[np.bool_]: + """Compute combined valid mask: True where ALL ground-reference layers have finite data. + + A pixel is valid only if DSM (and DEM/walls if provided) have finite values. + CDSM/TDSM are excluded — NaN vegetation means "at ground", not "invalid pixel". + Call fill_nan() before this to fill vegetation NaN with ground values. + + Returns: + Boolean array with same shape as DSM. True = valid pixel. + """ + valid = np.isfinite(self.dsm) + for arr in [self.dem, self.wall_height, self.wall_aspect]: + if arr is not None: + valid &= np.isfinite(arr) + if self.land_cover is not None: + valid &= self.land_cover != 255 + self._valid_mask = valid + n_invalid = int(np.sum(~valid)) + if n_invalid > 0: + pct = 100.0 * n_invalid / valid.size + logger.info(f" Valid mask: {n_invalid} invalid pixels ({pct:.1f}%)") + else: + logger.info(" Valid mask: all pixels valid") + return valid + + def apply_valid_mask(self) -> None: + """Set NaN in ALL layers where ANY layer has nodata. + + Ensures consistent nodata across all surface arrays. + Must call compute_valid_mask() first (or it will be called automatically). + """ + if self._valid_mask is None: + self.compute_valid_mask() + assert self._valid_mask is not None # set by compute_valid_mask + invalid = ~self._valid_mask + if not np.any(invalid): + return + self.dsm[invalid] = np.nan + for attr in ("cdsm", "dem", "tdsm", "wall_height", "wall_aspect", "albedo", "emissivity"): + arr = getattr(self, attr) + if arr is not None: + arr[invalid] = np.nan + if self.land_cover is not None: + self.land_cover[invalid] = 255 + + def crop_to_valid_bbox(self) -> tuple[int, int, int, int]: + """Crop all arrays to minimum bounding box of valid pixels. + + Eliminates edge NaN bands to reduce wasted computation. + Updates geotransform to reflect the new origin. + + Returns: + (row_start, row_end, col_start, col_end) of the crop window. + """ + if self._valid_mask is None: + self.compute_valid_mask() + assert self._valid_mask is not None # set by compute_valid_mask + rows_any = np.any(self._valid_mask, axis=1) + cols_any = np.any(self._valid_mask, axis=0) + if not np.any(rows_any): + logger.warning(" No valid pixels found — cannot crop") + return (0, self.dsm.shape[0], 0, self.dsm.shape[1]) + r0 = int(np.argmax(rows_any)) + r1 = len(rows_any) - int(np.argmax(rows_any[::-1])) + c0 = int(np.argmax(cols_any)) + c1 = len(cols_any) - int(np.argmax(cols_any[::-1])) + + if r0 == 0 and r1 == self.dsm.shape[0] and c0 == 0 and c1 == self.dsm.shape[1]: + logger.info(" Crop: no trimming needed (valid bbox = full extent)") + return (r0, r1, c0, c1) + + old_shape = self.dsm.shape + self.dsm = self.dsm[r0:r1, c0:c1].copy() + self._valid_mask = self._valid_mask[r0:r1, c0:c1].copy() + for attr in ("cdsm", "dem", "tdsm", "wall_height", "wall_aspect", "albedo", "emissivity", "land_cover"): + arr = getattr(self, attr) + if arr is not None: + setattr(self, attr, arr[r0:r1, c0:c1].copy()) + + # Update geotransform to reflect new origin + if self._geotransform is not None: + gt = self._geotransform + self._geotransform = [ + gt[0] + c0 * gt[1] + r0 * gt[2], # new origin X + gt[1], + gt[2], + gt[3] + c0 * gt[4] + r0 * gt[5], # new origin Y + gt[4], + gt[5], + ] + + # Crop SVF arrays if present + if self.svf is not None: + self.svf = self.svf.crop(r0, r1, c0, c1) + if self.shadow_matrices is not None: + self.shadow_matrices = self.shadow_matrices.crop(r0, r1, c0, c1) + + # Clear buffer pool (shape changed) + self.clear_buffers() + + logger.info(f" Cropped: {old_shape[1]}x{old_shape[0]} → {c1 - c0}x{r1 - r0} pixels") + return (r0, r1, c0, c1) + + def save_cleaned(self, output_dir: str | Path) -> None: + """Save cleaned, aligned rasters to disk for inspection. + + Writes all present layers to output_dir/cleaned/ as GeoTIFFs. + + Args: + output_dir: Parent directory. Files are saved under output_dir/cleaned/. + """ + out = Path(output_dir) / "cleaned" + out.mkdir(parents=True, exist_ok=True) + gt = self._geotransform or [0, self.pixel_size, 0, 0, 0, -self.pixel_size] + crs = self._crs_wkt or "" + io.save_raster(str(out / "dsm.tif"), self.dsm, gt, crs) + for name, arr in [ + ("cdsm", self.cdsm), + ("dem", self.dem), + ("tdsm", self.tdsm), + ("wall_height", self.wall_height), + ("wall_aspect", self.wall_aspect), + ]: + if arr is not None: + io.save_raster(str(out / f"{name}.tif"), arr, gt, crs) + if self.land_cover is not None: + io.save_raster(str(out / "land_cover.tif"), self.land_cover.astype(np.float32), gt, crs) + if self._valid_mask is not None: + io.save_raster(str(out / "valid_mask.tif"), self._valid_mask.astype(np.float32), gt, crs) + logger.info(f" Cleaned rasters saved to {out}") + + def get_buffer_pool(self) -> BufferPool: + """Get or create a buffer pool for this surface. + + The buffer pool provides pre-allocated numpy arrays that can be + reused across timesteps during timeseries calculations. This + reduces memory allocation overhead and GC pressure. + + Returns: + BufferPool sized to this surface's grid dimensions. + + Example: + pool = surface.get_buffer_pool() + temp = pool.get_zeros("ani_lum") # First call allocates + temp = pool.get_zeros("ani_lum") # Second call reuses same memory + """ + if self._buffer_pool is None: + self._buffer_pool = BufferPool(self.shape) + return self._buffer_pool + + def clear_buffers(self) -> None: + """Clear the buffer pool to free memory. + + Call this after completing a timeseries calculation to release + the pre-allocated arrays. + """ + if self._buffer_pool is not None: + self._buffer_pool.clear() + self._buffer_pool = None + # Clear runtime compute caches tied to this surface. + # These are lazily rebuilt on demand in computation.calculate_core_fused(). + for attr in ( + "_valid_mask_u8_cache", + "_valid_bbox_cache", + "_land_cover_props_cache", + "_buildings_mask_cache", + "_lc_grid_f32_cache", + "_gvf_geometry_cache", + "_gvf_geometry_cache_crop", + "_aniso_shadow_crop_cache", + ): + if hasattr(self, attr): + setattr(self, attr, None) + + def _looks_like_relative_heights(self) -> bool: + """ + Heuristic check if CDSM appears to contain relative heights. + + Returns True if max(CDSM) is much smaller than min(DSM), suggesting + CDSM contains height-above-ground values rather than absolute elevations. + + This is used to warn users who may have forgotten to call preprocess(). + """ + if self.cdsm is None: + return False + + cdsm_max = np.nanmax(self.cdsm) + dsm_min = np.nanmin(self.dsm) + + # If CDSM max is much smaller than DSM min, it's likely relative heights + # Typical case: DSM min ~100m elevation, CDSM max ~30m tree height + # Exception: coastal areas where DSM min could be near 0 + if dsm_min > 10 and cdsm_max < dsm_min * 0.5: + return True + + # Also check if CDSM values are typical vegetation heights (0-50m range) + # while DSM has larger values + return bool(cdsm_max < 60 and dsm_min > cdsm_max + 20) + + def _check_preprocessing_needed(self) -> None: + """ + Warn if CDSM appears to need preprocessing but wasn't preprocessed. + + Called internally before calculations to alert users. + """ + if self.cdsm is None: + return + + if self.cdsm_relative and not self._preprocessed and self._looks_like_relative_heights(): + logger.warning( + f"CDSM appears to contain relative vegetation heights " + f"(max CDSM={np.nanmax(self.cdsm):.1f}m < min DSM={np.nanmin(self.dsm):.1f}m), " + f"but preprocess() was not called. " + f"Call surface.preprocess() to convert to absolute heights, " + f"or set cdsm_relative=False if CDSM already contains absolute elevations." + ) + + def get_land_cover_properties( + self, + params: SimpleNamespace | None = None, + ) -> tuple[ + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], + NDArray[np.floating], + ]: + """ + Derive surface properties from land cover grid. + + Args: + params: Optional loaded parameters from JSON file (via load_params()). + When provided, land cover properties are read from the params. + When None, uses built-in defaults matching parametersforsolweig.json. + + Returns: + Tuple of (albedo_grid, emissivity_grid, tgk_grid, tstart_grid, tmaxlst_grid). + If land_cover is None, returns defaults. + + Land cover parameters from Lindberg et al. 2008, 2016 (parametersforsolweig.json): + - TgK (Ts_deg): Temperature coefficient for surface heating + - Tstart: Temperature offset at sunrise + - TmaxLST: Hour of maximum local surface temperature + """ + if self.land_cover is None: + # Use provided grids or defaults + alb = self.albedo if self.albedo is not None else np.full_like(self.dsm, 0.15) + emis = self.emissivity if self.emissivity is not None else np.full_like(self.dsm, 0.95) + tgk = np.full_like(self.dsm, 0.37) # Default TgK (cobblestone) + tstart = np.full_like(self.dsm, -3.41) # Default Tstart (cobblestone) + tmaxlst = np.full_like(self.dsm, 15.0) # Default TmaxLST (cobblestone) + return alb, emis, tgk, tstart, tmaxlst + + # If params provided, use the helper function to extract from JSON + if params is not None: + return get_lc_properties_from_params(self.land_cover, params, self.shape) + + # UMEP standard land cover properties from parametersforsolweig.json + # ID: (albedo, emissivity, TgK, Tstart, TmaxLST) + # Values must match the JSON parameters file for parity with runner + lc_properties = { + 0: (0.20, 0.95, 0.37, -3.41, 15.0), # Paved/cobblestone (Cobble_stone_2014a) + 1: (0.18, 0.95, 0.58, -9.78, 15.0), # Dark asphalt (albedo from JSON) + 2: (0.18, 0.95, 0.58, -9.78, 15.0), # Buildings/roofs (emissivity=0.95, albedo=0.18) + 3: (0.20, 0.95, 0.37, -3.41, 15.0), # Undefined (use paved defaults) + 4: (0.20, 0.95, 0.37, -3.41, 15.0), # Undefined (use paved defaults) + 5: (0.16, 0.94, 0.21, -3.38, 14.0), # Grass (Grass_unmanaged) - albedo=0.16, emis=0.94 + 6: (0.25, 0.94, 0.33, -3.01, 14.0), # Bare soil - emis=0.94 + 7: (0.05, 0.98, 0.00, 0.00, 12.0), # Water - albedo=0.05 + } + + rows, cols = self.shape + alb_grid = np.full((rows, cols), 0.15, dtype=np.float32) + emis_grid = np.full((rows, cols), 0.95, dtype=np.float32) + tgk_grid = np.full((rows, cols), 0.37, dtype=np.float32) + tstart_grid = np.full((rows, cols), -3.41, dtype=np.float32) + tmaxlst_grid = np.full((rows, cols), 15.0, dtype=np.float32) + + lc = self.land_cover + for lc_id, (alb, emis, tgk, tstart, tmaxlst) in lc_properties.items(): + mask = lc == lc_id + if np.any(mask): + alb_grid[mask] = alb + emis_grid[mask] = emis + tgk_grid[mask] = tgk + tstart_grid[mask] = tstart + tmaxlst_grid[mask] = tmaxlst + + return alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid diff --git a/pysrc/solweig/models/weather.py b/pysrc/solweig/models/weather.py new file mode 100644 index 0000000..efa4ffe --- /dev/null +++ b/pysrc/solweig/models/weather.py @@ -0,0 +1,790 @@ +"""Weather and location data models. + +Defines :class:`Location` (geographic coordinates and UTC offset) and +:class:`Weather` (per-timestep meteorological observations). Derived +fields such as sun position and the direct/diffuse radiation split are +computed lazily via :meth:`Weather.compute_derived`. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from datetime import datetime as dt +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import numpy as np + +from ..physics import sun_position as sp +from ..physics.clearnessindex_2013b import clearnessindex_2013b +from ..physics.diffusefraction import diffusefraction +from ..solweig_logging import get_logger + +if TYPE_CHECKING: + from .surface import SurfaceData + +logger = get_logger(__name__) + + +@dataclass +class Location: + """ + Geographic location for sun position calculations. + + Attributes: + latitude: Latitude in degrees (north positive). + longitude: Longitude in degrees (east positive). + altitude: Altitude above sea level in meters. Default 0. + utc_offset: UTC offset in hours. Default 0. + """ + + latitude: float + longitude: float + altitude: float = 0.0 + utc_offset: int = 0 + + def __post_init__(self): + if not -90 <= self.latitude <= 90: + raise ValueError(f"Latitude must be in [-90, 90], got {self.latitude}") + if not -180 <= self.longitude <= 180: + raise ValueError(f"Longitude must be in [-180, 180], got {self.longitude}") + + @classmethod + def from_dsm_crs(cls, dsm_path: str | Path, utc_offset: int = 0, altitude: float = 0.0) -> Location: + """ + Extract location from DSM raster's CRS by converting center point to WGS84. + + Args: + dsm_path: Path to DSM GeoTIFF file with valid CRS. + utc_offset: UTC offset in hours. Must be provided by user. + altitude: Altitude above sea level in meters. Default 0. + + Returns: + Location object with lat/lon from DSM center point. + + Raises: + ValueError: If DSM has no CRS or CRS conversion fails. + + Example: + location = Location.from_dsm_crs("dsm.tif", utc_offset=2) + """ + from .. import io + + try: + from pyproj import Transformer + except ImportError as err: + raise ImportError("pyproj is required for CRS extraction. Install with: pip install pyproj") from err + + # Load DSM to get CRS and bounds + _, transform, crs_wkt, _ = io.load_raster(str(dsm_path)) + + if not crs_wkt: + raise ValueError( + f"DSM has no CRS metadata: {dsm_path}\n" + f"Either:\n" + f" 1. Add CRS to GeoTIFF: gdal_edit.py -a_srs EPSG:XXXXX {dsm_path}\n" + f" 2. Provide location manually: Location(latitude=X, longitude=Y, utc_offset={utc_offset})" + ) + + # Get center point from geotransform + # Transform is [x_origin, x_pixel_size, x_rotation, y_origin, y_rotation, y_pixel_size] + # We need the raster dimensions to find center - load again to get shape + dsm_array, _, _, _ = io.load_raster(str(dsm_path)) + rows, cols = dsm_array.shape + + center_x = transform[0] + (cols / 2) * transform[1] + center_y = transform[3] + (rows / 2) * transform[5] + + # Convert to WGS84 + transformer = Transformer.from_crs(crs_wkt, "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(center_x, center_y) + + logger.info(f"Extracted location from DSM CRS: {lat:.4f}°N, {lon:.4f}°E (UTC{utc_offset:+d})") + return cls(latitude=lat, longitude=lon, altitude=altitude, utc_offset=utc_offset) + + @classmethod + def from_surface(cls, surface: SurfaceData, utc_offset: int | None = None, altitude: float = 0.0) -> Location: + """ + Extract location from SurfaceData's CRS by converting center point to WGS84. + + This avoids reloading the DSM raster when you already have loaded SurfaceData. + + Args: + surface: SurfaceData instance loaded from GeoTIFF. + utc_offset: UTC offset in hours. If not provided, defaults to 0 with a warning. + Always provide this explicitly for correct sun position calculations. + altitude: Altitude above sea level in meters. Default 0. + + Returns: + Location object with lat/lon from DSM center point. + + Raises: + ValueError: If surface has no CRS metadata. + ImportError: If pyproj is not installed. + + Example: + surface = SurfaceData.from_geotiff("dsm.tif") + location = Location.from_surface(surface, utc_offset=2) # Athens: UTC+2 + """ + import warnings + + try: + from pyproj import Transformer + except ImportError as err: + raise ImportError("pyproj is required for CRS extraction. Install with: pip install pyproj") from err + + # Check if geotransform and CRS are available + if not hasattr(surface, "_geotransform") or surface._geotransform is None: + raise ValueError( + "Surface data has no geotransform metadata.\n" + "Load surface with SurfaceData.from_geotiff() or provide location manually." + ) + if not hasattr(surface, "_crs_wkt") or surface._crs_wkt is None: + raise ValueError( + "Surface data has no CRS metadata.\n" + "Provide location manually: Location(latitude=X, longitude=Y, utc_offset=0)" + ) + + transform = surface._geotransform + crs_wkt = surface._crs_wkt + rows, cols = surface.dsm.shape + + # Get center point from geotransform + # Transform is [x_origin, x_pixel_size, x_rotation, y_origin, y_rotation, y_pixel_size] + center_x = transform[0] + (cols / 2) * transform[1] + center_y = transform[3] + (rows / 2) * transform[5] + + # Convert to WGS84 + transformer = Transformer.from_crs(crs_wkt, "EPSG:4326", always_xy=True) + lon, lat = transformer.transform(center_x, center_y) + + # Warn if utc_offset not explicitly provided + if utc_offset is None: + warnings.warn( + f"UTC offset not specified for auto-extracted location ({lat:.4f}°N, {lon:.4f}°E).\n" + f"Defaulting to UTC+0, which may cause incorrect sun positions.\n" + f"Fix: Location.from_surface(surface, utc_offset=YOUR_OFFSET) or\n" + f" Location(latitude={lat:.4f}, longitude={lon:.4f}, utc_offset=YOUR_OFFSET)", + UserWarning, + stacklevel=2, + ) + utc_offset = 0 + + logger.debug(f"Auto-extracted location: {lat:.4f}°N, {lon:.4f}°E (UTC{utc_offset:+d})") + return cls(latitude=lat, longitude=lon, altitude=altitude, utc_offset=utc_offset) + + @classmethod + def from_epw(cls, path: str | Path) -> Location: + """ + Extract location from an EPW weather file header. + + The EPW LOCATION line contains latitude, longitude, timezone offset, + and elevation — everything needed for a complete Location. + + Args: + path: Path to the EPW file. + + Returns: + Location with lat, lon, utc_offset, and altitude from the EPW header. + + Raises: + FileNotFoundError: If the EPW file doesn't exist. + ValueError: If the EPW header is malformed. + + Example: + location = Location.from_epw("madrid.epw") + # Location(latitude=40.45, longitude=-3.55, altitude=667.0, utc_offset=1) + """ + from .. import io as common + + metadata = common._parse_epw_metadata(Path(path)) + utc_offset = int(metadata["tz_offset"]) + + logger.info( + f"Location from EPW: {metadata['city']} — " + f"{metadata['latitude']:.4f}°N, {metadata['longitude']:.4f}°E " + f"(UTC{utc_offset:+d}, {metadata['elevation']:.0f}m)" + ) + return cls( + latitude=metadata["latitude"], + longitude=metadata["longitude"], + altitude=metadata["elevation"], + utc_offset=utc_offset, + ) + + def to_sun_position_dict(self) -> dict: + """Convert to dict format expected by sun_position module.""" + return { + "latitude": self.latitude, + "longitude": self.longitude, + "altitude": self.altitude, + } + + +@dataclass +class Weather: + """ + Weather/meteorological data for a single timestep. + + Only basic measurements are required. Derived values (sun position, + direct/diffuse radiation split) are computed automatically. + + Attributes: + datetime: Date and time of measurement (end of interval). + ta: Air temperature in °C. + rh: Relative humidity in % (0-100). + global_rad: Global solar radiation in W/m². + ws: Wind speed in m/s. Default 1.0. + pressure: Atmospheric pressure in hPa. Default 1013.25. + timestep_minutes: Data timestep in minutes. Default 60.0. + Sun position is computed at datetime - timestep/2 to represent + the center of the measurement interval. + measured_direct_rad: Optional measured direct beam radiation in W/m². + If provided with measured_diffuse_rad, these override the computed values. + measured_diffuse_rad: Optional measured diffuse radiation in W/m². + If provided with measured_direct_rad, these override the computed values. + + Auto-computed (after calling compute_derived()): + sun_altitude: Sun altitude angle in degrees. Initial: 0.0. + sun_azimuth: Sun azimuth angle in degrees. Initial: 0.0. + sun_zenith: Sun zenith angle in degrees. Initial: 90.0. + direct_rad: Direct beam radiation in W/m². Initial: 0.0. + diffuse_rad: Diffuse radiation in W/m². Initial: 0.0. + clearness_index: Clearness index (0-1). Initial: 1.0. + altmax: Maximum sun altitude for the day in degrees. Initial: 45.0. + """ + + datetime: dt + ta: float + rh: float + global_rad: float + ws: float = 1.0 + pressure: float = 1013.25 + timestep_minutes: float = 60.0 # Timestep in minutes (for half-timestep sun position offset) + measured_direct_rad: float | None = None # Optional measured direct beam radiation + measured_diffuse_rad: float | None = None # Optional measured diffuse radiation + precomputed_sun_altitude: float | None = None # Optional pre-computed sun altitude + precomputed_sun_azimuth: float | None = None # Optional pre-computed sun azimuth + precomputed_altmax: float | None = None # Optional pre-computed max sun altitude for day + + # Auto-computed values (set by compute_derived) + sun_altitude: float = field(default=0.0, init=False) + sun_azimuth: float = field(default=0.0, init=False) + sun_zenith: float = field(default=90.0, init=False) + direct_rad: float = field(default=0.0, init=False) + diffuse_rad: float = field(default=0.0, init=False) + clearness_index: float = field(default=1.0, init=False) + altmax: float = field(default=45.0, init=False) # Maximum sun altitude for the day + + _derived_computed: bool = field(default=False, init=False, repr=False) + + def __post_init__(self): + if not 0 <= self.rh <= 100: + raise ValueError(f"Relative humidity must be in [0, 100], got {self.rh}") + if self.global_rad < 0: + raise ValueError(f"Global radiation must be >= 0, got {self.global_rad}") + + def compute_derived(self, location: Location) -> None: + """ + Compute derived values: sun position and radiation split. + + Must be called before using sun_altitude, sun_azimuth, direct_rad, + or diffuse_rad. + + Sun position is calculated at the center of the measurement interval + (datetime - timestep/2), which is standard for meteorological data + where measurements are averaged over the interval. + + Args: + location: Geographic location for sun position calculation. + """ + # Always create location_dict (needed for clearness index calculation) + location_dict = location.to_sun_position_dict() + + # Use pre-computed sun position if provided, otherwise compute + if self.precomputed_sun_altitude is not None and self.precomputed_sun_azimuth is not None: + self.sun_altitude = self.precomputed_sun_altitude + self.sun_azimuth = self.precomputed_sun_azimuth + self.sun_zenith = 90.0 - self.sun_altitude + self.altmax = self.precomputed_altmax if self.precomputed_altmax is not None else self.sun_altitude + else: + # Apply half-timestep offset for sun position + # Meteorological data timestamps typically represent the end of an interval, + # so we compute sun position at the center of the interval to match SOLWEIG runner + from datetime import timedelta + + half_timestep = timedelta(minutes=self.timestep_minutes / 2.0) + sun_time = self.datetime - half_timestep + + # Compute sun position using NREL algorithm + time_dict = { + "year": sun_time.year, + "month": sun_time.month, + "day": sun_time.day, + "hour": sun_time.hour, + "min": sun_time.minute, + "sec": sun_time.second, + "UTC": location.utc_offset, + } + location_dict = location.to_sun_position_dict() + + sun = sp.sun_position(time_dict, location_dict) + + # Extract scalar values (sun_position may return 0-d arrays) + zenith = sun["zenith"] + azimuth = sun["azimuth"] + self.sun_zenith = float(np.asarray(zenith).flat[0]) if hasattr(zenith, "__iter__") else float(zenith) + self.sun_azimuth = float(np.asarray(azimuth).flat[0]) if hasattr(azimuth, "__iter__") else float(azimuth) + self.sun_altitude = 90.0 - self.sun_zenith + + # Use pre-computed altmax if available (avoids expensive 96-iteration loop) + if self.precomputed_altmax is not None: + self.altmax = self.precomputed_altmax + else: + # Calculate maximum sun altitude for the day (iterate in 15-min intervals) + from datetime import timedelta + + ymd = self.datetime.replace(hour=0, minute=0, second=0, microsecond=0) + sunmaximum = -90.0 + fifteen_min = 15.0 / 1440.0 # 15 minutes as fraction of day + + for step in range(96): # 24 hours * 4 (15-min intervals) + step_time = ymd + timedelta(days=step * fifteen_min) + time_dict_step = { + "year": step_time.year, + "month": step_time.month, + "day": step_time.day, + "hour": step_time.hour, + "min": step_time.minute, + "sec": 0, + "UTC": location.utc_offset, + } + sun_step = sp.sun_position(time_dict_step, location_dict) + zenith_step = sun_step["zenith"] + zenith_val = ( + float(np.asarray(zenith_step).flat[0]) + if hasattr(zenith_step, "__iter__") + else float(zenith_step) + ) + alt_step = 90.0 - zenith_val + if alt_step > sunmaximum: + sunmaximum = alt_step + + self.altmax = max(sunmaximum, 0.0) # Ensure non-negative + + # Use measured radiation values if provided, otherwise compute + if self.measured_direct_rad is not None and self.measured_diffuse_rad is not None: + # Use pre-measured direct and diffuse radiation + self.direct_rad = self.measured_direct_rad + self.diffuse_rad = self.measured_diffuse_rad + # Still compute clearness index for diagnostics/plotting + if self.sun_altitude > 0 and self.global_rad > 0: + zen_rad = self.sun_zenith * (np.pi / 180.0) + result = clearnessindex_2013b( + zen_rad, + self.datetime.timetuple().tm_yday, + self.ta, + self.rh / 100.0, + self.global_rad, + location_dict, + self.pressure, + ) + _, self.clearness_index, _, _, _ = result + else: + self.clearness_index = 0.0 + elif self.sun_altitude > 0 and self.global_rad > 0: + # Compute clearness index + zen_rad = self.sun_zenith * (np.pi / 180.0) + result = clearnessindex_2013b( + zen_rad, + self.datetime.timetuple().tm_yday, + self.ta, + self.rh / 100.0, + self.global_rad, + location_dict, + self.pressure, + ) + # clearnessindex_2013b returns: (I0, CI, Kt, I0_et, diff_et) + _, self.clearness_index, kt, _, _ = result + + # Use Reindl model for diffuse fraction + self.direct_rad, self.diffuse_rad = diffusefraction( + self.global_rad, self.sun_altitude, kt, self.ta, self.rh + ) + else: + # Night or no radiation + self.direct_rad = 0.0 + self.diffuse_rad = self.global_rad + self.clearness_index = 0.0 + + self._derived_computed = True + + @property + def is_daytime(self) -> bool: + """Check if sun is above horizon.""" + return self.sun_altitude > 0 + + @classmethod + def from_values( + cls, + ta: float, + rh: float, + global_rad: float, + datetime: dt | None = None, + ws: float = 1.0, + **kwargs: Any, + ) -> Weather: + """ + Quick factory for creating Weather with minimal required values. + + Useful for testing and single-timestep calculations where you + just need to specify the essential parameters. + + Args: + ta: Air temperature in °C. + rh: Relative humidity in % (0-100). + global_rad: Global solar radiation in W/m². + datetime: Date and time. If None, uses current time. + ws: Wind speed in m/s. Default 1.0. + **kwargs: Additional Weather parameters (pressure, etc.) + + Returns: + Weather object ready for calculation. + + Example: + # Quick weather for testing + weather = Weather.from_values(ta=25, rh=50, global_rad=800) + + # With specific datetime + weather = Weather.from_values( + ta=30, rh=60, global_rad=900, + datetime=datetime(2025, 7, 15, 14, 0) + ) + """ + if datetime is None: + datetime = dt.now() + return cls(datetime=datetime, ta=ta, rh=rh, global_rad=global_rad, ws=ws, **kwargs) + + @classmethod + def from_epw( + cls, + path: str | Path, + start: str | dt | None = None, + end: str | dt | None = None, + hours: list[int] | None = None, + year: int | None = None, + ) -> list[Weather]: + """ + Load weather data from an EnergyPlus Weather (EPW) file. + + Args: + path: Path to the EPW file. + start: Start date/datetime. Can be: + - ISO date string "YYYY-MM-DD" or "MM-DD" (for TMY with year=None) + - datetime object + If None, uses first date in file. + end: End date/datetime (inclusive). Same format as start. + If None, uses same as start (single day). + hours: List of hours to include (0-23). If None, includes all hours. + year: Year override for TMY files. If None and start/end use MM-DD format, + matches any year in the file. + + Returns: + List of Weather objects for each timestep in the requested range. + + Raises: + FileNotFoundError: If the EPW file doesn't exist. + ValueError: If requested dates are outside the EPW file's date range. + + Example: + # Load a single day + weather_list = Weather.from_epw("weather.epw", start="2023-07-15", end="2023-07-15") + + # Load with specific hours only (daylight hours) + weather_list = Weather.from_epw( + "weather.epw", + start="2023-07-15", + end="2023-07-16", + hours=[6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18] + ) + + # TMY file (year-agnostic) + weather_list = Weather.from_epw("tmy.epw", start="07-15", end="07-15") + """ + from .. import io as common + + # Parse EPW file + df, epw_info = common.read_epw(path) + + # Parse start/end dates + def parse_date(date_val, is_tmy: bool, default_year: int): + if date_val is None: + return None + if isinstance(date_val, dt): + return date_val + # String parsing + date_str = str(date_val) + if "-" in date_str: + parts = date_str.split("-") + if len(parts) == 2: + # MM-DD format (TMY) + month, day = int(parts[0]), int(parts[1]) + return dt(default_year, month, day) + elif len(parts) == 3: + # YYYY-MM-DD format + return dt.fromisoformat(date_str) + raise ValueError(f"Cannot parse date: {date_val}. Use 'YYYY-MM-DD' or 'MM-DD' format.") + + # Determine if using TMY mode (year-agnostic) + is_tmy = year is None and start is not None and isinstance(start, str) and len(start.split("-")) == 2 + + # Get default year from EPW data + if df.index.empty: + raise ValueError("EPW file contains no data") + default_year = df.index[0].year if year is None else year + + # Parse dates + start_dt = parse_date(start, is_tmy, default_year) + end_dt = parse_date(end, is_tmy, default_year) + + if start_dt is None: + start_dt = df.index[0].replace(tzinfo=None) + if end_dt is None: + end_dt = start_dt + + # Make end_dt inclusive of the full day + if end_dt.hour == 0 and end_dt.minute == 0: + end_dt = end_dt.replace(hour=23, minute=59, second=59) + + # Filter by date range + # Remove timezone from index for comparison if needed + df_idx = df.index.tz_localize(None) if df.index.tz is not None else df.index + + if is_tmy: + # TMY mode: match month and day, ignore year + # Build (month, day) tuples for comparison — handles year-crossing ranges + # like Dec 15 → Jan 15 correctly. + start_md = (start_dt.month, start_dt.day) + end_md = (end_dt.month, end_dt.day) + idx_md = list(zip(df_idx.month, df_idx.day, strict=False)) + + if start_md <= end_md: + # Normal range (e.g., Feb 7 → Feb 8) + mask = [(start_md <= md <= end_md) for md in idx_md] + else: + # Year-crossing range (e.g., Dec 15 → Jan 15) + mask = [(md >= start_md or md <= end_md) for md in idx_md] + else: + # Normal mode: match full datetime + mask = (df_idx >= start_dt) & (df_idx <= end_dt) + + df_filtered = df[mask] + + if df_filtered.empty: + # Build helpful error message + avail_start = df_idx.min() + avail_end = df_idx.max() + raise ValueError( + f"Requested dates {start_dt.date()} to {end_dt.date()} not found in EPW file.\n" + f"EPW file '{path}' contains data for: {avail_start.date()} to {avail_end.date()}\n" + "Suggestions:\n" + " - Use dates within the available range\n" + " - For TMY files, use 'MM-DD' format (e.g., '07-15') to match any year" + ) + + # Filter by hours if specified + if hours is not None: + hours_set = set(hours) + hour_mask = df_filtered.index.hour.isin(hours_set) + df_filtered = df_filtered[hour_mask] + + # Create Weather objects + weather_list = [] + for timestamp, row in df_filtered.iterrows(): + # Create Weather object with available data + # EPW has dni/dhi which we can use as measured values + w = cls( + datetime=timestamp.to_pydatetime().replace(tzinfo=None), + ta=float(row["temp_air"]) if not np.isnan(row["temp_air"]) else 20.0, + rh=float(row["relative_humidity"]) if not np.isnan(row["relative_humidity"]) else 50.0, + global_rad=float(row["ghi"]) if not np.isnan(row["ghi"]) else 0.0, + ws=float(row["wind_speed"]) if not np.isnan(row["wind_speed"]) else 1.0, + pressure=(float(row["atmospheric_pressure"]) / 100.0) + if not np.isnan(row["atmospheric_pressure"]) + else 1013.25, # Convert Pa to hPa + measured_direct_rad=float(row["dni"]) if not np.isnan(row["dni"]) else None, + measured_diffuse_rad=float(row["dhi"]) if not np.isnan(row["dhi"]) else None, + ) + weather_list.append(w) + + if weather_list: + logger.info( + f"Loaded {len(weather_list)} timesteps from EPW: " + f"{weather_list[0].datetime.strftime('%Y-%m-%d %H:%M')} → " + f"{weather_list[-1].datetime.strftime('%Y-%m-%d %H:%M')}" + ) + else: + logger.warning(f"No timesteps found in EPW file for date range {start_dt} to {end_dt}") + + return weather_list + + @classmethod + def from_umep_met( + cls, + paths: str | Path | list[str | Path], + resample_hourly: bool = True, + start: str | dt | None = None, + end: str | dt | None = None, + ) -> list[Weather]: + """ + Load weather data from UMEP/SUEWS meteorological forcing files. + + The UMEP met format is space-separated with columns: + %iy, id, it, imin, Q*, QH, QE, Qs, Qf, Wind, RH, Td, press, + rain, Kdn, snow, ldown, fcld, wuh, xsmd, lai_hr, Kdiff, Kdir, Wd + + Missing values are encoded as -999. + + Args: + paths: Path(s) to UMEP met file(s). Multiple files are + concatenated (e.g., one per month). + resample_hourly: If True, keep only on-the-hour rows (imin=0). + Default True since SOLWEIG works best with hourly data. + start: Start date filter as "YYYY-MM-DD" or datetime. Optional. + end: End date filter (inclusive) as "YYYY-MM-DD" or datetime. Optional. + + Returns: + List of Weather objects sorted by datetime. + + Example: + # Single file + weather = Weather.from_umep_met("metdata_10min_july.txt") + + # Multiple monthly files + weather = Weather.from_umep_met([ + "metdata_10min_may.txt", + "metdata_10min_june.txt", + ]) + """ + from datetime import timedelta + + if isinstance(paths, (str, Path)): + paths = [paths] + + rows: list[dict[str, float]] = [] + for path in paths: + path = Path(path) + if not path.exists(): + raise FileNotFoundError(f"UMEP met file not found: {path}") + + with open(path) as f: + for line in f: + line = line.strip() + if not line or line.startswith("%") or line.startswith("#"): + continue + parts = line.split() + if len(parts) < 24: + continue + try: + rows.append( + { + "year": int(parts[0]), + "doy": int(parts[1]), + "hour": int(parts[2]), + "minute": int(parts[3]), + "wind": float(parts[9]), + "rh": float(parts[10]), + "ta": float(parts[11]), + "press_kpa": float(parts[12]), + "kdn": float(parts[14]), + "kdiff": float(parts[21]), + "kdir": float(parts[22]), + } + ) + except (ValueError, IndexError): + continue + + if not rows: + raise ValueError(f"No valid data rows found in UMEP met files: {paths}") + + # Filter hourly if requested + if resample_hourly: + rows = [r for r in rows if r["minute"] == 0] + + # Convert to Weather objects + weather_list = [] + for r in rows: + timestamp = dt(int(r["year"]), 1, 1) + timedelta( + days=int(r["doy"]) - 1, hours=int(r["hour"]), minutes=int(r["minute"]) + ) + + # Skip rows with missing critical data + if r["ta"] <= -998 or r["rh"] <= -998 or r["kdn"] <= -998: + continue + + # Convert pressure from kPa to hPa (-999 means missing) + pressure = r["press_kpa"] * 10.0 if r["press_kpa"] > -998 else 1013.25 + + # Direct/diffuse radiation (-999 means missing) + kdir = r["kdir"] if r["kdir"] > -998 else None + kdiff = r["kdiff"] if r["kdiff"] > -998 else None + + # Wind speed (-999 means missing) + ws = r["wind"] if r["wind"] > -998 else 1.0 + + timestep = 60.0 if resample_hourly else 10.0 + + w = cls( + datetime=timestamp, + ta=r["ta"], + rh=r["rh"], + global_rad=max(r["kdn"], 0.0), + ws=ws, + pressure=pressure, + timestep_minutes=timestep, + measured_direct_rad=kdir, + measured_diffuse_rad=kdiff, + ) + weather_list.append(w) + + # Sort by datetime + weather_list.sort(key=lambda w: w.datetime) + + # Apply date filters + if start is not None: + start_dt = dt.fromisoformat(start) if isinstance(start, str) else start + weather_list = [w for w in weather_list if w.datetime >= start_dt] + if end is not None: + end_dt = dt.fromisoformat(end) if isinstance(end, str) else end + if end_dt.hour == 0 and end_dt.minute == 0: + end_dt = end_dt.replace(hour=23, minute=59, second=59) + weather_list = [w for w in weather_list if w.datetime <= end_dt] + + if weather_list: + logger.info( + f"Loaded {len(weather_list)} timesteps from UMEP met: " + f"{weather_list[0].datetime.strftime('%Y-%m-%d %H:%M')} → " + f"{weather_list[-1].datetime.strftime('%Y-%m-%d %H:%M')}" + ) + elif start is not None or end is not None: + # Warn when date filter produces no results — usually a year mismatch + all_weather = [] + for r in rows: + from datetime import timedelta as _td + + ts = dt(int(r["year"]), 1, 1) + _td( + days=int(r["doy"]) - 1, hours=int(r["hour"]), minutes=int(r["minute"]) + ) + all_weather.append(ts) + if all_weather: + avail_start = min(all_weather) + avail_end = max(all_weather) + logger.warning( + f"No timesteps found for requested range " + f"{start} to {end}.\n" + f" File contains data for: " + f"{avail_start.strftime('%Y-%m-%d')} to {avail_end.strftime('%Y-%m-%d')}\n" + f" Check that start/end dates match the year in the file." + ) + + return weather_list diff --git a/pysrc/solweig/output_async.py b/pysrc/solweig/output_async.py new file mode 100644 index 0000000..e72bc07 --- /dev/null +++ b/pysrc/solweig/output_async.py @@ -0,0 +1,143 @@ +"""Asynchronous GeoTIFF writing helpers for timeseries workflows.""" + +from __future__ import annotations + +import os +from collections import deque +from concurrent.futures import Future, ThreadPoolExecutor +from datetime import datetime as dt +from pathlib import Path +from typing import TYPE_CHECKING + +import numpy as np + +from .solweig_logging import get_logger + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from .models import SolweigResult, SurfaceData + +logger = get_logger(__name__) + + +def async_output_enabled() -> bool: + """Return whether asynchronous output writing is enabled.""" + raw = os.environ.get("SOLWEIG_ASYNC_OUTPUT", "1").strip().lower() + return raw not in {"0", "false", "no", "off"} + + +def collect_output_arrays(result: SolweigResult, outputs: list[str]) -> dict[str, NDArray[np.floating]]: + """Collect requested output arrays from a result object.""" + available_outputs = { + "tmrt": result.tmrt, + "utci": result.utci, + "pet": result.pet, + "shadow": result.shadow, + "kdown": result.kdown, + "kup": result.kup, + "ldown": result.ldown, + "lup": result.lup, + } + + selected: dict[str, NDArray[np.floating]] = {} + for name in outputs: + if name not in available_outputs: + logger.warning(f"Unknown output '{name}', skipping. Valid: {list(available_outputs.keys())}") + continue + array = available_outputs[name] + if array is None: + logger.warning(f"Output '{name}' is None (not computed), skipping.") + continue + selected[name] = array + return selected + + +class AsyncGeoTiffWriter: + """ + Single-threaded async writer with bounded in-flight tasks. + + Writing runs on one background thread so compute can continue while I/O + proceeds. ``max_pending`` provides backpressure and bounds memory use. + """ + + def __init__( + self, + output_dir: str | Path, + *, + surface: SurfaceData | None = None, + max_pending: int = 2, + ) -> None: + self.output_dir = Path(output_dir) + self.output_dir.mkdir(parents=True, exist_ok=True) + self.max_pending = max(1, int(max_pending)) + self._executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="solweig-geotiff") + self._pending: deque[Future[None]] = deque() + + self.transform: list[float] | None = None + self.crs_wkt: str = "" + if surface is not None: + if surface._geotransform is not None: + self.transform = surface._geotransform + if surface._crs_wkt is not None: + self.crs_wkt = surface._crs_wkt + + def submit(self, *, timestamp: dt, arrays: dict[str, NDArray[np.floating]]) -> None: + """Queue one timestep worth of outputs for writing.""" + if not arrays: + return + + self._drain_completed() + while len(self._pending) >= self.max_pending: + self._pending.popleft().result() + + ts_str = timestamp.strftime("%Y%m%d_%H%M") + future = self._executor.submit( + _write_outputs, + output_dir=self.output_dir, + ts_str=ts_str, + arrays=arrays, + transform=self.transform, + crs_wkt=self.crs_wkt, + ) + self._pending.append(future) + + def close(self) -> None: + """Wait for all queued writes and stop background worker.""" + try: + while self._pending: + self._pending.popleft().result() + finally: + self._executor.shutdown(wait=True) + + def _drain_completed(self) -> None: + while self._pending and self._pending[0].done(): + self._pending.popleft().result() + + +def _write_outputs( + *, + output_dir: Path, + ts_str: str, + arrays: dict[str, NDArray[np.floating]], + transform: list[float] | None, + crs_wkt: str, +) -> None: + from . import io + + if not arrays: + return + + write_transform = transform if transform is not None else [0.0, 1.0, 0.0, 0.0, 0.0, -1.0] + + for name, array in arrays.items(): + comp_dir = output_dir / name + comp_dir.mkdir(parents=True, exist_ok=True) + filepath = comp_dir / f"{name}_{ts_str}.tif" + io.save_raster( + out_path_str=str(filepath), + data_arr=array, + trf_arr=write_transform, + crs_wkt=crs_wkt, + no_data_val=np.nan, + ) diff --git a/pysrc/umepr/parametersforsolweig.json b/pysrc/solweig/parametersforsolweig.json similarity index 97% rename from pysrc/umepr/parametersforsolweig.json rename to pysrc/solweig/parametersforsolweig.json index 17a8cbe..bb327de 100644 --- a/pysrc/umepr/parametersforsolweig.json +++ b/pysrc/solweig/parametersforsolweig.json @@ -167,10 +167,10 @@ "Tmrt_params": { "Value": { "absK": 0.70, - "absL": 0.95, + "absL": 0.97, "posture": "Standing" }, - "Comment": "Absorption coefficients for mean radiant temperature (Tmrt) and posture. Posture is either standing or sitting." + "Comment": "Absorption coefficients per ISO 7726:1998. absK=0.70 (shortwave), absL=0.97 (longwave)." }, "PET_settings": { "Value": { diff --git a/temp/.gitkeep b/pysrc/solweig/physics/__init__.py similarity index 100% rename from temp/.gitkeep rename to pysrc/solweig/physics/__init__.py diff --git a/pysrc/solweig/physics/clearnessindex_2013b.py b/pysrc/solweig/physics/clearnessindex_2013b.py new file mode 100644 index 0000000..b0ebbd2 --- /dev/null +++ b/pysrc/solweig/physics/clearnessindex_2013b.py @@ -0,0 +1,97 @@ +import math + +import numpy as np + +from . import sun_distance + +__author__ = "xlinfr" + + +def clearnessindex_2013b(zen, jday, Ta, RH, radG, location, P): + """Clearness Index at the Earth's surface calculated from Crawford and Duchon 1999 + + :param zen: zenith angle in radians + :param jday: day of year + :param Ta: air temperature + :param RH: relative humidity + :param radG: global shortwave radiation + :param location: distionary including lat, lon and alt + :param P: pressure + :return: + """ + + p = 1013.0 if P == -999.0 else P * 10.0 # Pressure in millibars (convert hPa to mb) + + Itoa = 1370.0 # Effective solar constant + D = sun_distance.sun_distance(jday) # irradiance differences due to Sun-Earth distances + m = 35.0 * np.cos(zen) * ((1224.0 * (np.cos(zen) ** 2) + 1) ** (-1 / 2.0)) # optical air mass at p=1013 + Trpg = ( + 1.021 - 0.084 * (m * (0.000949 * p + 0.051)) ** 0.5 + ) # Transmission coefficient for Rayliegh scattering and permanent gases + + # empirical constant depending on latitude + if location["latitude"] < 10.0: + G_coeffs = [3.37, 2.85, 2.80, 2.64] + elif location["latitude"] >= 10.0 and location["latitude"] < 20.0: + G_coeffs = [2.99, 3.02, 2.70, 2.93] + elif location["latitude"] >= 20.0 and location["latitude"] < 30.0: + G_coeffs = [3.60, 3.00, 2.98, 2.93] + elif location["latitude"] >= 30.0 and location["latitude"] < 40.0: + G_coeffs = [3.04, 3.11, 2.92, 2.94] + elif location["latitude"] >= 40.0 and location["latitude"] < 50.0: + G_coeffs = [2.70, 2.95, 2.77, 2.71] + elif location["latitude"] >= 50.0 and location["latitude"] < 60.0: + G_coeffs = [2.52, 3.07, 2.67, 2.93] + elif location["latitude"] >= 60.0 and location["latitude"] < 70.0: + G_coeffs = [1.76, 2.69, 2.61, 2.61] + elif location["latitude"] >= 70.0 and location["latitude"] < 80.0: + G_coeffs = [1.60, 1.67, 2.24, 2.63] + else: # latitude >= 80.0 + G_coeffs = [1.11, 1.44, 1.94, 2.02] + + if jday > 335 or jday <= 60: + G: float = G_coeffs[0] + elif jday > 60 and jday <= 152: + G = G_coeffs[1] + elif jday > 152 and jday <= 244: + G = G_coeffs[2] + else: # jday > 244 and jday <= 335 + G = G_coeffs[3] + + # dewpoint calculation + a2 = 17.27 + b2 = 237.7 + Td = (b2 * (((a2 * Ta) / (b2 + Ta)) + np.log(RH))) / (a2 - (((a2 * Ta) / (b2 + Ta)) + np.log(RH))) + Td = (Td * 1.8) + 32 # Dewpoint (F) + u = np.exp(0.1133 - np.log(G + 1) + 0.0393 * Td) # Precipitable water + Tw = 1 - 0.077 * ((u * m) ** 0.3) # Transmission coefficient for water vapor + Tar = 0.935**m # Transmission coefficient for aerosols + + I0 = Itoa * np.cos(zen) * Trpg * Tw * D * Tar + if abs(zen) > np.pi / 2: + I0 = 0 + # b=I0==abs(zen)>np.pi/2 + # I0(b==1)=0 + # clear b; + if not (np.isreal(I0)): + I0 = 0 + + zen_deg = zen / np.pi * 180 + log_arg = 90 - zen_deg + if log_arg < 0.01: + # Sun at or below horizon — clearness index undefined + return 0.0, float("Inf"), 0.0, 0.0, 0.0 + + corr = 0.1473 * np.log(log_arg) + 0.3454 # 20070329 + + if I0 == 0: + return 0.0, float("Inf"), 0.0, 0.0, 0.0 + + CIuncorr = radG / I0 + CI = CIuncorr + (1 - corr) + I0et = Itoa * np.cos(zen) * D # extra terrestial solar radiation + Kt = radG / I0et if I0et != 0 else 0.0 + if math.isnan(CI): + CI = float("Inf") + + return I0, CI, Kt, I0et, CIuncorr diff --git a/pysrc/solweig/physics/create_patches.py b/pysrc/solweig/physics/create_patches.py new file mode 100644 index 0000000..ffa0f43 --- /dev/null +++ b/pysrc/solweig/physics/create_patches.py @@ -0,0 +1,77 @@ +""" +Sky vault patch geometry — **reference implementation only**. + +Not called by the production ``calculate()`` API. The fused Rust pipeline +constructs patch geometry internally. + +Retained for readability, tests, and validation against UMEP. +""" + +import numpy as np + + +def create_patches(patch_option): + # patch_option = 1 = 145 patches (Robinson & Stone, 2004) + # patch_option = 2 = 153 patches (Wallenberg et al., 2022) + # patch_option = 3 = 306 patches -> test + # patch_option = 4 = 612 patches -> test + + skyvaultalt = np.atleast_2d([]) + skyvaultazi = np.atleast_2d([]) + + # Creating skyvault of patches of constant radians (Tregeneza and Sharples, 1993) + # Patch option 1, 145 patches, Original Robinson & Stone (2004) after Tregenza (1987)/Tregenza & Sharples (1993) + if patch_option == 1: + annulino = np.array([0, 12, 24, 36, 48, 60, 72, 84, 90]) + skyvaultaltint = np.array([6, 18, 30, 42, 54, 66, 78, 90]) # Robinson & Stone (2004) + azistart = np.array([0, 4, 2, 5, 8, 0, 10, 0]) # Fredrik/Nils + patches_in_band = np.array([30, 30, 24, 24, 18, 12, 6, 1]) # Robinson & Stone (2004) + # Patch option 2, 153 patches, Wallenberg et al. (2022) + elif patch_option == 2: + annulino = np.array([0, 12, 24, 36, 48, 60, 72, 84, 90]) + skyvaultaltint = np.array([6, 18, 30, 42, 54, 66, 78, 90]) # Robinson & Stone (2004) + azistart = np.array([0, 4, 2, 5, 8, 0, 10, 0]) # Fredrik/Nils + patches_in_band = np.array([31, 30, 28, 24, 19, 13, 7, 1]) # Nils + # Patch option 3, 306 patches, test + elif patch_option == 3: + annulino = np.array([0, 12, 24, 36, 48, 60, 72, 84, 90]) + skyvaultaltint = np.array([6, 18, 30, 42, 54, 66, 78, 90]) # Robinson & Stone (2004) + azistart = np.array([0, 4, 2, 5, 8, 0, 10, 0]) # Fredrik/Nils + patches_in_band = np.array([31 * 2, 30 * 2, 28 * 2, 24 * 2, 19 * 2, 13 * 2, 7 * 2, 1]) # Nils + # Patch option 4, 612 patches, test + elif patch_option == 4: + annulino = np.array([0, 4.5, 9, 15, 21, 27, 33, 39, 45, 51, 57, 63, 69, 75, 81, 90]) # Nils + skyvaultaltint = np.array([3, 9, 15, 21, 27, 33, 39, 45, 51, 57, 63, 69, 75, 81, 90]) # Nils + patches_in_band = np.array( + [ + 31 * 2, + 31 * 2, + 30 * 2, + 30 * 2, + 28 * 2, + 28 * 2, + 24 * 2, + 24 * 2, + 19 * 2, + 19 * 2, + 13 * 2, + 13 * 2, + 7 * 2, + 7 * 2, + 1, + ] + ) # Nils + azistart = np.array([0, 0, 4, 4, 2, 2, 5, 5, 8, 8, 0, 0, 10, 10, 0]) # Nils + + skyvaultaziint = np.array([360 / patches for patches in patches_in_band]) + + for j in range(0, skyvaultaltint.shape[0]): + for k in range(0, patches_in_band[j]): + skyvaultalt = np.append(skyvaultalt, skyvaultaltint[j]) + skyvaultazi = np.append(skyvaultazi, k * skyvaultaziint[j] + azistart[j]) + + # skyvaultzen = (90 - skyvaultalt) * deg2rad + # skyvaultalt = skyvaultalt * deg2rad + # skyvaultazi = skyvaultazi * deg2rad + + return skyvaultalt, skyvaultazi, annulino, skyvaultaltint, patches_in_band, skyvaultaziint, azistart diff --git a/pysrc/solweig/physics/cylindric_wedge.py b/pysrc/solweig/physics/cylindric_wedge.py new file mode 100644 index 0000000..3403ef8 --- /dev/null +++ b/pysrc/solweig/physics/cylindric_wedge.py @@ -0,0 +1,109 @@ +import numpy as np + +from ..constants import MIN_SUN_ELEVATION_DEG + +# Convert to radians for internal use +_MIN_SUN_ALTITUDE_RAD = MIN_SUN_ELEVATION_DEG * (np.pi / 180.0) + + +def cylindric_wedge(zen, svfalfa, rows, cols): + """ + Fraction of sunlit walls based on sun altitude and SVF-weighted building angles. + + Args: + zen: Sun zenith angle (radians) + svfalfa: SVF-related angle grid (2D array, radians) + rows, cols: Grid dimensions (unused, kept for API compatibility) + + Returns: + F_sh: Shadow fraction grid (0 = fully sunlit, 1 = fully shaded) + + Note: + At very low sun altitudes (< 3°), returns F_sh = 1.0 to avoid + numerical instability from tan(zen) approaching infinity. + """ + # Guard against low sun angles where tan(zen) → infinity + # zenith = 90° - altitude, so zen > 87° means altitude < 3° + altitude_rad = (np.pi / 2.0) - zen + if altitude_rad < _MIN_SUN_ALTITUDE_RAD: + # Sun too low - walls fully shaded + return np.ones_like(svfalfa, dtype=np.float32) + + # Pre-compute trigonometric values once (1.7x speedup) + tan_zen = np.tan(zen) + tan_alfa = np.tan(svfalfa) + + # Guard against very small tan_alfa (near-horizontal surfaces) + tan_alfa = np.maximum(tan_alfa, 1e-6) + + ba = 1.0 / tan_alfa + tan_product = tan_alfa * tan_zen + + # Guard against division by very small values + tan_product = np.maximum(tan_product, 1e-6) + + xa = 1 - 2.0 / tan_product + ha = 2.0 / tan_product + hkil = 2.0 * ba * ha + + # Use np.where for vectorized conditionals (avoids index assignment overhead) + mask = xa < 0 + qa = np.where(mask, tan_zen / 2, 0.0).astype(np.float32) + + # Compute Za with safe sqrt + ba_sq = ba**2 + Za_sq = np.maximum(ba_sq - (qa**2) / 4, 0) + Za = np.where(mask, np.sqrt(Za_sq), 0.0).astype(np.float32) + + # Safe arctan (avoid division by zero) + phi = np.where(mask & (qa > 1e-10), np.arctan(Za / np.maximum(qa, 1e-10)), 0.0).astype(np.float32) + + # Compute A with safe denominator + cos_phi = np.cos(phi) + sin_phi = np.sin(phi) + denom = np.maximum(1 - cos_phi, 1e-10) + A = np.where(mask, (sin_phi - phi * cos_phi) / denom, 0.0).astype(np.float32) + + ukil = np.where(mask, 2 * ba * xa * A, 0.0).astype(np.float32) + + Ssurf = hkil + ukil + F_sh = (2 * np.pi * ba - Ssurf) / (2 * np.pi * ba) + + return F_sh.astype(np.float32) + + +def cylindric_wedge_voxel(zen, svfalfa): + np.seterr(divide="ignore", invalid="ignore") + + # Fraction of sunlit walls based on sun altitude and svf wieghted building angles + # input: + # sun zenith angle "beta" + # svf related angle "alfa" + + beta = zen + + xa = 1 - 2.0 / (np.tan(svfalfa) * np.tan(beta)) + ha = 2.0 / (np.tan(svfalfa) * np.tan(beta)) + ba = 1.0 / np.tan(svfalfa) + hkil = 2.0 * ba * ha + + qa = np.zeros((svfalfa.shape[0]), dtype=np.float32) + qa[xa < 0] = np.tan(beta) / 2 + + Za = np.zeros((svfalfa.shape[0]), dtype=np.float32) + Za[xa < 0] = ((ba[xa < 0] ** 2) - ((qa[xa < 0] ** 2) / 4)) ** 0.5 + + phi = np.zeros((svfalfa.shape[0]), dtype=np.float32) + phi[xa < 0] = np.arctan(Za[xa < 0] / qa[xa < 0]) + + A = np.zeros((svfalfa.shape[0]), dtype=np.float32) + A[xa < 0] = (np.sin(phi[xa < 0]) - phi[xa < 0] * np.cos(phi[xa < 0])) / (1 - np.cos(phi[xa < 0])) + + ukil = np.zeros((svfalfa.shape[0]), dtype=np.float32) + ukil[xa < 0] = 2 * ba[xa < 0] * xa[xa < 0] * A[xa < 0] + + Ssurf = hkil + ukil + + F_sh = (2 * np.pi * ba - Ssurf) / (2 * np.pi * ba) + + return F_sh diff --git a/pysrc/solweig/physics/daylen.py b/pysrc/solweig/physics/daylen.py new file mode 100644 index 0000000..442185e --- /dev/null +++ b/pysrc/solweig/physics/daylen.py @@ -0,0 +1,22 @@ +import numpy as np + + +def daylen(DOY, XLAT): + # Calculation of declination of sun (Eqn. 16). Amplitude= +/-23.45 + # deg. Minimum = DOY 355 (DEC 21), maximum = DOY 172.5 (JUN 21/22). + # Sun angles. SOC limited for latitudes above polar circles. + # Calculate daylength, sunrise and sunset (Eqn. 17) + + RAD = np.pi / 180.0 + + DEC = -23.45 * np.cos(2.0 * np.pi * (DOY + 10.0) / 365.0) + + SOC = np.tan(RAD * DEC) * np.tan(RAD * XLAT) + SOC = min(max(SOC, -1.0), 1.0) + # SOC=alt + + DAYL = 12.0 + 24.0 * np.arcsin(SOC) / np.pi + SNUP = 12.0 - DAYL / 2.0 + SNDN = 12.0 + DAYL / 2.0 + + return DAYL, DEC, SNDN, SNUP diff --git a/pysrc/solweig/physics/diffusefraction.py b/pysrc/solweig/physics/diffusefraction.py new file mode 100644 index 0000000..a01e1f3 --- /dev/null +++ b/pysrc/solweig/physics/diffusefraction.py @@ -0,0 +1,48 @@ +import numpy as np + + +def diffusefraction(radG, altitude, Kt, Ta, RH): + """ + This function estimates diffuse and directbeam radiation according to + Reindl et al (1990), Solar Energy 45:1 + + :param radG: + :param altitude: + :param Kt: # radiation at the top of the atmosphere + :param Ta: + :param RH: + :return: + """ + + alfa = altitude * (np.pi / 180) + + if Ta <= -999.00 or RH <= -999.00 or np.isnan(Ta) or np.isnan(RH): + if Kt <= 0.3: + radD = radG * (1.020 - 0.248 * Kt) + elif Kt > 0.3 and Kt < 0.78: + radD = radG * (1.45 - 1.67 * Kt) + else: + radD = radG * 0.147 + else: + RH = RH / 100 + if Kt <= 0.3: + radD = radG * (1 - 0.232 * Kt + 0.0239 * np.sin(alfa) - 0.000682 * Ta + 0.0195 * RH) + elif Kt > 0.3 and Kt < 0.78: + radD = radG * (1.329 - 1.716 * Kt + 0.267 * np.sin(alfa) - 0.00357 * Ta + 0.106 * RH) + else: + radD = radG * (0.426 * Kt - 0.256 * np.sin(alfa) + 0.00349 * Ta + 0.0734 * RH) + + sin_alfa = np.sin(alfa) + radI = 0.0 if sin_alfa < 0.01 else (radG - radD) / sin_alfa + + # Corrections for low sun altitudes (20130307) + if radI < 0: + radI = 0 + + if altitude < 1 and radI > radG: + radI = radG + + if radD > radG: + radD = radG + + return radI, radD diff --git a/pysrc/solweig/physics/morphology.py b/pysrc/solweig/physics/morphology.py new file mode 100644 index 0000000..df4577f --- /dev/null +++ b/pysrc/solweig/physics/morphology.py @@ -0,0 +1,192 @@ +""" +Pure numpy implementations of morphological operations — **reference implementation only**. + +Not called by the production ``calculate()`` API. The fused Rust pipeline +uses ``crate::morphology`` internally. + +Retained for readability, tests, and validation against UMEP. +Originally replaced scipy.ndimage functions to eliminate the scipy dependency, +making the package lighter for QGIS plugin distribution. +""" + +from __future__ import annotations + +import numpy as np +from numpy.typing import NDArray + + +def rotate_array( + array: NDArray[np.floating], + angle: float, + order: int = 1, + reshape: bool = False, + mode: str = "nearest", +) -> NDArray[np.floating]: + """ + Rotate a 2D array by the given angle (in degrees). + + Pure numpy implementation replacing scipy.ndimage.interpolation.rotate. + + Args: + array: 2D input array to rotate. + angle: Rotation angle in degrees (counter-clockwise). + order: Interpolation order (0=nearest, 1=bilinear). + reshape: If True, output shape is adjusted to contain the whole rotated array. + If False (default), output has same shape as input. + mode: How to handle boundaries ('nearest', 'constant'). + + Returns: + Rotated array. + """ + if reshape: + raise NotImplementedError("reshape=True not implemented") + + rows, cols = array.shape + # scipy uses pixel-centered coordinates: center is at (n-1)/2 for n pixels + center_y, center_x = (rows - 1) / 2, (cols - 1) / 2 + + # Convert angle to radians + theta = np.radians(angle) + cos_t, sin_t = np.cos(theta), np.sin(theta) + + # Create output array + output = np.zeros_like(array) + + # Create coordinate grids + y_indices, x_indices = np.mgrid[0:rows, 0:cols] + + # Translate to center, rotate, translate back (inverse mapping) + # For each output pixel, find the corresponding input pixel + x_centered = x_indices - center_x + y_centered = y_indices - center_y + + # Inverse rotation to find source coordinates + # scipy.ndimage.rotate uses counter-clockwise in image coordinates (y pointing down) + # For inverse mapping, we apply the transpose of the rotation matrix + src_x = cos_t * x_centered - sin_t * y_centered + center_x + src_y = sin_t * x_centered + cos_t * y_centered + center_y + + if order == 0: + # Nearest neighbor interpolation + src_x_int = np.round(src_x).astype(np.int32) + src_y_int = np.round(src_y).astype(np.int32) + + # Clip to valid range + src_x_int = np.clip(src_x_int, 0, cols - 1) + src_y_int = np.clip(src_y_int, 0, rows - 1) + + output = array[src_y_int, src_x_int] + + elif order == 1: + # Bilinear interpolation + x0 = np.floor(src_x).astype(np.int32) + y0 = np.floor(src_y).astype(np.int32) + x1 = x0 + 1 + y1 = y0 + 1 + + # Clip coordinates + x0_clipped = np.clip(x0, 0, cols - 1) + x1_clipped = np.clip(x1, 0, cols - 1) + y0_clipped = np.clip(y0, 0, rows - 1) + y1_clipped = np.clip(y1, 0, rows - 1) + + # Weights + wx = src_x - x0 + wy = src_y - y0 + wx = np.clip(wx, 0, 1) + wy = np.clip(wy, 0, 1) + + # Bilinear interpolation + output = ( + array[y0_clipped, x0_clipped] * (1 - wx) * (1 - wy) + + array[y0_clipped, x1_clipped] * wx * (1 - wy) + + array[y1_clipped, x0_clipped] * (1 - wx) * wy + + array[y1_clipped, x1_clipped] * wx * wy + ) + else: + raise ValueError(f"order must be 0 or 1, got {order}") + + return output.astype(array.dtype) + + +def binary_dilation( + input_array: NDArray[np.bool_], + structure: NDArray[np.bool_] | None = None, + iterations: int = 1, +) -> NDArray[np.bool_]: + """ + Perform binary dilation on a 2D boolean array. + + Pure numpy implementation replacing scipy.ndimage.binary_dilation. + + Args: + input_array: 2D boolean array to dilate. + structure: Structuring element (3x3 boolean array). + If None, uses 8-connectivity (all neighbors). + iterations: Number of times to apply dilation. + + Returns: + Dilated boolean array. + """ + if structure is None: + # Default: 8-connectivity (3x3 all ones) + structure = np.ones((3, 3), dtype=bool) + + result = input_array.copy() + + for _ in range(iterations): + # Pad the array + padded = np.pad(result, 1, mode="constant", constant_values=False) + new_result = np.zeros_like(result) + + # Apply structuring element + rows, cols = result.shape + struct_rows, struct_cols = structure.shape + offset_r = struct_rows // 2 + offset_c = struct_cols // 2 + + for dr in range(struct_rows): + for dc in range(struct_cols): + if structure[dr, dc]: + shifted = padded[ + 1 + dr - offset_r : 1 + rows + dr - offset_r, + 1 + dc - offset_c : 1 + cols + dc - offset_c, + ] + new_result |= shifted + + result = new_result + + return result + + +def generate_binary_structure(rank: int, connectivity: int) -> NDArray[np.bool_]: + """ + Generate a binary structuring element for morphological operations. + + Pure numpy implementation replacing scipy.ndimage.generate_binary_structure. + + Args: + rank: Number of dimensions (must be 2). + connectivity: 1 for 4-connectivity (cross), 2 for 8-connectivity (square). + + Returns: + 3x3 boolean structuring element. + """ + if rank != 2: + raise ValueError(f"Only rank=2 supported, got {rank}") + + if connectivity == 1: + # 4-connectivity (cross pattern) + return np.array( + [ + [False, True, False], + [True, True, True], + [False, True, False], + ], + dtype=bool, + ) + elif connectivity == 2: + # 8-connectivity (all neighbors) + return np.ones((3, 3), dtype=bool) + else: + raise ValueError(f"connectivity must be 1 or 2, got {connectivity}") diff --git a/pysrc/solweig/physics/patch_radiation.py b/pysrc/solweig/physics/patch_radiation.py new file mode 100644 index 0000000..e8a2d12 --- /dev/null +++ b/pysrc/solweig/physics/patch_radiation.py @@ -0,0 +1,385 @@ +""" +Patch-level radiation helpers — **reference implementation only**. + +Not called by the production ``calculate()`` API. The fused Rust pipeline +computes patch radiation internally. + +Retained for readability, tests, and validation against UMEP. +""" + +import numpy as np + +from ..constants import KELVIN_OFFSET, SBC + + +def shortwave_from_sky(sky, angle_of_incidence, lumChi, steradian, patch_azimuth, cyl): + """Calculates the amount of diffuse shortwave radiation from the sky for a patch with: + angle of incidence = angle_of_incidence + luminance = lumChi + steradian = steradian""" + + # Diffuse vertical radiation + diffuse_shortwave_radiation = sky * lumChi * angle_of_incidence * steradian + + return diffuse_shortwave_radiation + + +def longwave_from_sky(sky, Lsky_side, Lsky_down, patch_azimuth): + # Degrees to radians + deg2rad = np.pi / 180 + + # Longwave radiation from sky to vertical surface + Ldown_sky = sky * Lsky_down + + # Longwave radiation from sky to horizontal surface + Lside_sky = sky * Lsky_side + + # + Least = np.zeros((sky.shape[0], sky.shape[1]), dtype=np.float32) + Lsouth = np.zeros((sky.shape[0], sky.shape[1]), dtype=np.float32) + Lwest = np.zeros((sky.shape[0], sky.shape[1]), dtype=np.float32) + Lnorth = np.zeros((sky.shape[0], sky.shape[1]), dtype=np.float32) + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = sky * Lsky_side * np.cos((90 - patch_azimuth) * deg2rad) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = sky * Lsky_side * np.cos((180 - patch_azimuth) * deg2rad) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = sky * Lsky_side * np.cos((270 - patch_azimuth) * deg2rad) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = sky * Lsky_side * np.cos((0 - patch_azimuth) * deg2rad) + + return Lside_sky, Ldown_sky, Least, Lsouth, Lwest, Lnorth + + +def longwave_from_veg( + vegetation, steradian, angle_of_incidence, angle_of_incidence_h, patch_altitude, patch_azimuth, ewall, Ta +): + """Calculates the amount of longwave radiation from vegetation for a patch with: + angle of incidence = angle_of_incidence + steradian = steradian + if a patch is vegetation = vegetation + amount of radiation from vegetated patch = vegetation_surface""" + + # Degrees to radians + deg2rad = np.pi / 180 + + # Longwave radiation from vegetation surface (considered vertical) + vegetation_surface = (ewall * SBC * ((Ta + KELVIN_OFFSET) ** 4)) / np.pi + + # Longwave radiation reaching a vertical surface + Lside_veg = vegetation_surface * steradian * angle_of_incidence * vegetation + + # Longwave radiation reaching a horizontal surface + Ldown_veg = vegetation_surface * steradian * angle_of_incidence_h * vegetation + + # + Least = np.zeros((vegetation.shape[0], vegetation.shape[1]), dtype=np.float32) + Lsouth = np.zeros((vegetation.shape[0], vegetation.shape[1]), dtype=np.float32) + Lwest = np.zeros((vegetation.shape[0], vegetation.shape[1]), dtype=np.float32) + Lnorth = np.zeros((vegetation.shape[0], vegetation.shape[1]), dtype=np.float32) + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = ( + vegetation_surface + * steradian + * np.cos(patch_altitude * deg2rad) + * vegetation + * np.cos((90 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = ( + vegetation_surface + * steradian + * np.cos(patch_altitude * deg2rad) + * vegetation + * np.cos((180 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = ( + vegetation_surface + * steradian + * np.cos(patch_altitude * deg2rad) + * vegetation + * np.cos((270 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = ( + vegetation_surface + * steradian + * np.cos(patch_altitude * deg2rad) + * vegetation + * np.cos((0 - patch_azimuth) * deg2rad) + ) + + return Lside_veg, Ldown_veg, Least, Lsouth, Lwest, Lnorth + + +def longwave_from_buildings( + building, + steradian, + angle_of_incidence, + angle_of_incidence_h, + patch_azimuth, + sunlit_patches, + shaded_patches, + azimuth_difference, + solar_altitude, + ewall, + Ta, + Tgwall, +): + # Degrees to radians + deg2rad = np.pi / 180 + + # + Least = np.zeros((building.shape[0], building.shape[1]), dtype=np.float32) + Lsouth = np.zeros((building.shape[0], building.shape[1]), dtype=np.float32) + Lwest = np.zeros((building.shape[0], building.shape[1]), dtype=np.float32) + Lnorth = np.zeros((building.shape[0], building.shape[1]), dtype=np.float32) + + # Longwave radiation from sunlit surfaces + sunlit_surface = (ewall * SBC * ((Ta + Tgwall + KELVIN_OFFSET) ** 4)) / np.pi + # Longwave radiation from shaded surfaces + shaded_surface = (ewall * SBC * ((Ta + KELVIN_OFFSET) ** 4)) / np.pi + if (azimuth_difference > 90) and (azimuth_difference < 270) and (solar_altitude > 0): + # Calculate which patches defined as buildings that are sunlit or shaded + # sunlit_patches, shaded_patches = sunlit_shaded_patches.shaded_or_sunlit( + # solar_altitude, solar_azimuth, patch_altitude, patch_azimuth, asvf + # ) + + # Calculate longwave radiation from sunlit walls to vertical surface + Lside_sun = sunlit_surface * sunlit_patches * steradian * angle_of_incidence * building + # Calculate longwave radiation from shaded walls to vertical surface + Lside_sh = shaded_surface * shaded_patches * steradian * angle_of_incidence * building + + # Calculate longwave radiation from sunlit walls to horizontal surface + Ldown_sun = sunlit_surface * sunlit_patches * steradian * angle_of_incidence_h * building + # Calculate longwave radiation from shaded walls to horizontal surface + Ldown_sh = shaded_surface * shaded_patches * steradian * angle_of_incidence_h * building + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = ( + sunlit_surface + * sunlit_patches + * steradian + * angle_of_incidence + * building + * np.cos((90 - patch_azimuth) * deg2rad) + ) + Least += ( + shaded_surface + * shaded_patches + * steradian + * angle_of_incidence + * building + * np.cos((90 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = ( + sunlit_surface + * sunlit_patches + * steradian + * angle_of_incidence + * building + * np.cos((180 - patch_azimuth) * deg2rad) + ) + Lsouth += ( + shaded_surface + * shaded_patches + * steradian + * angle_of_incidence + * building + * np.cos((180 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = ( + sunlit_surface + * sunlit_patches + * steradian + * angle_of_incidence + * building + * np.cos((270 - patch_azimuth) * deg2rad) + ) + Lwest += ( + shaded_surface + * shaded_patches + * steradian + * angle_of_incidence + * building + * np.cos((270 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = ( + sunlit_surface + * sunlit_patches + * steradian + * angle_of_incidence + * building + * np.cos((0 - patch_azimuth) * deg2rad) + ) + Lnorth += ( + shaded_surface + * shaded_patches + * steradian + * angle_of_incidence + * building + * np.cos((0 - patch_azimuth) * deg2rad) + ) + + else: + # Calculate longwave radiation from shaded walls reaching a vertical surface + Lside_sh = shaded_surface * steradian * angle_of_incidence * building + Lside_sun = np.zeros((Lside_sh.shape[0], Lside_sh.shape[1]), dtype=np.float32) + + # Calculate longwave radiation from shaded walls reaching a horizontal surface + Ldown_sh = shaded_surface * steradian * angle_of_incidence_h * building + Ldown_sun = np.zeros((Lside_sh.shape[0], Lside_sh.shape[1]), dtype=np.float32) + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = shaded_surface * steradian * angle_of_incidence * building * np.cos((90 - patch_azimuth) * deg2rad) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = ( + shaded_surface * steradian * angle_of_incidence * building * np.cos((180 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = shaded_surface * steradian * angle_of_incidence * building * np.cos((270 - patch_azimuth) * deg2rad) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = shaded_surface * steradian * angle_of_incidence * building * np.cos((0 - patch_azimuth) * deg2rad) + + return Lside_sun, Lside_sh, Ldown_sun, Ldown_sh, Least, Lsouth, Lwest, Lnorth + + +def longwave_from_buildings_wallScheme( + voxelMaps, voxelTable, steradian, angle_of_incidence, angle_of_incidence_h, patch_azimuth +): + # Degrees to radians + deg2rad = np.pi / 180 + + # + Lside = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Lside_sh = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Ldown = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Ldown_sh = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Least = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Lsouth = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Lwest = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + Lnorth = np.zeros((voxelMaps.shape[0], voxelMaps.shape[1]), dtype=np.float32) + + # print(voxelMaps) + # print(voxelTable.head()) + unique_ids = list(np.unique(voxelMaps)[1:]) + # print(unique_ids) + lw_rad_dict = dict(voxelTable.loc[unique_ids, "LongwaveRadiation"]) + # print(lw_rad_dict) + patch_radiation = np.vectorize(lw_rad_dict.get)(voxelMaps).astype(float) + patch_radiation[np.isnan(patch_radiation)] = 0 + Lside += patch_radiation * steradian * angle_of_incidence + Ldown += patch_radiation * steradian * angle_of_incidence_h + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = patch_radiation * steradian * angle_of_incidence * np.cos((90 - patch_azimuth) * deg2rad) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = patch_radiation * steradian * angle_of_incidence * np.cos((180 - patch_azimuth) * deg2rad) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = patch_radiation * steradian * angle_of_incidence * np.cos((270 - patch_azimuth) * deg2rad) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = patch_radiation * steradian * angle_of_incidence * np.cos((0 - patch_azimuth) * deg2rad) + + return Lside, Lside_sh, Ldown, Ldown_sh, Least, Lsouth, Lwest, Lnorth + + +def reflected_longwave( + reflecting_surface, steradian, angle_of_incidence, angle_of_incidence_h, patch_azimuth, Ldown_sky, Lup, ewall +): + # Degrees to radians + deg2rad = np.pi / 180 + + # Calculate reflected longwave in each patch + reflected_radiation = ((Ldown_sky + Lup) * (1 - ewall) * 0.5) / np.pi + + # Reflected longwave radiation reaching vertical surfaces + Lside_ref = reflected_radiation * steradian * angle_of_incidence * reflecting_surface + + # Reflected longwave radiation reaching horizontal surfaces + Ldown_ref = reflected_radiation * steradian * angle_of_incidence_h * reflecting_surface + + # + Least = np.zeros((reflecting_surface.shape[0], reflecting_surface.shape[1]), dtype=np.float32) + Lsouth = np.zeros((reflecting_surface.shape[0], reflecting_surface.shape[1]), dtype=np.float32) + Lwest = np.zeros((reflecting_surface.shape[0], reflecting_surface.shape[1]), dtype=np.float32) + Lnorth = np.zeros((reflecting_surface.shape[0], reflecting_surface.shape[1]), dtype=np.float32) + + # Portion into cardinal directions to be used for standing box or POI output + if (patch_azimuth > 360) or (patch_azimuth < 180): + Least = ( + reflected_radiation + * steradian + * angle_of_incidence + * reflecting_surface + * np.cos((90 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 90) and (patch_azimuth < 270): + Lsouth = ( + reflected_radiation + * steradian + * angle_of_incidence + * reflecting_surface + * np.cos((180 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 180) and (patch_azimuth < 360): + Lwest = ( + reflected_radiation + * steradian + * angle_of_incidence + * reflecting_surface + * np.cos((270 - patch_azimuth) * deg2rad) + ) + if (patch_azimuth > 270) or (patch_azimuth < 90): + Lnorth = ( + reflected_radiation + * steradian + * angle_of_incidence + * reflecting_surface + * np.cos((0 - patch_azimuth) * deg2rad) + ) + + return Lside_ref, Ldown_ref, Least, Lsouth, Lwest, Lnorth + + +def patch_steradians(L_patches): + """'This function calculates the steradians of the patches""" + + # Degrees to radians + deg2rad = np.pi / 180 + + # Unique altitudes for patches + skyalt, skyalt_c = np.unique(L_patches[:, 0], return_counts=True) + + # Altitudes of the Robinson & Stone patches + patch_altitude = L_patches[:, 0] + + # Calculation of steradian for each patch + # Build scalar lookup once to avoid array->scalar coercion warnings. + count_by_altitude = {float(alt): float(count) for alt, count in zip(skyalt, skyalt_c, strict=False)} + steradian = np.zeros((patch_altitude.shape[0]), dtype=np.float32) + for i in range(patch_altitude.shape[0]): + band_count = count_by_altitude[float(patch_altitude[i])] + # If there are more than one patch in a band + if band_count > 1: + steradian[i] = ((360 / band_count) * deg2rad) * ( + np.sin((patch_altitude[i] + patch_altitude[0]) * deg2rad) + - np.sin((patch_altitude[i] - patch_altitude[0]) * deg2rad) + ) + # If there is only one patch in band, i.e. 90 degrees + else: + steradian[i] = ((360 / band_count) * deg2rad) * ( + np.sin((patch_altitude[i]) * deg2rad) - np.sin((patch_altitude[i - 1] + patch_altitude[0]) * deg2rad) + ) + + return steradian, skyalt, patch_altitude diff --git a/pysrc/solweig/physics/sun_distance.py b/pysrc/solweig/physics/sun_distance.py new file mode 100644 index 0000000..68c2034 --- /dev/null +++ b/pysrc/solweig/physics/sun_distance.py @@ -0,0 +1,20 @@ +__author__ = "xlinfr" +import numpy as np + + +def sun_distance(jday): + """ + + #% Calculatesrelative earth sun distance + #% with day of year as input. + #% Partridge and Platt, 1975 + """ + b = 2.0 * np.pi * jday / 365.0 + D = np.sqrt( + 1.00011 + + np.dot(0.034221, np.cos(b)) + + np.dot(0.001280, np.sin(b)) + + np.dot(0.000719, np.cos(2.0 * b)) + + np.dot(0.000077, np.sin(2.0 * b)) + ) + return D diff --git a/pysrc/solweig/physics/sun_position.py b/pysrc/solweig/physics/sun_position.py new file mode 100644 index 0000000..3875546 --- /dev/null +++ b/pysrc/solweig/physics/sun_position.py @@ -0,0 +1,1061 @@ +import numpy as np + + +def sun_position(time, location): + """ + % sun = sun_position(time, location) + % + % This function compute the sun position (zenith and azimuth angle at the observer + % location) as a function of the observer local time and position. + % + % It is an implementation of the algorithm presented by Reda et Andreas in: + % Reda, I., Andreas, A. (2003) Solar position algorithm for solar + % radiation application. National Renewable Energy Laboratory (NREL) + % Technical report NREL/TP-560-34302. + % This document is avalaible at www.osti.gov/bridge + % + % This algorithm is based on numerical approximation of the exact equations. + % The authors of the original paper state that this algorithm should be + % precise at +/- 0.0003 degrees. I have compared it to NOAA solar table + % (http://www.srrb.noaa.gov/highlights/sunrise/azel.html) and to USNO solar + % table (http://aa.usno.navy.mil/data/docs/AltAz.html) and found very good + % correspondance (up to the precision of those tables), except for large + % zenith angle, where the refraction by the atmosphere is significant + % (difference of about 1 degree). Note that in this code the correction + % for refraction in the atmosphere as been implemented for a temperature + % of 10C (283 kelvins) and a pressure of 1010 mbar. See the subfunction + % �sun_topocentric_zenith_angle_calculation� for a possible modification + % to explicitely model the effect of temperature and pressure as describe + % in Reda & Andreas (2003). + % + % Input parameters: + % time: a structure that specify the time when the sun position is + % calculated. + % time.year: year. Valid for [-2000, 6000] + % time.month: month [1-12] + % time.day: calendar day [1-31] + % time.hour: local hour [0-23] + % time.min: minute [0-59] + % time.sec: second [0-59] + % time.UTC: offset hour from UTC. Local time = Greenwich time + time.UTC + % This input can also be passed using the Matlab time format ('dd-mmm-yyyy HH:MM:SS'). + % In that case, the time has to be specified as UTC time (time.UTC = 0) + % + % location: a structure that specify the location of the observer + % location.latitude: latitude (in degrees, north of equator is + % positive) + % location.longitude: longitude (in degrees, positive for east of + % Greenwich) + % location.altitude: altitude above mean sea level (in meters) + % + % Output parameters + % sun: a structure with the calculated sun position + % sun.zenith = zenith angle in degrees (angle from the vertical) + % sun.azimuth = azimuth angle in degrees, eastward from the north. + % Only the sun zenith and azimuth angles are returned as output, but a lot + % of other parameters are calculated that could also extracted as output of + % this function. + % + % Exemple of use + % + % location.longitude = -105.1786; + % location.latitude = 39.742476; + % location.altitude = 1830.14; + % time.year = 2005; + % time.month = 10; + % time.day = 17; + % time.hour = 6; + % time.min = 30; + % time.sec = 30; + % time.UTC = -7; + % % + % location.longitude = 11.94; + % location.latitude = 57.70; + % location.altitude = 3.0; + % time.UTC = 1; + % sun = sun_position(time, location); + % + % sun = + % + % zenith: 50.1080438859849 + % azimuth: 194.341174010338 + % + % History + % 09/03/2004 Original creation by Vincent Roy (vincent.roy@drdc-rddc.gc.ca) + % 10/03/2004 Fixed a bug in julian_calculation subfunction (was + % incorrect for year 1582 only), Vincent Roy + % 18/03/2004 Correction to the header (help display) only. No changes to + % the code. (changed the �elevation� field in �location� structure + % information to �altitude�), Vincent Roy + % 13/04/2004 Following a suggestion from Jody Klymak (jklymak@ucsd.edu), + % allowed the 'time' input to be passed as a Matlab time string. + % 22/08/2005 Following a bug report from Bruce Bowler + % (bbowler@bigelow.org), modified the julian_calculation function. Bug + % was 'MATLAB has allowed structure assignment to a non-empty non-structure + % to overwrite the previous value. This behavior will continue in this release, + % but will be an error in a future version of MATLAB. For advice on how to + % write code that will both avoid this warning and work in future versions of + % MATLAB, see R14SP2 Release Notes'. Script should now be + % compliant with futher release of Matlab... + """ + + # 1. Calculate the Julian Day, and Century. Julian Ephemeris day, century + # and millenium are calculated using a mean delta_t of 33.184 seconds. + julian = julian_calculation(time) + # print(julian) + + # 2. Calculate the Earth heliocentric longitude, latitude, and radius + # vector (L, B, and R) + earth_heliocentric_position = earth_heliocentric_position_calculation(julian) + + # 3. Calculate the geocentric longitude and latitude + sun_geocentric_position = sun_geocentric_position_calculation(earth_heliocentric_position) + + # 4. Calculate the nutation in longitude and obliquity (in degrees). + nutation = nutation_calculation(julian) + + # 5. Calculate the true obliquity of the ecliptic (in degrees). + true_obliquity = true_obliquity_calculation(julian, nutation) + + # 6. Calculate the aberration correction (in degrees) + aberration_correction = abberation_correction_calculation(earth_heliocentric_position) + + # 7. Calculate the apparent sun longitude in degrees) + apparent_sun_longitude = apparent_sun_longitude_calculation( + sun_geocentric_position, nutation, aberration_correction + ) + + # 8. Calculate the apparent sideral time at Greenwich (in degrees) + apparent_stime_at_greenwich = apparent_stime_at_greenwich_calculation(julian, nutation, true_obliquity) + + # 9. Calculate the sun rigth ascension (in degrees) + sun_rigth_ascension = sun_rigth_ascension_calculation( + apparent_sun_longitude, true_obliquity, sun_geocentric_position + ) + + # 10. Calculate the geocentric sun declination (in degrees). Positive or + # negative if the sun is north or south of the celestial equator. + sun_geocentric_declination = sun_geocentric_declination_calculation( + apparent_sun_longitude, true_obliquity, sun_geocentric_position + ) + + # 11. Calculate the observer local hour angle (in degrees, westward from south). + observer_local_hour = observer_local_hour_calculation(apparent_stime_at_greenwich, location, sun_rigth_ascension) + + # 12. Calculate the topocentric sun position (rigth ascension, declination and + # rigth ascension parallax in degrees) + topocentric_sun_position = topocentric_sun_position_calculate( + earth_heliocentric_position, location, observer_local_hour, sun_rigth_ascension, sun_geocentric_declination + ) + + # 13. Calculate the topocentric local hour angle (in degrees) + topocentric_local_hour = topocentric_local_hour_calculate(observer_local_hour, topocentric_sun_position) + + # 14. Calculate the topocentric zenith and azimuth angle (in degrees) + sun = sun_topocentric_zenith_angle_calculate(location, topocentric_sun_position, topocentric_local_hour) + + return sun + + +def julian_calculation(t_input): + """ + % This function compute the julian day and julian century from the local + % time and timezone information. Ephemeris are calculated with a delta_t=0 + % seconds. + + % If time input is a Matlab time string, extract the information from + % this string and create the structure as defined in the main header of + % this script. + """ + if not isinstance(t_input, dict): + # tt = datetime.datetime.strptime(t_input, "%Y-%m-%d %H:%M:%S.%f") # if t_input is a string of this format + # t_input should be a datetime object + time = dict() + time["UTC"] = 0 + time["year"] = t_input.year + time["month"] = t_input.month + time["day"] = t_input.day + time["hour"] = t_input.hour + time["min"] = t_input.minute + time["sec"] = t_input.second + else: + time = t_input + + if time["month"] == 1 or time["month"] == 2: + Y = time["year"] - 1 + M = time["month"] + 12 + else: + Y = time["year"] + M = time["month"] + + ut_time = ( + ((time["hour"] - time["UTC"]) / 24) + (time["min"] / (60 * 24)) + (time["sec"] / (60 * 60 * 24)) + ) # time of day in UT time. + D = time["day"] + ut_time # Day of month in decimal time, ex. 2sd day of month at 12:30:30UT, D=2.521180556 + + # In 1582, the gregorian calendar was adopted + if time["year"] == 1582: + if time["month"] == 10: + if time["day"] <= 4: # The Julian calendar ended on October 4, 1582 + B = 0 + elif time["day"] >= 15: # The Gregorian calendar started on October 15, 1582 + A = np.floor(Y / 100) + B = 2 - A + np.floor(A / 4) + else: + print("This date never existed!. Date automatically set to October 4, 1582") + time["month"] = 10 + time["day"] = 4 + B = 0 + elif time["month"] < 10: # Julian calendar + B = 0 + else: # Gregorian calendar + A = np.floor(Y / 100) + B = 2 - A + np.floor(A / 4) + elif time["year"] < 1582: # Julian calendar + B = 0 + else: + A = np.floor(Y / 100) # Gregorian calendar + B = 2 - A + np.floor(A / 4) + + julian = dict() + julian["day"] = D + B + np.floor(365.25 * (Y + 4716)) + np.floor(30.6001 * (M + 1)) - 1524.5 + + delta_t = 0 # 33.184; + julian["ephemeris_day"] = (julian["day"]) + (delta_t / 86400) + julian["century"] = (julian["day"] - 2451545) / 36525 + julian["ephemeris_century"] = (julian["ephemeris_day"] - 2451545) / 36525 + julian["ephemeris_millenium"] = (julian["ephemeris_century"]) / 10 + + return julian + + +def earth_heliocentric_position_calculation(julian): + """ + % This function compute the earth position relative to the sun, using + % tabulated values. + + % Tabulated values for the longitude calculation + % L terms from the original code. + """ + # Tabulated values for the longitude calculation + # L terms from the original code. + L0_terms = np.array( + [ + [175347046.0, 0, 0], + [3341656.0, 4.6692568, 6283.07585], + [34894.0, 4.6261, 12566.1517], + [3497.0, 2.7441, 5753.3849], + [3418.0, 2.8289, 3.5231], + [3136.0, 3.6277, 77713.7715], + [2676.0, 4.4181, 7860.4194], + [2343.0, 6.1352, 3930.2097], + [1324.0, 0.7425, 11506.7698], + [1273.0, 2.0371, 529.691], + [1199.0, 1.1096, 1577.3435], + [990, 5.233, 5884.927], + [902, 2.045, 26.298], + [857, 3.508, 398.149], + [780, 1.179, 5223.694], + [753, 2.533, 5507.553], + [505, 4.583, 18849.228], + [492, 4.205, 775.523], + [357, 2.92, 0.067], + [317, 5.849, 11790.629], + [284, 1.899, 796.298], + [271, 0.315, 10977.079], + [243, 0.345, 5486.778], + [206, 4.806, 2544.314], + [205, 1.869, 5573.143], + [202, 2.4458, 6069.777], + [156, 0.833, 213.299], + [132, 3.411, 2942.463], + [126, 1.083, 20.775], + [115, 0.645, 0.98], + [103, 0.636, 4694.003], + [102, 0.976, 15720.839], + [102, 4.267, 7.114], + [99, 6.21, 2146.17], + [98, 0.68, 155.42], + [86, 5.98, 161000.69], + [85, 1.3, 6275.96], + [85, 3.67, 71430.7], + [80, 1.81, 17260.15], + [79, 3.04, 12036.46], + [71, 1.76, 5088.63], + [74, 3.5, 3154.69], + [74, 4.68, 801.82], + [70, 0.83, 9437.76], + [62, 3.98, 8827.39], + [61, 1.82, 7084.9], + [57, 2.78, 6286.6], + [56, 4.39, 14143.5], + [56, 3.47, 6279.55], + [52, 0.19, 12139.55], + [52, 1.33, 1748.02], + [51, 0.28, 5856.48], + [49, 0.49, 1194.45], + [41, 5.37, 8429.24], + [41, 2.4, 19651.05], + [39, 6.17, 10447.39], + [37, 6.04, 10213.29], + [37, 2.57, 1059.38], + [36, 1.71, 2352.87], + [36, 1.78, 6812.77], + [33, 0.59, 17789.85], + [30, 0.44, 83996.85], + [30, 2.74, 1349.87], + [25, 3.16, 4690.48], + ] + ) + + L1_terms = np.array( + [ + [628331966747.0, 0, 0], + [206059.0, 2.678235, 6283.07585], + [4303.0, 2.6351, 12566.1517], + [425.0, 1.59, 3.523], + [119.0, 5.796, 26.298], + [109.0, 2.966, 1577.344], + [93, 2.59, 18849.23], + [72, 1.14, 529.69], + [68, 1.87, 398.15], + [67, 4.41, 5507.55], + [59, 2.89, 5223.69], + [56, 2.17, 155.42], + [45, 0.4, 796.3], + [36, 0.47, 775.52], + [29, 2.65, 7.11], + [21, 5.34, 0.98], + [19, 1.85, 5486.78], + [19, 4.97, 213.3], + [17, 2.99, 6275.96], + [16, 0.03, 2544.31], + [16, 1.43, 2146.17], + [15, 1.21, 10977.08], + [12, 2.83, 1748.02], + [12, 3.26, 5088.63], + [12, 5.27, 1194.45], + [12, 2.08, 4694], + [11, 0.77, 553.57], + [10, 1.3, 3286.6], + [10, 4.24, 1349.87], + [9, 2.7, 242.73], + [9, 5.64, 951.72], + [8, 5.3, 2352.87], + [6, 2.65, 9437.76], + [6, 4.67, 4690.48], + ] + ) + + L2_terms = np.array( + [ + [52919.0, 0, 0], + [8720.0, 1.0721, 6283.0758], + [309.0, 0.867, 12566.152], + [27, 0.05, 3.52], + [16, 5.19, 26.3], + [16, 3.68, 155.42], + [10, 0.76, 18849.23], + [9, 2.06, 77713.77], + [7, 0.83, 775.52], + [5, 4.66, 1577.34], + [4, 1.03, 7.11], + [4, 3.44, 5573.14], + [3, 5.14, 796.3], + [3, 6.05, 5507.55], + [3, 1.19, 242.73], + [3, 6.12, 529.69], + [3, 0.31, 398.15], + [3, 2.28, 553.57], + [2, 4.38, 5223.69], + [2, 3.75, 0.98], + ] + ) + + L3_terms = np.array( + [ + [289.0, 5.844, 6283.076], + [35, 0, 0], + [17, 5.49, 12566.15], + [3, 5.2, 155.42], + [1, 4.72, 3.52], + [1, 5.3, 18849.23], + [1, 5.97, 242.73], + ] + ) + L4_terms = np.array([[114.0, 3.142, 0], [8, 4.13, 6283.08], [1, 3.84, 12566.15]]) + + L5_terms = np.array([1, 3.14, 0]) + L5_terms = np.atleast_2d(L5_terms) # since L5_terms is 1D, we have to convert it to 2D to avoid indexErrors + + A0 = L0_terms[:, 0] + B0 = L0_terms[:, 1] + C0 = L0_terms[:, 2] + + A1 = L1_terms[:, 0] + B1 = L1_terms[:, 1] + C1 = L1_terms[:, 2] + + A2 = L2_terms[:, 0] + B2 = L2_terms[:, 1] + C2 = L2_terms[:, 2] + + A3 = L3_terms[:, 0] + B3 = L3_terms[:, 1] + C3 = L3_terms[:, 2] + + A4 = L4_terms[:, 0] + B4 = L4_terms[:, 1] + C4 = L4_terms[:, 2] + + A5 = L5_terms[:, 0] + B5 = L5_terms[:, 1] + C5 = L5_terms[:, 2] + + JME = julian["ephemeris_millenium"] + + # Compute the Earth Heliochentric longitude from the tabulated values. + L0 = np.sum(A0 * np.cos(B0 + (C0 * JME))) + L1 = np.sum(A1 * np.cos(B1 + (C1 * JME))) + L2 = np.sum(A2 * np.cos(B2 + (C2 * JME))) + L3 = np.sum(A3 * np.cos(B3 + (C3 * JME))) + L4 = np.sum(A4 * np.cos(B4 + (C4 * JME))) + L5 = A5 * np.cos(B5 + (C5 * JME)) + + earth_heliocentric_position = dict() + earth_heliocentric_position["longitude"] = ( + L0 + + (L1 * JME) + + (L2 * np.power(JME, 2)) + + (L3 * np.power(JME, 3)) + + (L4 * np.power(JME, 4)) + + (L5 * np.power(JME, 5)) + ) / 1e8 + # Convert the longitude to degrees. + earth_heliocentric_position["longitude"] = earth_heliocentric_position["longitude"] * 180 / np.pi + + # Limit the range to [0,360] + earth_heliocentric_position["longitude"] = set_to_range(earth_heliocentric_position["longitude"], 0, 360) + + # Tabulated values for the earth heliocentric latitude. + # B terms from the original code. + B0_terms = np.array( + [[280.0, 3.199, 84334.662], [102.0, 5.422, 5507.553], [80, 3.88, 5223.69], [44, 3.7, 2352.87], [32, 4, 1577.34]] + ) + + B1_terms = np.array([[9, 3.9, 5507.55], [6, 1.73, 5223.69]]) + + A0 = B0_terms[:, 0] + B0 = B0_terms[:, 1] + C0 = B0_terms[:, 2] + + A1 = B1_terms[:, 0] + B1 = B1_terms[:, 1] + C1 = B1_terms[:, 2] + + L0 = np.sum(A0 * np.cos(B0 + (C0 * JME))) + L1 = np.sum(A1 * np.cos(B1 + (C1 * JME))) + + earth_heliocentric_position["latitude"] = (L0 + (L1 * JME)) / 1e8 + + # Convert the latitude to degrees. + earth_heliocentric_position["latitude"] = earth_heliocentric_position["latitude"] * 180 / np.pi + + # Limit the range to [0,360]; + earth_heliocentric_position["latitude"] = set_to_range(earth_heliocentric_position["latitude"], 0, 360) + + # Tabulated values for radius vector. + # R terms from the original code + R0_terms = np.array( + [ + [100013989.0, 0, 0], + [1670700.0, 3.0984635, 6283.07585], + [13956.0, 3.05525, 12566.1517], + [3084.0, 5.1985, 77713.7715], + [1628.0, 1.1739, 5753.3849], + [1576.0, 2.8469, 7860.4194], + [925.0, 5.453, 11506.77], + [542.0, 4.564, 3930.21], + [472.0, 3.661, 5884.927], + [346.0, 0.964, 5507.553], + [329.0, 5.9, 5223.694], + [307.0, 0.299, 5573.143], + [243.0, 4.273, 11790.629], + [212.0, 5.847, 1577.344], + [186.0, 5.022, 10977.079], + [175.0, 3.012, 18849.228], + [110.0, 5.055, 5486.778], + [98, 0.89, 6069.78], + [86, 5.69, 15720.84], + [86, 1.27, 161000.69], + [85, 0.27, 17260.15], + [63, 0.92, 529.69], + [57, 2.01, 83996.85], + [56, 5.24, 71430.7], + [49, 3.25, 2544.31], + [47, 2.58, 775.52], + [45, 5.54, 9437.76], + [43, 6.01, 6275.96], + [39, 5.36, 4694], + [38, 2.39, 8827.39], + [37, 0.83, 19651.05], + [37, 4.9, 12139.55], + [36, 1.67, 12036.46], + [35, 1.84, 2942.46], + [33, 0.24, 7084.9], + [32, 0.18, 5088.63], + [32, 1.78, 398.15], + [28, 1.21, 6286.6], + [28, 1.9, 6279.55], + [26, 4.59, 10447.39], + ] + ) + + R1_terms = np.array( + [ + [103019.0, 1.10749, 6283.07585], + [1721.0, 1.0644, 12566.1517], + [702.0, 3.142, 0], + [32, 1.02, 18849.23], + [31, 2.84, 5507.55], + [25, 1.32, 5223.69], + [18, 1.42, 1577.34], + [10, 5.91, 10977.08], + [9, 1.42, 6275.96], + [9, 0.27, 5486.78], + ] + ) + + R2_terms = np.array( + [ + [4359.0, 5.7846, 6283.0758], + [124.0, 5.579, 12566.152], + [12, 3.14, 0], + [9, 3.63, 77713.77], + [6, 1.87, 5573.14], + [3, 5.47, 18849], + ] + ) + + R3_terms = np.array([[145.0, 4.273, 6283.076], [7, 3.92, 12566.15]]) + + R4_terms = [4, 2.56, 6283.08] + R4_terms = np.atleast_2d(R4_terms) # since L5_terms is 1D, we have to convert it to 2D to avoid indexErrors + + A0 = R0_terms[:, 0] + B0 = R0_terms[:, 1] + C0 = R0_terms[:, 2] + + A1 = R1_terms[:, 0] + B1 = R1_terms[:, 1] + C1 = R1_terms[:, 2] + + A2 = R2_terms[:, 0] + B2 = R2_terms[:, 1] + C2 = R2_terms[:, 2] + + A3 = R3_terms[:, 0] + B3 = R3_terms[:, 1] + C3 = R3_terms[:, 2] + + A4 = R4_terms[:, 0] + B4 = R4_terms[:, 1] + C4 = R4_terms[:, 2] + + # Compute the Earth heliocentric radius vector + L0 = np.sum(A0 * np.cos(B0 + (C0 * JME))) + L1 = np.sum(A1 * np.cos(B1 + (C1 * JME))) + L2 = np.sum(A2 * np.cos(B2 + (C2 * JME))) + L3 = np.sum(A3 * np.cos(B3 + (C3 * JME))) + L4 = A4 * np.cos(B4 + (C4 * JME)) + + # Units are in AU + earth_heliocentric_position["radius"] = ( + L0 + (L1 * JME) + (L2 * np.power(JME, 2)) + (L3 * np.power(JME, 3)) + (L4 * np.power(JME, 4)) + ) / 1e8 + + return earth_heliocentric_position + + +def sun_geocentric_position_calculation(earth_heliocentric_position): + """ + % This function compute the sun position relative to the earth. + """ + sun_geocentric_position = dict() + sun_geocentric_position["longitude"] = earth_heliocentric_position["longitude"] + 180 + # Limit the range to [0,360]; + sun_geocentric_position["longitude"] = set_to_range(sun_geocentric_position["longitude"], 0, 360) + + sun_geocentric_position["latitude"] = -earth_heliocentric_position["latitude"] + # Limit the range to [0,360] + sun_geocentric_position["latitude"] = set_to_range(sun_geocentric_position["latitude"], 0, 360) + return sun_geocentric_position + + +def nutation_calculation(julian): + """ + % This function compute the nutation in longtitude and in obliquity, in + % degrees. + :param julian: + :return: nutation + """ + + # All Xi are in degrees. + JCE = julian["ephemeris_century"] + + # 1. Mean elongation of the moon from the sun + p = np.atleast_2d([(1 / 189474), -0.0019142, 445267.11148, 297.85036]) + + # X0 = polyval(p, JCE); + X0 = ( + p[0, 0] * np.power(JCE, 3) + p[0, 1] * np.power(JCE, 2) + p[0, 2] * JCE + p[0, 3] + ) # This is faster than polyval... + + # 2. Mean anomaly of the sun (earth) + p = np.atleast_2d([-(1 / 300000), -0.0001603, 35999.05034, 357.52772]) + + # X1 = polyval(p, JCE) + X1 = p[0, 0] * np.power(JCE, 3) + p[0, 1] * np.power(JCE, 2) + p[0, 2] * JCE + p[0, 3] + + # 3. Mean anomaly of the moon + p = np.atleast_2d([(1 / 56250), 0.0086972, 477198.867398, 134.96298]) + + # X2 = polyval(p, JCE); + X2 = p[0, 0] * np.power(JCE, 3) + p[0, 1] * np.power(JCE, 2) + p[0, 2] * JCE + p[0, 3] + + # 4. Moon argument of latitude + p = np.atleast_2d([(1 / 327270), -0.0036825, 483202.017538, 93.27191]) + + # X3 = polyval(p, JCE) + X3 = p[0, 0] * np.power(JCE, 3) + p[0, 1] * np.power(JCE, 2) + p[0, 2] * JCE + p[0, 3] + + # 5. Longitude of the ascending node of the moon's mean orbit on the + # ecliptic, measured from the mean equinox of the date + p = np.atleast_2d([(1 / 450000), 0.0020708, -1934.136261, 125.04452]) + + # X4 = polyval(p, JCE); + X4 = p[0, 0] * np.power(JCE, 3) + p[0, 1] * np.power(JCE, 2) + p[0, 2] * JCE + p[0, 3] + + # Y tabulated terms from the original code + Y_terms = np.array( + [ + [0, 0, 0, 0, 1], + [-2, 0, 0, 2, 2], + [0, 0, 0, 2, 2], + [0, 0, 0, 0, 2], + [0, 1, 0, 0, 0], + [0, 0, 1, 0, 0], + [-2, 1, 0, 2, 2], + [0, 0, 0, 2, 1], + [0, 0, 1, 2, 2], + [-2, -1, 0, 2, 2], + [-2, 0, 1, 0, 0], + [-2, 0, 0, 2, 1], + [0, 0, -1, 2, 2], + [2, 0, 0, 0, 0], + [0, 0, 1, 0, 1], + [2, 0, -1, 2, 2], + [0, 0, -1, 0, 1], + [0, 0, 1, 2, 1], + [-2, 0, 2, 0, 0], + [0, 0, -2, 2, 1], + [2, 0, 0, 2, 2], + [0, 0, 2, 2, 2], + [0, 0, 2, 0, 0], + [-2, 0, 1, 2, 2], + [0, 0, 0, 2, 0], + [-2, 0, 0, 2, 0], + [0, 0, -1, 2, 1], + [0, 2, 0, 0, 0], + [2, 0, -1, 0, 1], + [-2, 2, 0, 2, 2], + [0, 1, 0, 0, 1], + [-2, 0, 1, 0, 1], + [0, -1, 0, 0, 1], + [0, 0, 2, -2, 0], + [2, 0, -1, 2, 1], + [2, 0, 1, 2, 2], + [0, 1, 0, 2, 2], + [-2, 1, 1, 0, 0], + [0, -1, 0, 2, 2], + [2, 0, 0, 2, 1], + [2, 0, 1, 0, 0], + [-2, 0, 2, 2, 2], + [-2, 0, 1, 2, 1], + [2, 0, -2, 0, 1], + [2, 0, 0, 0, 1], + [0, -1, 1, 0, 0], + [-2, -1, 0, 2, 1], + [-2, 0, 0, 0, 1], + [0, 0, 2, 2, 1], + [-2, 0, 2, 0, 1], + [-2, 1, 0, 2, 1], + [0, 0, 1, -2, 0], + [-1, 0, 1, 0, 0], + [-2, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 1, 2, 0], + [0, 0, -2, 2, 2], + [-1, -1, 1, 0, 0], + [0, 1, 1, 0, 0], + [0, -1, 1, 2, 2], + [2, -1, -1, 2, 2], + [0, 0, 3, 2, 2], + [2, -1, 0, 2, 2], + ] + ) + + nutation_terms = np.array( + [ + [-171996, -174.2, 92025, 8.9], + [-13187, -1.6, 5736, -3.1], + [-2274, -0.2, 977, -0.5], + [2062, 0.2, -895, 0.5], + [1426, -3.4, 54, -0.1], + [712, 0.1, -7, 0], + [-517, 1.2, 224, -0.6], + [-386, -0.4, 200, 0], + [-301, 0, 129, -0.1], + [217, -0.5, -95, 0.3], + [-158, 0, 0, 0], + [129, 0.1, -70, 0], + [123, 0, -53, 0], + [63, 0, 0, 0], + [63, 0.1, -33, 0], + [-59, 0, 26, 0], + [-58, -0.1, 32, 0], + [-51, 0, 27, 0], + [48, 0, 0, 0], + [46, 0, -24, 0], + [-38, 0, 16, 0], + [-31, 0, 13, 0], + [29, 0, 0, 0], + [29, 0, -12, 0], + [26, 0, 0, 0], + [-22, 0, 0, 0], + [21, 0, -10, 0], + [17, -0.1, 0, 0], + [16, 0, -8, 0], + [-16, 0.1, 7, 0], + [-15, 0, 9, 0], + [-13, 0, 7, 0], + [-12, 0, 6, 0], + [11, 0, 0, 0], + [-10, 0, 5, 0], + [-8, 0, 3, 0], + [7, 0, -3, 0], + [-7, 0, 0, 0], + [-7, 0, 3, 0], + [-7, 0, 3, 0], + [6, 0, 0, 0], + [6, 0, -3, 0], + [6, 0, -3, 0], + [-6, 0, 3, 0], + [-6, 0, 3, 0], + [5, 0, 0, 0], + [-5, 0, 3, 0], + [-5, 0, 3, 0], + [-5, 0, 3, 0], + [4, 0, 0, 0], + [4, 0, 0, 0], + [4, 0, 0, 0], + [-4, 0, 0, 0], + [-4, 0, 0, 0], + [-4, 0, 0, 0], + [3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + [-3, 0, 0, 0], + ] + ) + + # Using the tabulated values, compute the delta_longitude and + # delta_obliquity. + Xi = np.array([X0, X1, X2, X3, X4]) # a col mat in octave + + tabulated_argument = Y_terms.dot(np.transpose(Xi)) * (np.pi / 180) + + delta_longitude = (nutation_terms[:, 0] + (nutation_terms[:, 1] * JCE)) * np.sin(tabulated_argument) + delta_obliquity = (nutation_terms[:, 2] + (nutation_terms[:, 3] * JCE)) * np.cos(tabulated_argument) + + nutation = dict() # init nutation dictionary + # Nutation in longitude + nutation["longitude"] = np.sum(delta_longitude) / 36000000 + + # Nutation in obliquity + nutation["obliquity"] = np.sum(delta_obliquity) / 36000000 + + return nutation + + +def true_obliquity_calculation(julian, nutation): + """ + This function compute the true obliquity of the ecliptic. + + :param julian: + :param nutation: + :return: + """ + + p = np.atleast_2d([2.45, 5.79, 27.87, 7.12, -39.05, -249.67, -51.38, 1999.25, -1.55, -4680.93, 84381.448]) + + # mean_obliquity = polyval(p, julian.ephemeris_millenium/10); + U = julian["ephemeris_millenium"] / 10 + mean_obliquity = ( + p[0, 0] * np.power(U, 10) + + p[0, 1] * np.power(U, 9) + + p[0, 2] * np.power(U, 8) + + p[0, 3] * np.power(U, 7) + + p[0, 4] * np.power(U, 6) + + p[0, 5] * np.power(U, 5) + + p[0, 6] * np.power(U, 4) + + p[0, 7] * np.power(U, 3) + + p[0, 8] * np.power(U, 2) + + p[0, 9] * U + + p[0, 10] + ) + + true_obliquity = (mean_obliquity / 3600) + nutation["obliquity"] + + return true_obliquity + + +def abberation_correction_calculation(earth_heliocentric_position): + """ + This function compute the aberration_correction, as a function of the + earth-sun distance. + + :param earth_heliocentric_position: + :return: + """ + aberration_correction = -20.4898 / (3600 * earth_heliocentric_position["radius"]) + return aberration_correction + + +def apparent_sun_longitude_calculation(sun_geocentric_position, nutation, aberration_correction): + """ + This function compute the sun apparent longitude + + :param sun_geocentric_position: + :param nutation: + :param aberration_correction: + :return: + """ + apparent_sun_longitude = sun_geocentric_position["longitude"] + nutation["longitude"] + aberration_correction + return apparent_sun_longitude + + +def apparent_stime_at_greenwich_calculation(julian, nutation, true_obliquity): + """ + This function compute the apparent sideral time at Greenwich. + + :param julian: + :param nutation: + :param true_obliquity: + :return: + """ + + JD = julian["day"] + JC = julian["century"] + + # Mean sideral time, in degrees + mean_stime = ( + 280.46061837 + + (360.98564736629 * (JD - 2451545)) + + (0.000387933 * np.power(JC, 2)) + - (np.power(JC, 3) / 38710000) + ) + + # Limit the range to [0-360]; + mean_stime = set_to_range(mean_stime, 0, 360) + + apparent_stime_at_greenwich = mean_stime + (nutation["longitude"] * np.cos(true_obliquity * np.pi / 180)) + return apparent_stime_at_greenwich + + +def sun_rigth_ascension_calculation(apparent_sun_longitude, true_obliquity, sun_geocentric_position): + """ + This function compute the sun rigth ascension. + :param apparent_sun_longitude: + :param true_obliquity: + :param sun_geocentric_position: + :return: + """ + + argument_numerator = (np.sin(apparent_sun_longitude * np.pi / 180) * np.cos(true_obliquity * np.pi / 180)) - ( + np.tan(sun_geocentric_position["latitude"] * np.pi / 180) * np.sin(true_obliquity * np.pi / 180) + ) + argument_denominator = np.cos(apparent_sun_longitude * np.pi / 180) + sun_rigth_ascension = np.arctan2(argument_numerator, argument_denominator) * 180 / np.pi + # Limit the range to [0,360]; + sun_rigth_ascension = set_to_range(sun_rigth_ascension, 0, 360) + return sun_rigth_ascension + + +def sun_geocentric_declination_calculation(apparent_sun_longitude, true_obliquity, sun_geocentric_position): + """ + + :param apparent_sun_longitude: + :param true_obliquity: + :param sun_geocentric_position: + :return: + """ + + argument = (np.sin(sun_geocentric_position["latitude"] * np.pi / 180) * np.cos(true_obliquity * np.pi / 180)) + ( + np.cos(sun_geocentric_position["latitude"] * np.pi / 180) + * np.sin(true_obliquity * np.pi / 180) + * np.sin(apparent_sun_longitude * np.pi / 180) + ) + + sun_geocentric_declination = np.arcsin(argument) * 180 / np.pi + return sun_geocentric_declination + + +def observer_local_hour_calculation(apparent_stime_at_greenwich, location, sun_rigth_ascension): + """ + This function computes observer local hour. + + :param apparent_stime_at_greenwich: + :param location: + :param sun_rigth_ascension: + :return: + """ + + observer_local_hour = apparent_stime_at_greenwich + location["longitude"] - sun_rigth_ascension + # Set the range to [0-360] + observer_local_hour = set_to_range(observer_local_hour, 0, 360) + return observer_local_hour + + +def topocentric_sun_position_calculate( + earth_heliocentric_position, location, observer_local_hour, sun_rigth_ascension, sun_geocentric_declination +): + """ + This function compute the sun position (rigth ascension and declination) + with respect to the observer local position at the Earth surface. + + :param earth_heliocentric_position: + :param location: + :param observer_local_hour: + :param sun_rigth_ascension: + :param sun_geocentric_declination: + :return: + """ + + # Equatorial horizontal parallax of the sun in degrees + eq_horizontal_parallax = 8.794 / (3600 * earth_heliocentric_position["radius"]) + + # Term u, used in the following calculations (in radians) + u = np.arctan(0.99664719 * np.tan(location["latitude"] * np.pi / 180)) + + # Term x, used in the following calculations + x = np.cos(u) + ((location["altitude"] / 6378140) * np.cos(location["latitude"] * np.pi / 180)) + + # Term y, used in the following calculations + y = (0.99664719 * np.sin(u)) + ((location["altitude"] / 6378140) * np.sin(location["latitude"] * np.pi / 180)) + + # Parallax in the sun rigth ascension (in radians) + nominator = -x * np.sin(eq_horizontal_parallax * np.pi / 180) * np.sin(observer_local_hour * np.pi / 180) + denominator = np.cos(sun_geocentric_declination * np.pi / 180) - ( + x * np.sin(eq_horizontal_parallax * np.pi / 180) * np.cos(observer_local_hour * np.pi / 180) + ) + sun_rigth_ascension_parallax = np.arctan2(nominator, denominator) + # Conversion to degrees. + topocentric_sun_position = dict() + topocentric_sun_position["rigth_ascension_parallax"] = sun_rigth_ascension_parallax * 180 / np.pi + + # Topocentric sun rigth ascension (in degrees) + topocentric_sun_position["rigth_ascension"] = sun_rigth_ascension + (sun_rigth_ascension_parallax * 180 / np.pi) + + # Topocentric sun declination (in degrees) + nominator = ( + np.sin(sun_geocentric_declination * np.pi / 180) - (y * np.sin(eq_horizontal_parallax * np.pi / 180)) + ) * np.cos(sun_rigth_ascension_parallax) + denominator = np.cos(sun_geocentric_declination * np.pi / 180) - ( + y * np.sin(eq_horizontal_parallax * np.pi / 180) + ) * np.cos(observer_local_hour * np.pi / 180) + topocentric_sun_position["declination"] = np.arctan2(nominator, denominator) * 180 / np.pi + return topocentric_sun_position + + +def topocentric_local_hour_calculate(observer_local_hour, topocentric_sun_position): + """ + This function compute the topocentric local jour angle in degrees + + :param observer_local_hour: + :param topocentric_sun_position: + :return: + """ + + topocentric_local_hour = observer_local_hour - topocentric_sun_position["rigth_ascension_parallax"] + return topocentric_local_hour + + +def sun_topocentric_zenith_angle_calculate(location, topocentric_sun_position, topocentric_local_hour): + """ + This function compute the sun zenith angle, taking into account the + atmospheric refraction. A default temperature of 283K and a + default pressure of 1010 mbar are used. + + :param location: + :param topocentric_sun_position: + :param topocentric_local_hour: + :return: + """ + + # Topocentric elevation, without atmospheric refraction + argument = ( + np.sin(location["latitude"] * np.pi / 180) * np.sin(topocentric_sun_position["declination"] * np.pi / 180) + ) + ( + np.cos(location["latitude"] * np.pi / 180) + * np.cos(topocentric_sun_position["declination"] * np.pi / 180) + * np.cos(topocentric_local_hour * np.pi / 180) + ) + true_elevation = np.arcsin(argument) * 180 / np.pi + + # Atmospheric refraction correction (in degrees) + argument = true_elevation + (10.3 / (true_elevation + 5.11)) + refraction_corr = 1.02 / (60 * np.tan(argument * np.pi / 180)) + + # For exact pressure and temperature correction, use this, + # with P the pressure in mbar amd T the temperature in Kelvins: + # refraction_corr = (P/1010) * (283/T) * 1.02 / (60 * tan(argument * pi/180)); + + # Apparent elevation + apparent_elevation = true_elevation + refraction_corr + + sun = dict() + sun["zenith"] = 90 - apparent_elevation + + # Topocentric azimuth angle. The +180 conversion is to pass from astronomer + # notation (westward from south) to navigation notation (eastward from + # north); + nominator = np.sin(topocentric_local_hour * np.pi / 180) + denominator = (np.cos(topocentric_local_hour * np.pi / 180) * np.sin(location["latitude"] * np.pi / 180)) - ( + np.tan(topocentric_sun_position["declination"] * np.pi / 180) * np.cos(location["latitude"] * np.pi / 180) + ) + sun["azimuth"] = (np.arctan2(nominator, denominator) * 180 / np.pi) + 180 + + # Set the range to [0-360] + sun["azimuth"] = set_to_range(sun["azimuth"], 0, 360) + return sun + + +def set_to_range(var, min_interval, max_interval): + """ + Sets a variable in range min_interval and max_interval + + :param var: + :param min_interval: + :param max_interval: + :return: + """ + var = var - max_interval * np.floor(var / max_interval) + + if var < min_interval: + var = var + max_interval + return var diff --git a/pysrc/solweig/physics/wallalgorithms.py b/pysrc/solweig/physics/wallalgorithms.py new file mode 100644 index 0000000..6275619 --- /dev/null +++ b/pysrc/solweig/physics/wallalgorithms.py @@ -0,0 +1,214 @@ +__author__ = "xlinfr" + +import math + +import numpy as np + +from ..progress import get_progress_iterator +from .morphology import rotate_array + + +def findwalls(a, walllimit): + # This function identifies walls based on a DSM and a wall-height limit + # Walls are represented by outer pixels within building footprints + # + # Fredrik Lindberg, Goteborg Urban Climate Group + # fredrikl@gvc.gu.se + # 20150625 + # + # For each pixel, find the max of its 4 cardinal neighbors (cross kernel). + # Wall height = max_neighbor - self, clipped to walllimit. + + walls = np.zeros_like(a, dtype=np.float32) + + # Max of 4 cardinal neighbors for all interior pixels + max_neighbors = np.maximum.reduce( + [ + a[:-2, 1:-1], # north + a[2:, 1:-1], # south + a[1:-1, :-2], # west + a[1:-1, 2:], # east + ] + ) + walls[1:-1, 1:-1] = max_neighbors + + walls = walls - a + walls[walls < walllimit] = 0 + + # Zero borders + walls[0, :] = 0 + walls[-1, :] = 0 + walls[:, 0] = 0 + walls[:, -1] = 0 + + return walls + + +def filter1Goodwin_as_aspect_v3(walls, scale, a, feedback=None): + """ + tThis function applies the filter processing presented in Goodwin et al (2010) but instead for removing + linear fetures it calculates wall aspect based on a wall pixels grid, a dsm (a) and a scale factor + + Fredrik Lindberg, 2012-02-14 + fredrikl@gvc.gu.se + + Translated: 2015-09-15 + + :param walls: + :param scale: + :param a: + :return: dirwalls + """ + # Try Rust implementation first (much faster) + try: + import threading + + from ..progress import ProgressReporter + from ..rustalgos import wall_aspect as _wa_rust + + walls_f32 = np.asarray(walls, dtype=np.float32) + dsm_f32 = np.asarray(a, dtype=np.float32) + + runner = _wa_rust.WallAspectRunner() + result = [None] + error = [None] + + def _run(): + try: + result[0] = runner.compute(walls_f32, float(scale), dsm_f32) + except Exception as e: + error[0] = e + + thread = threading.Thread(target=_run, daemon=True) + thread.start() + + # Poll progress (180 angle iterations) + total = 180 + pbar = ProgressReporter(total=total, desc="Computing wall aspects", feedback=feedback) + last = 0 + while thread.is_alive(): + thread.join(timeout=0.05) + done = runner.progress() + if done > last: + pbar.update(done - last) + last = done + # Check QGIS cancellation + if feedback is not None and hasattr(feedback, "isCanceled") and feedback.isCanceled(): + runner.cancel() + thread.join(timeout=5.0) + pbar.close() + return np.zeros_like(walls_f32) + if last < total: + pbar.update(total - last) + pbar.close() + + thread.join() + if error[0] is not None: + raise error[0] + return np.asarray(result[0]) + except ImportError: + pass + + # Python fallback + row = a.shape[0] + col = a.shape[1] + + filtersize = np.floor((scale + 0.0000000001) * 9) + if filtersize <= 2: + filtersize = 3 + elif filtersize != 9 and filtersize % 2 == 0: + filtersize = filtersize + 1 + + filthalveceil = int(np.ceil(filtersize / 2.0)) + filthalvefloor = int(np.floor(filtersize / 2.0)) + + filtmatrix = np.zeros((int(filtersize), int(filtersize)), dtype=np.float32) + buildfilt = np.zeros((int(filtersize), int(filtersize)), dtype=np.float32) + + filtmatrix[:, filthalveceil - 1] = 1 + n = filtmatrix.shape[0] - 1 + buildfilt[filthalveceil - 1, 0:filthalvefloor] = 1 + buildfilt[filthalveceil - 1, filthalveceil : int(filtersize)] = 2 + + y = np.zeros((row, col), dtype=np.float32) # final direction + z = np.zeros((row, col), dtype=np.float32) # temporary direction + x = np.zeros((row, col), dtype=np.float32) # building side + walls[walls > 0.5] = 1 + + for h in get_progress_iterator( + range(0, 180), desc="Computing wall aspects", feedback=feedback + ): # =0:1:180 #%increased resolution to 1 deg 20140911 + filtmatrix1temp = rotate_array(filtmatrix, h, order=1, reshape=False, mode="nearest") # bilinear + filtmatrix1 = np.round(filtmatrix1temp) + filtmatrixbuildtemp = rotate_array(buildfilt, h, order=0, reshape=False, mode="nearest") # Nearest neighbor + # filtmatrixbuild = np.round(filtmatrixbuildtemp / 127.) + filtmatrixbuild = np.round(filtmatrixbuildtemp) + index = 270 - h + if h == 150: + filtmatrixbuild[:, n] = 0 + if h == 30: + filtmatrixbuild[:, n] = 0 + if index == 225: + # n = filtmatrix.shape[0] - 1 # length(filtmatrix); + filtmatrix1[0, 0] = 1 + filtmatrix1[n, n] = 1 + if index == 135: + # n = filtmatrix.shape[0] - 1 # length(filtmatrix); + filtmatrix1[0, n] = 1 + filtmatrix1[n, 0] = 1 + + for i in range(int(filthalveceil) - 1, row - int(filthalveceil) - 1): # i=filthalveceil:sizey-filthalveceil + for j in range( + int(filthalveceil) - 1, col - int(filthalveceil) - 1 + ): # (j=filthalveceil:sizex-filthalveceil + if walls[i, j] == 1: + wallscut = ( + walls[ + i - filthalvefloor : i + filthalvefloor + 1, + j - filthalvefloor : j + filthalvefloor + 1, + ] + * filtmatrix1 + ) + dsmcut = a[ + i - filthalvefloor : i + filthalvefloor + 1, + j - filthalvefloor : j + filthalvefloor + 1, + ] + if z[i, j] < wallscut.sum(): # sum(sum(wallscut)) + z[i, j] = wallscut.sum() # sum(sum(wallscut)); + if np.sum(dsmcut[filtmatrixbuild == 1]) > np.sum(dsmcut[filtmatrixbuild == 2]): + x[i, j] = 1 + else: + x[i, j] = 2 + + y[i, j] = index + + y[(x == 1)] = y[(x == 1)] - 180 + y[(y < 0)] = y[(y < 0)] + 360 + + grad, asp = get_ders(a, scale) + + y = y + ((walls == 1) * 1) * ((y == 0) * 1) * (asp / (math.pi / 180.0)) + + dirwalls = y + + return dirwalls + + +def cart2pol(x, y, units="deg"): + radius = np.sqrt(x**2 + y**2) + theta = np.arctan2(y, x) + if units in ["deg", "degs"]: + theta = theta * 180 / np.pi + return theta, radius + + +def get_ders(dsm, scale): + # dem,_,_=read_dem_grid(dem_file) + dx = 1 / scale + # dx=0.5 + fy, fx = np.gradient(dsm, dx, dx) + asp, grad = cart2pol(fy, fx, "rad") + grad = np.arctan(grad) + asp = asp * -1 + asp = asp + (asp < 0) * (np.pi * 2) + return grad, asp diff --git a/pysrc/solweig/postprocess.py b/pysrc/solweig/postprocess.py new file mode 100644 index 0000000..5555c32 --- /dev/null +++ b/pysrc/solweig/postprocess.py @@ -0,0 +1,107 @@ +"""Thermal comfort index computation: UTCI and PET grid functions.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +import numpy as np + +from .models import HumanParams +from .rustalgos import pet as pet_rust +from .rustalgos import utci as utci_rust + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +# ============================================================================= +# Post-Processing: Thermal Comfort Indices +# ============================================================================= + + +def compute_utci_grid( + tmrt: NDArray[np.floating], + ta: float, + rh: float, + wind: float, +) -> NDArray[np.floating]: + """ + Compute UTCI (Universal Thermal Climate Index) for a single grid. + + Thin wrapper around the Rust UTCI polynomial implementation. + + Args: + tmrt: Mean Radiant Temperature grid (°C). + ta: Air temperature (°C). + rh: Relative humidity (%). + wind: Wind speed at 10m height (m/s). + + Returns: + UTCI grid (°C). + + Example: + # Compute UTCI for a single result + utci = compute_utci_grid( + tmrt=result.tmrt, + ta=25.0, + rh=60.0, + wind=2.0, + ) + """ + + wind_grid = np.full_like(tmrt, wind, dtype=np.float32) + return utci_rust.utci_grid(ta, rh, tmrt, wind_grid) + + +def compute_pet_grid( + tmrt: NDArray[np.floating], + ta: float, + rh: float, + wind: float, + human: HumanParams | None = None, +) -> NDArray[np.floating]: + """ + Compute PET (Physiological Equivalent Temperature) for a single grid. + + Thin wrapper around the Rust PET iterative solver. + + Args: + tmrt: Mean Radiant Temperature grid (°C). + ta: Air temperature (°C). + rh: Relative humidity (%). + wind: Wind speed at 10m height (m/s). + human: Human body parameters. Uses defaults if not provided. + + Returns: + PET grid (°C). + + Example: + # Compute PET for a single result + pet = compute_pet_grid( + tmrt=result.tmrt, + ta=25.0, + rh=60.0, + wind=2.0, + human=HumanParams(weight=75, height=1.75), + ) + """ + + if human is None: + human = HumanParams() + + wind_grid = np.full_like(tmrt, wind, dtype=np.float32) + return pet_rust.pet_grid( + ta, + rh, + tmrt, + wind_grid, + human.weight, + float(human.age), + human.height, + human.activity, + human.clothing, + human.sex, + ) diff --git a/pysrc/solweig/progress.py b/pysrc/solweig/progress.py new file mode 100644 index 0000000..4099e78 --- /dev/null +++ b/pysrc/solweig/progress.py @@ -0,0 +1,250 @@ +""" +Progress reporting abstraction for SOLWEIG. + +Automatically uses the appropriate progress mechanism: +- QGIS: QgsProcessingFeedback (native progress bar) +- Python: tqdm (terminal progress bar) +- Fallback: no-op (silent iteration) + +Usage: + from solweig.progress import get_progress_iterator, ProgressReporter + + # Simple iteration with progress + for item in get_progress_iterator(items, desc="Processing"): + process(item) + + # Manual progress control + progress = ProgressReporter(total=100, desc="Computing") + for i in range(100): + do_work(i) + progress.update(1) + progress.close() +""" + +from __future__ import annotations + +import logging +import sys +from collections.abc import Iterable, Iterator +from typing import Any, TypeVar + +logger = logging.getLogger(__name__) + +T = TypeVar("T") + +# Detect environment once at module load +_QGIS_AVAILABLE = False +_TQDM_AVAILABLE = False +_qgis_feedback_class = None + +# Check for QGIS +try: + if "qgis" in sys.modules or "qgis.core" in sys.modules: + from qgis.core import QgsProcessingFeedback + + _QGIS_AVAILABLE = True + _qgis_feedback_class = QgsProcessingFeedback + logger.debug("QGIS environment detected, will use QgsProcessingFeedback") +except ImportError: + pass + +# Check for tqdm +_tqdm: type | None = None +try: + from tqdm import tqdm as _tqdm + + _TQDM_AVAILABLE = True +except ImportError: + pass + + +class ProgressReporter: + """ + Unified progress reporter that works in QGIS, terminal, or silently. + + Args: + total: Total number of steps (required for percentage calculation). + desc: Description shown in progress bar. + feedback: Optional QGIS QgsProcessingFeedback object. If provided, + uses QGIS progress. Otherwise auto-detects environment. + disable: If True, disable all progress output. + """ + + def __init__( + self, + total: int, + desc: str = "", + feedback: Any = None, + disable: bool = False, + progress_range: tuple[float, float] | None = None, + ): + self.total = total + self.desc = desc + self.current = 0 + self.disable = disable + self._closed = False + + # Optional QGIS percentage sub-range mapping. + # When set, maps internal 0..total to progress_range[0]..progress_range[1] + # instead of the default 0..100. Useful when SVF is one phase of a + # larger QGIS algorithm (e.g. surface preprocessing maps SVF to 35-74%). + self._progress_range = progress_range + + # Determine which backend to use + self._qgis_feedback = None + self._tqdm_bar = None + + if disable: + return + + # If explicit QGIS feedback provided, use it + if feedback is not None: + self._qgis_feedback = feedback + return + + # Use tqdm if available (even when qgis.core is importable — without + # a feedback object there's no QGIS progress bar to use) + if _tqdm is not None: + self._tqdm_bar = _tqdm(total=total, desc=desc) + return + + # Fallback: silent operation + logger.debug(f"No progress backend available for: {desc}") + + def update(self, n: int = 1) -> None: + """Update progress by n steps.""" + if self._closed: + return + + self.current += n + + if self.disable: + return + + if self._qgis_feedback is not None: + # QGIS expects percentage 0-100 + frac = min(1.0, self.current / self.total) if self.total > 0 else 0.0 + if self._progress_range is not None: + lo, hi = self._progress_range + percent = int(lo + frac * (hi - lo)) + else: + percent = int(100 * frac) + self._qgis_feedback.setProgress(percent) + elif self._tqdm_bar is not None: + self._tqdm_bar.update(n) + + def set_description(self, desc: str) -> None: + """Update the progress bar description (tqdm only, no QGIS log output).""" + self.desc = desc + if self._tqdm_bar is not None: + self._tqdm_bar.set_description(desc) + + def set_text(self, text: str) -> None: + """Update status text above the progress bar (QGIS only). + + In QGIS, calls ``feedback.setProgressText()`` to update the label + shown above the progress bar. In tqdm/fallback mode this is a no-op + (use :meth:`set_description` for tqdm bar text instead). + """ + if self._qgis_feedback is not None: + self._qgis_feedback.setProgressText(text) + + def is_cancelled(self) -> bool: + """Check if user requested cancellation (QGIS only).""" + if self._qgis_feedback is not None: + return self._qgis_feedback.isCanceled() + return False + + def close(self) -> None: + """Close the progress bar.""" + if self._closed: + return + self._closed = True + + if self._tqdm_bar is not None: + self._tqdm_bar.close() + + +class _ProgressIterator(Iterator[T]): + """Iterator wrapper that reports progress.""" + + def __init__(self, iterable: Iterable[T], reporter: ProgressReporter): + self._iterator = iter(iterable) + self._reporter = reporter + + def __iter__(self) -> _ProgressIterator[T]: + return self + + def __next__(self) -> T: + try: + item = next(self._iterator) + self._reporter.update(1) + return item + except StopIteration: + self._reporter.close() + raise + + +def get_progress_iterator( + iterable: Iterable[T], + desc: str = "", + total: int | None = None, + feedback: Any = None, + disable: bool = False, +) -> Iterator[T]: + """ + Wrap an iterable with automatic progress reporting. + + Automatically uses the appropriate progress mechanism: + - QGIS environment with feedback: QgsProcessingFeedback + - Terminal: tqdm progress bar + - Fallback: silent iteration + + Args: + iterable: The iterable to wrap. + desc: Description for the progress bar. + total: Total number of items (computed from len() if not provided). + feedback: Optional QGIS QgsProcessingFeedback for progress reporting. + disable: If True, disable progress output entirely. + + Returns: + Iterator that reports progress as items are consumed. + + Example: + # Simple usage + for item in get_progress_iterator(items, desc="Processing"): + process(item) + + # With QGIS feedback (in processing algorithm) + for item in get_progress_iterator(items, feedback=self.feedback): + process(item) + """ + if total is None: + try: + total = len(iterable) # type: ignore + except TypeError: + # Iterable doesn't have len(), estimate or use 0 + total = 0 + + reporter = ProgressReporter(total=total, desc=desc, feedback=feedback, disable=disable) + return _ProgressIterator(iterable, reporter) + + +# Convenience function that matches tqdm signature for easy migration +def progress( + iterable: Iterable[T], + desc: str = "", + total: int | None = None, + **kwargs, +) -> Iterator[T]: + """ + Drop-in replacement for tqdm that auto-detects environment. + + This function has a similar signature to tqdm for easy migration. + Additional kwargs are ignored for compatibility. + + Example: + # Replace: for item in tqdm(items, desc="Processing"): + # With: for item in progress(items, desc="Processing"): + """ + return get_progress_iterator(iterable, desc=desc, total=total) diff --git a/pysrc/solweig/solweig_logging.py b/pysrc/solweig/solweig_logging.py new file mode 100644 index 0000000..4083e85 --- /dev/null +++ b/pysrc/solweig/solweig_logging.py @@ -0,0 +1,190 @@ +""" +QGIS-compatible logging for SOLWEIG. + +Provides automatic environment detection and uses appropriate logging backend: +- QGIS: QgsProcessingFeedback.pushInfo() / pushDebugInfo() +- Python: Standard logging module +- Fallback: Print to stdout + +Usage: + from solweig.solweig_logging import get_logger + + logger = get_logger(__name__) + logger.info("Surface data loaded: 400×400 pixels") + logger.debug(f"Using {len(weather_list)} timesteps") + logger.warning("SVF not provided; calculate() will raise MissingPrecomputedData") +""" + +from __future__ import annotations + +import logging +import sys +from enum import IntEnum +from typing import Any + + +class LogLevel(IntEnum): + """Log levels matching Python logging.""" + + DEBUG = 10 + INFO = 20 + WARNING = 30 + ERROR = 40 + + +class SolweigLogger: + """ + Unified logger that works in both QGIS and Python environments. + + Auto-detects environment and uses appropriate backend: + - QGIS: Uses QgsProcessingFeedback if available + - Python: Uses standard logging module + - Fallback: Prints to stdout + """ + + def __init__(self, name: str, level: LogLevel = LogLevel.INFO): + """ + Initialize logger. + + Args: + name: Logger name (usually module name) + level: Minimum log level to display + """ + self.name = name + self.level = level + self._feedback = None + self._backend = self._detect_backend() + + def _detect_backend(self) -> str: + """Detect which logging backend to use.""" + # Check if running in QGIS + try: + from qgis.core import QgsProcessingFeedback # noqa: F401 + + # QGIS is available, but we need a feedback object to be set + # This will be set via set_feedback() when running as QGIS processing algorithm + return "qgis" + except ImportError: + pass + + # Use standard Python logging + return "logging" + + def set_feedback(self, feedback: Any) -> None: + """ + Set QGIS feedback object for logging. + + Args: + feedback: QgsProcessingFeedback object + """ + self._feedback = feedback + + def _log(self, level: LogLevel, message: str) -> None: + """Internal logging method.""" + if level < self.level: + return # Below minimum level + + if self._backend == "qgis" and self._feedback is not None: + # Use QGIS feedback + if level >= LogLevel.ERROR: + self._feedback.reportError(message) + elif level >= LogLevel.WARNING: + self._feedback.pushInfo(f"WARNING: {message}") + elif level >= LogLevel.INFO: + self._feedback.pushInfo(message) + else: # DEBUG + self._feedback.pushDebugInfo(message) + elif self._backend in ("logging", "qgis"): + # Use Python logging (also used as fallback when QGIS backend + # is detected but no feedback object has been set) + logger = logging.getLogger(self.name) + logger.log(level, message) + else: + # Fallback: print to stdout + prefix = { + LogLevel.DEBUG: "DEBUG", + LogLevel.INFO: "INFO", + LogLevel.WARNING: "WARNING", + LogLevel.ERROR: "ERROR", + }.get(level, "INFO") + print(f"[{prefix}] {self.name}: {message}", file=sys.stderr if level >= LogLevel.WARNING else sys.stdout) + + def debug(self, message: str) -> None: + """Log debug message.""" + self._log(LogLevel.DEBUG, message) + + def info(self, message: str) -> None: + """Log info message.""" + self._log(LogLevel.INFO, message) + + def warning(self, message: str) -> None: + """Log warning message.""" + self._log(LogLevel.WARNING, message) + + def error(self, message: str) -> None: + """Log error message.""" + self._log(LogLevel.ERROR, message) + + def set_level(self, level: LogLevel | int) -> None: + """Set minimum log level.""" + self.level = LogLevel(level) if isinstance(level, int) else level + + +# Global logger registry +_loggers: dict[str, SolweigLogger] = {} + + +def get_logger(name: str, level: LogLevel | int = LogLevel.INFO) -> SolweigLogger: + """ + Get or create a logger for the given name. + + Args: + name: Logger name (usually module name or __name__) + level: Minimum log level (default: INFO) + + Returns: + SolweigLogger instance + + Example: + >>> logger = get_logger(__name__) + >>> logger.info("Processing started") + >>> logger.debug(f"Grid size: {rows}×{cols}") + """ + if name not in _loggers: + _loggers[name] = SolweigLogger(name, LogLevel(level) if isinstance(level, int) else level) + return _loggers[name] + + +def set_global_level(level: LogLevel | int) -> None: + """ + Set log level for all existing loggers. + + Args: + level: Minimum log level (DEBUG, INFO, WARNING, ERROR) + + Example: + >>> import solweig.solweig_logging as slog + >>> slog.set_global_level(slog.LogLevel.DEBUG) # Show debug messages + """ + level = LogLevel(level) if isinstance(level, int) else level + for logger in _loggers.values(): + logger.set_level(level) + + +def set_global_feedback(feedback: Any) -> None: + """ + Set QGIS feedback object for all loggers. + + Args: + feedback: QgsProcessingFeedback object + """ + for logger in _loggers.values(): + logger.set_feedback(feedback) + + +# Configure Python logging to be less verbose by default +logging.basicConfig( + level=logging.INFO, + format="%(name)s: %(message)s", + stream=sys.stdout, +) diff --git a/pysrc/solweig/summary.py b/pysrc/solweig/summary.py new file mode 100644 index 0000000..66f410f --- /dev/null +++ b/pysrc/solweig/summary.py @@ -0,0 +1,718 @@ +"""Timeseries summary and grid accumulation. + +Defines :class:`TimeseriesSummary` (the default return type of +:func:`calculate_timeseries`) and :class:`GridAccumulator` (the +internal helper that builds it incrementally during the loop). +""" + +from __future__ import annotations + +import datetime as _dt +from collections.abc import Callable +from dataclasses import dataclass, field +from pathlib import Path +from typing import TYPE_CHECKING + +import numpy as np + +if TYPE_CHECKING: + from numpy.typing import NDArray + + from .models.results import SolweigResult + from .models.surface import SurfaceData + from .models.weather import Weather + + +@dataclass +class Timeseries: + """Per-timestep scalar timeseries extracted during the calculation loop. + + Each field is a 1-D array of length ``n_timesteps``, holding the spatial + mean (or fraction) for that metric at each timestep. Useful for plotting + how conditions evolve over the simulation period. + + Attributes: + datetime: Timestamp per step. + ta: Air temperature per step (°C) — from weather input. + rh: Relative humidity per step (%) — from weather input. + ws: Wind speed per step (m/s) — from weather input. + global_rad: Global solar radiation per step (W/m²) — from weather input. + direct_rad: Direct beam radiation per step (W/m²). + diffuse_rad: Diffuse radiation per step (W/m²). + sun_altitude: Sun altitude angle per step (°). + tmrt_mean: Spatial mean Tmrt per step (°C). + utci_mean: Spatial mean UTCI per step (°C). + sun_fraction: Fraction of sunlit pixels per step (0–1). NaN when shadow unavailable. + diffuse_fraction: Diffuse fraction per step (0–1). 0 = clear sky, 1 = fully overcast. + clearness_index: Clearness index per step. Higher = clearer sky. 0 at night. + is_daytime: Day/night flag per step. + """ + + datetime: list[_dt.datetime] + ta: NDArray[np.floating] + rh: NDArray[np.floating] + ws: NDArray[np.floating] + global_rad: NDArray[np.floating] + direct_rad: NDArray[np.floating] + diffuse_rad: NDArray[np.floating] + sun_altitude: NDArray[np.floating] + tmrt_mean: NDArray[np.floating] + utci_mean: NDArray[np.floating] + sun_fraction: NDArray[np.floating] + diffuse_fraction: NDArray[np.floating] + clearness_index: NDArray[np.floating] + is_daytime: NDArray[np.bool_] + + +@dataclass +class TimeseriesSummary: + """Aggregated summary from a SOLWEIG timeseries calculation. + + All grids have the same shape as the input DSM (rows, cols). + + Attributes: + tmrt_mean: Mean Tmrt across all timesteps (°C). + tmrt_max: Per-pixel maximum Tmrt (°C). + tmrt_min: Per-pixel minimum Tmrt (°C). + tmrt_day_mean: Mean Tmrt during daytime (°C). NaN where no daytime data. + tmrt_night_mean: Mean Tmrt during nighttime (°C). NaN where no nighttime data. + utci_mean: Mean UTCI across all timesteps (°C). + utci_max: Per-pixel maximum UTCI (°C). + utci_min: Per-pixel minimum UTCI (°C). + utci_day_mean: Mean UTCI during daytime (°C). NaN where no daytime data. + utci_night_mean: Mean UTCI during nighttime (°C). NaN where no nighttime data. + sun_hours: Hours of direct sun per pixel. + shade_hours: Hours of shade per pixel. + utci_hours_above: Threshold (°C) → grid of hours exceeding that UTCI value. + n_timesteps: Total number of timesteps processed. + n_daytime: Number of daytime timesteps. + n_nighttime: Number of nighttime timesteps. + shadow_available: Whether shadow data was available for sun/shade hours. + heat_thresholds_day: Daytime UTCI thresholds used. + heat_thresholds_night: Nighttime UTCI thresholds used. + timeseries: Per-timestep scalar timeseries (spatial means over time). + results: Per-timestep results (only populated when ``timestep_outputs`` is provided). + """ + + # Tmrt summary grids + tmrt_mean: NDArray[np.floating] + tmrt_max: NDArray[np.floating] + tmrt_min: NDArray[np.floating] + tmrt_day_mean: NDArray[np.floating] + tmrt_night_mean: NDArray[np.floating] + + # UTCI summary grids + utci_mean: NDArray[np.floating] + utci_max: NDArray[np.floating] + utci_min: NDArray[np.floating] + utci_day_mean: NDArray[np.floating] + utci_night_mean: NDArray[np.floating] + + # Sun/shade + sun_hours: NDArray[np.floating] + shade_hours: NDArray[np.floating] + + # UTCI threshold exceedance + utci_hours_above: dict[float, NDArray[np.floating]] + + # Metadata + n_timesteps: int + n_daytime: int + n_nighttime: int + shadow_available: bool + heat_thresholds_day: list[float] + heat_thresholds_night: list[float] + + # Per-timestep scalar timeseries + timeseries: Timeseries | None = None + + # Per-timestep results (opt-in) + results: list[SolweigResult] = field(default_factory=list) + + # Surface reference for GeoTIFF output (not shown in repr) + _surface: SurfaceData | None = field(default=None, repr=False) + + # Output directory where summary GeoTIFFs were saved (not shown in repr) + _output_dir: Path | None = field(default=None, repr=False) + + def __len__(self) -> int: + """Return number of timesteps processed.""" + return self.n_timesteps + + def report(self) -> str: + """Return a human-readable summary report. + + Includes spatial statistics, threshold exceedance, timeseries ranges, + and links to saved GeoTIFF files when available. + + Returns: + Multi-line report string. + """ + if self.n_timesteps == 0: + return "TimeseriesSummary: 0 timesteps (empty)" + + lines: list[str] = [] + + # Period header from timeseries datetimes + if self.timeseries is not None and self.timeseries.datetime: + dt0 = self.timeseries.datetime[0] + dt1 = self.timeseries.datetime[-1] + lines.append( + f"SOLWEIG Summary: {self.n_timesteps} timesteps ({self.n_daytime} day, {self.n_nighttime} night)" + ) + lines.append(f" Period: {dt0:%Y-%m-%d %H:%M} — {dt1:%Y-%m-%d %H:%M}") + else: + lines.append( + f"SOLWEIG Summary: {self.n_timesteps} timesteps ({self.n_daytime} day, {self.n_nighttime} night)", + ) + + # Tmrt stats + with np.errstate(invalid="ignore"): + tmrt_vals = self.tmrt_mean[np.isfinite(self.tmrt_mean)] + if tmrt_vals.size > 0: + lines.append( + f" Tmrt — mean: {tmrt_vals.mean():.1f}°C, " + f"range: {np.nanmin(self.tmrt_min):.1f} – {np.nanmax(self.tmrt_max):.1f}°C" + ) + + # UTCI stats + utci_vals = self.utci_mean[np.isfinite(self.utci_mean)] + if utci_vals.size > 0: + lines.append( + f" UTCI — mean: {utci_vals.mean():.1f}°C, " + f"range: {np.nanmin(self.utci_min):.1f} – {np.nanmax(self.utci_max):.1f}°C" + ) + + # Sun/shade + if self.shadow_available: + sun_valid = self.sun_hours[np.isfinite(self.sun_hours)] + if sun_valid.size > 0: + lines.append(f" Sun — {sun_valid.min():.1f} – {sun_valid.max():.1f} hours") + + # UTCI threshold exceedance (labelled day/night) + day_set = set(self.heat_thresholds_day) + night_set = set(self.heat_thresholds_night) + for threshold, grid in sorted(self.utci_hours_above.items()): + valid = grid[np.isfinite(grid)] + if valid.size > 0 and valid.max() > 0: + label = "" + if threshold in day_set and threshold not in night_set: + label = " (day)" + elif threshold in night_set and threshold not in day_set: + label = " (night)" + lines.append(f" UTCI > {threshold:g}°C{label} — max {valid.max():.1f}h") + + # Timeseries summary + if self.timeseries is not None: + ts = self.timeseries + ta_range = f"{np.nanmin(ts.ta):.1f} – {np.nanmax(ts.ta):.1f}°C" + lines.append(f" Ta — range: {ta_range}") + + # Per-timestep results + if self.results: + lines.append(f" Per-timestep results: {len(self.results)} SolweigResult objects") + + # Output file links + if self._output_dir is not None: + summary_dir = self._output_dir / "summary" + if summary_dir.exists(): + tifs = sorted(summary_dir.glob("*.tif")) + if tifs: + lines.append(f" Summary GeoTIFFs: {summary_dir}/") + for tif in tifs: + lines.append(f" {tif.name}") + + # Access hint + lines.append("") + lines.append("Tip: per-timestep arrays are in summary.timeseries (e.g. .ta, .tmrt_mean, .utci_mean).") + lines.append(" Spatial grids are on the summary itself (e.g. .tmrt_mean, .utci_max).") + if self._output_dir is not None: + lines.append(" Summary grids are saved as GeoTIFFs above; timeseries arrays are in memory only.") + else: + lines.append(" Call summary.to_geotiff(output_dir) to save spatial grids.") + + return "\n".join(lines) + + def _repr_html_(self) -> str: + """Rich HTML rendering for Jupyter notebooks.""" + return "
" + self.report().replace("&", "&").replace("<", "<") + "
" + + @classmethod + def empty(cls) -> TimeseriesSummary: + """Create an empty summary for zero-timestep runs.""" + z = np.empty((0, 0), dtype=np.float32) + return cls( + tmrt_mean=z, + tmrt_max=z, + tmrt_min=z, + tmrt_day_mean=z, + tmrt_night_mean=z, + utci_mean=z, + utci_max=z, + utci_min=z, + utci_day_mean=z, + utci_night_mean=z, + sun_hours=z, + shade_hours=z, + utci_hours_above={}, + n_timesteps=0, + n_daytime=0, + n_nighttime=0, + shadow_available=False, + heat_thresholds_day=[], + heat_thresholds_night=[], + ) + + def plot( + self, + save_path: str | Path | None = None, + figsize: tuple[float, float] = (14, 10), + max_days: int = 5, + ) -> None: + """Plot the per-timestep timeseries as a multi-panel figure. + + Requires ``matplotlib``. If ``save_path`` is provided, the figure + is saved to that path instead of being shown interactively. + + For long simulations the plot is truncated to the first ``max_days`` + days so that individual diurnal cycles remain readable. + + Args: + save_path: File path to save the figure (e.g. ``"summary.png"``). + If None, calls ``plt.show()``. + figsize: Figure size as ``(width, height)`` in inches. + max_days: Maximum number of days to display. If the timeseries + spans more than this many days, only the first ``max_days`` + are plotted. Set to ``0`` to plot all data. + + Raises: + RuntimeError: If no timeseries data is available. + """ + if self.timeseries is None or self.n_timesteps == 0: + raise RuntimeError( + "No timeseries data to plot. Timeseries data is populated automatically by calculate_timeseries()." + ) + + try: + import matplotlib.dates as mdates + import matplotlib.pyplot as plt + except ImportError: + raise ImportError("matplotlib is required for plotting. Install it with: pip install matplotlib") from None + + ts = self.timeseries + dates = ts.datetime + + # Truncate to max_days if the timeseries is long + n = len(dates) + truncated = False + if max_days > 0 and n > 1: + import datetime as _dtmod + + span = dates[-1] - dates[0] + if span > _dtmod.timedelta(days=max_days): + cutoff = dates[0] + _dtmod.timedelta(days=max_days) + # Find the first index past the cutoff + n = next((i for i, d in enumerate(dates) if d > cutoff), len(dates)) + truncated = True + + if truncated: + dates = dates[:n] + ts = Timeseries( + datetime=dates, + ta=ts.ta[:n], + rh=ts.rh[:n], + ws=ts.ws[:n], + global_rad=ts.global_rad[:n], + direct_rad=ts.direct_rad[:n], + diffuse_rad=ts.diffuse_rad[:n], + sun_altitude=ts.sun_altitude[:n], + tmrt_mean=ts.tmrt_mean[:n], + utci_mean=ts.utci_mean[:n], + sun_fraction=ts.sun_fraction[:n], + diffuse_fraction=ts.diffuse_fraction[:n], + clearness_index=ts.clearness_index[:n], + is_daytime=ts.is_daytime[:n], + ) + + fig, axes = plt.subplots(4, 1, figsize=figsize, sharex=True) + + # Helper: shade nighttime regions on any axis + def _shade_night(ax): + """Add light grey background for nighttime periods.""" + in_night = False + night_start = None + for i, d in enumerate(dates): + if not ts.is_daytime[i]: + if not in_night: + night_start = d + in_night = True + else: + if in_night: + ax.axvspan(night_start, d, alpha=0.08, color="grey", linewidth=0) + in_night = False + # Close final night span + if in_night and night_start is not None: + ax.axvspan(night_start, dates[-1], alpha=0.08, color="grey", linewidth=0) + + # Panel 1: Temperature (Ta + Tmrt spatial mean + UTCI spatial mean) + ax = axes[0] + _shade_night(ax) + ax.plot(dates, ts.ta, label="Ta", color="#2196F3", linewidth=1) + ax.plot(dates, ts.tmrt_mean, label="Tmrt (spatial mean)", color="#F44336", linewidth=1) + ax.plot(dates, ts.utci_mean, label="UTCI (spatial mean)", color="#FF9800", linewidth=1) + ax.set_ylabel("Temperature (°C)") + ax.legend(loc="upper right", fontsize=8) + title = "Temperature and Thermal Comfort" + if truncated: + title += f" (showing first {max_days} of {self.n_timesteps} timesteps)" + ax.set_title(title) + ax.grid(True, alpha=0.3) + + # Panel 2: Solar radiation + ax = axes[1] + _shade_night(ax) + ax.plot(dates, ts.global_rad, label="Global", color="#FFC107", linewidth=1) + ax.plot(dates, ts.direct_rad, label="Direct", color="#FF5722", linewidth=1) + ax.plot(dates, ts.diffuse_rad, label="Diffuse", color="#03A9F4", linewidth=1) + ax.set_ylabel("Radiation (W/m²)") + ax.legend(loc="upper right", fontsize=8) + ax.set_title("Solar Radiation") + ax.grid(True, alpha=0.3) + + # Panel 3: Sun fraction + sun altitude + clearness index + ax = axes[2] + _shade_night(ax) + ax2 = ax.twinx() + if self.shadow_available: + ax.fill_between(dates, ts.sun_fraction, alpha=0.3, color="#FFEB3B", label="Sun fraction") + ax.plot(dates, ts.clearness_index, label="Clearness index", color="#FF9800", linewidth=1, alpha=0.8) + ax.set_ylabel("Fraction / Index") + ax.set_ylim(0, max(1.05, float(np.nanmax(ts.clearness_index)) * 1.1)) + ax2.plot(dates, ts.sun_altitude, label="Sun altitude", color="#9C27B0", linewidth=1, alpha=0.7) + ax2.set_ylabel("Sun altitude (°)") + ax.set_title("Sun Exposure, Clearness and Solar Geometry") + # Combine legends + handles1, labels1 = ax.get_legend_handles_labels() + handles2, labels2 = ax2.get_legend_handles_labels() + ax.legend(handles1 + handles2, labels1 + labels2, loc="upper right", fontsize=8) + ax.grid(True, alpha=0.3) + + # Panel 4: Weather inputs (RH + wind speed) + ax = axes[3] + _shade_night(ax) + ax.plot(dates, ts.rh, label="RH", color="#4CAF50", linewidth=1) + ax.set_ylabel("Relative Humidity (%)") + ax.set_ylim(0, 105) + ax3 = ax.twinx() + ax3.plot(dates, ts.ws, label="Wind speed", color="#607D8B", linewidth=1) + ax3.set_ylabel("Wind speed (m/s)") + handles1, labels1 = ax.get_legend_handles_labels() + handles2, labels2 = ax3.get_legend_handles_labels() + ax.legend(handles1 + handles2, labels1 + labels2, loc="upper right", fontsize=8) + ax.set_title("Meteorological Inputs") + ax.grid(True, alpha=0.3) + + # Format x-axis dates + ax.xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d %H:%M")) + fig.autofmt_xdate(rotation=30, ha="right") + + fig.tight_layout() + + if save_path is not None: + fig.savefig(str(save_path), dpi=150, bbox_inches="tight") + plt.close(fig) + else: + plt.show() + + def to_geotiff( + self, + output_dir: str | Path, + surface: SurfaceData | None = None, + ) -> None: + """Save all summary grids to GeoTIFF files in ``output_dir/summary/``. + + Args: + output_dir: Base output directory. + surface: SurfaceData for CRS/transform metadata. Falls back to + the internal ``_surface`` reference if not provided. + """ + from . import io + + surface = surface or self._surface + + summary_dir = Path(output_dir) / "summary" + summary_dir.mkdir(parents=True, exist_ok=True) + + # Resolve geo-referencing from surface + transform: list[float] | None = None + crs_wkt: str = "" + if surface is not None: + if surface._geotransform is not None: + transform = surface._geotransform + if surface._crs_wkt is not None: + crs_wkt = surface._crs_wkt + if transform is None: + transform = [0.0, 1.0, 0.0, 0.0, 0.0, -1.0] + + def _save(name: str, arr: NDArray[np.floating]) -> None: + if arr.size == 0: + return + io.save_raster( + out_path_str=str(summary_dir / f"{name}.tif"), + data_arr=arr, + trf_arr=transform, + crs_wkt=crs_wkt, + no_data_val=np.nan, + ) + + # Tmrt grids + _save("tmrt_mean", self.tmrt_mean) + _save("tmrt_max", self.tmrt_max) + _save("tmrt_min", self.tmrt_min) + _save("tmrt_day_mean", self.tmrt_day_mean) + _save("tmrt_night_mean", self.tmrt_night_mean) + + # UTCI grids + _save("utci_mean", self.utci_mean) + _save("utci_max", self.utci_max) + _save("utci_min", self.utci_min) + _save("utci_day_mean", self.utci_day_mean) + _save("utci_night_mean", self.utci_night_mean) + + # Sun/shade + _save("sun_hours", self.sun_hours) + _save("shade_hours", self.shade_hours) + + # UTCI threshold exceedance (labelled day/night in filename) + day_set = set(self.heat_thresholds_day) + night_set = set(self.heat_thresholds_night) + for threshold, arr in sorted(self.utci_hours_above.items()): + suffix = "" + if threshold in day_set and threshold not in night_set: + suffix = "_day" + elif threshold in night_set and threshold not in day_set: + suffix = "_night" + _save(f"utci_hours_above_{threshold:g}{suffix}", arr) + + +class GridAccumulator: + """Accumulates per-pixel summary grids during the timeseries loop. + + Used identically by both ``timeseries.py`` and ``tiling.py``. + All internal accumulators use float64 for numerical stability. + """ + + def __init__( + self, + shape: tuple[int, int], + heat_thresholds_day: list[float], + heat_thresholds_night: list[float], + timestep_hours: float, + ) -> None: + self.shape = shape + self.heat_thresholds_day = list(heat_thresholds_day) + self.heat_thresholds_night = list(heat_thresholds_night) + self.timestep_hours = timestep_hours + + # Tmrt accumulators + self._tmrt_sum = np.zeros(shape, dtype=np.float64) + self._tmrt_count = np.zeros(shape, dtype=np.int32) + self._tmrt_max = np.full(shape, -np.inf, dtype=np.float64) + self._tmrt_min = np.full(shape, np.inf, dtype=np.float64) + self._tmrt_day_sum = np.zeros(shape, dtype=np.float64) + self._tmrt_day_count = np.zeros(shape, dtype=np.int32) + self._tmrt_night_sum = np.zeros(shape, dtype=np.float64) + self._tmrt_night_count = np.zeros(shape, dtype=np.int32) + + # UTCI accumulators + self._utci_sum = np.zeros(shape, dtype=np.float64) + self._utci_count = np.zeros(shape, dtype=np.int32) + self._utci_max = np.full(shape, -np.inf, dtype=np.float64) + self._utci_min = np.full(shape, np.inf, dtype=np.float64) + self._utci_day_sum = np.zeros(shape, dtype=np.float64) + self._utci_day_count = np.zeros(shape, dtype=np.int32) + self._utci_night_sum = np.zeros(shape, dtype=np.float64) + self._utci_night_count = np.zeros(shape, dtype=np.int32) + + # Sun/shade + self._sun_hours = np.zeros(shape, dtype=np.float64) + self._shade_hours = np.zeros(shape, dtype=np.float64) + self._shadow_seen = False + + # UTCI threshold exceedance — combine all unique thresholds + all_thresholds = sorted(set(heat_thresholds_day) | set(heat_thresholds_night)) + self._utci_hours_above: dict[float, NDArray] = {t: np.zeros(shape, dtype=np.float64) for t in all_thresholds} + self._day_thresholds_set = set(heat_thresholds_day) + self._night_thresholds_set = set(heat_thresholds_night) + + # Counters + self._n_timesteps = 0 + self._n_daytime = 0 + self._n_nighttime = 0 + + # Per-timestep scalar accumulators (lists, finalized to arrays) + self._ts_datetime: list[_dt.datetime] = [] + self._ts_ta: list[float] = [] + self._ts_rh: list[float] = [] + self._ts_ws: list[float] = [] + self._ts_global_rad: list[float] = [] + self._ts_direct_rad: list[float] = [] + self._ts_diffuse_rad: list[float] = [] + self._ts_sun_altitude: list[float] = [] + self._ts_tmrt_mean: list[float] = [] + self._ts_utci_mean: list[float] = [] + self._ts_sun_fraction: list[float] = [] + self._ts_diffuse_fraction: list[float] = [] + self._ts_clearness_index: list[float] = [] + self._ts_is_daytime: list[bool] = [] + + def update( + self, + result: SolweigResult, + weather: Weather, + compute_utci_fn: Callable, + ) -> None: + """Ingest one timestep. Must be called BEFORE arrays are freed.""" + tmrt = result.tmrt + valid = np.isfinite(tmrt) + is_day = weather.is_daytime + + # --- Tmrt stats --- + self._tmrt_sum += np.where(valid, tmrt, 0.0) + self._tmrt_count += valid.astype(np.int32) + np.fmax(self._tmrt_max, np.where(valid, tmrt, -np.inf), out=self._tmrt_max) + np.fmin(self._tmrt_min, np.where(valid, tmrt, np.inf), out=self._tmrt_min) + + if is_day: + self._tmrt_day_sum += np.where(valid, tmrt, 0.0) + self._tmrt_day_count += valid.astype(np.int32) + else: + self._tmrt_night_sum += np.where(valid, tmrt, 0.0) + self._tmrt_night_count += valid.astype(np.int32) + + # --- UTCI --- + utci = compute_utci_fn(tmrt, weather.ta, weather.rh, weather.ws) + utci_valid = valid & np.isfinite(utci) + + self._utci_sum += np.where(utci_valid, utci, 0.0) + self._utci_count += utci_valid.astype(np.int32) + np.fmax(self._utci_max, np.where(utci_valid, utci, -np.inf), out=self._utci_max) + np.fmin(self._utci_min, np.where(utci_valid, utci, np.inf), out=self._utci_min) + + if is_day: + self._utci_day_sum += np.where(utci_valid, utci, 0.0) + self._utci_day_count += utci_valid.astype(np.int32) + else: + self._utci_night_sum += np.where(utci_valid, utci, 0.0) + self._utci_night_count += utci_valid.astype(np.int32) + + # --- Sun/shade hours --- + sun_fraction = np.nan + if result.shadow is not None: + self._shadow_seen = True + self._sun_hours += np.where(valid, result.shadow * self.timestep_hours, 0.0) + self._shade_hours += np.where(valid, (1.0 - result.shadow) * self.timestep_hours, 0.0) + n_valid = valid.sum() + sun_fraction = float(result.shadow[valid].sum() / n_valid) if n_valid > 0 else np.nan + + # --- UTCI threshold exceedance --- + active_thresholds = self._day_thresholds_set if is_day else self._night_thresholds_set + for threshold in active_thresholds: + acc = self._utci_hours_above[threshold] + acc += np.where(utci_valid & (utci > threshold), self.timestep_hours, 0.0) + + self._n_timesteps += 1 + if is_day: + self._n_daytime += 1 + else: + self._n_nighttime += 1 + + # --- Per-timestep scalar tracking --- + self._ts_datetime.append(weather.datetime) + self._ts_ta.append(weather.ta) + self._ts_rh.append(weather.rh) + self._ts_ws.append(weather.ws) + self._ts_global_rad.append(weather.global_rad) + self._ts_direct_rad.append(weather.direct_rad) + self._ts_diffuse_rad.append(weather.diffuse_rad) + self._ts_sun_altitude.append(weather.sun_altitude) + self._ts_is_daytime.append(is_day) + + # Spatial means (over valid pixels) + n_valid_tmrt = valid.sum() + self._ts_tmrt_mean.append(float(tmrt[valid].mean()) if n_valid_tmrt > 0 else np.nan) + n_valid_utci = utci_valid.sum() + self._ts_utci_mean.append(float(utci[utci_valid].mean()) if n_valid_utci > 0 else np.nan) + self._ts_sun_fraction.append(sun_fraction) + # Diffuse fraction: 0 = clear, 1 = overcast (NaN at night) + if weather.global_rad > 0: + self._ts_diffuse_fraction.append(weather.diffuse_rad / weather.global_rad) + else: + self._ts_diffuse_fraction.append(np.nan) + self._ts_clearness_index.append(weather.clearness_index) + + def finalize(self) -> TimeseriesSummary: + """Compute final summary grids from accumulated state.""" + + def _safe_mean(total: NDArray, count: NDArray) -> NDArray[np.floating]: + with np.errstate(invalid="ignore"): + out = np.where(count > 0, total / count, np.nan) + return out.astype(np.float32) + + def _safe_extrema(arr: NDArray, count: NDArray) -> NDArray[np.floating]: + out = np.where(count > 0, arr, np.nan) + return out.astype(np.float32) + + sun_hours = ( + self._sun_hours.astype(np.float32) if self._shadow_seen else np.full(self.shape, np.nan, dtype=np.float32) + ) + shade_hours = ( + self._shade_hours.astype(np.float32) if self._shadow_seen else np.full(self.shape, np.nan, dtype=np.float32) + ) + + utci_hours = {t: arr.astype(np.float32) for t, arr in sorted(self._utci_hours_above.items())} + + # Build per-timestep timeseries + timeseries = ( + Timeseries( + datetime=list(self._ts_datetime), + ta=np.array(self._ts_ta, dtype=np.float32), + rh=np.array(self._ts_rh, dtype=np.float32), + ws=np.array(self._ts_ws, dtype=np.float32), + global_rad=np.array(self._ts_global_rad, dtype=np.float32), + direct_rad=np.array(self._ts_direct_rad, dtype=np.float32), + diffuse_rad=np.array(self._ts_diffuse_rad, dtype=np.float32), + sun_altitude=np.array(self._ts_sun_altitude, dtype=np.float32), + tmrt_mean=np.array(self._ts_tmrt_mean, dtype=np.float32), + utci_mean=np.array(self._ts_utci_mean, dtype=np.float32), + sun_fraction=np.array(self._ts_sun_fraction, dtype=np.float32), + diffuse_fraction=np.array(self._ts_diffuse_fraction, dtype=np.float32), + clearness_index=np.array(self._ts_clearness_index, dtype=np.float32), + is_daytime=np.array(self._ts_is_daytime, dtype=np.bool_), + ) + if self._n_timesteps > 0 + else None + ) + + return TimeseriesSummary( + tmrt_mean=_safe_mean(self._tmrt_sum, self._tmrt_count), + tmrt_max=_safe_extrema(self._tmrt_max, self._tmrt_count), + tmrt_min=_safe_extrema(self._tmrt_min, self._tmrt_count), + tmrt_day_mean=_safe_mean(self._tmrt_day_sum, self._tmrt_day_count), + tmrt_night_mean=_safe_mean(self._tmrt_night_sum, self._tmrt_night_count), + utci_mean=_safe_mean(self._utci_sum, self._utci_count), + utci_max=_safe_extrema(self._utci_max, self._utci_count), + utci_min=_safe_extrema(self._utci_min, self._utci_count), + utci_day_mean=_safe_mean(self._utci_day_sum, self._utci_day_count), + utci_night_mean=_safe_mean(self._utci_night_sum, self._utci_night_count), + sun_hours=sun_hours, + shade_hours=shade_hours, + utci_hours_above=utci_hours, + n_timesteps=self._n_timesteps, + n_daytime=self._n_daytime, + n_nighttime=self._n_nighttime, + shadow_available=self._shadow_seen, + heat_thresholds_day=self.heat_thresholds_day, + heat_thresholds_night=self.heat_thresholds_night, + timeseries=timeseries, + ) diff --git a/pysrc/solweig/tiling.py b/pysrc/solweig/tiling.py new file mode 100644 index 0000000..e2b3060 --- /dev/null +++ b/pysrc/solweig/tiling.py @@ -0,0 +1,1510 @@ +""" +Tiled processing for large rasters. + +This module provides automatic and manual tiling for SOLWEIG calculations, +supporting both single-timestep and timeseries modes. Large rasters are +automatically divided into overlapping tiles with buffers sized to capture +shadows from the tallest buildings at low sun angles. + +Timeseries mode preserves thermal state accumulation across tiles and timesteps, +ensuring physically accurate ground temperature modeling with thermal inertia. +""" + +from __future__ import annotations + +import math +import os +import sys +import time +from collections.abc import Callable +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING, Any + +import numpy as np + +from .errors import MissingPrecomputedData +from .models import HumanParams, PrecomputedData, SolweigResult, SurfaceData, ThermalState, TileSpec +from .output_async import AsyncGeoTiffWriter, async_output_enabled, collect_output_arrays +from .postprocess import compute_pet_grid, compute_utci_grid +from .solweig_logging import get_logger +from .summary import GridAccumulator, TimeseriesSummary + +logger = get_logger(__name__) + +_TIMING_ENABLED = os.environ.get("SOLWEIG_TIMING", "").lower() in ("1", "true") + +if TYPE_CHECKING: + from .models import ( + Location, + ModelConfig, + Weather, + ) + + +# ============================================================================= +# Constants +# ============================================================================= + +MIN_TILE_SIZE = 256 # Minimum tile size in pixels +_FALLBACK_MAX_TILE_SIZE = 2500 # Used when GPU + RAM detection both fail +MIN_SUN_ELEVATION_DEG = 3.0 # Minimum sun elevation for shadow calculations +MAX_BUFFER_M = 1000.0 # Default maximum buffer / shadow distance in meters + +# Backward-compat alias (imported by tests and calculate_timeseries_tiled docstring) +MAX_TILE_SIZE = _FALLBACK_MAX_TILE_SIZE + +# Resource estimation constants +_RAM_FRACTION = 0.50 # Use at most 50% of total physical RAM for tile arrays + +# GPU memory constants — two sets per context, derived from +# rust/src/gpu/shadow_gpu.rs allocate_buffers() + init_svf_accumulation(). +# +# *_TOTAL: aggregate footprint of ALL wgpu buffers alive simultaneously. +# Used when max_buffer_size ≈ total GPU memory (Metal/DX12). +# *_SINGLE: largest single buffer per pixel. +# Used when max_buffer_size is a per-buffer cap (Vulkan/GL). +# +# Shadow context ("solweig" timestep): allocate_buffers() only +# 16 f32 storage buffers (64 B/px) + staging 10× (40 B/px) + overhead/headroom +_SHADOW_GPU_TOTAL_BPP = 120 +_SHADOW_GPU_SINGLE_BPP = 40 # staging_buffer = 10 × buffer_size +# +# SVF context: allocate_buffers() + init_svf_accumulation() +# shadow (104) + svf_data 15× (60) + svf_staging (60) +# + bitpack 3×20 output (60) + bitpack staging (60) +_SVF_GPU_TOTAL_BPP = 384 +_SVF_GPU_SINGLE_BPP = 60 # svf_data_buffer = 15 × buffer_size +# +# SVF CPU/RAM: Rust SvfIntermediate::zeros() = 15 f32 arrays (60 B/px) +# + 3 bitpacked u8 arrays at 20 B each (60 B/px) + memmap overhead (~30 B/px). +_SVF_RAM_BYTES_PER_PIXEL = 150 + +_SOLWEIG_BYTES_PER_PIXEL = 400 # Peak Python-side bytes per pixel (benchmarked ~370) +_GPU_HEADROOM = 0.80 # Use 80% of GPU max buffer to leave headroom + +# Backends where max_buffer_size ≈ total GPU/unified memory. +# On these backends we constrain by total allocation across all buffers. +# On others (Vulkan, GL) max_buffer_size is a per-buffer cap, so we +# constrain by the largest single buffer and rely on init_svf_accumulation() +# falling back to CPU if total VRAM is exceeded. +_TOTAL_MEMORY_BACKENDS = {"Metal", "Dx12"} +_MAX_AUTO_TILE_WORKERS = 6 # Hard cap to avoid bandwidth/cache thrash on many-core CPUs + +# Cache for computed tile limits (populated once per context on first call) +_cached_max_tile_side: dict[str, int] = {} + + +# ============================================================================= +# Resource detection +# ============================================================================= + + +def _get_total_ram_bytes() -> int | None: + """ + Detect total physical RAM in bytes. + + Uses ``os.sysconf`` on POSIX (macOS/Linux) and ``ctypes`` on Windows. + Returns ``None`` if detection fails. No external dependencies. + """ + import os + import sys + + try: + if sys.platform == "win32": + import ctypes + + class MEMORYSTATUSEX(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("ullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + stat = MEMORYSTATUSEX() + stat.dwLength = ctypes.sizeof(stat) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) + return stat.ullTotalPhys + else: + pages = os.sysconf("SC_PHYS_PAGES") + page_size = os.sysconf("SC_PAGE_SIZE") + if pages > 0 and page_size > 0: + return pages * page_size + except (OSError, ValueError, AttributeError): + pass + return None + + +def compute_max_tile_pixels(*, context: str = "solweig") -> int: + """ + Compute the maximum number of pixels that fit in a single tile, + based on real GPU buffer limits and system RAM. + + Args: + context: ``"solweig"`` for timestep tiling, or ``"svf"`` for SVF-only + tiling. Affects the bytes-per-pixel estimate used for the RAM + constraint. + + Returns: + Maximum pixel count for a tile (rows * cols). + """ + from . import get_gpu_limits + + if context == "svf": + ram_bytes_per_pixel = _SVF_RAM_BYTES_PER_PIXEL + gpu_total_bpp = _SVF_GPU_TOTAL_BPP + gpu_single_bpp = _SVF_GPU_SINGLE_BPP + else: + ram_bytes_per_pixel = _SOLWEIG_BYTES_PER_PIXEL + gpu_total_bpp = _SHADOW_GPU_TOTAL_BPP + gpu_single_bpp = _SHADOW_GPU_SINGLE_BPP + + # GPU constraint: total GPU allocation for all buffers must fit in memory. + # On Metal (Apple Silicon), max_buffer_size ≈ total GPU/unified memory, + # so this effectively constrains the aggregate footprint per tile. + # - "solweig": shadow buffers only (~104 B/px) + # - "svf": shadow + SVF accumulation + bitpack (~344 B/px with veg) + gpu_max_pixels = None + limits = get_gpu_limits() + if limits is not None: + max_buf = limits["max_buffer_size"] + backend = str(limits.get("backend", "")) + # Metal/DX12 backends generally report max_buffer_size close to total + # GPU/unified memory, so constrain by aggregate per-tile working set. + # Other backends may expose per-buffer caps; for them use largest single + # buffer estimate. + bpp = gpu_total_bpp if backend in _TOTAL_MEMORY_BACKENDS else gpu_single_bpp + gpu_max_pixels = int(int(max_buf) * _GPU_HEADROOM) // bpp + + # RAM constraint: total physical RAM × fraction / bytes per pixel. + # - "svf": ~150 B/px (Rust SvfIntermediate + memmap overhead) + # - "solweig": ~400 B/px (timestep with radiation grids, state arrays, etc). + ram_max_pixels = None + total_ram = _get_total_ram_bytes() + if total_ram is not None: + usable_ram = int(total_ram * _RAM_FRACTION) + ram_max_pixels = usable_ram // ram_bytes_per_pixel + + # Use the tighter of the two constraints + candidates = [c for c in [gpu_max_pixels, ram_max_pixels] if c is not None] + if candidates: + return max(MIN_TILE_SIZE**2, min(candidates)) + + # Fallback: no detection succeeded + return _FALLBACK_MAX_TILE_SIZE**2 + + +def compute_max_tile_side(*, context: str = "solweig") -> int: + """ + Compute the maximum tile side length (square tiles) from resource limits. + + The result is cached per *context* for the lifetime of the process. + + Returns: + Maximum tile side in pixels (at least ``MIN_TILE_SIZE``). + """ + import math + + if context in _cached_max_tile_side: + return _cached_max_tile_side[context] + + max_pixels = compute_max_tile_pixels(context=context) + side = max(MIN_TILE_SIZE, int(math.isqrt(max_pixels))) + + # Log once so the user can see what limits are driving tile sizing + from . import get_gpu_limits + + limits = get_gpu_limits() + total_ram = _get_total_ram_bytes() + gpu_str = f"{limits['max_buffer_size']:,} bytes" if limits else "N/A" + ram_str = f"{total_ram:,} bytes" if total_ram else "N/A" + logger.info( + f"Resource-aware tile sizing (context={context}): " + f"GPU max_buffer={gpu_str}, system RAM={ram_str}, max_tile_side={side} px" + ) + + _cached_max_tile_side[context] = side + return side + + +# ============================================================================= +# Helper Functions +# ============================================================================= + + +def _should_use_tiling(rows: int, cols: int) -> bool: + """Check if raster size exceeds resource limits and requires tiling.""" + max_side = compute_max_tile_side(context="solweig") + return rows > max_side or cols > max_side + + +def _calculate_auto_tile_size(rows: int, cols: int) -> int: + """ + Calculate optimal core tile size based on raster dimensions and resources. + + Returns the resource-derived maximum tile side as the core size. + ``validate_tile_size()`` will further adjust to ensure the full tile + (core + 2 × overlap) fits within resource limits. + + Returns: + Core tile size in pixels. + """ + return compute_max_tile_side(context="solweig") + + +def _resolve_tile_workers(tile_workers: int | None, n_tiles: int) -> int: + """Resolve worker count for tiled orchestration.""" + if n_tiles <= 0: + return 1 + if tile_workers is not None and tile_workers < 1: + raise ValueError(f"tile_workers must be >= 1, got {tile_workers}") + if tile_workers is None: + cpu_count = os.cpu_count() or 2 + tile_workers = max(2, min(_MAX_AUTO_TILE_WORKERS, cpu_count // 2)) + return max(1, min(tile_workers, n_tiles)) + + +def _resolve_inflight_limit( + n_workers: int, + n_tiles: int, + tile_queue_depth: int | None, + prefetch_tiles: bool, +) -> int: + """ + Resolve max number of in-flight tile tasks. + + ``tile_queue_depth`` controls queued tasks beyond active workers. + Effective in-flight task limit is ``n_workers + queue_depth``. + """ + if tile_queue_depth is not None and tile_queue_depth < 0: + raise ValueError(f"tile_queue_depth must be >= 0, got {tile_queue_depth}") + + queue_depth = (n_workers if prefetch_tiles else 0) if tile_queue_depth is None else tile_queue_depth + + return max(1, min(n_tiles, n_workers + queue_depth)) + + +def _resolve_prefetch_default( + n_workers: int, + n_tiles: int, + core_tile_size: int, + buffer_pixels: int, +) -> bool: + """ + Decide default prefetch behavior based on estimated in-flight memory pressure. + + Uses a conservative estimate of per-tile Python-side working memory. + Prefetch is enabled only when estimated in-flight bytes are comfortably + below the usable RAM budget. + """ + if n_tiles <= 0: + return False + + total_ram = _get_total_ram_bytes() + if total_ram is None: + return True + + full_side = core_tile_size + 2 * buffer_pixels + tile_pixels = max(MIN_TILE_SIZE**2, full_side * full_side) + estimated_tile_bytes = tile_pixels * _SOLWEIG_BYTES_PER_PIXEL + + # Default prefetch queues up to n_workers extra tasks. + estimated_inflight_tiles = min(n_tiles, n_workers * 2) + estimated_inflight_bytes = estimated_inflight_tiles * estimated_tile_bytes + + usable_ram = int(total_ram * _RAM_FRACTION) + return estimated_inflight_bytes <= int(usable_ram * 0.5) + + +def _maybe_subdivide_single_tile_for_timeseries( + rows: int, + cols: int, + tile_size: int, + buffer_pixels: int, + pixel_size: float, + requested_workers: int | None, +) -> int: + """ + Optionally reduce tile size for large single-tile timeseries runs. + + Motivation: + Resource-aware sizing often yields one very large tile that fits memory, but + that can leave CPU/GPU orchestration under-utilized in timeseries mode. + Splitting into a few tiles enables overlapping GPU and CPU work across workers. + """ + if rows * cols < 4_000_000: + return tile_size + + env_target = os.getenv("SOLWEIG_TIMESERIES_TARGET_TILES", "").strip() + try: + target_tiles = int(env_target) if env_target else 0 + except ValueError: + target_tiles = 0 + + if target_tiles <= 1: + if requested_workers is not None: + target_tiles = max(2, min(16, requested_workers)) + else: + cpu_count = os.cpu_count() or 2 + target_tiles = max(2, min(16, cpu_count // 2)) + + splits = max(2, int(math.ceil(math.sqrt(target_tiles)))) + candidate_core = int(math.ceil(max(rows / splits, cols / splits))) + candidate_core = max(MIN_TILE_SIZE, min(candidate_core, tile_size)) + + if candidate_core >= tile_size: + return tile_size + + adjusted_candidate, warning = validate_tile_size(candidate_core, buffer_pixels, pixel_size) + if warning: + logger.warning(warning) + if adjusted_candidate >= tile_size: + return tile_size + + candidate_tiles = generate_tiles(rows, cols, adjusted_candidate, buffer_pixels) + if len(candidate_tiles) <= 1: + return tile_size + + logger.info( + "Timeseries CPU parallelization: splitting single tile into " + f"{len(candidate_tiles)} tiles (core {tile_size} -> {adjusted_candidate})" + ) + return adjusted_candidate + + +def _extract_tile_surface( + surface: SurfaceData, + tile: TileSpec, + pixel_size: float, + precomputed: PrecomputedData | None = None, +) -> SurfaceData: + """ + Extract tile slice from full surface, reusing precomputed SVF when available. + + Creates a new SurfaceData with sliced arrays (DSM, CDSM, etc.). + If the global surface has precomputed SVF (via prepare() or compute_svf()), + the SVF is sliced to the tile bounds — avoiding expensive per-tile + recomputation. If surface.svf is absent but precomputed.svf is provided, + that precomputed SVF is sliced instead. When neither source exists, SVF + remains unset and callers must fail fast before computation. + + Args: + surface: Full raster surface data. + tile: Tile specification with slice bounds. + pixel_size: Pixel size in meters. + precomputed: Optional precomputed data containing SVF. + + Returns: + SurfaceData for this tile. + """ + read_slice = tile.read_slice + + tile_dsm = surface.dsm[read_slice].copy() + tile_cdsm = surface.cdsm[read_slice].copy() if surface.cdsm is not None else None + tile_tdsm = surface.tdsm[read_slice].copy() if surface.tdsm is not None else None + tile_dem = surface.dem[read_slice].copy() if surface.dem is not None else None + tile_lc = surface.land_cover[read_slice].copy() if surface.land_cover is not None else None + tile_albedo = surface.albedo[read_slice].copy() if surface.albedo is not None else None + tile_emis = surface.emissivity[read_slice].copy() if surface.emissivity is not None else None + + # Slice precomputed SVF if available (avoids per-tile recomputation) + tile_svf = None + if surface.svf is not None: + tile_svf = surface.svf.crop( + tile.row_start_full, + tile.row_end_full, + tile.col_start_full, + tile.col_end_full, + ) + elif precomputed is not None and precomputed.svf is not None: + tile_svf = precomputed.svf.crop( + tile.row_start_full, + tile.row_end_full, + tile.col_start_full, + tile.col_end_full, + ) + + # Slice shadow matrices if available (required for anisotropic sky in tiled mode) + tile_shadow_matrices = None + if surface.shadow_matrices is not None: + tile_shadow_matrices = surface.shadow_matrices.crop( + tile.row_start_full, + tile.row_end_full, + tile.col_start_full, + tile.col_end_full, + ) + + tile_surface = SurfaceData( + dsm=tile_dsm, + cdsm=tile_cdsm, + tdsm=tile_tdsm, + dem=tile_dem, + land_cover=tile_lc, + albedo=tile_albedo, + emissivity=tile_emis, + pixel_size=pixel_size, + svf=tile_svf, + shadow_matrices=tile_shadow_matrices, + ) + return tile_surface + + +def _slice_tile_precomputed( + precomputed: PrecomputedData | None, + tile: TileSpec, +) -> PrecomputedData | None: + """ + Slice walls and shadow matrices from precomputed data for a tile. + + SVF is handled via surface.svf (sliced in _extract_tile_surface). + Shadow matrices are spatially cropped to the tile bounds for + anisotropic sky support in tiled mode. + + Args: + precomputed: Full raster precomputed data (or None). + tile: Tile specification with slice bounds. + + Returns: + PrecomputedData with sliced walls and shadow matrices, or None. + """ + if precomputed is None: + return None + + read_slice = tile.read_slice + + tile_wall_ht = None + tile_wall_asp = None + tile_shadow_matrices = None + + if precomputed.wall_height is not None: + tile_wall_ht = precomputed.wall_height[read_slice].copy() + if precomputed.wall_aspect is not None: + tile_wall_asp = precomputed.wall_aspect[read_slice].copy() + if precomputed.shadow_matrices is not None: + tile_shadow_matrices = precomputed.shadow_matrices.crop( + tile.row_start_full, + tile.row_end_full, + tile.col_start_full, + tile.col_end_full, + ) + + if tile_wall_ht is None and tile_wall_asp is None and tile_shadow_matrices is None: + return None + + return PrecomputedData( + wall_height=tile_wall_ht, + wall_aspect=tile_wall_asp, + svf=None, + shadow_matrices=tile_shadow_matrices, + ) + + +def _write_tile_result( + tile_result: SolweigResult, + tile: TileSpec, + tmrt_out: np.ndarray, + shadow_out: np.ndarray | None, + kdown_out: np.ndarray | None, + kup_out: np.ndarray | None, + ldown_out: np.ndarray | None, + lup_out: np.ndarray | None, +) -> None: + """Write core region of tile result to global output arrays.""" + core_slice = tile.core_slice + write_slice = tile.write_slice + + tmrt_out[write_slice] = tile_result.tmrt[core_slice] + if shadow_out is not None and tile_result.shadow is not None: + shadow_out[write_slice] = tile_result.shadow[core_slice] + if kdown_out is not None and tile_result.kdown is not None: + kdown_out[write_slice] = tile_result.kdown[core_slice] + if kup_out is not None and tile_result.kup is not None: + kup_out[write_slice] = tile_result.kup[core_slice] + if ldown_out is not None and tile_result.ldown is not None: + ldown_out[write_slice] = tile_result.ldown[core_slice] + if lup_out is not None and tile_result.lup is not None: + lup_out[write_slice] = tile_result.lup[core_slice] + + +def _slice_tile_state(state: ThermalState, tile: TileSpec) -> ThermalState: + """ + Slice thermal state arrays for a tile. + + Spatial arrays are sliced using tile.read_slice (full tile with overlap). + Scalar values are copied as-is (they're global, not spatial). + + Args: + state: Global thermal state for full raster. + tile: Tile specification with slice bounds. + + Returns: + ThermalState for this tile. + """ + read_slice = tile.read_slice + + return ThermalState( + tgmap1=state.tgmap1[read_slice].copy(), + tgmap1_e=state.tgmap1_e[read_slice].copy(), + tgmap1_s=state.tgmap1_s[read_slice].copy(), + tgmap1_w=state.tgmap1_w[read_slice].copy(), + tgmap1_n=state.tgmap1_n[read_slice].copy(), + tgout1=state.tgout1[read_slice].copy(), + firstdaytime=state.firstdaytime, + timeadd=state.timeadd, + timestep_dec=state.timestep_dec, + ) + + +def _refresh_tile_state(tile_state: ThermalState, global_state: ThermalState, tile: TileSpec) -> None: + """ + Refresh a preallocated tile state from the global state in-place. + + This avoids reallocating ThermalState objects/arrays each timestep while + still ensuring overlap regions are synchronized from the latest global state. + """ + read_slice = tile.read_slice + np.copyto(tile_state.tgmap1, global_state.tgmap1[read_slice]) + np.copyto(tile_state.tgmap1_e, global_state.tgmap1_e[read_slice]) + np.copyto(tile_state.tgmap1_s, global_state.tgmap1_s[read_slice]) + np.copyto(tile_state.tgmap1_w, global_state.tgmap1_w[read_slice]) + np.copyto(tile_state.tgmap1_n, global_state.tgmap1_n[read_slice]) + np.copyto(tile_state.tgout1, global_state.tgout1[read_slice]) + tile_state.firstdaytime = global_state.firstdaytime + tile_state.timeadd = global_state.timeadd + tile_state.timestep_dec = global_state.timestep_dec + + +def _merge_tile_state( + tile_state: ThermalState, + tile: TileSpec, + global_state: ThermalState, +) -> None: + """ + Merge tile state arrays back into global state (in-place). + + Writes core region (tile.core_slice) of tile state arrays to the + corresponding region (tile.write_slice) in global state. Updates + global scalar values from tile state (identical across all tiles + for a given timestep). + + Args: + tile_state: Computed state for this tile. + tile: Tile specification with slice bounds. + global_state: Global state to update (modified in-place). + """ + core_slice = tile.core_slice + write_slice = tile.write_slice + + global_state.tgmap1[write_slice] = tile_state.tgmap1[core_slice] + global_state.tgmap1_e[write_slice] = tile_state.tgmap1_e[core_slice] + global_state.tgmap1_s[write_slice] = tile_state.tgmap1_s[core_slice] + global_state.tgmap1_w[write_slice] = tile_state.tgmap1_w[core_slice] + global_state.tgmap1_n[write_slice] = tile_state.tgmap1_n[core_slice] + global_state.tgout1[write_slice] = tile_state.tgout1[core_slice] + + # Scalars are the same across all tiles for a given timestep + global_state.firstdaytime = tile_state.firstdaytime + global_state.timeadd = tile_state.timeadd + + +# ============================================================================= +# Public Functions +# ============================================================================= + + +def calculate_buffer_distance( + max_height: float, + min_sun_elev_deg: float = MIN_SUN_ELEVATION_DEG, + max_shadow_distance_m: float = MAX_BUFFER_M, +) -> float: + """ + Calculate required buffer distance for tiled processing based on max building height. + + The buffer must be large enough to capture shadows cast by the tallest buildings + at the lowest sun elevation angle. + + Formula: buffer = min(max_height / tan(min_sun_elevation), max_shadow_distance_m) + + Args: + max_height: Maximum building/DSM height in meters. + min_sun_elev_deg: Minimum sun elevation angle in degrees. Default 3.0. + max_shadow_distance_m: Maximum buffer distance in meters. Default 500.0. + + Returns: + Buffer distance in meters, capped at max_shadow_distance_m. + + Example: + >>> calculate_buffer_distance(30.0) # 30m building + 500.0 # Capped (actual would be 573m) + >>> calculate_buffer_distance(10.0) # 10m building + 190.8 # 10m / tan(3) + """ + if max_height <= 0: + return 0.0 + + tan_elev = np.tan(np.radians(min_sun_elev_deg)) + if tan_elev <= 0: + return max_shadow_distance_m + + buffer = max_height / tan_elev + return min(buffer, max_shadow_distance_m) + + +def validate_tile_size( + tile_size: int, + buffer_pixels: int, + pixel_size: float, + context: str = "solweig", +) -> tuple[int, str | None]: + """ + Validate and adjust core tile size for tiled processing. + + ``tile_size`` is the **core** tile side (the region whose results are + kept). The actual tile in memory is ``core + 2 × buffer_pixels``. + This function ensures the full tile fits within resource-derived limits. + + Args: + tile_size: Requested core tile size in pixels. + buffer_pixels: Overlap buffer size in pixels. + pixel_size: Pixel size in meters. + context: Resource context for limit detection. Use ``"svf"`` for + SVF preprocessing tiles and ``"solweig"`` for timestep tiles. + + Returns: + Tuple of (adjusted_core_size, warning_message or None). + + Constraints: + - core >= MIN_TILE_SIZE (preferred) + - core >= 1 when large overlap leaves less than MIN_TILE_SIZE + - core + 2 * buffer_pixels <= resource-derived maximum + """ + max_full = compute_max_tile_side(context=context) + warning = None + core = tile_size + + # Enforce maximum: full tile (core + 2*buffer) must fit resource limit + max_core = max_full - 2 * buffer_pixels + if max_core < 1: + warning = f"Buffer {buffer_pixels}px too large for resource limit ({max_full}px). Using minimum feasible core=1" + return 1, warning + + # Enforce minimum core size (prefer MIN_TILE_SIZE, but allow smaller when overlap is large) + min_core = MIN_TILE_SIZE if max_core >= MIN_TILE_SIZE else 1 + if core < min_core: + warning = f"Tile core size {tile_size} below minimum, using {min_core}" + core = min_core + + if core > max_core: + core = max_core + warning = ( + f"Tile core {tile_size} + 2x{buffer_pixels}px buffer exceeds resource limit " + f"({max_full}px). Using core={core}" + ) + + return core, warning + + +def generate_tiles( + rows: int, + cols: int, + tile_size: int, + overlap: int, +) -> list[TileSpec]: + """ + Generate tile specifications with overlaps for tiled processing. + + Args: + rows: Total number of rows in raster. + cols: Total number of columns in raster. + tile_size: Core tile size in pixels (without overlap). + overlap: Overlap size in pixels. + + Returns: + List of TileSpec objects covering the entire raster. + """ + tiles = [] + n_tiles_row = int(np.ceil(rows / tile_size)) + n_tiles_col = int(np.ceil(cols / tile_size)) + + for i in range(n_tiles_row): + for j in range(n_tiles_col): + # Core tile bounds + row_start = i * tile_size + row_end = min((i + 1) * tile_size, rows) + col_start = j * tile_size + col_end = min((j + 1) * tile_size, cols) + + # Calculate overlaps (bounded by raster edges) + overlap_top = overlap if i > 0 else 0 + overlap_bottom = overlap if row_end < rows else 0 + overlap_left = overlap if j > 0 else 0 + overlap_right = overlap if col_end < cols else 0 + + # Full tile bounds with overlap + row_start_full = max(0, row_start - overlap_top) + row_end_full = min(rows, row_end + overlap_bottom) + col_start_full = max(0, col_start - overlap_left) + col_end_full = min(cols, col_end + overlap_right) + + tiles.append( + TileSpec( + row_start=row_start, + row_end=row_end, + col_start=col_start, + col_end=col_end, + row_start_full=row_start_full, + row_end_full=row_end_full, + col_start_full=col_start_full, + col_end_full=col_end_full, + overlap_top=overlap_top, + overlap_bottom=overlap_bottom, + overlap_left=overlap_left, + overlap_right=overlap_right, + ) + ) + + return tiles + + +def calculate_tiled( + surface: SurfaceData, + location: Location, + weather: Weather, + human: HumanParams | None = None, + precomputed: PrecomputedData | None = None, + tile_size: int = 1024, + use_anisotropic_sky: bool | None = None, + conifer: bool = False, + physics: SimpleNamespace | None = None, + materials: SimpleNamespace | None = None, + max_shadow_distance_m: float = MAX_BUFFER_M, + tile_workers: int | None = None, + tile_queue_depth: int | None = None, + prefetch_tiles: bool | None = None, + progress_callback: Callable[..., Any] | None = None, +) -> SolweigResult: + """ + Calculate mean radiant temperature using tiled processing for large rasters. + + Processes the raster in tiles with overlapping buffers to ensure accurate + shadow calculations at tile boundaries. + + Args: + surface: Surface/terrain data (DSM required). + location: Geographic location (lat, lon, UTC offset). + weather: Weather data for a single timestep. + human: Human body parameters. Uses defaults if not provided. + precomputed: Optional pre-computed SVF/walls/shadow matrices. + tile_size: Core tile size in pixels (default 1024). + use_anisotropic_sky: Use anisotropic sky model. + If None, follows calculate() default behavior. + conifer: Treat vegetation as evergreen conifers. Default False. + physics: Physics parameters. If None, uses bundled defaults. + materials: Material properties. If None, uses bundled defaults. + max_shadow_distance_m: Upper bound on shadow reach in meters (default 500.0). + The actual buffer is computed from the tallest DSM pixel via + calculate_buffer_distance(), capped at this value. + tile_workers: Number of worker threads for tile execution. If None, + uses adaptive default based on CPU count. + tile_queue_depth: Extra queued tile tasks beyond active workers. + If None, defaults to one queue slot per worker when prefetching. + prefetch_tiles: Whether to prefetch queued tile tasks. If None, + chooses automatically based on estimated memory pressure. + progress_callback: Optional callback(tile_idx, total_tiles). + + Returns: + SolweigResult with Tmrt grid. State is not returned for single-timestep + tiled mode. + """ + + if human is None: + human = HumanParams() + + if surface.svf is None and (precomputed is None or precomputed.svf is None): + raise MissingPrecomputedData( + "Sky View Factor (SVF) data is required but not available.", + "Call surface.compute_svf() before calculate_tiled(), or use SurfaceData.prepare() " + "which computes SVF automatically.", + ) + + if use_anisotropic_sky: + has_shadow_matrices = (precomputed is not None and precomputed.shadow_matrices is not None) or ( + surface.shadow_matrices is not None + ) + if not has_shadow_matrices: + raise MissingPrecomputedData( + "shadow_matrices required for anisotropic sky model", + "Either set use_anisotropic_sky=False, or provide shadow matrices via " + "precomputed=PrecomputedData(shadow_matrices=...) or surface.shadow_matrices", + ) + + # Compute derived weather values + if not weather._derived_computed: + weather.compute_derived(location) + + rows, cols = surface.shape + pixel_size = surface.pixel_size + + # Height-aware buffer: use relative max building height (not absolute elevation) + max_height = surface.max_height + buffer_m = calculate_buffer_distance(max_height, max_shadow_distance_m=max_shadow_distance_m) + buffer_pixels = int(np.ceil(buffer_m / pixel_size)) + logger.info(f"Buffer: {buffer_m:.0f}m ({buffer_pixels}px) from max height {max_height:.1f}m") + + # Validate and adjust tile size + adjusted_tile_size, warning = validate_tile_size(tile_size, buffer_pixels, pixel_size) + if warning: + logger.warning(warning) + + # Check if tiling is actually needed + if rows <= adjusted_tile_size and cols <= adjusted_tile_size: + logger.info(f"Raster {rows}x{cols} fits in single tile, using non-tiled calculation") + from .api import calculate + + return calculate( + surface=surface, + location=location, + weather=weather, + human=human, + precomputed=precomputed, + use_anisotropic_sky=use_anisotropic_sky, + conifer=conifer, + physics=physics, + materials=materials, + max_shadow_distance_m=max_shadow_distance_m, + ) + + # Generate tiles + tiles = generate_tiles(rows, cols, adjusted_tile_size, buffer_pixels) + n_tiles = len(tiles) + + from .api import calculate + + logger.info( + f"Tiled processing: {rows}x{cols} raster, {n_tiles} tiles, " + f"tile_size={adjusted_tile_size}, buffer={buffer_m:.0f}m ({buffer_pixels}px) from max height {max_height:.1f}m" + ) + + # Initialize output arrays + tmrt_out = np.full((rows, cols), np.nan, dtype=np.float32) + shadow_out = np.full((rows, cols), np.nan, dtype=np.float32) + kdown_out = np.full((rows, cols), np.nan, dtype=np.float32) + kup_out = np.full((rows, cols), np.nan, dtype=np.float32) + ldown_out = np.full((rows, cols), np.nan, dtype=np.float32) + lup_out = np.full((rows, cols), np.nan, dtype=np.float32) + + # Set up progress reporting + from .progress import ProgressReporter + + _progress = None if progress_callback is not None else ProgressReporter(total=n_tiles, desc="SOLWEIG tiled") + + # Submit tiles in parallel — Rust releases the GIL during compute_timestep. + from concurrent.futures import ThreadPoolExecutor, as_completed + + n_workers = _resolve_tile_workers(tile_workers, n_tiles) + effective_prefetch = ( + prefetch_tiles + if prefetch_tiles is not None + else _resolve_prefetch_default(n_workers, n_tiles, adjusted_tile_size, buffer_pixels) + ) + inflight_limit = _resolve_inflight_limit(n_workers, n_tiles, tile_queue_depth, effective_prefetch) + logger.info(f"Tiled runtime: workers={n_workers}, inflight_limit={inflight_limit}, prefetch={effective_prefetch}") + completed = 0 + + with ThreadPoolExecutor(max_workers=n_workers) as executor: + futures: dict[Any, tuple[int, TileSpec]] = {} + submit_times: dict[Any, float] = {} + max_queue = 0 + turnaround_sum = 0.0 + + def _submit_tile(tile_idx: int, tile: TileSpec) -> None: + tile_surface = _extract_tile_surface(surface, tile, pixel_size, precomputed=precomputed) + tile_precomputed = _slice_tile_precomputed(precomputed, tile) + + future = executor.submit( + calculate, + surface=tile_surface, + location=location, + weather=weather, + human=human, + precomputed=tile_precomputed, + use_anisotropic_sky=use_anisotropic_sky, + conifer=conifer, + state=None, + physics=physics, + materials=materials, + max_shadow_distance_m=max_shadow_distance_m, + return_state_copy=False, + ) + futures[future] = (tile_idx, tile) + submit_times[future] = time.perf_counter() + + next_tile = 0 + while next_tile < n_tiles and len(futures) < inflight_limit: + tile = tiles[next_tile] + _submit_tile(next_tile, tile) + next_tile += 1 + + while futures: + future = next(as_completed(futures)) + tile_idx, tile = futures.pop(future) + submit_t = submit_times.pop(future) + tile_result = future.result() + _write_tile_result(tile_result, tile, tmrt_out, shadow_out, kdown_out, kup_out, ldown_out, lup_out) + + turnaround_sum += time.perf_counter() - submit_t + completed += 1 + if _progress is not None: + _progress.set_text(f"Tile {completed}/{n_tiles}") + _progress.update(1) + if progress_callback: + progress_callback(completed, n_tiles) + + while next_tile < n_tiles and len(futures) < inflight_limit: + tile = tiles[next_tile] + _submit_tile(next_tile, tile) + next_tile += 1 + max_queue = max(max_queue, max(0, len(futures) - n_workers)) + + if _progress is not None: + _progress.close() + if completed > 0: + mean_turnaround_ms = (turnaround_sum / completed) * 1000.0 + logger.info(f"Tiled telemetry: mean_turnaround={mean_turnaround_ms:.1f}ms, max_queue={max_queue}") + + return SolweigResult( + tmrt=tmrt_out, + shadow=shadow_out, + kdown=kdown_out, + kup=kup_out, + ldown=ldown_out, + lup=lup_out, + utci=None, + pet=None, + state=None, + ) + + +def calculate_timeseries_tiled( + surface: SurfaceData, + weather_series: list[Weather], + location: Location, + config: ModelConfig | None = None, + human: HumanParams | None = None, + precomputed: PrecomputedData | None = None, + use_anisotropic_sky: bool | None = None, + conifer: bool = False, + physics: SimpleNamespace | None = None, + materials: SimpleNamespace | None = None, + wall_material: str | None = None, + max_shadow_distance_m: float | None = None, + tile_workers: int | None = None, + tile_queue_depth: int | None = None, + prefetch_tiles: bool | None = None, + output_dir: str | Path | None = None, + outputs: list[str] | None = None, + timestep_outputs: list[str] | None = None, + heat_thresholds_day: list[float] | None = None, + heat_thresholds_night: list[float] | None = None, + progress_callback: Callable[[int, int], None] | None = None, +) -> TimeseriesSummary: + """ + Calculate Tmrt timeseries using tiled processing for large rasters. + + Automatically divides large rasters into overlapping tiles and processes + each timestep tile-by-tile, preserving thermal state accumulation across + both tiles and timesteps. + + This function is called automatically by calculate_timeseries() when the + raster exceeds the resource-derived maximum tile side in either dimension. + + Args: + surface: Surface/terrain data (DSM required). + weather_series: List of Weather objects in chronological order. + location: Geographic location (lat, lon, UTC offset). + config: Model configuration (provides defaults for None params). + human: Human body parameters. If None, uses config or defaults. + precomputed: Optional pre-computed SVF/walls/shadow matrices. + use_anisotropic_sky: Use anisotropic sky model. + Shadow matrices are spatially sliced per tile. + conifer: Treat vegetation as evergreen conifers. Default False. + physics: Physics parameters. If None, uses config or bundled defaults. + materials: Material properties. If None, uses config or bundled defaults. + wall_material: Wall material type for temperature model. + max_shadow_distance_m: Upper bound on shadow reach in meters. + If None, uses config or default (500.0). The actual buffer is + computed from the tallest DSM pixel via calculate_buffer_distance(). + tile_workers: Number of worker threads for tile execution. If None, + uses config.tile_workers or adaptive default. + tile_queue_depth: Extra queued tile tasks beyond active workers. + If None, uses config.tile_queue_depth or runtime default. + prefetch_tiles: Whether to prefetch queued tile tasks. If None, + uses config.prefetch_tiles or defaults to True. + output_dir: Directory to save results incrementally as GeoTIFF. + outputs: Which outputs to save (e.g., ["tmrt", "shadow"]). + timestep_outputs: Which per-timestep arrays to retain in memory + (e.g., ``["tmrt", "shadow"]``). Default None (summary-only). + heat_thresholds_day: UTCI thresholds (°C) for daytime exceedance hours. + Default ``[32, 38]``. + heat_thresholds_night: UTCI thresholds (°C) for nighttime exceedance hours. + Default ``[26]``. + progress_callback: Optional callback(current_step, total_steps). + + Returns: + :class:`TimeseriesSummary` with aggregated grids and metadata. + """ + if not weather_series: + return TimeseriesSummary.empty() + + anisotropic_requested_explicitly = use_anisotropic_sky is True + + # Resolve effective parameters from config + effective_aniso = use_anisotropic_sky + effective_human = human + effective_physics = physics + effective_materials = materials + effective_outputs = outputs + effective_max_shadow = max_shadow_distance_m + effective_tile_workers = tile_workers + effective_tile_queue_depth = tile_queue_depth + effective_prefetch_tiles = prefetch_tiles + + if config is not None: + if effective_aniso is None: + effective_aniso = config.use_anisotropic_sky + if effective_human is None: + effective_human = config.human + if effective_physics is None: + effective_physics = config.physics + if effective_materials is None: + effective_materials = config.materials + if effective_outputs is None and config.outputs: + effective_outputs = config.outputs + if effective_max_shadow is None: + effective_max_shadow = config.max_shadow_distance_m + if effective_tile_workers is None: + effective_tile_workers = config.tile_workers + if effective_tile_queue_depth is None: + effective_tile_queue_depth = config.tile_queue_depth + if effective_prefetch_tiles is None: + effective_prefetch_tiles = config.prefetch_tiles + + if effective_aniso is None: + # Keep behavior aligned with calculate() and ModelConfig defaults. + effective_aniso = True + if effective_human is None: + effective_human = HumanParams() + if effective_max_shadow is None: + effective_max_shadow = MAX_BUFFER_M + if effective_materials is None: + from .loaders import load_params + + effective_materials = load_params() + if effective_physics is None: + from .loaders import load_physics + + effective_physics = load_physics() + anisotropic_arg = effective_aniso if (anisotropic_requested_explicitly or effective_aniso is False) else None + + requested_outputs = None + if output_dir is not None and effective_outputs: + requested_outputs = {"tmrt", "shadow"} | set(effective_outputs) + elif timestep_outputs is not None: + # Keep specific per-timestep arrays; always include tmrt + shadow for accumulator. + requested_outputs = {"tmrt", "shadow"} | set(timestep_outputs) + else: + # Summary-only mode: only need tmrt + shadow for accumulation. + requested_outputs = {"tmrt", "shadow"} + + need_shadow = requested_outputs is None or "shadow" in requested_outputs + need_kdown = requested_outputs is None or "kdown" in requested_outputs + need_kup = requested_outputs is None or "kup" in requested_outputs + need_ldown = requested_outputs is None or "ldown" in requested_outputs + need_lup = requested_outputs is None or "lup" in requested_outputs + + # Fill NaN in surface layers + surface.fill_nan() + + if surface.svf is None and (precomputed is None or precomputed.svf is None): + raise MissingPrecomputedData( + "Sky View Factor (SVF) data is required but not available.", + "Call surface.compute_svf() before calculate_timeseries_tiled(), or use SurfaceData.prepare() " + "which computes SVF automatically.", + ) + + if anisotropic_requested_explicitly and effective_aniso: + has_shadow_matrices = (precomputed is not None and precomputed.shadow_matrices is not None) or ( + surface.shadow_matrices is not None + ) + if not has_shadow_matrices: + raise MissingPrecomputedData( + "shadow_matrices required for anisotropic sky model", + "Either set use_anisotropic_sky=False, or provide shadow matrices via " + "precomputed=PrecomputedData(shadow_matrices=...) or surface.shadow_matrices", + ) + + rows, cols = surface.shape + pixel_size = surface.pixel_size + + # Height-aware buffer: use relative max building height (not absolute elevation) + max_height = surface.max_height + buffer_m = calculate_buffer_distance(max_height, max_shadow_distance_m=effective_max_shadow) + buffer_pixels = int(np.ceil(buffer_m / pixel_size)) + logger.info(f"Buffer: {buffer_m:.0f}m ({buffer_pixels}px) from max height {max_height:.1f}m") + + # Determine tile size + tile_size = _calculate_auto_tile_size(rows, cols) + adjusted_tile_size, warning = validate_tile_size(tile_size, buffer_pixels, pixel_size) + if warning: + logger.warning(warning) + + # Generate tiles + tiles = generate_tiles(rows, cols, adjusted_tile_size, buffer_pixels) + n_tiles = len(tiles) + n_steps = len(weather_series) + + # Large one-tile runs can underutilize CPU/GPU in timeseries mode. + # Optionally split into several tiles to increase overlap and throughput. + if n_tiles == 1: + adjusted_parallel = _maybe_subdivide_single_tile_for_timeseries( + rows, + cols, + adjusted_tile_size, + buffer_pixels, + pixel_size, + effective_tile_workers, + ) + if adjusted_parallel < adjusted_tile_size: + adjusted_tile_size = adjusted_parallel + tiles = generate_tiles(rows, cols, adjusted_tile_size, buffer_pixels) + n_tiles = len(tiles) + + # Pre-compute weather (sun positions, radiation) + from .timeseries import _precompute_weather + + logger.info("=" * 60) + logger.info("Starting SOLWEIG tiled timeseries calculation") + logger.info(f" Grid size: {cols}x{rows} pixels") + logger.info(f" Timesteps: {n_steps}") + start_str = weather_series[0].datetime.strftime("%Y-%m-%d %H:%M") + end_str = weather_series[-1].datetime.strftime("%Y-%m-%d %H:%M") + logger.info(f" Period: {start_str} -> {end_str}") + logger.info(f" Location: {location.latitude:.2f}N, {location.longitude:.2f}E") + logger.info( + f" Tiles: {n_tiles} (size={adjusted_tile_size}, buffer={buffer_m:.0f}m from max height {max_height:.1f}m)" + ) + logger.info("=" * 60) + + logger.info("Pre-computing sun positions and radiation splits...") + precompute_start = time.time() + _precompute_weather(weather_series, location) + precompute_time = time.time() - precompute_start + logger.info(f" Pre-computed {n_steps} timesteps in {precompute_time:.1f}s") + + output_path: Path | None = None + # Create output directory if needed + if output_dir is not None: + output_path = Path(output_dir) + output_path.mkdir(parents=True, exist_ok=True) + use_async_output = output_dir is not None and effective_outputs and async_output_enabled() + _writer = ( + AsyncGeoTiffWriter(output_dir=output_path, surface=surface) + if use_async_output and output_path is not None + else None + ) + + # Import calculate + from .api import calculate + + n_workers = _resolve_tile_workers(effective_tile_workers, n_tiles) + if effective_prefetch_tiles is None: + effective_prefetch_tiles = _resolve_prefetch_default(n_workers, n_tiles, adjusted_tile_size, buffer_pixels) + inflight_limit = _resolve_inflight_limit( + n_workers, + n_tiles, + effective_tile_queue_depth, + effective_prefetch_tiles, + ) + logger.info( + f"Tiled runtime: workers={n_workers}, inflight_limit={inflight_limit}, prefetch={effective_prefetch_tiles}" + ) + + # Initialize global state + state = ThermalState.initial(surface.shape) + if len(weather_series) >= 2: + dt0 = weather_series[0].datetime + dt1 = weather_series[1].datetime + state.timestep_dec = (dt1 - dt0).total_seconds() / 86400.0 + _timestep_hours = (dt1 - dt0).total_seconds() / 3600.0 + else: + _timestep_hours = 1.0 + + # Grid accumulator for summary statistics + _accumulator = GridAccumulator( + shape=surface.shape, + heat_thresholds_day=heat_thresholds_day if heat_thresholds_day is not None else [32.0, 38.0], + heat_thresholds_night=heat_thresholds_night if heat_thresholds_night is not None else [26.0], + timestep_hours=_timestep_hours, + ) + + results = [] + processed_steps = 0 + total_work = n_steps * n_tiles + start_time = time.time() + + # Set up progress reporting + from .progress import ProgressReporter + + _progress = ( + None if progress_callback is not None else ProgressReporter(total=total_work, desc="SOLWEIG tiled timeseries") + ) + + # Pre-create tile data once — surfaces and precomputed data don't change + # between timesteps. This eliminates N_timesteps × N_tiles redundant copies + # and allows GVF geometry cache + buffer pool to persist across timesteps. + tile_surfaces = [_extract_tile_surface(surface, tile, pixel_size, precomputed=precomputed) for tile in tiles] + tile_precomputeds = [_slice_tile_precomputed(precomputed, tile) for tile in tiles] + tile_states = [_slice_tile_state(state, tile) for tile in tiles] + + from concurrent.futures import FIRST_COMPLETED, ThreadPoolExecutor, wait + + try: + with ThreadPoolExecutor(max_workers=n_workers) as executor: + for t_idx, weather in enumerate(weather_series): + # Initialize output arrays for this timestep + tmrt_out = np.full((rows, cols), np.nan, dtype=np.float32) + shadow_out = np.full((rows, cols), np.nan, dtype=np.float32) if need_shadow else None + kdown_out = np.full((rows, cols), np.nan, dtype=np.float32) if need_kdown else None + kup_out = np.full((rows, cols), np.nan, dtype=np.float32) if need_kup else None + ldown_out = np.full((rows, cols), np.nan, dtype=np.float32) if need_ldown else None + lup_out = np.full((rows, cols), np.nan, dtype=np.float32) if need_lup else None + + # Refresh preallocated tile states from the current global state. + for tile_idx, tile in enumerate(tiles): + _refresh_tile_state(tile_states[tile_idx], state, tile) + + # Submit tiles in parallel and drain by completion order. + # This avoids head-of-line blocking when one tile is slower. + futures: dict[Any, int] = {} + submit_times: dict[int, float] = {} + next_submit = 0 + completed_tiles = 0 + max_queue = 0 + turnaround_sum = 0.0 + + # Keep only a bounded number of tile tasks in flight. + while completed_tiles < n_tiles: + while next_submit < n_tiles and len(futures) < inflight_limit: + future = executor.submit( + calculate, + surface=tile_surfaces[next_submit], + location=location, + weather=weather, + human=effective_human, + precomputed=tile_precomputeds[next_submit], + use_anisotropic_sky=anisotropic_arg, + conifer=conifer, + state=tile_states[next_submit], + physics=effective_physics, + materials=effective_materials, + wall_material=wall_material, + max_shadow_distance_m=effective_max_shadow, + return_state_copy=False, + _requested_outputs=requested_outputs, + ) + futures[future] = next_submit + submit_times[next_submit] = time.perf_counter() + next_submit += 1 + max_queue = max(max_queue, max(0, len(futures) - n_workers)) + + done, _ = wait(futures, return_when=FIRST_COMPLETED) + if not done: + continue + for future in done: + tile_idx = futures.pop(future) + tile = tiles[tile_idx] + submit_t = submit_times.pop(tile_idx) + + if _TIMING_ENABLED: + _t0 = time.perf_counter() + + tile_result = future.result() + turnaround_sum += time.perf_counter() - submit_t + + if _TIMING_ENABLED: + _t_ffi = time.perf_counter() - _t0 + _t1 = time.perf_counter() + + # Write core results to global arrays (non-overlapping write_slice) + _write_tile_result( + tile_result, + tile, + tmrt_out, + shadow_out, + kdown_out, + kup_out, + ldown_out, + lup_out, + ) + + # Merge tile state back to global state (non-overlapping write_slice) + if tile_result.state is not None: + _merge_tile_state(tile_result.state, tile, state) + + if _TIMING_ENABLED: + _t_merge = time.perf_counter() - _t1 + print( + f"[TIMING] tile {tile_idx + 1}/{n_tiles} " + f"ffi={_t_ffi * 1000:.1f}ms " + f"merge={_t_merge * 1000:.1f}ms", + file=sys.stderr, + ) + + # Report progress + completed_tiles += 1 + step = t_idx * n_tiles + completed_tiles + if progress_callback is not None: + progress_callback(step, total_work) + elif _progress is not None: + _progress.update(1) + + mean_turnaround_ms = (turnaround_sum / n_tiles) * 1000.0 if n_tiles > 0 else 0.0 + logger.debug( + f"Tiled timestep telemetry: step={t_idx + 1}/{n_steps}, " + f"mean_turnaround={mean_turnaround_ms:.1f}ms, max_queue={max_queue}" + ) + + # Log timestep completion + elapsed = time.time() - start_time + rate = (t_idx + 1) / elapsed if elapsed > 0 else 0 + logger.info(f" Timestep {t_idx + 1}/{n_steps} complete ({rate:.2f} steps/s)") + + # Create result for this timestep + result = SolweigResult( + tmrt=tmrt_out, + shadow=shadow_out, + kdown=kdown_out, + kup=kup_out, + ldown=ldown_out, + lup=lup_out, + utci=None, + pet=None, + state=None, # State managed externally + ) + + # Update grid accumulator (before potential array release) + _accumulator.update(result, weather, compute_utci_fn=compute_utci_grid) + + # Compute per-timestep UTCI/PET if requested (for in-memory or file output) + _need_utci = (timestep_outputs is not None and "utci" in timestep_outputs) or ( + effective_outputs is not None and "utci" in effective_outputs + ) + _need_pet = (timestep_outputs is not None and "pet" in timestep_outputs) or ( + effective_outputs is not None and "pet" in effective_outputs + ) + if _need_utci and result.utci is None: + result.utci = compute_utci_grid(result.tmrt, weather.ta, weather.rh, weather.ws) + if _need_pet and result.pet is None: + result.pet = compute_pet_grid(result.tmrt, weather.ta, weather.rh, weather.ws, effective_human) + + # Save per-timestep outputs if output_dir and outputs are provided + if _writer is not None and effective_outputs: + _writer.submit( + timestamp=weather.datetime, + arrays=collect_output_arrays(result, effective_outputs), + ) + elif output_dir is not None and effective_outputs: + result.to_geotiff( + output_dir=output_dir, + timestamp=weather.datetime, + outputs=effective_outputs, + surface=surface, + ) + + if timestep_outputs is not None: + # Keep only requested fields; free the rest. + _keep = set(timestep_outputs) + if "tmrt" not in _keep: + result.tmrt = None # type: ignore[assignment] + if "shadow" not in _keep: + result.shadow = None + if "kdown" not in _keep: + result.kdown = None + if "kup" not in _keep: + result.kup = None + if "ldown" not in _keep: + result.ldown = None + if "lup" not in _keep: + result.lup = None + if "utci" not in _keep: + result.utci = None + if "pet" not in _keep: + result.pet = None + results.append(result) + else: + # Summary-only: free all large arrays. + result.tmrt = None # type: ignore[assignment] + result.shadow = None + result.kdown = None + result.kup = None + result.ldown = None + result.lup = None + result.utci = None + result.pet = None + processed_steps += 1 + finally: + if _progress is not None: + _progress.close() + if _writer is not None: + _writer.close() + + # Finalize summary + summary = _accumulator.finalize() + summary.results = results # empty list when timestep_outputs=None + summary._surface = surface + + # Log summary + total_time = time.time() - start_time + overall_rate = processed_steps / total_time if total_time > 0 else 0 + + logger.info("=" * 60) + logger.info(f"Calculation complete: {processed_steps} timesteps processed (tiled)") + logger.info(f" Total time: {total_time:.1f}s ({overall_rate:.2f} steps/s)") + if summary.n_timesteps > 0: + _valid_mean = np.nanmean(summary.tmrt_mean) + _valid_min = np.nanmin(summary.tmrt_min) + _valid_max = np.nanmax(summary.tmrt_max) + logger.info(f" Tmrt range: {_valid_min:.1f}C - {_valid_max:.1f}C (mean: {_valid_mean:.1f}C)") + + if output_dir is not None and effective_outputs is not None: + file_count = processed_steps * len(effective_outputs) + logger.info(f" Files saved: {file_count} GeoTIFFs in {output_dir}") + logger.info("=" * 60) + + # Save summary grids and run metadata if output_dir provided + if output_dir is not None: + summary.to_geotiff(output_dir, surface=surface) + summary._output_dir = Path(output_dir) + from .metadata import create_run_metadata, save_run_metadata + + metadata = create_run_metadata( + surface=surface, + location=location, + weather_series=weather_series, + human=effective_human, + physics=effective_physics, + materials=effective_materials, + use_anisotropic_sky=effective_aniso, + conifer=conifer, + output_dir=output_dir, + outputs=effective_outputs, + ) + save_run_metadata(metadata, output_dir) + + return summary diff --git a/pysrc/solweig/timeseries.py b/pysrc/solweig/timeseries.py new file mode 100644 index 0000000..9299480 --- /dev/null +++ b/pysrc/solweig/timeseries.py @@ -0,0 +1,585 @@ +"""Time-series SOLWEIG calculation with thermal state management. + +Provides :func:`calculate_timeseries`, a convenience wrapper around +:func:`~solweig.api.calculate` that iterates over a list of +:class:`~solweig.Weather` objects, carrying thermal state (ground and +wall temperatures) forward between timesteps. Large rasters are +transparently routed to the tiled processing path. +""" + +from __future__ import annotations + +import time +from collections.abc import Callable +from pathlib import Path +from types import SimpleNamespace +from typing import TYPE_CHECKING + +import numpy as np + +from .metadata import create_run_metadata, save_run_metadata +from .models import HumanParams, Location, ThermalState +from .output_async import AsyncGeoTiffWriter, async_output_enabled, collect_output_arrays +from .postprocess import compute_pet_grid, compute_utci_grid +from .progress import ProgressReporter +from .solweig_logging import get_logger +from .summary import GridAccumulator, TimeseriesSummary + +logger = get_logger(__name__) + + +def _precompute_weather(weather_series: list, location: Location) -> None: + """ + Pre-compute derived weather values for all timesteps efficiently. + + Optimizations: + 1. Compute max sun altitude (altmax) only once per unique day + 2. Pre-assign altmax to Weather objects to skip the 96-iteration loop + + This reduces compute_derived() from O(96) iterations to O(1) per timestep + when multiple timesteps share the same day. + + Args: + weather_series: List of Weather objects to process + location: Geographic location for sun position calculations + """ + if not weather_series: + return + + from datetime import timedelta + + import numpy as np + + from .physics import sun_position as sp + + location_dict = location.to_sun_position_dict() + + # Step 1: Compute altmax once per unique day + altmax_cache = {} # date -> altmax + + for weather in weather_series: + day = weather.datetime.date() + if day not in altmax_cache: + # Compute max sun altitude for this day (iterate in 15-min intervals) + ymd = weather.datetime.replace(hour=0, minute=0, second=0, microsecond=0) + sunmaximum = -90.0 + fifteen_min = 15.0 / 1440.0 # 15 minutes as fraction of day + + for step in range(96): # 24 hours * 4 (15-min intervals) + step_time = ymd + timedelta(days=step * fifteen_min) + time_dict_step = { + "year": step_time.year, + "month": step_time.month, + "day": step_time.day, + "hour": step_time.hour, + "min": step_time.minute, + "sec": 0, + "UTC": location.utc_offset, + } + sun_step = sp.sun_position(time_dict_step, location_dict) + zenith_step = sun_step["zenith"] + zenith_val = ( + float(np.asarray(zenith_step).flat[0]) if hasattr(zenith_step, "__iter__") else float(zenith_step) + ) + altitude_step = 90.0 - zenith_val + if altitude_step > sunmaximum: + sunmaximum = altitude_step + + altmax_cache[day] = max(sunmaximum, 0.0) + + # Step 2: Pre-assign altmax to each weather object + for weather in weather_series: + day = weather.datetime.date() + weather.precomputed_altmax = altmax_cache[day] + + # Step 3: Compute derived values (now fast since altmax is cached) + for weather in weather_series: + if not weather._derived_computed: + weather.compute_derived(location) + + +if TYPE_CHECKING: + from .models import ( + ModelConfig, + PrecomputedData, + SurfaceData, + Weather, + ) + + +def calculate_timeseries( + surface: SurfaceData, + weather_series: list[Weather], + location: Location | None = None, + config: ModelConfig | None = None, + human: HumanParams | None = None, + precomputed: PrecomputedData | None = None, + use_anisotropic_sky: bool | None = None, + conifer: bool = False, + physics: SimpleNamespace | None = None, + materials: SimpleNamespace | None = None, + wall_material: str | None = None, + max_shadow_distance_m: float | None = None, + tile_workers: int | None = None, + tile_queue_depth: int | None = None, + prefetch_tiles: bool | None = None, + output_dir: str | Path | None = None, + outputs: list[str] | None = None, + timestep_outputs: list[str] | None = None, + heat_thresholds_day: list[float] | None = None, + heat_thresholds_night: list[float] | None = None, + progress_callback: Callable[[int, int], None] | None = None, +) -> TimeseriesSummary: + """ + Calculate Tmrt for a time series of weather data. + + Returns a :class:`TimeseriesSummary` with aggregated per-pixel grids + (mean/max/min Tmrt and UTCI, sun/shade hours, heat-stress exceedance). + Per-timestep arrays are only retained when ``timestep_outputs`` is provided. + + Maintains thermal state across timesteps for accurate surface temperature + modeling with thermal inertia (TsWaveDelay_2015a). + + Large rasters are automatically processed using overlapping tiles to + manage memory. The tile size and buffer distance are computed dynamically + from available GPU/RAM resources and the maximum building height in the DSM, + ensuring accurate shadows at tile boundaries without wasting overlap on + short buildings. + + This is a convenience function that manages state automatically. For custom + control over state, use calculate() directly with the state parameter. + + Args: + surface: Surface/terrain data (DSM required, CDSM/DEM optional). + weather_series: List of Weather objects in chronological order. + The datetime of each Weather object determines the timestep size. + location: Geographic location (lat, lon, UTC offset). If None, automatically + extracted from surface's CRS metadata. + config: Model configuration object providing base settings. + Explicit parameters override config values when provided. + human: Human body parameters (absorption, posture, weight, height, etc.). + If None, uses config.human or HumanParams defaults. + precomputed: Pre-computed SVF and/or shadow matrices. Optional. + use_anisotropic_sky: Use anisotropic sky model. + If None, uses config.use_anisotropic_sky (default True). + conifer: Treat vegetation as evergreen conifers (always leaf-on). Default False. + physics: Physics parameters (Tree_settings, Posture geometry) from load_physics(). + Site-independent scientific constants. If None, uses config.physics or bundled defaults. + materials: Material properties (albedo, emissivity per landcover class) from load_materials(). + Site-specific landcover parameters. Only needed if surface has land_cover grid. + If None, uses config.materials. + wall_material: Wall material type for temperature model. + One of "brick", "concrete", "wood", "cobblestone" (case-insensitive). + If None (default), uses generic wall params from materials JSON. + max_shadow_distance_m: Maximum shadow reach in metres (default 1000.0). + Caps horizontal shadow ray distance and serves as the tile overlap + buffer for automatic tiled processing of large rasters. If None, + uses config.max_shadow_distance_m or 1000.0. + tile_workers: Number of workers for tiled orchestration. If None, uses + config.tile_workers or adaptive default. + tile_queue_depth: Extra queued tile tasks beyond active workers. If None, + uses config.tile_queue_depth or a runtime default. + prefetch_tiles: Whether to prefetch extra tile tasks beyond active workers. + If None, uses config.prefetch_tiles or runtime auto-selection. + output_dir: Directory to save results. If provided, per-timestep results + are saved incrementally as GeoTIFF files during calculation. + Summary grids are always saved to ``output_dir/summary/``. + outputs: Which per-timestep outputs to save as GeoTIFFs (e.g., ["tmrt", "shadow"]). + Only used if output_dir is provided. If None, no per-timestep files are written. + timestep_outputs: Which per-timestep arrays to retain in memory + (e.g., ``["tmrt", "shadow"]``). When provided, ``summary.results`` + contains a list of SolweigResult with those fields populated. + Default None (summary-only, no per-timestep arrays kept). + heat_thresholds_day: UTCI thresholds (°C) for daytime exceedance hours. + Default ``[32, 38]`` (strong / very strong heat stress). + heat_thresholds_night: UTCI thresholds (°C) for nighttime exceedance hours. + Default ``[26]`` (tropical night threshold). + progress_callback: Optional callback(current_step, total_steps) called after + each timestep. If None, a tqdm progress bar is shown automatically. + + Returns: + :class:`TimeseriesSummary` with aggregated grids and metadata. + Access ``summary.results`` for per-timestep arrays (requires + ``timestep_outputs``). + + Example: + # Summary only (default) + summary = calculate_timeseries( + surface=surface, + weather_series=weather_list, + ) + print(summary.tmrt_mean, summary.utci_hours_above[32]) + + # With per-timestep arrays retained + summary = calculate_timeseries( + surface=surface, + weather_series=weather_list, + timestep_outputs=["tmrt", "shadow"], + output_dir="output/", + ) + for r in summary.results: + print(r.tmrt.mean()) + """ + if not weather_series: + return TimeseriesSummary.empty() + + anisotropic_requested_explicitly = use_anisotropic_sky is True + + # Auto-extract location from surface if not provided + if location is None: + logger.warning( + "Location not provided - auto-extracting from surface CRS.\n" + "⚠️ UTC offset will default to 0 if not specified, which may cause incorrect sun positions.\n" + " Recommend: provide location explicitly with correct UTC offset." + ) + location = Location.from_surface(surface) + + # Build effective configuration: explicit params override config + effective_aniso = use_anisotropic_sky + effective_human = human + effective_physics = physics + effective_materials = materials + effective_outputs = outputs + effective_max_shadow = max_shadow_distance_m + effective_tile_workers = tile_workers + effective_tile_queue_depth = tile_queue_depth + effective_prefetch_tiles = prefetch_tiles + + if config is not None: + # Use config values as fallback for None parameters + if effective_aniso is None: + effective_aniso = config.use_anisotropic_sky + if effective_human is None: + effective_human = config.human + if effective_physics is None: + effective_physics = config.physics + if effective_materials is None: + effective_materials = config.materials + if effective_outputs is None and config.outputs: + effective_outputs = config.outputs + if effective_max_shadow is None: + effective_max_shadow = config.max_shadow_distance_m + if effective_tile_workers is None: + effective_tile_workers = config.tile_workers + if effective_tile_queue_depth is None: + effective_tile_queue_depth = config.tile_queue_depth + if effective_prefetch_tiles is None: + effective_prefetch_tiles = config.prefetch_tiles + + # Debug log when explicit params override config + overrides = [] + if use_anisotropic_sky is not None and use_anisotropic_sky != config.use_anisotropic_sky: + overrides.append(f"use_anisotropic_sky={use_anisotropic_sky}") + if human is not None and config.human is not None: + overrides.append("human") + if physics is not None and config.physics is not None: + overrides.append("physics") + if materials is not None and config.materials is not None: + overrides.append("materials") + if outputs is not None and config.outputs: + overrides.append("outputs") + if max_shadow_distance_m is not None and max_shadow_distance_m != config.max_shadow_distance_m: + overrides.append(f"max_shadow_distance_m={max_shadow_distance_m}") + if tile_workers is not None and config.tile_workers is not None: + overrides.append(f"tile_workers={tile_workers}") + if tile_queue_depth is not None and config.tile_queue_depth is not None: + overrides.append(f"tile_queue_depth={tile_queue_depth}") + if prefetch_tiles is not None and prefetch_tiles != config.prefetch_tiles: + overrides.append(f"prefetch_tiles={prefetch_tiles}") + if overrides: + logger.debug(f"Explicit params override config: {', '.join(overrides)}") + + # Apply defaults for anything still None + if effective_aniso is None: + # Keep behavior aligned with calculate() and ModelConfig defaults. + effective_aniso = True + if effective_physics is None: + from .loaders import load_physics + + effective_physics = load_physics() + # Auto-load bundled UMEP JSON as default materials (single source of truth) + if effective_materials is None: + from .loaders import load_params + + effective_materials = load_params() + + # Assign back for use in the rest of the function + use_anisotropic_sky = effective_aniso + human = effective_human + physics = effective_physics + materials = effective_materials + outputs = effective_outputs + tile_workers = effective_tile_workers + tile_queue_depth = effective_tile_queue_depth + prefetch_tiles = effective_prefetch_tiles + anisotropic_arg = ( + use_anisotropic_sky if (anisotropic_requested_explicitly or use_anisotropic_sky is False) else None + ) + + # Fill NaN in surface layers (idempotent — skipped if already done) + surface.fill_nan() + + # Auto-tile large rasters transparently + from .tiling import _should_use_tiling + + if _should_use_tiling(surface.shape[0], surface.shape[1]): + from .tiling import calculate_timeseries_tiled + + logger.info( + f"Raster size {surface.dsm.shape[1]}×{surface.dsm.shape[0]} exceeds tiling threshold — " + "switching to tiled processing." + ) + return calculate_timeseries_tiled( + surface=surface, + weather_series=weather_series, + location=location, + human=human, + precomputed=precomputed, + use_anisotropic_sky=anisotropic_arg, + conifer=conifer, + physics=physics, + materials=materials, + wall_material=wall_material, + max_shadow_distance_m=effective_max_shadow, + tile_workers=tile_workers, + tile_queue_depth=tile_queue_depth, + prefetch_tiles=prefetch_tiles, + output_dir=output_dir, + outputs=outputs, + timestep_outputs=timestep_outputs, + heat_thresholds_day=heat_thresholds_day, + heat_thresholds_night=heat_thresholds_night, + progress_callback=progress_callback, + ) + + # Log configuration summary + logger.info("=" * 60) + logger.info("Starting SOLWEIG timeseries calculation") + logger.info(f" Grid size: {surface.dsm.shape[1]}×{surface.dsm.shape[0]} pixels") + logger.info(f" Timesteps: {len(weather_series)}") + start_str = weather_series[0].datetime.strftime("%Y-%m-%d %H:%M") + end_str = weather_series[-1].datetime.strftime("%Y-%m-%d %H:%M") + logger.info(f" Period: {start_str} → {end_str}") + logger.info(f" Location: {location.latitude:.2f}°N, {location.longitude:.2f}°E") + + options = [] + if use_anisotropic_sky: + options.append("anisotropic sky") + if precomputed is not None: + options.append("precomputed SVF") + if options: + logger.info(f" Options: {', '.join(options)}") + + if output_dir is not None and outputs: + logger.info(f" Auto-save: {output_dir} ({', '.join(outputs)})") + elif output_dir is not None: + logger.info(f" Output dir: {output_dir} (summary only)") + logger.info("=" * 60) + + output_path: Path | None = None + # Create output directory if needed + if output_dir is not None: + output_path = Path(output_dir) + output_path.mkdir(parents=True, exist_ok=True) + + # Determine which arrays the Rust compute needs to return. + # tmrt + shadow are always needed for the summary accumulator. + requested_outputs = None + if output_dir is not None and outputs: + requested_outputs = {"tmrt", "shadow"} | set(outputs) + elif timestep_outputs is not None: + # Keep specific per-timestep arrays; always include tmrt + shadow for accumulator. + requested_outputs = {"tmrt", "shadow"} | set(timestep_outputs) + else: + # Summary-only mode: only need tmrt + shadow for accumulation. + requested_outputs = {"tmrt", "shadow"} + + # Import calculate here to avoid circular import + from .api import calculate + + # Pre-compute derived weather values in parallel (sun position, radiation split) + # This is ~4x faster than computing sequentially in the main loop + logger.info("Pre-computing sun positions and radiation splits...") + precompute_start = time.time() + _precompute_weather(weather_series, location) + precompute_time = time.time() - precompute_start + logger.info(f" Pre-computed {len(weather_series)} timesteps in {precompute_time:.1f}s") + + results = [] + processed_steps = 0 + state = ThermalState.initial(surface.shape) + + # Pre-calculate timestep size from first two entries (matching runner behavior) + # The runner uses a fixed timestep_dec for all iterations, calculated upfront + if len(weather_series) >= 2: + dt0 = weather_series[0].datetime + dt1 = weather_series[1].datetime + state.timestep_dec = (dt1 - dt0).total_seconds() / 86400.0 + _timestep_hours = (dt1 - dt0).total_seconds() / 3600.0 + else: + _timestep_hours = 1.0 + + # Grid accumulator for summary statistics + _accumulator = GridAccumulator( + shape=surface.shape, + heat_thresholds_day=heat_thresholds_day if heat_thresholds_day is not None else [32.0, 38.0], + heat_thresholds_night=heat_thresholds_night if heat_thresholds_night is not None else [26.0], + timestep_hours=_timestep_hours, + ) + + # Pre-create buffer pool for array reuse across timesteps + _ = surface.get_buffer_pool() + + # Set up progress reporting (caller callback suppresses tqdm) + n_steps = len(weather_series) + _progress = None if progress_callback is not None else ProgressReporter(total=n_steps, desc="SOLWEIG timeseries") + use_async_output = output_dir is not None and outputs and async_output_enabled() + _writer = ( + AsyncGeoTiffWriter(output_dir=output_path, surface=surface) + if use_async_output and output_path is not None + else None + ) + + # Start timing + start_time = time.time() + + try: + for i, weather in enumerate(weather_series): + # Process timestep + result = calculate( + surface=surface, + location=location, + weather=weather, + human=human, + precomputed=precomputed, + use_anisotropic_sky=anisotropic_arg, + conifer=conifer, + state=state, + physics=physics, + materials=materials, + wall_material=wall_material, + max_shadow_distance_m=effective_max_shadow, + return_state_copy=False, + _requested_outputs=requested_outputs, + ) + + # Carry forward state to next timestep + if result.state is not None: + state = result.state + result.state = None # Free state arrays (~23 MB); state managed externally + + # Update grid accumulator (before potential array release) + _accumulator.update(result, weather, compute_utci_fn=compute_utci_grid) + + # Compute per-timestep UTCI/PET if requested (for in-memory or file output) + _need_utci = (timestep_outputs is not None and "utci" in timestep_outputs) or ( + outputs is not None and "utci" in outputs + ) + _need_pet = (timestep_outputs is not None and "pet" in timestep_outputs) or ( + outputs is not None and "pet" in outputs + ) + if _need_utci and result.utci is None: + result.utci = compute_utci_grid(result.tmrt, weather.ta, weather.rh, weather.ws) + if _need_pet and result.pet is None: + result.pet = compute_pet_grid(result.tmrt, weather.ta, weather.rh, weather.ws, human) + + # Save per-timestep outputs if output_dir and outputs are provided + if _writer is not None and outputs: + _writer.submit( + timestamp=weather.datetime, + arrays=collect_output_arrays(result, outputs), + ) + elif output_dir is not None and outputs: + result.to_geotiff( + output_dir=output_dir, + timestamp=weather.datetime, + outputs=outputs, + surface=surface, + ) + + if timestep_outputs is not None: + # Keep only requested fields; free the rest. + _keep = set(timestep_outputs) + if "tmrt" not in _keep: + result.tmrt = None # type: ignore[assignment] + if "shadow" not in _keep: + result.shadow = None + if "kdown" not in _keep: + result.kdown = None + if "kup" not in _keep: + result.kup = None + if "ldown" not in _keep: + result.ldown = None + if "lup" not in _keep: + result.lup = None + if "utci" not in _keep: + result.utci = None + if "pet" not in _keep: + result.pet = None + results.append(result) + else: + # Summary-only: free all large arrays. + result.tmrt = None # type: ignore[assignment] + result.shadow = None + result.kdown = None + result.kup = None + result.ldown = None + result.lup = None + result.utci = None + result.pet = None + processed_steps += 1 + + # Report progress + if progress_callback is not None: + progress_callback(i + 1, n_steps) + elif _progress is not None: + _progress.update(1) + finally: + if _progress is not None: + _progress.close() + if _writer is not None: + _writer.close() + + # Finalize summary + summary = _accumulator.finalize() + summary.results = results # empty list when timestep_outputs=None + summary._surface = surface + + # Calculate total elapsed time + total_time = time.time() - start_time + overall_rate = processed_steps / total_time if total_time > 0 else 0 + + # Log summary statistics + logger.info("=" * 60) + logger.info(f"✓ Calculation complete: {processed_steps} timesteps processed") + logger.info(f" Total time: {total_time:.1f}s ({overall_rate:.2f} steps/s)") + if summary.n_timesteps > 0: + _valid_mean = np.nanmean(summary.tmrt_mean) + _valid_min = np.nanmin(summary.tmrt_min) + _valid_max = np.nanmax(summary.tmrt_max) + logger.info(f" Tmrt range: {_valid_min:.1f}°C - {_valid_max:.1f}°C (mean: {_valid_mean:.1f}°C)") + + if output_dir is not None and outputs is not None: + file_count = processed_steps * len(outputs) + logger.info(f" Files saved: {file_count} GeoTIFFs in {output_dir}") + logger.info("=" * 60) + + # Save summary grids and run metadata if output_dir is provided + if output_dir is not None: + summary.to_geotiff(output_dir, surface=surface) + summary._output_dir = Path(output_dir) + metadata = create_run_metadata( + surface=surface, + location=location, + weather_series=weather_series, + human=human, + physics=physics, + materials=materials, + use_anisotropic_sky=use_anisotropic_sky, + conifer=conifer, + output_dir=output_dir, + outputs=outputs, + ) + save_run_metadata(metadata, output_dir) + + return summary diff --git a/pysrc/solweig/utils.py b/pysrc/solweig/utils.py new file mode 100644 index 0000000..cf7c36e --- /dev/null +++ b/pysrc/solweig/utils.py @@ -0,0 +1,285 @@ +"""Utility functions for geometry and namespace conversion.""" + +from __future__ import annotations + +import logging +from types import SimpleNamespace +from typing import TYPE_CHECKING, Any + +import numpy as np + +from ._compat import GDAL_AVAILABLE, RASTERIO_AVAILABLE + +if TYPE_CHECKING: + from affine import Affine + from numpy.typing import NDArray + +logger = logging.getLogger(__name__) + +if RASTERIO_AVAILABLE: + from rasterio.transform import array_bounds, from_bounds # noqa: F401 + from rasterio.warp import Resampling, reproject # noqa: F401 +elif GDAL_AVAILABLE: + from osgeo import gdal, gdalconst # noqa: F401 + + +# ============================================================================= +# Namespace Conversion (for JSON parameter loading) +# ============================================================================= + + +def dict_to_namespace(d: dict[str, Any] | list | Any) -> SimpleNamespace | list | Any: + """ + Recursively convert dicts to SimpleNamespace. + + This matches the runner's dict_to_namespace function for loading JSON parameters. + + Args: + d: Dictionary, list, or scalar value to convert + + Returns: + SimpleNamespace for dicts, list of converted items for lists, or original value for scalars + """ + if isinstance(d, dict): + return SimpleNamespace(**{k: dict_to_namespace(v) for k, v in d.items()}) + elif isinstance(d, list): + return [dict_to_namespace(i) for i in d] + else: + return d + + +def namespace_to_dict(ns: SimpleNamespace | Any) -> dict | list | Any: + """ + Recursively convert SimpleNamespace to dict for JSON serialization. + + Inverse of dict_to_namespace. + + Args: + ns: SimpleNamespace, list, or scalar value to convert + + Returns: + Dict for SimpleNamespace, list of converted items for lists, or original value for scalars + """ + if isinstance(ns, SimpleNamespace): + return {k: namespace_to_dict(v) for k, v in vars(ns).items()} + elif isinstance(ns, list): + return [namespace_to_dict(i) for i in ns] + else: + return ns + + +# ============================================================================= +# Geometric Utilities (for raster operations) +# ============================================================================= + + +def extract_bounds(transform: list[float] | Affine, shape: tuple[int, ...]) -> list[float]: + """ + Extract bounding box [minx, miny, maxx, maxy] from affine transform and array shape. + + Works with either rasterio or GDAL backend. + + Args: + transform: Affine transformation matrix (Affine object or GDAL list) + shape: Array shape (rows, cols) + + Returns: + Bounding box as [minx, miny, maxx, maxy] + """ + rows, cols = shape + + if RASTERIO_AVAILABLE: + from affine import Affine as AffineClass + from rasterio.transform import array_bounds + + # Convert list to Affine if needed + if isinstance(transform, list): + transform = AffineClass.from_gdal(*transform) + + bounds = array_bounds(rows, cols, transform) + # array_bounds returns (left, bottom, right, top) + return [bounds[0], bounds[1], bounds[2], bounds[3]] + + elif GDAL_AVAILABLE: + # GDAL geotransform: [x_origin, x_pixel_size, x_rotation, y_origin, y_rotation, y_pixel_size] + # Convert Affine to GDAL list if needed (Affine has .to_gdal() method) + gt = transform if isinstance(transform, list) else list(transform.to_gdal()) + + x_origin, x_pixel_size, _, y_origin, _, y_pixel_size = gt + + # Calculate bounds + minx = x_origin + maxx = x_origin + cols * x_pixel_size + maxy = y_origin # y_origin is typically top-left (north) + miny = y_origin + rows * y_pixel_size # y_pixel_size is typically negative + + # Ensure correct order (miny < maxy) + if miny > maxy: + miny, maxy = maxy, miny + + return [minx, miny, maxx, maxy] + + else: + raise ImportError( + "Neither rasterio nor GDAL available. Install rasterio (pip install rasterio) " + "or run in OSGeo4W/QGIS environment." + ) + + +def intersect_bounds(bounds_list: list[list[float]]) -> list[float]: + """ + Compute intersection of multiple bounding boxes. + + Args: + bounds_list: List of bounding boxes, each as [minx, miny, maxx, maxy] + + Returns: + Intersection bounding box as [minx, miny, maxx, maxy] + + Raises: + ValueError: If bounding boxes don't intersect + """ + if not bounds_list: + raise ValueError("No bounding boxes provided") + + # Start with first bounds + minx = bounds_list[0][0] + miny = bounds_list[0][1] + maxx = bounds_list[0][2] + maxy = bounds_list[0][3] + + # Compute intersection with remaining bounds + for bounds in bounds_list[1:]: + minx = max(minx, bounds[0]) + miny = max(miny, bounds[1]) + maxx = min(maxx, bounds[2]) + maxy = min(maxy, bounds[3]) + + # Check if intersection is valid + if minx >= maxx or miny >= maxy: + raise ValueError(f"Bounding boxes don't intersect: intersection would be [{minx}, {miny}, {maxx}, {maxy}]") + + return [minx, miny, maxx, maxy] + + +def resample_to_grid( + array: NDArray, + src_transform: list[float] | Affine, + target_bbox: list[float], + target_pixel_size: float, + method: str = "bilinear", + src_crs: str | None = None, +) -> tuple[NDArray, Affine]: + """ + Resample array to match target grid specification. + + Works with either rasterio or GDAL backend. + + Args: + array: Source array to resample + src_transform: Source affine transformation (Affine object or GDAL list) + target_bbox: Target bounding box [minx, miny, maxx, maxy] + target_pixel_size: Target pixel size in map units + method: Resampling method ("bilinear" or "nearest") + src_crs: Source CRS (WKT string), required for rasterio reproject + + Returns: + Tuple of (resampled_array, target_transform as Affine) + """ + from affine import Affine as AffineClass + + minx, miny, maxx, maxy = target_bbox + + # Calculate target dimensions + width = int(np.round((maxx - minx) / target_pixel_size)) + height = int(np.round((maxy - miny) / target_pixel_size)) + + if RASTERIO_AVAILABLE: + from rasterio.transform import from_bounds + from rasterio.warp import Resampling, reproject + + # Convert list to Affine if needed + if isinstance(src_transform, list): + src_transform = AffineClass.from_gdal(*src_transform) + + # Create target transform + target_transform = from_bounds(minx, miny, maxx, maxy, width, height) + + # Create destination array + destination = np.zeros((height, width), dtype=array.dtype) + + # Select resampling method + resampling_method = Resampling.nearest if method == "nearest" else Resampling.bilinear + + # Reproject (same CRS, just resampling) + reproject( + source=array, + destination=destination, + src_transform=src_transform, + dst_transform=target_transform, + src_crs=src_crs, # Pass through CRS for rasterio + dst_crs=src_crs, # Same CRS (no reprojection, just resampling) + resampling=resampling_method, + ) + + return destination, target_transform + + elif GDAL_AVAILABLE: + from osgeo import gdal, gdalconst + + # Convert Affine to GDAL geotransform if needed (Affine has .to_gdal() method) + src_gt = src_transform if isinstance(src_transform, list) else list(src_transform.to_gdal()) + + # Create target geotransform (top-left origin, positive x, negative y) + target_gt = [minx, target_pixel_size, 0, maxy, 0, -target_pixel_size] + + # Map numpy dtype to GDAL type + dtype_map = { + np.float32: gdalconst.GDT_Float32, + np.float64: gdalconst.GDT_Float64, + np.int32: gdalconst.GDT_Int32, + np.int16: gdalconst.GDT_Int16, + np.uint8: gdalconst.GDT_Byte, + np.uint16: gdalconst.GDT_UInt16, + np.uint32: gdalconst.GDT_UInt32, + } + gdal_dtype = dtype_map.get(array.dtype.type, gdalconst.GDT_Float32) + + # Select resampling method + resample_alg = gdalconst.GRA_NearestNeighbour if method == "nearest" else gdalconst.GRA_Bilinear + + # Create in-memory source dataset + src_rows, src_cols = array.shape + mem_driver = gdal.GetDriverByName("MEM") + src_ds = mem_driver.Create("", src_cols, src_rows, 1, gdal_dtype) + src_ds.SetGeoTransform(src_gt) + if src_crs: + src_ds.SetProjection(src_crs) + src_ds.GetRasterBand(1).WriteArray(array) + + # Create in-memory destination dataset + dst_ds = mem_driver.Create("", width, height, 1, gdal_dtype) + dst_ds.SetGeoTransform(target_gt) + if src_crs: + dst_ds.SetProjection(src_crs) + + # Perform resampling + gdal.ReprojectImage(src_ds, dst_ds, src_crs, src_crs, resample_alg) + + # Read result + destination = dst_ds.GetRasterBand(1).ReadAsArray() + + # Clean up + src_ds = None + dst_ds = None + + # Create Affine transform for return value + target_transform = AffineClass.from_gdal(*target_gt) + + return destination, target_transform + + else: + raise ImportError( + "Neither rasterio nor GDAL available. Install rasterio (pip install rasterio) " + "or run in OSGeo4W/QGIS environment." + ) diff --git a/pysrc/solweig/walls.py b/pysrc/solweig/walls.py new file mode 100644 index 0000000..78961db --- /dev/null +++ b/pysrc/solweig/walls.py @@ -0,0 +1,59 @@ +""" +Wall height and aspect generation from DSM. + +This algorithm identifies wall pixels and their height from ground and building +digital surface models (DSM) using filters as presented by Lindberg et al. (2015a). +Wall aspect is estimated using a specific linear filter as presented by +Goodwin et al. (1999) and further developed by Lindberg et al. (2015b). + +References: +- Goodwin NR, Coops NC, Tooke TR, Christen A, Voogt JA (2009) + Characterizing urban surface cover and structure with airborne lidar technology. + Can J Remote Sens 35:297–309 +- Lindberg F., Grimmond, C.S.B. and Martilli, A. (2015a) + Sunlit fractions on urban facets - Impact of spatial resolution and approach + Urban Climate DOI: 10.1016/j.uclim.2014.11.006 +- Lindberg F., Jonsson, P. & Honjo, T. and Wästberg, D. (2015b) + Solar energy on building envelopes - 3D modelling in a 2D environment + Solar Energy 115 369–378 +""" + +from __future__ import annotations + +from pathlib import Path + +from . import io as common +from .physics import wallalgorithms as wa + + +def generate_wall_hts( + dsm_path: str, + bbox: list[int] | None, + out_dir: str, + wall_limit: float = 1, +): + """ + Generate wall height and aspect rasters from a DSM. + + Args: + dsm_path: Path to the Digital Surface Model raster + bbox: Bounding box [minx, miny, maxx, maxy] or None for full extent + out_dir: Output directory for wall_hts.tif and wall_aspects.tif + wall_limit: Minimum height to be considered a wall (default: 1m) + + Outputs: + wall_hts.tif: Wall heights in meters + wall_aspects.tif: Wall aspect in degrees (0 = North) + """ + dsm_rast, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox, ensure_float32=True) + dsm_scale = 1 / dsm_transf[1] + + out_path = Path(out_dir) + out_path.mkdir(parents=True, exist_ok=True) + out_path_str = str(out_path) + + walls = wa.findwalls(dsm_rast, wall_limit) + common.save_raster(out_path_str + "/" + "wall_hts.tif", walls, dsm_transf, dsm_crs, ensure_float32=True) + + dirwalls = wa.filter1Goodwin_as_aspect_v3(walls, dsm_scale, dsm_rast) + common.save_raster(out_path_str + "/" + "wall_aspects.tif", dirwalls, dsm_transf, dsm_crs, ensure_float32=True) diff --git a/pysrc/umepr/__init__.py b/pysrc/umepr/__init__.py deleted file mode 100644 index f52e4a5..0000000 --- a/pysrc/umepr/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -""" -UMEP-Rust: Urban Multi-scale Environmental Predictor (Rust implementation) -""" - -import logging - -logger = logging.getLogger(__name__) - -try: - from .rustalgos import GPU_ENABLED, shadowing - - # Export GPU functions at package level - __all__ = ["GPU_ENABLED", "shadowing"] - - # Enable GPU by default if available - if GPU_ENABLED: - shadowing.enable_gpu() - logger.info("GPU acceleration enabled by default") - else: - logger.info("GPU support not compiled in this build") - -except ImportError as e: - # If rustalgos is not available or GPU feature not compiled - logger.warning(f"Failed to import rustalgos GPU functions: {e}") - GPU_ENABLED = False - __all__ = ["GPU_ENABLED"] diff --git a/pysrc/umepr/functions/daily_shading.py b/pysrc/umepr/functions/daily_shading.py deleted file mode 100644 index 0e9a851..0000000 --- a/pysrc/umepr/functions/daily_shading.py +++ /dev/null @@ -1,235 +0,0 @@ -""" -Daily shading calculations for a given DSM and vegetation DSM. -Uses Rust algorithms for shadow calculations. -""" - -import datetime as dt -from builtins import range - -import numpy as np -from tqdm import tqdm -from umep import common -from umep.util.SEBESOLWEIGCommonFiles import sun_position as sp - -from ..rustalgos import shadowing - - -def daily_shading( - dsm, - vegdsm, - vegdsm2, - scale, - lon, - lat, - dsm_width, - dsm_height, - tv, - UTC, - usevegdem, - timeInterval, - onetime, - folder, - dsm_transf, - dsm_crs, - trans, - dst, - wallshadow, - wheight, - waspect, -): - # lon = lonlat[0] - # lat = lonlat[1] - year = tv[0] - month = tv[1] - day = tv[2] - - alt = np.median(dsm) - location = {"longitude": lon, "latitude": lat, "altitude": alt} - if usevegdem == 1: - psi = trans - # amaxvalue - vegmax = vegdsm.max() - amaxvalue = dsm.max() - dsm.min() - amaxvalue = np.maximum(amaxvalue, vegmax) - - # Elevation vegdsms if buildingDSM includes ground heights - vegdem = vegdsm + dsm - vegdem[vegdem == dsm] = 0 - vegdem2 = vegdsm2 + dsm - vegdem2[vegdem2 == dsm] = 0 - - # Bush separation - bush = np.logical_not(vegdem2 * vegdem) * vegdem - else: - psi = 1.0 - vegdem = np.zeros_like(dsm) - vegdem2 = np.zeros_like(dsm) - amaxvalue = dsm.max() - dsm.min() - bush = np.zeros_like(dsm) - - shtot = np.zeros((dsm_height, dsm_width)) - - if onetime == 1: - itera = 1 - else: - itera = int(1440 / timeInterval) - - alt = np.zeros(itera) - azi = np.zeros(itera) - hour = 0 - index = 0 - time = dict() - time["UTC"] = UTC - - if wallshadow == 1: - walls = wheight - dirwalls = waspect - else: - walls = np.zeros((dsm_height, dsm_width)) - dirwalls = np.zeros((dsm_height, dsm_width)) - - for i in tqdm(range(0, itera)): - if onetime == 0: - minu = int(timeInterval * i) - if minu >= 60: - hour = int(np.floor(minu / 60)) - minu = int(minu - hour * 60) - else: - minu = tv[4] - hour = tv[3] - - doy = day_of_year(year, month, day) - - ut_time = doy - 1.0 + ((hour - dst) / 24.0) + (minu / (60.0 * 24.0)) + (0.0 / (60.0 * 60.0 * 24.0)) - - if ut_time < 0: - year = year - 1 - month = 12 - day = 31 - doy = day_of_year(year, month, day) - ut_time = ut_time + doy - 1 - - HHMMSS = dectime_to_timevec(ut_time) - time["year"] = year - time["month"] = month - time["day"] = day - time["hour"] = HHMMSS[0] - time["min"] = HHMMSS[1] - time["sec"] = HHMMSS[2] - - sun = sp.sun_position(time, location) - alt[i] = 90.0 - sun["zenith"] - azi[i] = sun["azimuth"] - - if time["sec"] == 59: # issue 228 and 256 - time["sec"] = 0 - time["min"] = time["min"] + 1 - if time["min"] == 60: - time["min"] = 0 - time["hour"] = time["hour"] + 1 - if time["hour"] == 24: - time["hour"] = 0 - - time_vector = dt.datetime(year, month, day, time["hour"], time["min"], time["sec"]) - timestr = time_vector.strftime("%Y%m%d_%H%M") - if alt[i] > 0: - if wallshadow == 1: # Include wall shadows (Issue #121) - result = shadowing.calculate_shadows_wall_ht_25( - azi[i], - alt[i], - scale, - amaxvalue, - dsm, - vegdem, - vegdem2, - bush, - wheight if wallshadow == 1 else np.zeros((dsm_height, dsm_width)), - waspect * np.pi / 180.0 if wallshadow == 1 else np.zeros((dsm_height, dsm_width)), - None, - None, - None, - ) - sh = result.bldg_sh - (1 - result.veg_sh) * (1 - psi) - if onetime == 0: - filenamewallshve = folder + "/facade_shdw_veg/facade_shdw_veg_" + timestr + "_LST.tif" - common.save_raster(filenamewallshve, result.wall_sh_veg, dsm_transf, dsm_crs) - if onetime == 0: - filename = folder + "/shadow_ground/shadow_ground_" + timestr + "_LST.tif" - common.save_raster(filename, sh, dsm_transf, dsm_crs) - filenamewallsh = folder + "/facade_shdw_bldgs/facade_shdw_bldgs_" + timestr + "_LST.tif" - common.save_raster(filenamewallsh, result.wall_sh, dsm_transf, dsm_crs) - else: - result = shadowing.calculate_shadows_wall_ht_25( - azi[i], - alt[i], - scale, - amaxvalue, - dsm, - vegdem, - vegdem2, - bush, - np.zeros((dsm_height, dsm_width)), - np.zeros((dsm_height, dsm_width)), - None, - None, - None, - ) - sh = result.bldg_sh - (1 - result.veg_sh) * (1 - psi) - if onetime == 0: - filename = folder + "/Shadow_" + timestr + "_LST.tif" - common.save_raster(filename, sh, dsm_transf, dsm_crs) - - shtot = shtot + sh - index += 1 - - shfinal = shtot / index - - if wallshadow == 1: - if onetime == 1: - filenamewallsh = folder + "/facade_shdw_bldgs/facade_shdw_bldgs_" + timestr + "_LST.tif" - common.save_raster(filenamewallsh, result.wall_sh, dsm_transf, dsm_crs) - filenamewallshve = folder + "/facade_shdw_veg/facade_shdw_veg_" + timestr + "_LST.tif" - common.save_raster(filenamewallshve, result.wall_sh_veg, dsm_transf, dsm_crs) - - shadowresult = {"shfinal": shfinal, "time_vector": time_vector} - - return shadowresult - - -def day_of_year(yy, month, day): - if (yy % 4) == 0: - if (yy % 100) == 0: - if (yy % 400) == 0: - leapyear = 1 - else: - leapyear = 0 - else: - leapyear = 1 - else: - leapyear = 0 - - if leapyear == 1: - dayspermonth = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] - else: - dayspermonth = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] - - doy = np.sum(dayspermonth[0 : month - 1]) + day - - return doy - - -def dectime_to_timevec(dectime): - # This subroutine converts dectime to individual hours, minutes and seconds - - doy = np.floor(dectime) - - DH = dectime - doy - HOURS = int(24 * DH) - - DM = 24 * DH - HOURS - MINS = int(60 * DM) - - DS = 60 * DM - MINS - SECS = int(60 * DS) - - return (HOURS, MINS, SECS) diff --git a/pysrc/umepr/functions/solweig.py b/pysrc/umepr/functions/solweig.py deleted file mode 100644 index 25b02d0..0000000 --- a/pysrc/umepr/functions/solweig.py +++ /dev/null @@ -1,883 +0,0 @@ -""" -Solweig model in Python which calls shadowing and GVF calculations implemented in Rust. - -Implemented from SolweigRunRust class, which inherits from SolweigRunCore. - -This version is a copy except for the changes made to call the Rust functions directly. -""" - -from copy import deepcopy - -import numpy as np -from umep.functions.SOLWEIGpython.cylindric_wedge import cylindric_wedge -from umep.functions.SOLWEIGpython.daylen import daylen -from umep.functions.SOLWEIGpython.Kup_veg_2015a import Kup_veg_2015a - -# Anisotropic longwave -from umep.functions.SOLWEIGpython.patch_radiation import patch_steradians -from umep.functions.SOLWEIGpython.TsWaveDelay_2015a import TsWaveDelay_2015a - -# Wall surface temperature scheme -from umep.functions.SOLWEIGpython.wall_surface_temperature import wall_surface_temperature -from umep.util.SEBESOLWEIGCommonFiles.clearnessindex_2013b import clearnessindex_2013b -from umep.util.SEBESOLWEIGCommonFiles.create_patches import create_patches -from umep.util.SEBESOLWEIGCommonFiles.diffusefraction import diffusefraction -from umep.util.SEBESOLWEIGCommonFiles.Perez_v3 import Perez_v3 - -from ..rustalgos import gvf, shadowing, sky, vegetation - - -def Solweig_2025a_calc( - i, - dsm, - scale, - rows, - cols, - svf, - svfN, - svfW, - svfE, - svfS, - svfveg, - svfNveg, - svfEveg, - svfSveg, - svfWveg, - svfaveg, - svfEaveg, - svfSaveg, - svfWaveg, - svfNaveg, - vegdem, - vegdem2, - albedo_b, - absK, - absL, - ewall, - Fside, - Fup, - Fcyl, - altitude, - azimuth, - zen, - jday, - usevegdem, - onlyglobal, - buildings, - location, - psi, - landcover, - lc_grid, - dectime, - altmax, - dirwalls, - walls, - cyl, - elvis, - Ta, - RH, - radG, - radD, - radI, - P, - amaxvalue, - bush, - Twater, - TgK, - Tstart, - alb_grid, - emis_grid, - TgK_wall, - Tstart_wall, - TmaxLST, - TmaxLST_wall, - first, - second, - svfalfa, - svfbuveg, - firstdaytime, - timeadd, - timestepdec, - Tgmap1, - Tgmap1E, - Tgmap1S, - Tgmap1W, - Tgmap1N, - CI, - TgOut1, - diffsh, - shmat, - vegshmat, - vbshvegshmat, - anisotropic_sky, - asvf, - patch_option, - voxelMaps, - voxelTable, - ws, - wallScheme, - timeStep, - steradians, - walls_scheme, - dirwalls_scheme, -): - # def Solweig_2021a_calc(i, dsm, scale, rows, cols, svf, svfN, svfW, svfE, svfS, svfveg, svfNveg, svfEveg, svfSveg, - # svfWveg, svfaveg, svfEaveg, svfSaveg, svfWaveg, svfNaveg, vegdem, vegdem2, albedo_b, absK, absL, - # ewall, Fside, Fup, Fcyl, altitude, azimuth, zen, jday, usevegdem, onlyglobal, buildings, location, psi, - # landcover, lc_grid, dectime, altmax, dirwalls, walls, cyl, elvis, Ta, RH, radG, radD, radI, P, - # amaxvalue, bush, Twater, TgK, Tstart, alb_grid, emis_grid, TgK_wall, Tstart_wall, TmaxLST, - # TmaxLST_wall, first, second, svfalfa, svfbuveg, firstdaytime, timeadd, timestepdec, Tgmap1, - # Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, CI, TgOut1, diffsh, ani): - - # This is the core function of the SOLWEIG model - # 2016-Aug-28 - # Fredrik Lindberg, fredrikl@gvc.gu.se - # Goteborg Urban Climate Group - # Gothenburg University - # - # Input variables: - # dsm = digital surface model - # scale = height to pixel size (2m pixel gives scale = 0.5) - # svf,svfN,svfW,svfE,svfS = SVFs for building and ground - # svfveg,svfNveg,svfEveg,svfSveg,svfWveg = Veg SVFs blocking sky - # svfaveg,svfEaveg,svfSaveg,svfWaveg,svfNaveg = Veg SVFs blocking buildings - # vegdem = Vegetation canopy DSM - # vegdem2 = Vegetation trunk zone DSM - # albedo_b = building wall albedo - # absK = human absorption coefficient for shortwave radiation - # absL = human absorption coefficient for longwave radiation - # ewall = Emissivity of building walls - # Fside = The angular factors between a person and the surrounding surfaces - # Fup = The angular factors between a person and the surrounding surfaces - # Fcyl = The angular factors between a culidric person and the surrounding surfaces - # altitude = Sun altitude (degree) - # azimuth = Sun azimuth (degree) - # zen = Sun zenith angle (radians) - # jday = day of year - # usevegdem = use vegetation scheme - # onlyglobal = calculate dir and diff from global shortwave (Reindl et al. 1990) - # buildings = Boolena grid to identify building pixels - # location = geographic location - # height = height of measurements point (center of gravity of human) - # psi = 1 - Transmissivity of shortwave through vegetation - # landcover = use landcover scheme !!!NEW IN 2015a!!! - # lc_grid = grid with landcoverclasses - # lc_class = table with landcover properties - # dectime = decimal time - # altmax = maximum sun altitude - # dirwalls = aspect of walls - # walls = one pixel row outside building footprint. height of building walls - # cyl = consider man as cylinder instead of cude - # elvis = dummy - # Ta = air temp - # RH - # radG = global radiation - # radD = diffuse - # radI = direct - # P = pressure - # amaxvalue = max height of buildings - # bush = grid representing bushes - # Twater = temperature of water (daily) - # TgK, Tstart, TgK_wall, Tstart_wall, TmaxLST,TmaxLST_wall, - # alb_grid, emis_grid = albedo and emmissivity on ground - # first, second = conneted to old Ts model (source area based on Smidt et al.) - # svfalfa = SVF recalculated to angle - # svfbuveg = complete SVF - # firstdaytime, timeadd, timestepdec, Tgmap1, Tgmap1E, Tgmap1S, Tgmap1W, Tgmap1N, - # CI = Clearness index - # TgOut1 = old Ts model - # diffsh, ani = Used in anisotrpic models (Wallenberg et al. 2019, 2022) - - # # # Core program start # # # - - # Optimization: Crop to valid region to avoid needless computation on NaN boundaries - valid_mask = ~(np.isnan(dsm) | np.isnan(svf)) - - if not np.any(valid_mask): - # Return all NaN arrays if no valid pixels - nan_array = np.full((rows, cols), np.nan) - return ( - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - np.nan, - np.nan, - 0, - CI, - nan_array.copy(), - firstdaytime, - timestepdec, - timeadd, - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - TgOut1, - nan_array.copy(), - radI, - radD, - nan_array.copy(), - None, - CI, - CI, - nan_array.copy(), - nan_array.copy(), - nan_array.copy(), - steradians, - voxelTable, - ) - - rows_valid = np.any(valid_mask, axis=1) - cols_valid = np.any(valid_mask, axis=0) - rmin, rmax = np.where(rows_valid)[0][[0, -1]] - cmin, cmax = np.where(cols_valid)[0][[0, -1]] - rmax += 1 - cmax += 1 - - orig_rows, orig_cols = rows, cols - is_cropped = (rmin > 0) or (rmax < rows) or (cmin > 0) or (cmax < cols) - - if is_cropped: - sl = (slice(rmin, rmax), slice(cmin, cmax)) - rows = rmax - rmin - cols = cmax - cmin - - # Crop inputs - dsm = dsm[sl] - svf = svf[sl] - svfN = svfN[sl] - svfW = svfW[sl] - svfE = svfE[sl] - svfS = svfS[sl] - svfveg = svfveg[sl] - svfNveg = svfNveg[sl] - svfEveg = svfEveg[sl] - svfSveg = svfSveg[sl] - svfWveg = svfWveg[sl] - svfaveg = svfaveg[sl] - svfEaveg = svfEaveg[sl] - svfSaveg = svfSaveg[sl] - svfWaveg = svfWaveg[sl] - svfNaveg = svfNaveg[sl] - vegdem = vegdem[sl] - vegdem2 = vegdem2[sl] - buildings = buildings[sl] - if lc_grid is not None: - lc_grid = lc_grid[sl] - dirwalls = dirwalls[sl] - walls = walls[sl] - bush = bush[sl] - alb_grid = alb_grid[sl] - emis_grid = emis_grid[sl] - TgK = TgK[sl] - Tstart = Tstart[sl] - TmaxLST = TmaxLST[sl] - # Note: TgK_wall, Tstart_wall, TmaxLST_wall are scalars, not arrays - svfalfa = svfalfa[sl] - svfbuveg = svfbuveg[sl] - Tgmap1 = Tgmap1[sl] - Tgmap1E = Tgmap1E[sl] - Tgmap1S = Tgmap1S[sl] - Tgmap1W = Tgmap1W[sl] - Tgmap1N = Tgmap1N[sl] - if np.ndim(TgOut1) >= 2: - TgOut1 = TgOut1[sl] - if diffsh is not None: - diffsh = diffsh[sl] - if shmat is not None: - shmat = shmat[sl] - if vegshmat is not None: - vegshmat = vegshmat[sl] - if vbshvegshmat is not None: - vbshvegshmat = vbshvegshmat[sl] - asvf = asvf[sl] - if voxelMaps is not None: - voxelMaps = voxelMaps[sl] - if voxelTable is not None: - voxelTable = voxelTable[sl] - walls_scheme = walls_scheme[sl] - dirwalls_scheme = dirwalls_scheme[sl] - - # Instrument offset in degrees - t = 0.0 - - # Stefan Bolzmans Constant - SBC = 5.67051e-8 - - # Degrees to radians - deg2rad = np.pi / 180 - - # Find sunrise decimal hour - new from 2014a - _, _, _, SNUP = daylen(jday, location["latitude"]) - - # Vapor pressure - ea = 6.107 * 10 ** ((7.5 * Ta) / (237.3 + Ta)) * (RH / 100.0) - - # Determination of clear - sky emissivity from Prata (1996) - msteg = 46.5 * (ea / (Ta + 273.15)) - esky = (1 - (1 + msteg) * np.exp(-((1.2 + 3.0 * msteg) ** 0.5))) + elvis # -0.04 old error from Jonsson et al.2006 - - if altitude > 0: # # # # # # DAYTIME # # # # # # - # Clearness Index on Earth's surface after Crawford and Dunchon (1999) with a correction - # factor for low sun elevations after Lindberg et al.(2008) - I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100.0, radG, location, P) - if (CI > 1) or (np.inf == CI): - CI = 1 - - # Estimation of radD and radI if not measured after Reindl et al.(1990) - if onlyglobal == 1: - I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b(zen, jday, Ta, RH / 100.0, radG, location, P) - if (CI > 1) or (np.inf == CI): - CI = 1 - - radI, radD = diffusefraction(radG, altitude, Kt, Ta, RH) - - # Diffuse Radiation - # Anisotropic Diffuse Radiation after Perez et al. 1993 - if anisotropic_sky == 1: - patchchoice = 1 - zenDeg = zen * (180 / np.pi) - # Relative luminance - lv, pc_, pb_ = Perez_v3(zenDeg, azimuth, radD, radI, jday, patchchoice, patch_option) - # Total relative luminance from sky, i.e. from each patch, into each cell - aniLum = np.zeros((rows, cols)) - for idx in range(lv.shape[0]): - aniLum += diffsh[:, :, idx] * lv[idx, 2] - - dRad = aniLum * radD # Total diffuse radiation from sky into each cell - else: - dRad = radD * svfbuveg - patchchoice = 1 - lv = None - - # Shadow images - if usevegdem == 1: - result = shadowing.calculate_shadows_wall_ht_25( - azimuth, - altitude, - scale, - amaxvalue, - dsm.astype(np.float32), - vegdem.astype(np.float32), - vegdem2.astype(np.float32), - bush.astype(np.float32), - walls.astype(np.float32), - (dirwalls * np.pi / 180.0).astype(np.float32), - walls_scheme.astype(np.float32), - (dirwalls_scheme * np.pi / 180.0).astype(np.float32), - None, - ) - vegsh = result.veg_sh - sh = result.bldg_sh - wallsh = result.wall_sh - wallsun = result.wall_sun - wallshve = result.wall_sh_veg - facesun = result.face_sun - wallsh_ = result.face_sh - shadow = result.bldg_sh - (1 - result.veg_sh) * (1 - psi) - else: - result = shadowing.calculate_shadows_wall_ht_25( - azimuth, - altitude, - scale, - dsm.astype(np.float32), - None, - None, - None, - walls.astype(np.float32), - (dirwalls * np.pi / 180.0).astype(np.float32), - None, - None, - None, - ) - sh = result.bldg_sh - wallsh = result.wall_sh - wallsun = result.wall_sun - facesh = result.face_sh - facesun = result.face_sun - shadow = result.bldg_sh - - # # # Surface temperature parameterisation during daytime # # # # - # new using max sun alt.instead of dfm - # Tgamp = (TgK * altmax - Tstart) + Tstart # Old - Tgamp = TgK * altmax + Tstart # Fixed 2021 - # Tgampwall = (TgK_wall * altmax - (Tstart_wall)) + (Tstart_wall) # Old - Tgampwall = TgK_wall * altmax + Tstart_wall - Tg = Tgamp * np.sin( - (((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST / 24 - SNUP / 24)) * np.pi / 2 - ) # 2015 a, based on max sun altitude - Tgwall = Tgampwall * np.sin( - (((dectime - np.floor(dectime)) - SNUP / 24) / (TmaxLST_wall / 24 - SNUP / 24)) * np.pi / 2 - ) # 2015a, based on max sun altitude - - if Tgwall < 0: # temporary for removing low Tg during morning 20130205 - # Tg = 0 - Tgwall = 0 - - # New estimation of Tg reduction for non - clear situation based on Reindl et al.1990 - radI0, _ = diffusefraction(I0, altitude, 1.0, Ta, RH) - corr = 0.1473 * np.log(90 - (zen / np.pi * 180)) + 0.3454 # 20070329 correction of lat, Lindberg et al. 2008 - CI_Tg = (radG / radI0) + (1 - corr) - if (CI_Tg > 1) or (CI_Tg == np.inf): - CI_Tg = 1 - - radG0 = radI0 * (np.sin(altitude * deg2rad)) + _ - CI_TgG = (radG / radG0) + (1 - corr) - if (CI_TgG > 1) or (CI_TgG == np.inf): - CI_TgG = 1 - - # Tg = Tg * CI_Tg # new estimation - # Tgwall = Tgwall * CI_Tg - Tg = Tg * CI_TgG # new estimation - Tgwall = Tgwall * CI_TgG - if landcover == 1: - Tg[Tg < 0] = 0 # temporary for removing low Tg during morning 20130205 - - # # # # Ground View Factors # # # # - gvf_result = gvf.gvf_calc( - wallsun.astype(np.float32), - walls.astype(np.float32), - buildings.astype(np.float32), - scale, - shadow.astype(np.float32), - first, - second, - dirwalls.astype(np.float32), - Tg.astype(np.float32), - Tgwall, - Ta, - emis_grid.astype(np.float32), - ewall, - alb_grid.astype(np.float32), - SBC, - albedo_b, - Twater, - lc_grid.astype(np.float32) if lc_grid is not None else None, - landcover, - ) - - # # # # Lup, daytime # # # # - # Surface temperature wave delay - new as from 2014a - Lup, timeaddnotused, Tgmap1 = TsWaveDelay_2015a(gvf_result.gvf_lup, firstdaytime, timeadd, timestepdec, Tgmap1) - LupE, timeaddnotused, Tgmap1E = TsWaveDelay_2015a( - gvf_result.gvf_lup_e, firstdaytime, timeadd, timestepdec, Tgmap1E - ) - LupS, timeaddnotused, Tgmap1S = TsWaveDelay_2015a( - gvf_result.gvf_lup_s, firstdaytime, timeadd, timestepdec, Tgmap1S - ) - LupW, timeaddnotused, Tgmap1W = TsWaveDelay_2015a( - gvf_result.gvf_lup_w, firstdaytime, timeadd, timestepdec, Tgmap1W - ) - LupN, timeaddnotused, Tgmap1N = TsWaveDelay_2015a( - gvf_result.gvf_lup_n, firstdaytime, timeadd, timestepdec, Tgmap1N - ) - - # # For Tg output in POIs - TgTemp = Tg * shadow + Ta - TgOut, timeadd, TgOut1 = TsWaveDelay_2015a( - TgTemp, firstdaytime, timeadd, timestepdec, TgOut1 - ) # timeadd only here v2021a - - # Building height angle from svf - F_sh = cylindric_wedge(zen, svfalfa, rows, cols) # Fraction shadow on building walls based on sun alt and svf - F_sh[np.isnan(F_sh)] = 0.5 - - # # # # # # # Calculation of shortwave daytime radiative fluxes # # # # # # # - Kdown = ( - radI * shadow * np.sin(altitude * (np.pi / 180)) - + dRad - + albedo_b * (1 - svfbuveg) * (radG * (1 - F_sh) + radD * F_sh) - ) # *sin(altitude(i) * (pi / 180)) - - Kup, KupE, KupS, KupW, KupN = Kup_veg_2015a( - radI, - radD, - radG, - altitude, - svfbuveg, - albedo_b, - F_sh, - gvf_result.gvfalb, - gvf_result.gvfalb_e, - gvf_result.gvfalb_s, - gvf_result.gvfalb_w, - gvf_result.gvfalb_n, - gvf_result.gvfalbnosh, - gvf_result.gvfalbnosh_e, - gvf_result.gvfalbnosh_s, - gvf_result.gvfalbnosh_w, - gvf_result.gvfalbnosh_n, - ) - - kside_result = vegetation.kside_veg( - radI, - radD, - radG, - shadow.astype(np.float32), - svfS, - svfW, - svfN, - svfE, - svfEveg, - svfSveg, - svfWveg, - svfNveg, - azimuth, - altitude, - psi, - t, - albedo_b, - F_sh.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - bool(cyl), - lv.astype(np.float32) if lv is not None else None, - bool(anisotropic_sky), - diffsh, - asvf, - shmat, - vegshmat, - vbshvegshmat, - ) - Keast = kside_result.keast - Ksouth = kside_result.ksouth - Kwest = kside_result.kwest - Knorth = kside_result.knorth - KsideI = kside_result.kside_i - KsideD = kside_result.kside_d - Kside = kside_result.kside - - firstdaytime = 0 - - else: # # # # # # # NIGHTTIME # # # # # # # # - Tgwall = 0 - # CI_Tg = -999 # F_sh = [] - - # Nocturnal K fluxes set to 0 - Knight = np.zeros((rows, cols)) - Kdown = np.zeros((rows, cols)) - Kwest = np.zeros((rows, cols)) - Kup = np.zeros((rows, cols)) - Keast = np.zeros((rows, cols)) - Ksouth = np.zeros((rows, cols)) - Knorth = np.zeros((rows, cols)) - KsideI = np.zeros((rows, cols)) - KsideD = np.zeros((rows, cols)) - F_sh = np.zeros((rows, cols)) - Tg = np.zeros((rows, cols)) - shadow = np.zeros((rows, cols)) - CI_Tg = deepcopy(CI) - CI_TgG = deepcopy(CI) - dRad = np.zeros((rows, cols)) - Kside = np.zeros((rows, cols)) - - # # # # Lup # # # # - Lup = SBC * emis_grid * ((Knight + Ta + Tg + 273.15) ** 4) - if landcover == 1: - Lup[lc_grid == 3] = SBC * 0.98 * (Twater + 273.15) ** 4 # nocturnal Water temp - - LupE = Lup - LupS = Lup - LupW = Lup - LupN = Lup - - # # For Tg output in POIs - TgOut = Ta + Tg - - I0 = 0 - timeadd = 0 - firstdaytime = 1 - - # # # # Ldown # # # # - Ldown = ( - (svf + svfveg - 1) * esky * SBC * ((Ta + 273.15) ** 4) - + (2 - svfveg - svfaveg) * ewall * SBC * ((Ta + 273.15) ** 4) - + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) - + (2 - svf - svfveg) * (1 - ewall) * esky * SBC * ((Ta + 273.15) ** 4) - ) # Jonsson et al.(2006) - # Ldown = Ldown - 25 # Shown by Jonsson et al.(2006) and Duarte et al.(2006) - - if CI < 0.95: # non - clear conditions - c = 1 - CI - Ldown = Ldown * (1 - c) + c * ( - (svf + svfveg - 1) * SBC * ((Ta + 273.15) ** 4) - + (2 - svfveg - svfaveg) * ewall * SBC * ((Ta + 273.15) ** 4) - + (svfaveg - svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) - + (2 - svf - svfveg) * (1 - ewall) * SBC * ((Ta + 273.15) ** 4) - ) # NOT REALLY TESTED!!! BUT MORE CORRECT? - - # # # # Lside # # # # - lside_veg_result = vegetation.lside_veg( - svfS, - svfW, - svfN, - svfE, - svfEveg, - svfSveg, - svfWveg, - svfNveg, - svfEaveg, - svfSaveg, - svfWaveg, - svfNaveg, - azimuth, - altitude, - Ta, - Tgwall, - SBC, - ewall, - Ldown.astype(np.float32), - esky, - t, - F_sh.astype(np.float32), - CI, - LupE.astype(np.float32), - LupS.astype(np.float32), - LupW.astype(np.float32), - LupN.astype(np.float32), - bool(anisotropic_sky), - ) - Least = lside_veg_result.least - Lsouth = lside_veg_result.lsouth - Lwest = lside_veg_result.lwest - Lnorth = lside_veg_result.lnorth - - # New parameterization scheme for wall temperatures - if wallScheme == 1: - # albedo_g = 0.15 #TODO Change to correct - if altitude < 0: - wallsh_ = 0 - voxelTable = wall_surface_temperature( - voxelTable, wallsh_, altitude, azimuth, timeStep, radI, radD, radG, Ldown, Lup, Ta, esky - ) - # Anisotropic sky - if anisotropic_sky == 1: - if "lv" not in locals(): - # Creating skyvault of patches of constant radians (Tregeneza and Sharples, 1993) - skyvaultalt, skyvaultazi, _, _, _, _, _ = create_patches(patch_option) - - patch_emissivities = np.zeros(skyvaultalt.shape[0]) - - x = np.transpose(np.atleast_2d(skyvaultalt)) - y = np.transpose(np.atleast_2d(skyvaultazi)) - z = np.transpose(np.atleast_2d(patch_emissivities)) - - L_patches = np.append(np.append(x, y, axis=1), z, axis=1) - - else: - L_patches = deepcopy(lv) - - # Calculate steradians for patches if it is the first model iteration - if i == 0: - steradians, skyalt, patch_altitude = patch_steradians(L_patches) - - # Create lv from L_patches if nighttime, i.e. lv does not exist - if altitude < 0: - # CI = deepcopy(CI) - lv = deepcopy(L_patches) - KupE = np.zeros_like(lv) - KupS = np.zeros_like(lv) - KupW = np.zeros_like(lv) - KupN = np.zeros_like(lv) - - # Adjust sky emissivity under semi-cloudy/hazy/cloudy/overcast conditions, i.e. CI lower than 0.95 - if CI < 0.95: - esky_c = CI * esky + (1 - CI) * 1.0 - esky = esky_c - - ani_sky_result = sky.anisotropic_sky( - shmat, - vegshmat, - vbshvegshmat, - altitude, - azimuth, - asvf, - bool(cyl), - esky, - L_patches.astype(np.float32), - bool(wallScheme), - voxelTable.astype(np.float32) if voxelTable is not None else None, - voxelMaps.astype(np.float32) if voxelMaps is not None else None, - steradians.astype(np.float32), - Ta, - Tgwall, - ewall, - Lup.astype(np.float32), - radI, - radD, - radG, - lv.astype(np.float32), - albedo_b, - False, - diffsh, - shadow.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - i, - ) - Ldown = ani_sky_result.ldown - Lside = ani_sky_result.lside - Lside_sky = ani_sky_result.lside_sky - Lside_veg = ani_sky_result.lside_veg - Lside_sh = ani_sky_result.lside_sh - Lside_sun = ani_sky_result.lside_sun - Lside_ref = ani_sky_result.lside_ref - Least_ = ani_sky_result.least - Lwest_ = ani_sky_result.lwest - Lnorth_ = ani_sky_result.lnorth - Lsouth_ = ani_sky_result.lsouth - Keast = ani_sky_result.keast - Ksouth = ani_sky_result.ksouth - Kwest = ani_sky_result.kwest - Knorth = ani_sky_result.knorth - KsideI = ani_sky_result.kside_i - KsideD = ani_sky_result.kside_d - Kside = ani_sky_result.kside - steradians = ani_sky_result.steradians - skyalt = ani_sky_result.skyalt - else: - Lside = np.zeros((rows, cols)) - L_patches = None - - # Box and anisotropic longwave - if cyl == 0 and anisotropic_sky == 1: - Least += Least_ - Lwest += Lwest_ - Lnorth += Lnorth_ - Lsouth += Lsouth_ - - # # # # Calculation of radiant flux density and Tmrt # # # # - # Human body considered as a cylinder with isotropic all-sky diffuse - if cyl == 1 and anisotropic_sky == 0: - Sstr = absK * (KsideI * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( - (Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside - ) - # Human body considered as a cylinder with Perez et al. (1993) (anisotropic sky diffuse) - # and Martin and Berdahl (1984) (anisotropic sky longwave) - elif cyl == 1 and anisotropic_sky == 1: - Sstr = absK * (Kside * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( - (Ldown + Lup) * Fup + Lside * Fcyl + (Lnorth + Least + Lsouth + Lwest) * Fside - ) - # Knorth = nan Ksouth = nan Kwest = nan Keast = nan - else: # Human body considered as a standing cube - Sstr = absK * ((Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + absL * ( - (Ldown + Lup) * Fup + (Lnorth + Least + Lsouth + Lwest) * Fside - ) - - Tmrt = np.sqrt(np.sqrt(Sstr / (absL * SBC))) - 273.2 - - # Add longwave to cardinal directions for output in POI - if (cyl == 1) and (anisotropic_sky == 1): - Least += Least_ - Lwest += Lwest_ - Lnorth += Lnorth_ - Lsouth += Lsouth_ - - if is_cropped: - - def uncrop(arr): - if arr is None: - return None - if np.isscalar(arr): - return arr - arr = np.asarray(arr) - if arr.ndim < 2: - return arr - # Check if it matches the cropped shape (rows, cols) - if arr.shape[0] != rows or arr.shape[1] != cols: - return arr - - new_shape = (orig_rows, orig_cols) + arr.shape[2:] - full = np.full(new_shape, np.nan, dtype=arr.dtype) - full[sl] = arr - return full - - Tmrt = uncrop(Tmrt) - Kdown = uncrop(Kdown) - Kup = uncrop(Kup) - Ldown = uncrop(Ldown) - Lup = uncrop(Lup) - Tg = uncrop(Tg) - shadow = uncrop(shadow) - Tgmap1 = uncrop(Tgmap1) - Tgmap1E = uncrop(Tgmap1E) - Tgmap1S = uncrop(Tgmap1S) - Tgmap1W = uncrop(Tgmap1W) - Tgmap1N = uncrop(Tgmap1N) - Keast = uncrop(Keast) - Ksouth = uncrop(Ksouth) - Kwest = uncrop(Kwest) - Knorth = uncrop(Knorth) - Least = uncrop(Least) - Lsouth = uncrop(Lsouth) - Lwest = uncrop(Lwest) - Lnorth = uncrop(Lnorth) - KsideI = uncrop(KsideI) - TgOut1 = uncrop(TgOut1) - TgOut = uncrop(TgOut) - Lside = uncrop(Lside) - KsideD = uncrop(KsideD) - dRad = uncrop(dRad) - Kside = uncrop(Kside) - voxelTable = uncrop(voxelTable) - - return ( - Tmrt, - Kdown, - Kup, - Ldown, - Lup, - Tg, - ea, - esky, - I0, - CI, - shadow, - firstdaytime, - timestepdec, - timeadd, - Tgmap1, - Tgmap1E, - Tgmap1S, - Tgmap1W, - Tgmap1N, - Keast, - Ksouth, - Kwest, - Knorth, - Least, - Lsouth, - Lwest, - Lnorth, - KsideI, - TgOut1, - TgOut, - radI, - radD, - Lside, - L_patches, - CI_Tg, - CI_TgG, - KsideD, - dRad, - Kside, - steradians, - voxelTable, - ) diff --git a/pysrc/umepr/hybrid/svf.py b/pysrc/umepr/hybrid/svf.py deleted file mode 100644 index 9aa5f61..0000000 --- a/pysrc/umepr/hybrid/svf.py +++ /dev/null @@ -1,220 +0,0 @@ -""" -SVF hybrid - Python implementation of the SVF algorithm which calls Rust for the shadowing calculations. - -NOTE: Used for testing. -There is a full Rust implementation which should be used instead. -""" - -import numpy as np -from tqdm import tqdm -from umep.util import shadowingfunctions as shadow -from umep.util.SEBESOLWEIGCommonFiles.create_patches import create_patches - -from ..rustalgos import shadowing - - -def annulus_weight(altitude, aziinterval): - n = 90.0 - steprad = (360.0 / aziinterval) * (np.pi / 180.0) - annulus = 91.0 - altitude - w = (1.0 / (2.0 * np.pi)) * np.sin(np.pi / (2.0 * n)) * np.sin((np.pi * (2.0 * annulus - 1.0)) / (2.0 * n)) - weight = steprad * w - - return weight - - -def svfForProcessing153_rust_shdw(dsm, vegdem, vegdem2, scale, usevegdem, amax): - # setup - rows = dsm.shape[0] - cols = dsm.shape[1] - svf = np.zeros([rows, cols], dtype=np.float32) - svfE = np.zeros([rows, cols], dtype=np.float32) - svfS = np.zeros([rows, cols], dtype=np.float32) - svfW = np.zeros([rows, cols], dtype=np.float32) - svfN = np.zeros([rows, cols], dtype=np.float32) - svfveg = np.zeros((rows, cols), dtype=np.float32) - svfEveg = np.zeros((rows, cols), dtype=np.float32) - svfSveg = np.zeros((rows, cols), dtype=np.float32) - svfWveg = np.zeros((rows, cols), dtype=np.float32) - svfNveg = np.zeros((rows, cols), dtype=np.float32) - svfaveg = np.zeros((rows, cols), dtype=np.float32) - svfEaveg = np.zeros((rows, cols), dtype=np.float32) - svfSaveg = np.zeros((rows, cols), dtype=np.float32) - svfWaveg = np.zeros((rows, cols), dtype=np.float32) - svfNaveg = np.zeros((rows, cols), dtype=np.float32) - - # % Elevation vegdems if buildingDSM inclused ground heights - # vegdem = vegdem + dsm - # vegdem[vegdem == dsm] = 0 - # vegdem2 = vegdem2 + dsm - # vegdem2[vegdem2 == dsm] = 0 - - # % Bush separation - # bush = np.logical_not(vegdem2 * vegdem) * vegdem - bush = np.copy(vegdem) - bush[vegdem2 > 0] = 0.0 - - index = 0 - - # patch_option = 1 # 145 patches - patch_option = 2 # 153 patches - # patch_option = 3 # 306 patches - # patch_option = 4 # 612 patches - - # Create patches based on patch_option - ( - skyvaultalt, - skyvaultazi, - annulino, - skyvaultaltint, - aziinterval, - skyvaultaziint, - azistart, - ) = create_patches(patch_option) - - skyvaultaziint = np.array([360 / patches for patches in aziinterval]) - iazimuth = np.hstack(np.zeros((1, np.sum(aziinterval)))) # Nils - - # float 32 for memory - shmat = np.zeros((rows, cols, np.sum(aziinterval)), dtype=np.float32) - vegshmat = np.zeros((rows, cols, np.sum(aziinterval)), dtype=np.float32) - vbshvegshmat = np.zeros((rows, cols, np.sum(aziinterval)), dtype=np.float32) - - for j in range(0, skyvaultaltint.shape[0]): - for k in range(0, int(360 / skyvaultaziint[j])): - iazimuth[index] = k * skyvaultaziint[j] + azistart[j] - if iazimuth[index] > 360.0: - iazimuth[index] = iazimuth[index] - 360.0 - index = index + 1 - - # NOTE: total for progress - total = 0 - for i in range(0, skyvaultaltint.shape[0]): - for j in np.arange(0, aziinterval[int(i)]): - total += 1 - progress = tqdm(total=total) - # - aziintervalaniso = np.ceil(aziinterval / 2.0) - index = 0 - for i in range(0, skyvaultaltint.shape[0]): - for j in np.arange(0, aziinterval[int(i)]): - altitude = skyvaultaltint[int(i)] - azimuth = iazimuth[int(index)] - - # Casting shadow - if usevegdem == 1: - # numba doesn't seem to offer notable gains in this instance - result = shadowing.calculate_shadows_wall_ht_25( - azimuth, - altitude, - scale, - amax, - dsm, - vegdem, - vegdem2, - bush, - None, - None, - None, - None, - None, - ) - vegsh = result.veg_sh - vbshvegsh = result.veg_blocks_bldg_sh - sh = result.bldg_sh - vegshmat[:, :, index] = vegsh - vbshvegshmat[:, :, index] = vbshvegsh - else: - sh = shadow.shadowingfunctionglobalradiation(dsm, azimuth, altitude, scale, 1) - shmat[:, :, index] = sh - - # Calculate svfs - for k in np.arange(annulino[int(i)] + 1, (annulino[int(i + 1.0)]) + 1): - weight = annulus_weight(k, aziinterval[i]) * sh - svf = svf + weight - weight = annulus_weight(k, aziintervalaniso[i]) * sh - if (azimuth >= 0) and (azimuth < 180): - svfE = svfE + weight - if (azimuth >= 90) and (azimuth < 270): - svfS = svfS + weight - if (azimuth >= 180) and (azimuth < 360): - svfW = svfW + weight - if (azimuth >= 270) or (azimuth < 90): - svfN = svfN + weight - - if usevegdem == 1: - for k in np.arange(annulino[int(i)] + 1, (annulino[int(i + 1.0)]) + 1): - # % changed to include 90 - weight = annulus_weight(k, aziinterval[i]) - svfveg = svfveg + weight * vegsh - svfaveg = svfaveg + weight * vbshvegsh - weight = annulus_weight(k, aziintervalaniso[i]) - if (azimuth >= 0) and (azimuth < 180): - svfEveg = svfEveg + weight * vegsh - svfEaveg = svfEaveg + weight * vbshvegsh - if (azimuth >= 90) and (azimuth < 270): - svfSveg = svfSveg + weight * vegsh - svfSaveg = svfSaveg + weight * vbshvegsh - if (azimuth >= 180) and (azimuth < 360): - svfWveg = svfWveg + weight * vegsh - svfWaveg = svfWaveg + weight * vbshvegsh - if (azimuth >= 270) or (azimuth < 90): - svfNveg = svfNveg + weight * vegsh - svfNaveg = svfNaveg + weight * vbshvegsh - - index += 1 - - # track progress - progress.update(1) - - svfS = svfS + 3.0459e-004 - svfW = svfW + 3.0459e-004 - # % Last azimuth is 90. Hence, manual add of last annuli for svfS and SVFW - # %Forcing svf not be greater than 1 (some MATLAB crazyness) - svf[(svf > 1.0)] = 1.0 - svfE[(svfE > 1.0)] = 1.0 - svfS[(svfS > 1.0)] = 1.0 - svfW[(svfW > 1.0)] = 1.0 - svfN[(svfN > 1.0)] = 1.0 - - if usevegdem == 1: - last = np.zeros((rows, cols)) - last[(vegdem2 == 0.0)] = 3.0459e-004 - svfSveg = svfSveg + last - svfWveg = svfWveg + last - svfSaveg = svfSaveg + last - svfWaveg = svfWaveg + last - # %Forcing svf not be greater than 1 (some MATLAB crazyness) - svfveg[(svfveg > 1.0)] = 1.0 - svfEveg[(svfEveg > 1.0)] = 1.0 - svfSveg[(svfSveg > 1.0)] = 1.0 - svfWveg[(svfWveg > 1.0)] = 1.0 - svfNveg[(svfNveg > 1.0)] = 1.0 - svfaveg[(svfaveg > 1.0)] = 1.0 - svfEaveg[(svfEaveg > 1.0)] = 1.0 - svfSaveg[(svfSaveg > 1.0)] = 1.0 - svfWaveg[(svfWaveg > 1.0)] = 1.0 - svfNaveg[(svfNaveg > 1.0)] = 1.0 - - svfresult = { - "svf": svf, - "svfE": svfE, - "svfS": svfS, - "svfW": svfW, - "svfN": svfN, - "svfveg": svfveg, - "svfEveg": svfEveg, - "svfSveg": svfSveg, - "svfWveg": svfWveg, - "svfNveg": svfNveg, - "svfaveg": svfaveg, - "svfEaveg": svfEaveg, - "svfSaveg": svfSaveg, - "svfWaveg": svfWaveg, - "svfNaveg": svfNaveg, - "shmat": shmat, - "vegshmat": vegshmat, - "vbshvegshmat": vbshvegshmat, - } - - return svfresult diff --git a/pysrc/umepr/shadows.py b/pysrc/umepr/shadows.py deleted file mode 100644 index c23c55b..0000000 --- a/pysrc/umepr/shadows.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Shadow wrapper for Python. - -daily_shading import internally calls the Rust implementation for shadow calculations. -""" - -import datetime -from pathlib import Path - -import pyproj -from rasterio.transform import Affine, xy - -from umep import common -from .functions import daily_shading as dsh - - -def generate_shadows( - dsm_path: str, - shadow_date_Ymd: str, # %Y-%m-%d" - wall_ht_path: str, - wall_aspect_path: str, - bbox: list[int, int, int, int], - out_dir: str, - shadow_time_HM: int | None = None, # "%H:%M" - time_interval_M=30, - veg_dsm_path: str | None = None, - trans_veg: float = 3, - trunk_zone_ht_perc: float = 0.25, -): - dsm, dsm_transf, dsm_crs, _dsm_nd = common.load_raster(dsm_path, bbox) - dsm_height, dsm_width = dsm.shape # y rows by x cols - dsm_scale = 1 / dsm_transf[1] - # y is flipped - so return max for lower row - minx, miny = xy(Affine.from_gdal(*dsm_transf), dsm.shape[0], 0) - # Define the source and target CRS - source_crs = pyproj.CRS(dsm_crs) - target_crs = pyproj.CRS(4326) # WGS 84 - # Create a transformer object - transformer = pyproj.Transformer.from_crs(source_crs, target_crs, always_xy=True) - # Perform the transformation - lon, lat = transformer.transform(minx, miny) - - # veg transmissivity as percentage - if not trans_veg >= 0 and trans_veg <= 100: - raise ValueError("Vegetation transmissivity should be a number between 0 and 100") - trans = trans_veg / 100.0 - - if veg_dsm_path is not None: - usevegdem = 1 - veg_dsm, veg_dsm_transf, veg_dsm_crs, _veg_dsm_nd = common.load_raster(veg_dsm_path, bbox) - veg_dsm_height, veg_dsm_width = veg_dsm.shape - if not (veg_dsm_width == dsm_width) & (veg_dsm_height == dsm_height): - raise ValueError("Error in Vegetation Canopy DSM: All rasters must be of same extent and resolution") - trunkratio = trunk_zone_ht_perc / 100.0 - veg_dsm_2 = veg_dsm * trunkratio - veg_dsm_2_height, veg_dsm_2_width = veg_dsm_2.shape - if not (veg_dsm_2_width == dsm_width) & (veg_dsm_2_height == dsm_height): - raise ValueError("Error in Trunk Zone DSM: All rasters must be of same extent and resolution") - else: - usevegdem = 0 - veg_dsm = 0 - veg_dsm_2 = 0 - - if wall_aspect_path and wall_ht_path: - print("Facade shadow scheme activated") - wallsh = 1 - wh_rast, wh_transf, wh_crs, _wh_nd = common.load_raster(wall_ht_path, bbox) - wh_height, wh_width = wh_rast.shape - if not (wh_width == dsm_width) & (wh_height == dsm_height): - raise ValueError("Error in Wall height raster: All rasters must be of same extent and resolution") - wa_rast, wa_transf, wa_crs, _wa_nd = common.load_raster(wall_aspect_path, bbox) - wa_height, wa_width = wa_rast.shape - if not (wa_width == dsm_width) & (wa_height == dsm_height): - raise ValueError("Error in Wall aspect raster: All rasters must be of same extent and resolution") - else: - wallsh = 0 - wh_rast = 0 - wa_height = 0 - - dst = 0 - UTC = 0 - target_date = datetime.datetime.strptime(shadow_date_Ymd, "%Y-%m-%d").date() - year = target_date.year - month = target_date.month - day = target_date.day - if shadow_time_HM is not None: - onetime = 1 - onetimetime = datetime.datetime.strptime(shadow_time_HM, "%H:%M") - hour = onetimetime.hour - minu = onetimetime.minute - sec = onetimetime.second - else: - onetime = 0 - hour = 0 - minu = 0 - sec = 0 - - tv = [year, month, day, hour, minu, sec] - - out_path = Path(out_dir) - out_path.mkdir(parents=True, exist_ok=True) - out_path_str = str(out_path) - - Path.mkdir(out_path / "facade_shdw_bldgs", parents=True, exist_ok=True) - Path.mkdir(out_path / "facade_shdw_veg", parents=True, exist_ok=True) - Path.mkdir(out_path / "shadow_ground", parents=True, exist_ok=True) - - shadowresult = dsh.daily_shading( - dsm.astype("float32"), - veg_dsm.astype("float32"), - veg_dsm_2.astype("float32"), - dsm_scale, - lon, - lat, - dsm_width, - dsm_height, - tv, - UTC, - usevegdem, - time_interval_M, - onetime, - out_path_str, - dsm_transf, - dsm_crs, - trans, - dst, - wallsh, - wh_rast.astype("float32"), - wa_rast.astype("float32"), - ) - - shfinal = shadowresult["shfinal"] - common.save_raster(out_path_str + "/shadow_composite.tif", shfinal, dsm_transf, dsm_crs) diff --git a/pysrc/umepr/solweig_runner_rust.py b/pysrc/umepr/solweig_runner_rust.py deleted file mode 100644 index 2971486..0000000 --- a/pysrc/umepr/solweig_runner_rust.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Subclasses SolweigRunCore - swaps in solweig function which calls Rust implementations of shadowing and GVF calculations -""" - -from typing import Any - -from umep.functions.SOLWEIGpython.solweig_runner_core import SolweigRunCore - -from .functions.solweig import Solweig_2025a_calc as Solweig_2025a_calc_hybrid - - -class SolweigRunRust(SolweigRunCore): - """Class to run the SOLWEIG algorithm with Rust optimisations.""" - - def calc_solweig( - self, - iter: int, - elvis: float, - first: float, - second: float, - firstdaytime: float, - timeadd: float, - timestepdec: float, - posture: Any, - ): - """ - Calculate SOLWEIG results for the given iteration. - Uses variant with GVF and Shadows rust optimisations. - """ - return Solweig_2025a_calc_hybrid( # type: ignore - iter, - self.raster_data.dsm, - self.raster_data.scale, - self.raster_data.rows, - self.raster_data.cols, - self.svf_data.svf, - self.svf_data.svf_north, - self.svf_data.svf_west, - self.svf_data.svf_east, - self.svf_data.svf_south, - self.svf_data.svf_veg, - self.svf_data.svf_veg_north, - self.svf_data.svf_veg_east, - self.svf_data.svf_veg_south, - self.svf_data.svf_veg_west, - self.svf_data.svf_veg_blocks_bldg_sh, - self.svf_data.svf_veg_blocks_bldg_sh_east, - self.svf_data.svf_veg_blocks_bldg_sh_south, - self.svf_data.svf_veg_blocks_bldg_sh_west, - self.svf_data.svf_veg_blocks_bldg_sh_north, - self.raster_data.cdsm, - self.raster_data.tdsm, - self.params.Albedo.Effective.Value.Walls, - self.params.Tmrt_params.Value.absK, - self.params.Tmrt_params.Value.absL, - self.params.Emissivity.Value.Walls, - posture.Fside, - posture.Fup, - posture.Fcyl, - self.environ_data.altitude[iter], - self.environ_data.azimuth[iter], - self.environ_data.zen[iter], - self.environ_data.jday[iter], - self.config.use_veg_dem, - self.config.only_global, - self.raster_data.buildings, - self.location, - self.environ_data.psi[iter], - self.config.use_landcover, - self.raster_data.lcgrid, - self.environ_data.dectime[iter], - self.environ_data.altmax[iter], - self.raster_data.wallaspect, - self.raster_data.wallheight, - int(self.config.person_cylinder), # expects int though should work either way - elvis, - self.environ_data.Ta[iter], - self.environ_data.RH[iter], - self.environ_data.radG[iter], - self.environ_data.radD[iter], - self.environ_data.radI[iter], - self.environ_data.P[iter], - self.raster_data.amaxvalue, - self.raster_data.bush, - self.environ_data.Twater[iter], - self.tg_maps.TgK, - self.tg_maps.Tstart, - self.tg_maps.alb_grid, - self.tg_maps.emis_grid, - self.tg_maps.TgK_wall, - self.tg_maps.Tstart_wall, - self.tg_maps.TmaxLST, - self.tg_maps.TmaxLST_wall, - first, - second, - self.svf_data.svfalfa, - self.raster_data.svfbuveg, - firstdaytime, - timeadd, - timestepdec, - self.tg_maps.Tgmap1, - self.tg_maps.Tgmap1E, - self.tg_maps.Tgmap1S, - self.tg_maps.Tgmap1W, - self.tg_maps.Tgmap1N, - self.environ_data.CI[iter], - self.tg_maps.TgOut1, - self.shadow_mats.diffsh, - self.shadow_mats.shmat, - self.shadow_mats.vegshmat, - self.shadow_mats.vbshvegshmat, - int(self.config.use_aniso), # expects int though should work either way - self.shadow_mats.asvf, - self.shadow_mats.patch_option, - self.walls_data.voxelMaps, - self.walls_data.voxelTable, - self.environ_data.Ws[iter], - self.config.use_wall_scheme, - self.walls_data.timeStep, - self.shadow_mats.steradians, - self.walls_data.walls_scheme, - self.walls_data.dirwalls_scheme, - ) diff --git a/pysrc/umepr/svf.py b/pysrc/umepr/svf.py deleted file mode 100644 index 8955c92..0000000 --- a/pysrc/umepr/svf.py +++ /dev/null @@ -1,526 +0,0 @@ -""" -SVF wrapper for Python - calls full Rust SVF via skyview rust module. -""" - -# %% -import logging -import os -import shutil -import tempfile -import zipfile -from pathlib import Path - -import numpy as np -from umep import class_configs, common -from umep.tile_manager import TileManager - -from .rustalgos import skyview - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -# %% -def generate_svf( - dsm_path: str, - bbox: list[int], - out_dir: str, - dem_path: str | None = None, - cdsm_path: str | None = None, - trans_veg_perc: float = 3, - trunk_ratio_perc: float = 25, - amax_local_window_m: int = 100, - amax_local_perc: float = 99.9, - use_tiled_loading: bool = False, - tile_size: int = 1000, - save_shadowmats: bool = True, -): - """ - Generate Sky View Factor outputs. - - Args: - save_shadowmats: Save shadow matrices (required for SOLWEIG anisotropic sky). - Saved as uint8 (75% smaller than float32). Set to False only - if you don't need SOLWEIG's anisotropic modeling. - """ - out_path = Path(out_dir) - out_path.mkdir(parents=True, exist_ok=True) - out_path_str = str(out_path) - - # Open the DSM file to get metadata - # If tiled, we only load metadata first - if use_tiled_loading: - dsm_meta = common.get_raster_metadata(dsm_path) - dsm_trf = dsm_meta["transform"] - dsm_crs = dsm_meta["crs"] - dsm_nd = dsm_meta["nodata"] - rows = dsm_meta["rows"] - cols = dsm_meta["cols"] - - # Handle rasterio vs GDAL transform - if "res" in dsm_meta: - # Convert Rasterio Affine to GDAL transform - # Affine: (a, b, c, d, e, f) -> GDAL: (c, a, b, f, d, e) - t = dsm_trf - dsm_trf = [t.c, t.a, t.b, t.f, t.d, t.e] - - dsm_pix_size = dsm_trf[1] - dsm_scale = 1 / dsm_pix_size - - # Calculate conservative global amax for buffer estimation - # Load sample data to estimate terrain complexity - sample_dsm, _, _, _ = common.load_raster(dsm_path, bbox=None, coerce_f64_to_f32=True) - - if dem_path is None: - # Without DEM, use DSM range as conservative estimate - global_amax = float(np.nanmax(sample_dsm) - np.nanmin(sample_dsm)) - else: - # With DEM, estimate from height differences - sample_dem, _, _, _ = common.load_raster(dem_path, bbox=None, coerce_f64_to_f32=True) - height_diff = sample_dsm - sample_dem - global_amax = float(np.nanpercentile(height_diff[~np.isnan(height_diff)], 99.9)) - del sample_dem - - # Add safety margin and cap at reasonable maximum - global_amax = min(global_amax * 1.2, 200.0) # 20% safety margin, max 200m - del sample_dsm - - logger.info(f"Estimated global amax: {global_amax:.1f}m for buffer calculation") - - # Initialize TileManager with calculated buffer - tile_manager = TileManager(rows, cols, tile_size, dsm_pix_size, buffer_dist=global_amax) - - if len(tile_manager.tiles) == 0: - raise ValueError(f"TileManager generated 0 tiles for {rows}x{cols} raster with tile_size={tile_size}") - - logger.info(f"Initialized TileManager with {len(tile_manager.tiles)} tiles.") - - # Initialize output rasters - # We need to create empty rasters for all outputs - output_files = ["input-dsm.tif", "svf.tif", "svfE.tif", "svfS.tif", "svfW.tif", "svfN.tif"] - if dem_path: - output_files.append("input-dem.tif") - if cdsm_path: - output_files.extend( - [ - "input-cdsm.tif", - "input-tdsm.tif", - "svfveg.tif", - "svfEveg.tif", - "svfSveg.tif", - "svfWveg.tif", - "svfNveg.tif", - "svfaveg.tif", - "svfEaveg.tif", - "svfSaveg.tif", - "svfWaveg.tif", - "svfNaveg.tif", - "svf_total.tif", - ] - ) - - for fname in output_files: - common.create_empty_raster(out_path_str + "/" + fname, rows, cols, dsm_trf, dsm_crs, nodata=-9999.0) - - # Initialize memory-mapped arrays for shadow matrices (only if needed) - if save_shadowmats: - # 153 patches is standard for this algorithm - patches = 153 - shmat_shape = (rows, cols, patches) - - # Create temp file paths - temp_dir = tempfile.mkdtemp(dir=out_path_str) - shmat_path = os.path.join(temp_dir, "shmat.dat") - vegshmat_path = os.path.join(temp_dir, "vegshmat.dat") - vbshvegshmat_path = os.path.join(temp_dir, "vbshvegshmat.dat") - - # Use uint8 instead of float32 for 75% space savings (shadow mats are binary 0/1) - # Calculate memory requirements - memmap_size_mb = (shmat_shape[0] * shmat_shape[1] * shmat_shape[2] * 1) / (1024 * 1024) - logger.info(f"Creating memory-mapped arrays: {memmap_size_mb * 3:.1f} MB total (3 arrays, uint8)") - - # Create memmapped arrays with error handling - try: - shmat_mem = np.memmap(shmat_path, dtype="uint8", mode="w+", shape=shmat_shape) - vegshmat_mem = np.memmap(vegshmat_path, dtype="uint8", mode="w+", shape=shmat_shape) - vbshvegshmat_mem = np.memmap(vbshvegshmat_path, dtype="uint8", mode="w+", shape=shmat_shape) - except OSError as e: - shutil.rmtree(temp_dir, ignore_errors=True) - raise OSError( - f"Failed to create memory-mapped arrays ({memmap_size_mb * 3:.1f} MB). " - f"Check disk space and permissions in {out_path_str}. Error: {e}" - ) from e - - trans_veg = trans_veg_perc / 100.0 - trunk_ratio = trunk_ratio_perc / 100.0 - - # Iterate over tiles - for i, tile in enumerate(tile_manager.get_tiles()): - logger.info(f"Processing tile {i + 1}/{len(tile_manager.tiles)}") - - # Load inputs for tile (with overlap) - dsm_tile = common.read_raster_window(dsm_path, tile.full_slice, band=1) - - dem_tile = None - if dem_path: - dem_tile = common.read_raster_window(dem_path, tile.full_slice, band=1) - - cdsm_tile = None - if cdsm_path: - cdsm_tile = common.read_raster_window(cdsm_path, tile.full_slice, band=1) - - # Preprocess - dsm_tile, dem_tile, cdsm_tile, tdsm_tile, amax = class_configs.raster_preprocessing( - dsm_tile, - dem_tile, - cdsm_tile, - None, - trunk_ratio, - dsm_pix_size, - amax_local_window_m=amax_local_window_m, - amax_local_perc=amax_local_perc, - quiet=True, - ) - - # Compute SVF using Rust skyview module - use_cdsm_bool = cdsm_path is not None - runner = skyview.SkyviewRunner() - ret = runner.calculate_svf( - dsm_tile.astype(np.float32), - cdsm_tile.astype(np.float32) if cdsm_tile is not None else None, - tdsm_tile.astype(np.float32) if tdsm_tile is not None else None, - dsm_scale, - use_cdsm_bool, - amax, - 2, # 153 patches - 5.0, # min_sun_elev_deg - ) - - # Write outputs (core only) - core_slice = tile.core_slice() - write_win = tile.write_window.to_slices() - - # Helper to write core - bind loop vars with default args - def write_core(fname, data, cs=core_slice, ww=write_win): - core_data = data[cs] - common.write_raster_window(out_path_str + "/" + fname, core_data, ww) - - write_core("input-dsm.tif", dsm_tile) - if dem_tile is not None: - write_core("input-dem.tif", dem_tile) - if cdsm_tile is not None: - write_core("input-cdsm.tif", cdsm_tile) - write_core("input-tdsm.tif", tdsm_tile) - - write_core("svf.tif", ret.svf) - write_core("svfE.tif", ret.svf_east) - write_core("svfS.tif", ret.svf_south) - write_core("svfW.tif", ret.svf_west) - write_core("svfN.tif", ret.svf_north) - - if use_cdsm_bool: - write_core("svfveg.tif", ret.svf_veg) - write_core("svfEveg.tif", ret.svf_veg_east) - write_core("svfSveg.tif", ret.svf_veg_south) - write_core("svfWveg.tif", ret.svf_veg_west) - write_core("svfNveg.tif", ret.svf_veg_north) - write_core("svfaveg.tif", ret.svf_veg_blocks_bldg_sh) - write_core("svfEaveg.tif", ret.svf_veg_blocks_bldg_sh_east) - write_core("svfSaveg.tif", ret.svf_veg_blocks_bldg_sh_south) - write_core("svfWaveg.tif", ret.svf_veg_blocks_bldg_sh_west) - write_core("svfNaveg.tif", ret.svf_veg_blocks_bldg_sh_north) - - # Calculate total SVF - svftotal_tile = ret.svf - (1 - ret.svf_veg) * (1 - trans_veg) - write_core("svf_total.tif", svftotal_tile) - - # Write shadow matrices to memmap (if saving) - if save_shadowmats: - # Extract core for 3D arrays - use core_slice with added dimension - core_slice_3d = core_slice + (slice(None),) - - # Destination slice in memmap - use write_window directly - write_slice_3d = tile.write_window.to_slices() + (slice(None),) - - # Convert to uint8 (shadow matrices are binary 0/1) - shmat_mem[write_slice_3d] = (ret.bldg_sh_matrix[core_slice_3d] * 255).astype(np.uint8) - vegshmat_mem[write_slice_3d] = (ret.veg_sh_matrix[core_slice_3d] * 255).astype(np.uint8) - vbshvegshmat_mem[write_slice_3d] = (ret.veg_blocks_bldg_sh_matrix[core_slice_3d] * 255).astype(np.uint8) - - # Flush memmaps periodically? - if i % 10 == 0: - shmat_mem.flush() - vegshmat_mem.flush() - vbshvegshmat_mem.flush() - - # Save shadow matrices (if requested) - if save_shadowmats: - # Flush final - shmat_mem.flush() - vegshmat_mem.flush() - vbshvegshmat_mem.flush() - - # Save shadow matrices as compressed npz (uint8 format) - # We read from the memmapped files - logger.info("Saving shadow matrices to npz (uint8 format, 75% smaller)...") - np.savez_compressed( - out_path_str + "/" + "shadowmats.npz", - shadowmat=shmat_mem, - vegshadowmat=vegshmat_mem, - vbshmat=vbshvegshmat_mem, - dtype="uint8", # Store metadata about dtype - ) - - # Cleanup temp - del shmat_mem - del vegshmat_mem - del vbshvegshmat_mem - shutil.rmtree(temp_dir) - else: - logger.info("Skipping shadow matrix save (not needed for this workflow)") - - # Zip SVF files (same as standard) - zip_filepath = out_path_str + "/" + "svfs.zip" - if os.path.isfile(zip_filepath): - os.remove(zip_filepath) - - with zipfile.ZipFile(zip_filepath, "a") as zippo: - zippo.write(out_path_str + "/" + "svf.tif", "svf.tif") - zippo.write(out_path_str + "/" + "svfE.tif", "svfE.tif") - zippo.write(out_path_str + "/" + "svfS.tif", "svfS.tif") - zippo.write(out_path_str + "/" + "svfW.tif", "svfW.tif") - zippo.write(out_path_str + "/" + "svfN.tif", "svfN.tif") - - if cdsm_path: - zippo.write(out_path_str + "/" + "svfveg.tif", "svfveg.tif") - zippo.write(out_path_str + "/" + "svfEveg.tif", "svfEveg.tif") - zippo.write(out_path_str + "/" + "svfSveg.tif", "svfSveg.tif") - zippo.write(out_path_str + "/" + "svfWveg.tif", "svfWveg.tif") - zippo.write(out_path_str + "/" + "svfNveg.tif", "svfNveg.tif") - zippo.write(out_path_str + "/" + "svfaveg.tif", "svfaveg.tif") - zippo.write(out_path_str + "/" + "svfEaveg.tif", "svfEaveg.tif") - zippo.write(out_path_str + "/" + "svfSaveg.tif", "svfSaveg.tif") - zippo.write(out_path_str + "/" + "svfWaveg.tif", "svfWaveg.tif") - zippo.write(out_path_str + "/" + "svfNaveg.tif", "svfNaveg.tif") - - # Remove individual files - files_to_remove = ["svf.tif", "svfE.tif", "svfS.tif", "svfW.tif", "svfN.tif"] - if cdsm_path: - files_to_remove.extend( - [ - "svfveg.tif", - "svfEveg.tif", - "svfSveg.tif", - "svfWveg.tif", - "svfNveg.tif", - "svfaveg.tif", - "svfEaveg.tif", - "svfSaveg.tif", - "svfWaveg.tif", - "svfNaveg.tif", - ] - ) - - for f in files_to_remove: - try: - os.remove(out_path_str + "/" + f) - except OSError as e: - logger.warning(f"Could not remove temporary file {f}: {e}") - - return - - # Standard execution (non-tiled) - # Open the DSM file - dsm, dsm_trf, dsm_crs, dsm_nd = common.load_raster(dsm_path, bbox, coerce_f64_to_f32=True) - dsm_pix_size = dsm_trf[1] - dsm_scale = 1 / dsm_pix_size - - dem = None - if dem_path is not None: - dem, dem_trf, dem_crs, _dem_nd = common.load_raster(dem_path, bbox, coerce_f64_to_f32=True) - assert dem.shape == dsm.shape, "Mismatching raster shapes for DSM and DEM." - assert np.allclose(dsm_trf, dem_trf), "Mismatching spatial transform for DSM and DEM." - assert dem_crs == dsm_crs, "Mismatching CRS for DSM and DEM." - - use_cdsm = False - cdsm = None - if cdsm_path is not None: - use_cdsm = True - cdsm, cdsm_trf, cdsm_crs, _cdsm_nd = common.load_raster(cdsm_path, bbox, coerce_f64_to_f32=True) - assert cdsm.shape == dsm.shape, "Mismatching raster shapes for DSM and CDSM." - assert np.allclose(dsm_trf, cdsm_trf), "Mismatching spatial transform for DSM and CDSM." - assert cdsm_crs == dsm_crs, "Mismatching CRS for DSM and CDSM." - - # veg transmissivity as percentage - if not (0 <= trans_veg_perc <= 100): - raise ValueError("Vegetation transmissivity should be a number between 0 and 100") - - trans_veg = trans_veg_perc / 100.0 - trunk_ratio = trunk_ratio_perc / 100.0 - - dsm, dem, cdsm, tdsm, amax = class_configs.raster_preprocessing( - dsm, - dem, - cdsm, - None, - trunk_ratio, - dsm_pix_size, - amax_local_window_m=amax_local_window_m, - amax_local_perc=amax_local_perc, - ) - - common.save_raster( - out_path_str + "/input-dsm.tif", - dsm, - dsm_trf, - dsm_crs, - dsm_nd, - coerce_f64_to_f32=True, - ) - if dem is not None: - common.save_raster( - out_path_str + "/input-dem.tif", - dem, - dsm_trf, - dsm_crs, - dsm_nd, - coerce_f64_to_f32=True, - ) - if use_cdsm: - common.save_raster( - out_path_str + "/input-cdsm.tif", - cdsm, - dsm_trf, - dsm_crs, - dsm_nd, - coerce_f64_to_f32=True, - ) - common.save_raster( - out_path_str + "/input-tdsm.tif", - tdsm, - dsm_trf, - dsm_crs, - dsm_nd, - coerce_f64_to_f32=True, - ) - - # compute using Rust skyview module - runner = skyview.SkyviewRunner() - ret = runner.calculate_svf( - dsm.astype(np.float32), - cdsm.astype(np.float32) if cdsm is not None else None, - tdsm.astype(np.float32) if tdsm is not None else None, - dsm_scale, - use_cdsm, - amax, - 2, # 153 patches - 5.0, # min_sun_elev_deg - ) - - svfbu = ret.svf - svfbuE = ret.svf_east - svfbuS = ret.svf_south - svfbuW = ret.svf_west - svfbuN = ret.svf_north - - # Save the rasters using rasterio - common.save_raster(out_path_str + "/" + "svf.tif", svfbu, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfE.tif", svfbuE, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfS.tif", svfbuS, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfW.tif", svfbuW, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfN.tif", svfbuN, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - - # Create or update the ZIP file - zip_filepath = out_path_str + "/" + "svfs.zip" - if os.path.isfile(zip_filepath): - os.remove(zip_filepath) - - with zipfile.ZipFile(zip_filepath, "a") as zippo: - zippo.write(out_path_str + "/" + "svf.tif", "svf.tif") - zippo.write(out_path_str + "/" + "svfE.tif", "svfE.tif") - zippo.write(out_path_str + "/" + "svfS.tif", "svfS.tif") - zippo.write(out_path_str + "/" + "svfW.tif", "svfW.tif") - zippo.write(out_path_str + "/" + "svfN.tif", "svfN.tif") - - # Remove the individual TIFF files after zipping - os.remove(out_path_str + "/" + "svf.tif") - os.remove(out_path_str + "/" + "svfE.tif") - os.remove(out_path_str + "/" + "svfS.tif") - os.remove(out_path_str + "/" + "svfW.tif") - os.remove(out_path_str + "/" + "svfN.tif") - - if use_cdsm == 0: - svftotal = svfbu - else: - # Report the vegetation-related results - svfveg = ret.svf_veg - svfEveg = ret.svf_veg_east - svfSveg = ret.svf_veg_south - svfWveg = ret.svf_veg_west - svfNveg = ret.svf_veg_north - svfaveg = ret.svf_veg_blocks_bldg_sh - svfEaveg = ret.svf_veg_blocks_bldg_sh_east - svfSaveg = ret.svf_veg_blocks_bldg_sh_south - svfWaveg = ret.svf_veg_blocks_bldg_sh_west - svfNaveg = ret.svf_veg_blocks_bldg_sh_north - - # Save vegetation rasters - common.save_raster(out_path_str + "/" + "svfveg.tif", svfveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfEveg.tif", svfEveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfSveg.tif", svfSveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfWveg.tif", svfWveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfNveg.tif", svfNveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfaveg.tif", svfaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfEaveg.tif", svfEaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfSaveg.tif", svfSaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfWaveg.tif", svfWaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - common.save_raster(out_path_str + "/" + "svfNaveg.tif", svfNaveg, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - - # Add vegetation rasters to the ZIP file - with zipfile.ZipFile(zip_filepath, "a") as zippo: - zippo.write(out_path_str + "/" + "svfveg.tif", "svfveg.tif") - zippo.write(out_path_str + "/" + "svfEveg.tif", "svfEveg.tif") - zippo.write(out_path_str + "/" + "svfSveg.tif", "svfSveg.tif") - zippo.write(out_path_str + "/" + "svfWveg.tif", "svfWveg.tif") - zippo.write(out_path_str + "/" + "svfNveg.tif", "svfNveg.tif") - zippo.write(out_path_str + "/" + "svfaveg.tif", "svfaveg.tif") - zippo.write(out_path_str + "/" + "svfEaveg.tif", "svfEaveg.tif") - zippo.write(out_path_str + "/" + "svfSaveg.tif", "svfSaveg.tif") - zippo.write(out_path_str + "/" + "svfWaveg.tif", "svfWaveg.tif") - zippo.write(out_path_str + "/" + "svfNaveg.tif", "svfNaveg.tif") - - # Remove the individual TIFF files after zipping - os.remove(out_path_str + "/" + "svfveg.tif") - os.remove(out_path_str + "/" + "svfEveg.tif") - os.remove(out_path_str + "/" + "svfSveg.tif") - os.remove(out_path_str + "/" + "svfWveg.tif") - os.remove(out_path_str + "/" + "svfNveg.tif") - os.remove(out_path_str + "/" + "svfaveg.tif") - os.remove(out_path_str + "/" + "svfEaveg.tif") - os.remove(out_path_str + "/" + "svfSaveg.tif") - os.remove(out_path_str + "/" + "svfWaveg.tif") - os.remove(out_path_str + "/" + "svfNaveg.tif") - - # Calculate final total SVF - svftotal = svfbu - (1 - svfveg) * (1 - trans_veg) - - # Save the final svftotal raster - common.save_raster(out_path_str + "/" + "svf_total.tif", svftotal, dsm_trf, dsm_crs, coerce_f64_to_f32=True) - - # Save shadow matrices as compressed npz (only if requested) - if save_shadowmats: - shmat = ret.bldg_sh_matrix - vegshmat = ret.veg_sh_matrix - vbshvegshmat = ret.veg_blocks_bldg_sh_matrix - - # Convert to uint8 for 75% space savings (shadow matrices are binary 0/1) - logger.info("Saving shadow matrices to npz (uint8 format, 75% smaller)...") - np.savez_compressed( - out_path_str + "/" + "shadowmats.npz", - shadowmat=(shmat * 255).astype(np.uint8), - vegshadowmat=(vegshmat * 255).astype(np.uint8), - vbshmat=(vbshvegshmat * 255).astype(np.uint8), - dtype="uint8", # Store metadata about dtype - ) - else: - logger.info("Skipping shadow matrix save (not needed for this workflow)") diff --git a/qgis_plugin/README.md b/qgis_plugin/README.md new file mode 100644 index 0000000..ae51bd3 --- /dev/null +++ b/qgis_plugin/README.md @@ -0,0 +1,353 @@ +# SOLWEIG QGIS Plugin + +QGIS Processing plugin for SOLWEIG (Solar and Longwave Environmental Irradiance Geometry model). + +## Overview + +This plugin wraps SOLWEIG's Python API to provide native QGIS Processing framework integration. It enables calculation of Mean Radiant Temperature (Tmrt), UTCI, and PET thermal comfort indices directly within QGIS. + +**Key Features:** + +- Native QGIS Processing Toolbox integration +- Model Builder and batch mode support +- Auto-detects GDAL backend (no rasterio required in QGIS/OSGeo4W) +- Progress reporting via QgsProcessingFeedback +- Outputs auto-load to canvas with thermal comfort color ramps + +## Installation + +1. Copy the `solweig_qgis/` directory to your QGIS plugins folder: + - Windows: `%APPDATA%\QGIS\QGIS3\profiles\default\python\plugins\` + - macOS: `~/Library/Application Support/QGIS/QGIS3/profiles/default/python/plugins/` + - Linux: `~/.local/share/QGIS/QGIS3/profiles/default/python/plugins/` + +2. Enable the plugin in QGIS: Plugins → Manage and Install Plugins → Installed → SOLWEIG + +3. Access algorithms via Processing Toolbox → SOLWEIG + +## Algorithms + +### Preprocessing + +| Algorithm | Description | +| --------------------------- | ------------------------------------------------- | +| **Compute Sky View Factor** | Pre-compute SVF arrays for reuse across timesteps | + +### Calculation + +| Algorithm | Description | +| ------------------------------------ | ---------------------------------------------------------- | +| **Calculate Tmrt (Single Timestep)** | Calculate Mean Radiant Temperature for one datetime | +| **Calculate Tmrt (Timeseries)** | Multi-timestep calculation with thermal state accumulation | +| **Calculate Tmrt (Large Rasters)** | Memory-efficient tiled processing for large areas | + +### Post-Processing + +| Algorithm | Description | +| ---------------- | ----------------------------------------------------- | +| **Compute UTCI** | Universal Thermal Climate Index (fast polynomial) | +| **Compute PET** | Physiological Equivalent Temperature (detailed model) | + +### Utilities + +| Algorithm | Description | +| --------------------------- | --------------------------------------------- | +| **Import EPW Weather File** | Preview and validate EnergyPlus weather files | + +## Directory Structure + +``` +qgis_plugin/ +├── README.md # This file +├── build_plugin.py # Build script for bundled distribution +│ +└── solweig_qgis/ # Plugin package (install this to QGIS) + ├── __init__.py # Plugin entry point with classFactory() + ├── metadata.txt # QGIS plugin metadata + ├── provider.py # SolweigProvider (registers algorithms) + ├── _bundled/ # Bundled SOLWEIG library (for distribution) + │ + ├── algorithms/ + │ ├── __init__.py + │ ├── base.py # SolweigAlgorithmBase (shared utilities) + │ │ + │ ├── preprocess/ + │ │ ├── __init__.py + │ │ └── svf_preprocessing.py # "Compute Sky View Factor" + │ │ + │ ├── calculation/ + │ │ ├── __init__.py + │ │ ├── single_timestep.py # "Calculate Tmrt (Single Timestep)" + │ │ ├── timeseries.py # "Calculate Tmrt (Timeseries)" + │ │ └── tiled_processing.py # "Calculate Tmrt (Large Rasters)" + │ │ + │ ├── postprocess/ + │ │ ├── __init__.py + │ │ ├── utci.py # "Compute UTCI" + │ │ └── pet.py # "Compute PET" + │ │ + │ └── utilities/ + │ ├── __init__.py + │ └── epw_import.py # "Import EPW Weather File" + │ + └── utils/ + ├── __init__.py + ├── parameters.py # Common parameter builders + └── converters.py # QGIS ↔ solweig dataclass conversion +``` + +--- + +## Implementation Checklist + +### Phase 1: Plugin Skeleton ✅ + +- [x] **1.1** Create `__init__.py` with `classFactory()` entry point +- [x] **1.2** Create `metadata.txt` with plugin metadata +- [x] **1.3** Create `provider.py` with `SolweigProvider` class +- [ ] **1.4** Create placeholder icon.png +- [ ] **1.5** Test plugin loads in QGIS (empty provider) + +### Phase 2: Shared Utilities ✅ + +- [x] **2.1** Create `algorithms/__init__.py` +- [x] **2.2** Create `algorithms/base.py` with `SolweigAlgorithmBase`: + - [x] `load_raster_from_layer()` - QGIS layer → numpy array via GDAL + - [x] `load_optional_raster()` - Handle optional raster parameters + - [x] `save_georeferenced_output()` - Save with CRS/transform via solweig.io + - [x] `add_raster_to_canvas()` - Add layer to QGIS project + - [x] `apply_thermal_comfort_style()` - Apply UTCI/Tmrt color ramps +- [x] **2.3** Create `utils/__init__.py` +- [x] **2.4** Create `utils/parameters.py` with common parameter builders: + - [x] `add_surface_parameters()` - DSM, CDSM, DEM, TDSM, LAND_COVER + - [x] `add_location_parameters()` - LAT, LON, UTC_OFFSET, AUTO_EXTRACT + - [x] `add_weather_parameters()` - DATETIME, TA, RH, RAD, WIND + - [x] `add_human_parameters()` - POSTURE, ABS_K +- [x] **2.5** Create `utils/converters.py`: + - [x] `create_surface_from_parameters()` - Build SurfaceData from QGIS params + - [x] `create_location_from_parameters()` - Build Location from params + - [x] `create_weather_from_parameters()` - Build Weather from params + +### Phase 3: Single Timestep Algorithm ✅ + +- [x] **3.1** Create `algorithms/calculation/__init__.py` +- [x] **3.2** Create `algorithms/calculation/single_timestep.py`: + - [x] Define all input parameters (surface, location, weather, human, options) + - [x] Define output parameters (TMRT, optional SHADOW, KDOWN) + - [x] Implement `processAlgorithm()`: + - [x] Load rasters from QGIS layers + - [x] Create SurfaceData, Location, Weather, HumanParams + - [x] Handle height conversion (relative → absolute) + - [x] Call `solweig.calculate()` + - [x] Save output GeoTIFF + - [x] Add to canvas with styling +- [x] **3.3** Register in provider +- [ ] **3.4** Test in QGIS with Gothenburg test data + +### Phase 4: SVF Preprocessing Algorithm ✅ + +- [x] **4.1** Create `algorithms/preprocess/__init__.py` +- [x] **4.2** Create `algorithms/preprocess/svf_preprocessing.py`: + - [x] Define input parameters (DSM, CDSM, DEM, TDSM, TRANS_VEG, OUTPUT_DIR) + - [x] Define output parameters (SVF_DIR, SVF_FILE) + - [x] Implement `processAlgorithm()`: + - [x] Load rasters + - [x] Create SurfaceData + - [x] Call `surface.prepare()` with working_dir + - [x] Report progress via feedback +- [x] **4.3** Register in provider +- [ ] **4.4** Test SVF computation and caching + +### Phase 5: Timeseries Algorithm ✅ + +- [x] **5.1** Create `algorithms/calculation/timeseries.py`: + - [x] Add EPW_FILE, START_DATE, END_DATE, HOURS_FILTER parameters + - [x] Add OUTPUT_DIR, OUTPUTS selection parameters + - [x] Implement `processAlgorithm()`: + - [x] Load and filter weather from EPW + - [x] Create surface and location + - [x] Call `solweig.calculate_timeseries()` + - [x] Report progress per timestep + - [x] Handle cancellation +- [x] **5.2** Register in provider +- [ ] **5.3** Test with multi-day EPW data + +### Phase 6: UTCI Algorithm ✅ + +- [x] **6.1** Create `algorithms/postprocess/__init__.py` +- [x] **6.2** Create `algorithms/postprocess/utci.py`: + - [x] Define TMRT_DIR, EPW_FILE, OUTPUT_DIR parameters + - [x] Implement `processAlgorithm()`: + - [x] Load weather series from EPW + - [x] Call `solweig.compute_utci()` + - [x] Report file count +- [x] **6.3** Register in provider +- [ ] **6.4** Test UTCI computation + +### Phase 7: PET Algorithm ✅ + +- [x] **7.1** Create `algorithms/postprocess/pet.py`: + - [x] Add human body parameters (AGE, WEIGHT, HEIGHT, SEX, ACTIVITY, CLOTHING) + - [x] Implement `processAlgorithm()` calling `solweig.compute_pet()` +- [x] **7.2** Register in provider +- [ ] **7.3** Test PET computation + +### Phase 8: EPW Import Utility ✅ + +- [x] **8.1** Create `algorithms/utilities/__init__.py` +- [x] **8.2** Create `algorithms/utilities/epw_import.py`: + - [x] Define EPW_FILE input parameter + - [x] Implement `processAlgorithm()`: + - [x] Parse EPW with `solweig.io.read_epw()` + - [x] Generate HTML report with location, date range, statistics +- [x] **8.3** Register in provider +- [ ] **8.4** Test with sample EPW files + +### Phase 9: Tiled Processing Algorithm ✅ + +- [x] **9.1** Create `algorithms/calculation/tiled_processing.py`: + - [x] Add TILE_SIZE, AUTO_TILE_SIZE parameters + - [x] Implement `processAlgorithm()` calling `solweig.calculate_tiled()` +- [x] **9.2** Register in provider +- [ ] **9.3** Test with large raster + +### Phase 10: Build & Distribution ✅ + +- [x] **10.1** Create `build_plugin.py` build script +- [x] **10.2** Set up `_bundled/` directory support in `__init__.py` +- [x] **10.3** Create GitHub Actions workflow for cross-platform builds +- [x] **10.4** Update README with build instructions + +### Phase 11: Testing & Polish (Pending) + +- [ ] **11.1** Add docstrings to all algorithms +- [ ] **11.2** Create help strings for QGIS Help panel +- [ ] **11.3** Test full workflow in QGIS +- [ ] **11.4** Verify outputs match standalone Python execution +- [ ] **11.5** Create icon.png +- [ ] **11.6** Update this README with usage examples + +--- + +## Building & Distribution + +The plugin can be distributed in two ways: + +### Option A: Bundled Distribution (Recommended for Users) + +This bundles the compiled Rust extension and Python modules into the plugin, so users don't need to install anything separately. + +```bash +# Build for your current platform +cd qgis_plugin +python build_plugin.py + +# Create distributable ZIP +python build_plugin.py --package --version 0.1.0 + +# Clean build artifacts +python build_plugin.py --clean +``` + +This creates a platform-specific ZIP file (e.g., `solweig-qgis-0.1.0-linux_x86_64.zip`) that can be installed directly in QGIS. + +**Supported platforms (CI builds):** + +- Linux x86_64 +- Windows x86_64 +- macOS x86_64 +- macOS aarch64 (Apple Silicon) + +### Option B: Development Setup + +For development or if you have SOLWEIG installed via pip: + +1. Install SOLWEIG in your Python environment: + + ```bash + pip install solweig + # or for development + cd /path/to/solweig && pip install -e . + ``` + +2. Symlink the plugin to QGIS: + + ```bash + # Linux + ln -s /path/to/solweig/qgis_plugin/solweig_qgis ~/.local/share/QGIS/QGIS3/profiles/default/python/plugins/solweig_qgis + + # macOS + ln -s /path/to/solweig/qgis_plugin/solweig_qgis ~/Library/Application\ Support/QGIS/QGIS3/profiles/default/python/plugins/solweig_qgis + + # Windows (run as admin) + mklink /D "%APPDATA%\QGIS\QGIS3\profiles\default\python\plugins\solweig_qgis" "C:\path\to\solweig\qgis_plugin\solweig_qgis" + ``` + +The plugin auto-detects SOLWEIG in this order: + +1. Bundled (`_bundled/` directory) +2. System-installed (via pip) +3. Development path (`../pysrc/solweig`) + +### CI/CD Automated Builds (Universal Plugin) + +The GitHub Actions workflow (`.github/workflows/build-qgis-plugin.yml`) automatically builds a **universal multi-platform plugin**: + +**Triggers:** + +- Version tags (e.g., `v0.1.0`) +- Manual workflow dispatch + +**Build process:** + +1. Builds Rust wheels for all 4 platforms (Linux, Windows, macOS Intel, macOS ARM) +2. Extracts Python modules from one wheel (identical across platforms) +3. Extracts platform-specific `rustalgos` binaries to `_native//` +4. Creates single ZIP: `solweig-qgis-{version}-universal.zip` + +**At runtime**, the plugin auto-detects the platform and loads the correct binary from `_native/` + +**Result:** One ZIP works on all platforms - no need for separate downloads per OS + +## Dependencies + +**For bundled distribution:** No external dependencies required. + +**For development:** The plugin requires the SOLWEIG Python package: + +```bash +pip install solweig +``` + +Or point to development source: + +```python +import sys +sys.path.insert(0, '/path/to/solweig/pysrc') +``` + +## Core Library Files Referenced + +| File | Purpose | +| --------------------------------- | ----------------------------------------------------- | +| `pysrc/solweig/api.py` | Entry points: `calculate()`, `calculate_timeseries()` | +| `pysrc/solweig/progress.py` | QgsProcessingFeedback integration | +| `pysrc/solweig/io.py` | GDAL backend, EPW parser | +| `pysrc/solweig/models/surface.py` | SurfaceData with height conversion | +| `pysrc/solweig/models/weather.py` | Weather.from_epw() | + +## Citation + +If you use SOLWEIG in your research, please cite the original UMEP paper: + +> Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services. Environmental Modelling and Software 99, 70-87 [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +## Original Code + +This plugin is adapted from the GPLv3-licensed [UMEP-processing](https://github.com/UMEP-dev/UMEP-processing) by Fredrik Lindberg, Ting Sun, Sue Grimmond, Yihao Tang, and Nils Wallenberg. + +SOLWEIG plugin maintained by Gareth Simons and the SOLWEIG Development Team. + +## License + +GNU General Public License v3.0. Same license as SOLWEIG core library and original UMEP code. diff --git a/qgis_plugin/build_plugin.py b/qgis_plugin/build_plugin.py new file mode 100644 index 0000000..84d22f1 --- /dev/null +++ b/qgis_plugin/build_plugin.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +""" +Build script for SOLWEIG QGIS plugin. + +Packages the plugin into a distributable ZIP for the QGIS Plugin Repository. +The solweig library itself is installed separately via pip (auto-prompted on +first use, or manually with ``pip install solweig``). + +The version is read from pyproject.toml (single source of truth) and stamped +into metadata.txt before packaging. + +Usage: + python build_plugin.py # Create distributable ZIP + python build_plugin.py --version 0.2.0 # Override version +""" + +from __future__ import annotations + +import argparse +import re +import shutil +import zipfile +from pathlib import Path + +# Paths +SCRIPT_DIR = Path(__file__).parent +PROJECT_ROOT = SCRIPT_DIR.parent +PLUGIN_DIR = SCRIPT_DIR / "solweig_qgis" +METADATA_PATH = PLUGIN_DIR / "metadata.txt" + + +def read_pyproject_version() -> str: + """Read the version from pyproject.toml (single source of truth).""" + pyproject = PROJECT_ROOT / "pyproject.toml" + text = pyproject.read_text() + match = re.search(r'^version\s*=\s*"([^"]+)"', text, re.MULTILINE) + if not match: + raise RuntimeError("Could not find version in pyproject.toml") + return match.group(1) + + +def pep440_to_qgis(version: str) -> str: + """Convert PEP 440 version (0.1.0b5) to QGIS metadata format (0.1.0-beta5).""" + version = re.sub(r"a(\d+)", r"-alpha\1", version) + version = re.sub(r"b(\d+)", r"-beta\1", version) + version = re.sub(r"rc(\d+)", r"-rc\1", version) + return version + + +def stamp_metadata_version(version: str): + """Update the version in metadata.txt to match pyproject.toml.""" + qgis_version = pep440_to_qgis(version) + text = METADATA_PATH.read_text() + new_text = re.sub(r"^version=.*$", f"version={qgis_version}", text, flags=re.MULTILINE) + if new_text == text and f"version={qgis_version}" not in text: + raise RuntimeError(f"Failed to update version in {METADATA_PATH}") + METADATA_PATH.write_text(new_text) + print(f" Stamped metadata.txt version={qgis_version}") + + # Warn if changelog doesn't mention this version + if qgis_version not in new_text.split("changelog=")[-1]: + print(f" WARNING: changelog in metadata.txt has no entry for {qgis_version}") + + +def copy_license(): + """Copy LICENSE from project root into the plugin directory (required by QGIS repo).""" + src = PROJECT_ROOT / "LICENSE" + dest = PLUGIN_DIR / "LICENSE" + if src.exists(): + shutil.copy2(src, dest) + print(f" Copied LICENSE into {PLUGIN_DIR.name}/") + else: + print(" WARNING: No LICENSE file found at project root") + + +def create_package_zip(version: str) -> Path: + """Create distributable ZIP file for QGIS Plugin Repository.""" + qgis_version = pep440_to_qgis(version) + zip_name = f"solweig-qgis-{qgis_version}.zip" + zip_path = SCRIPT_DIR / zip_name + + print(f"\nCreating {zip_name}...") + + with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf: + for file_path in PLUGIN_DIR.rglob("*"): + if file_path.is_file(): + # Skip __pycache__, .pyc, and macOS metadata files + if "__pycache__" in str(file_path) or file_path.suffix == ".pyc": + continue + if file_path.name in (".DS_Store", "._DS_Store"): + continue + arcname = file_path.relative_to(SCRIPT_DIR) + zf.write(file_path, arcname) + + size_kb = zip_path.stat().st_size / 1024 + print(f" Created: {zip_path.name} ({size_kb:.0f} KB)") + return zip_path + + +def main(): + parser = argparse.ArgumentParser(description="Build SOLWEIG QGIS plugin") + parser.add_argument( + "--version", + default=None, + help="Override version (default: read from pyproject.toml)", + ) + parser.add_argument("--clean", action="store_true", help="Clean old ZIP artifacts") + args = parser.parse_args() + + print("=" * 60) + print("SOLWEIG QGIS Plugin Builder") + print("=" * 60) + + if args.clean: + print("\nCleaning build artifacts...") + for zip_file in SCRIPT_DIR.glob("solweig-qgis-*.zip"): + zip_file.unlink() + print(f" Removed: {zip_file.name}") + print("Done!") + return + + version = args.version or read_pyproject_version() + print(f"\n Version: {version} (from {'--version flag' if args.version else 'pyproject.toml'})") + + stamp_metadata_version(version) + copy_license() + zip_path = create_package_zip(version) + + print("\n" + "=" * 60) + print("Build complete!") + print("=" * 60) + print(f"\nPackage: {zip_path}") + print("\nTo install in QGIS:") + print(" 1. Plugins > Manage and Install Plugins > Install from ZIP") + print(f" 2. Select {zip_path.name}") + print(" 3. The plugin will prompt to install the solweig library on first use") + + +if __name__ == "__main__": + main() diff --git a/qgis_plugin/solweig_qgis/LICENSE b/qgis_plugin/solweig_qgis/LICENSE new file mode 100644 index 0000000..e72bfdd --- /dev/null +++ b/qgis_plugin/solweig_qgis/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file diff --git a/qgis_plugin/solweig_qgis/__init__.py b/qgis_plugin/solweig_qgis/__init__.py new file mode 100644 index 0000000..8ffcbde --- /dev/null +++ b/qgis_plugin/solweig_qgis/__init__.py @@ -0,0 +1,480 @@ +""" +SOLWEIG QGIS Plugin + +Provides QGIS Processing algorithms for calculating Mean Radiant Temperature (Tmrt), +UTCI, and PET thermal comfort indices using the SOLWEIG model. + +Adapted from UMEP (Urban Multi-scale Environmental Predictor). +Original code by Fredrik Lindberg, Ting Sun, Sue Grimmond, Yihao Tang, and Nils Wallenberg. + +Citation: + Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, + Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, + Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor + (UMEP) - An integrated tool for city-based climate services. + Environmental Modelling and Software 99, 70-87 + https://doi.org/10.1016/j.envsoft.2017.09.020 +""" + +from __future__ import annotations + +import os +import sys +from pathlib import Path + +# --------------------------------------------------------------------------- +# Dependency management +# --------------------------------------------------------------------------- + +_PLUGIN_DIR = Path(__file__).resolve().parent + + +def _read_required_version() -> str: + """ + Read the required solweig version from metadata.txt. + + The plugin version in metadata.txt is kept in sync with the solweig library + version by build_plugin.py, which reads pyproject.toml as the single source + of truth. The QGIS metadata format uses hyphens (0.1.0-beta5) while PEP 440 + uses letters (0.1.0b5), so we normalize here. + """ + import configparser + + metadata_path = _PLUGIN_DIR / "metadata.txt" + config = configparser.ConfigParser() + config.read(metadata_path) + qgis_version = config.get("general", "version", fallback="0.0.0") + + # Normalize QGIS format (0.1.0-beta5) to PEP 440 (0.1.0b5) + import re + + normalized = re.sub(r"-?alpha", "a", qgis_version) + normalized = re.sub(r"-?beta", "b", normalized) + normalized = re.sub(r"-?rc", "rc", normalized) + return normalized + + +_REQUIRED_SOLWEIG_VERSION = _read_required_version() +_SOLWEIG_AVAILABLE = False +_SOLWEIG_OUTDATED = False # True when installed but too old +_SOLWEIG_SOURCE = None # "system", "development", or None +_SOLWEIG_IMPORT_ERROR = None +_SOLWEIG_INSTALLED_VERSION = None + + +def _parse_version(version_str: str) -> tuple: + """ + Parse a PEP 440 version string into a comparable tuple. + + Handles release versions (0.1.0) and pre-release versions (0.1.0b5, 0.1.0a1, 0.1.0rc1). + Pre-release versions sort before their release (0.1.0b5 < 0.1.0). + """ + import re + + match = re.match(r"^(\d+(?:\.\d+)*)(?:(a|b|rc)(\d+))?", version_str) + if not match: + return (0, 0, 0, "z", 0) # unparseable sorts high to avoid false outdated + + release = tuple(int(x) for x in match.group(1).split(".")) + pre_type = match.group(2) # "a", "b", "rc", or None + pre_num = int(match.group(3)) if match.group(3) else 0 + + # "z" sorts after "a", "b", "rc" — so final releases are higher than pre-releases + pre_key = pre_type if pre_type else "z" + return release + (pre_key, pre_num) + + +def _check_version(solweig_module) -> bool: + """ + Check if the imported solweig module meets the minimum version requirement. + + Sets _SOLWEIG_OUTDATED and _SOLWEIG_IMPORT_ERROR if the version is too old. + + Returns: + True if version is acceptable, False if outdated. + """ + global _SOLWEIG_OUTDATED, _SOLWEIG_IMPORT_ERROR, _SOLWEIG_INSTALLED_VERSION + + installed = getattr(solweig_module, "__version__", None) or "0.0.0" + _SOLWEIG_INSTALLED_VERSION = installed + + # Version check (prefer robust PEP 440 parsing when available) + try: + from packaging.version import Version + + if Version(installed) < Version(_REQUIRED_SOLWEIG_VERSION): + _SOLWEIG_OUTDATED = True + _SOLWEIG_IMPORT_ERROR = ( + f"solweig {installed} is installed but this plugin requires >= {_REQUIRED_SOLWEIG_VERSION}" + ) + return False + except Exception: + # Fallback for minimal environments (should be rare) + if _parse_version(installed) < _parse_version(_REQUIRED_SOLWEIG_VERSION): + _SOLWEIG_OUTDATED = True + _SOLWEIG_IMPORT_ERROR = ( + f"solweig {installed} is installed but this plugin requires >= {_REQUIRED_SOLWEIG_VERSION}" + ) + return False + + # Feature check: ensure the imported SurfaceData supports the API used by this plugin. + # This guards against environments where a different/old `solweig` package is importable + # (or where version strings are missing/non-standard). + missing: list[str] = [] + surface_cls = getattr(solweig_module, "SurfaceData", None) + if surface_cls is None: + missing.append("SurfaceData") + else: + for method_name in ("preprocess", "fill_nan", "compute_valid_mask", "apply_valid_mask", "crop_to_valid_bbox"): + if not hasattr(surface_cls, method_name): + missing.append(f"SurfaceData.{method_name}()") + + if missing: + _SOLWEIG_OUTDATED = True + _SOLWEIG_IMPORT_ERROR = ( + "The imported solweig package is missing required APIs: " + + ", ".join(missing) + + f". Please upgrade solweig to >= {_REQUIRED_SOLWEIG_VERSION} and restart QGIS." + ) + return False + + return True + + +def _setup_solweig_path(): + """ + Set up the import path for solweig library. + + Priority: + 1. System-installed solweig (via pip) + 2. Development path (for local development) + """ + global _SOLWEIG_AVAILABLE, _SOLWEIG_OUTDATED, _SOLWEIG_SOURCE, _SOLWEIG_IMPORT_ERROR, _SOLWEIG_INSTALLED_VERSION + + # Already found in a previous call + if _SOLWEIG_AVAILABLE: + return + + def _try_import_system() -> bool: + global _SOLWEIG_AVAILABLE, _SOLWEIG_SOURCE, _SOLWEIG_IMPORT_ERROR + try: + import solweig # noqa: F401 + + if not _check_version(solweig): + return False + _SOLWEIG_AVAILABLE = True + _SOLWEIG_SOURCE = "system" + _SOLWEIG_IMPORT_ERROR = None + return True + except Exception as exc: + # Catch all exceptions, not just ImportError — older solweig versions + # may crash on import (e.g. rasterio incompatibility) and we must + # still load the plugin so the upgrade prompt can appear. + _SOLWEIG_IMPORT_ERROR = f"system import failed: {exc}" + return False + + def _try_import_dev(dev_path: Path) -> bool: + global _SOLWEIG_AVAILABLE, _SOLWEIG_SOURCE, _SOLWEIG_IMPORT_ERROR + if not (dev_path.exists() and (dev_path / "solweig").exists()): + return False + inserted = False + if str(dev_path) not in sys.path: + sys.path.insert(0, str(dev_path)) + inserted = True + try: + # If solweig was already imported (e.g. from a system install), + # remove the cached module so Python re-discovers it from pysrc/. + if "solweig" in sys.modules: + # Remove the main module and all submodules so the fresh + # import picks up the development source tree. + stale = [k for k in sys.modules if k == "solweig" or k.startswith("solweig.")] + for k in stale: + del sys.modules[k] + + import solweig # noqa: F401 + + if not _check_version(solweig): + return False + _SOLWEIG_AVAILABLE = True + _SOLWEIG_SOURCE = "development" + _SOLWEIG_IMPORT_ERROR = None + return True + except Exception as exc: + _SOLWEIG_IMPORT_ERROR = f"development import failed: {exc}" + return False + finally: + # If dev import didn't succeed, keep sys.path clean. + if not _SOLWEIG_AVAILABLE and inserted and str(dev_path) in sys.path: + sys.path.remove(str(dev_path)) + + # Development mode - look for pysrc in parent directories + dev_paths = [ + _PLUGIN_DIR.parent.parent / "pysrc", # repo_root/pysrc + _PLUGIN_DIR.parent.parent.parent / "pysrc", # One more level up + ] + + # If we're running from a repository checkout (symlinked plugin), prefer local pysrc + # to avoid accidentally using an older system-installed solweig. + repo_root = _PLUGIN_DIR.parent.parent + prefer_dev = (repo_root / "pyproject.toml").exists() and (repo_root / "pysrc" / "solweig").exists() + + if prefer_dev: + for dev_path in dev_paths: + if _try_import_dev(dev_path): + return + if _try_import_system(): + return + else: + if _try_import_system(): + return + for dev_path in dev_paths: + if _try_import_dev(dev_path): + return + + # No solweig found + _SOLWEIG_AVAILABLE = False + _SOLWEIG_SOURCE = None + if _SOLWEIG_IMPORT_ERROR is None: + _SOLWEIG_IMPORT_ERROR = "solweig package not installed" + + +def _install_solweig() -> tuple[bool, str]: + """ + Install or upgrade solweig via pip in-process. + + Uses pip's internal API rather than subprocess because QGIS embeds Python + and sys.executable points to the QGIS binary, not a usable Python interpreter. + See: https://github.com/qgis/QGIS/issues/45646 + + Returns: + Tuple of (success, message). + """ + import contextlib + import io + + try: + from pip._internal.cli.main import main as pip_main + except ImportError: + return False, "pip is not available in this QGIS Python environment." + + try: + output = io.StringIO() + with contextlib.redirect_stdout(output), contextlib.redirect_stderr(output): + exit_code = pip_main(["install", "--upgrade", "--no-deps", "solweig"]) + if exit_code == 0: + return True, "SOLWEIG installed successfully." + return False, f"pip install failed (exit code {exit_code}):\n{output.getvalue()}" + except Exception as e: + return False, f"Installation failed: {e}" + + +# Force GDAL backend before importing solweig — we know we're in QGIS, +# so bypass _compat.py heuristic detection to avoid rasterio/numpy crashes. +os.environ["UMEP_USE_GDAL"] = "1" + +# Run setup on module load +_setup_solweig_path() + + +def check_dependencies() -> tuple[bool, str]: + """ + Check if all required dependencies are available. + + Returns: + Tuple of (success, message) + """ + if _SOLWEIG_AVAILABLE: + return True, f"SOLWEIG library loaded ({_SOLWEIG_SOURCE})" + + if _SOLWEIG_OUTDATED: + msg = ( + f"SOLWEIG {_SOLWEIG_INSTALLED_VERSION} is installed but this plugin " + f"requires >= {_REQUIRED_SOLWEIG_VERSION}.\n\n" + "To upgrade manually:\n\n" + " In OSGeo4W Shell (Windows) or Terminal (macOS/Linux):\n" + " pip install --upgrade solweig\n\n" + "After upgrading, restart QGIS." + ) + return False, msg + + error_hint = f"\nLast import error: {_SOLWEIG_IMPORT_ERROR}\n" if _SOLWEIG_IMPORT_ERROR else "" + + msg = f"""SOLWEIG library not found.{error_hint} + +To install SOLWEIG manually: + + In OSGeo4W Shell (Windows) or Terminal (macOS/Linux): + pip install solweig + +After installation, restart QGIS and re-enable the plugin. +""" + return False, msg + + +def _prompt_install(): + """Offer to auto-install or upgrade solweig if it's missing or outdated.""" + global _SOLWEIG_AVAILABLE, _SOLWEIG_OUTDATED, _SOLWEIG_SOURCE, _SOLWEIG_IMPORT_ERROR, _SOLWEIG_INSTALLED_VERSION + + success, message = check_dependencies() + if success: + return + + try: + from qgis.PyQt.QtWidgets import QMessageBox + + if _SOLWEIG_OUTDATED: + title = "SOLWEIG Plugin - Update Required" + prompt = ( + f"SOLWEIG {_SOLWEIG_INSTALLED_VERSION} is installed but this plugin " + f"requires >= {_REQUIRED_SOLWEIG_VERSION}.\n\n" + "Would you like to upgrade now?\n\n" + "This will run: pip install --upgrade solweig" + ) + decline_msg = ( + "SOLWEIG was not upgraded. You can upgrade manually:\n\n" + " pip install --upgrade solweig\n\n" + "Then restart QGIS." + ) + else: + title = "SOLWEIG Plugin - Install Dependencies" + prompt = ( + "The SOLWEIG library is required but not installed.\n\n" + "Would you like to install it now?\n\n" + "This will run: pip install solweig" + ) + decline_msg = ( + "SOLWEIG was not installed. You can install it manually:\n\n pip install solweig\n\nThen restart QGIS." + ) + + reply = QMessageBox.question( + None, + title, + prompt, + QMessageBox.StandardButton.Yes | QMessageBox.StandardButton.No, + QMessageBox.StandardButton.Yes, + ) + + if reply != QMessageBox.StandardButton.Yes: + QMessageBox.information(None, "SOLWEIG Plugin", decline_msg) + return + + # Show a wait cursor while installing + from qgis.PyQt.QtCore import Qt + from qgis.PyQt.QtWidgets import QApplication + + QApplication.setOverrideCursor(Qt.CursorShape.WaitCursor) + try: + ok, install_msg = _install_solweig() + finally: + QApplication.restoreOverrideCursor() + + if ok: + # Reset state so _setup_solweig_path() can re-check + _SOLWEIG_AVAILABLE = False + _SOLWEIG_OUTDATED = False + _SOLWEIG_IMPORT_ERROR = None + _SOLWEIG_INSTALLED_VERSION = None + + # Reload the module if it was already imported (upgrade case) + if "solweig" in sys.modules: + import importlib + + importlib.reload(sys.modules["solweig"]) + + _setup_solweig_path() + if _SOLWEIG_AVAILABLE: + QMessageBox.information( + None, + "SOLWEIG Plugin", + "SOLWEIG installed successfully! The plugin is ready to use.", + ) + else: + QMessageBox.information( + None, + "SOLWEIG Plugin", + "SOLWEIG installed successfully.\n\nPlease restart QGIS to complete setup.", + ) + else: + QMessageBox.warning( + None, + "SOLWEIG Plugin - Installation Failed", + f"{install_msg}\n\nYou can try installing manually:\n\n pip install solweig\n\nThen restart QGIS.", + ) + + except ImportError: + # Not in QGIS environment + print(f"WARNING: {message}") + + +# --------------------------------------------------------------------------- +# Plugin entry point +# --------------------------------------------------------------------------- + +from .provider import SolweigProvider # noqa: E402 + + +def classFactory(iface): + """ + QGIS plugin entry point. + + Called by QGIS when the plugin is loaded. Returns the provider instance + that will register all processing algorithms. + + Args: + iface: QgisInterface instance providing access to QGIS components. + + Returns: + SolweigPlugin instance that manages the processing provider. + """ + return SolweigPlugin(iface) + + +class SolweigPlugin: + """ + Main plugin class that manages the SOLWEIG processing provider. + + This class handles plugin lifecycle (load/unload) and registers + the SolweigProvider with QGIS Processing framework. + """ + + def __init__(self, iface): + self.iface = iface + self.provider = None + + def initProcessing(self): + """Initialize the processing provider.""" + from qgis.core import QgsApplication + + self.provider = SolweigProvider() + QgsApplication.processingRegistry().addProvider(self.provider) + + def initGui(self): + """Initialize the plugin GUI (called when plugin is activated).""" + # Register the Processing provider first — unconditionally — so + # SOLWEIG always appears in the Processing Toolbox even when the + # library isn't installed yet. Showing a QMessageBox during + # initGui() can fail or block on some platforms (especially macOS), + # which would prevent initProcessing() from ever being called. + self.initProcessing() + + if not _SOLWEIG_AVAILABLE or _SOLWEIG_OUTDATED: + # Defer the install prompt to after the event loop starts, + # so it doesn't block plugin registration. + from qgis.PyQt.QtCore import QTimer + + QTimer.singleShot(500, _prompt_install) + + def unload(self): + """Unload the plugin (called when plugin is deactivated).""" + from qgis.core import QgsApplication + + if self.provider: + QgsApplication.processingRegistry().removeProvider(self.provider) + + +# --------------------------------------------------------------------------- +# Module-level info for debugging +# --------------------------------------------------------------------------- + +__solweig_available__ = _SOLWEIG_AVAILABLE +__solweig_source__ = _SOLWEIG_SOURCE diff --git a/qgis_plugin/solweig_qgis/_make_icon.py b/qgis_plugin/solweig_qgis/_make_icon.py new file mode 100644 index 0000000..12c1ac3 --- /dev/null +++ b/qgis_plugin/solweig_qgis/_make_icon.py @@ -0,0 +1,228 @@ +"""Generate the SOLWEIG QGIS plugin icon using Pillow. + +A fun sun-with-sunglasses over a city skyline with a thermometer. +Palette: dark charcoal buildings, golden sun, red thermometer accent, white details. +Run once to produce icon.png, then delete this script. +""" + +import math +import os + +from PIL import Image, ImageDraw + +# ============================================================================= +# Palette — at most 4 colours + sky gradient +# ============================================================================= +DARK = (38, 42, 48) # charcoal — buildings, ground, sunglasses +MID = (58, 64, 72) # lighter charcoal — alternate buildings +GOLD = (255, 210, 50) # sun, rays, window glow +RED = (230, 60, 50) # thermometer, antenna blink, grin +WHITE = (255, 255, 255) # thermometer body, text, highlights + +SKY_TOP = (255, 107, 53) # warm orange (gradient only) +SKY_MID = (247, 201, 72) # golden (gradient only) +SKY_BOT = (135, 206, 235) # light blue (gradient only) + + +def lerp_color(c1, c2, t): + return tuple(int(a + (b - a) * t) for a, b in zip(c1, c2, strict=False)) + + +def draw_icon(size=128): + img = Image.new("RGBA", (size, size), (0, 0, 0, 0)) + d = ImageDraw.Draw(img) + s = size / 64 # scale factor relative to 64px design + + # --- Sky gradient background --- + for y in range(size): + t = y / size + color = lerp_color(SKY_TOP, SKY_MID, t / 0.5) if t < 0.5 else lerp_color(SKY_MID, SKY_BOT, (t - 0.5) / 0.5) + d.line([(0, y), (size - 1, y)], fill=color) + + # Rounded corners mask + mask = Image.new("L", (size, size), 0) + mask_d = ImageDraw.Draw(mask) + mask_d.rounded_rectangle([0, 0, size - 1, size - 1], radius=int(8 * s), fill=255) + img.putalpha(mask) + d = ImageDraw.Draw(img) + + # --- Sun glow --- + glow_img = Image.new("RGBA", (size, size), (0, 0, 0, 0)) + glow_d = ImageDraw.Draw(glow_img) + cx_sun, cy_sun = int(32 * s), int(13 * s) + for radius in range(int(22 * s), 0, -1): + t = radius / (22 * s) + alpha = int(55 * (1 - t)) + glow_d.ellipse( + [cx_sun - radius, cy_sun - radius, cx_sun + radius, cy_sun + radius], + fill=(*GOLD[:3], alpha), + ) + img = Image.alpha_composite(img, glow_img) + d = ImageDraw.Draw(img) + + # --- Sun body --- + sr = int(9 * s) + d.ellipse( + [cx_sun - sr, cy_sun - sr, cx_sun + sr, cy_sun + sr], + fill=GOLD, + outline=(220, 180, 30), + width=max(1, int(s)), + ) + + # --- Sun rays --- + ray_inner = int(10.5 * s) + ray_outer = int(13 * s) + ray_w = max(1, int(1.8 * s)) + for angle_deg in range(0, 360, 45): + if angle_deg == 180: + continue + a = math.radians(angle_deg) + x1 = cx_sun + int(ray_inner * math.sin(a)) + y1 = cy_sun - int(ray_inner * math.cos(a)) + x2 = cx_sun + int(ray_outer * math.sin(a)) + y2 = cy_sun - int(ray_outer * math.cos(a)) + d.line([(x1, y1), (x2, y2)], fill=GOLD, width=ray_w) + + # --- Sunglasses (DARK) --- + gw, gh = int(3.2 * s), int(2.4 * s) + lx, ly = int(29 * s), int(12 * s) + rx, ry = int(35 * s), int(12 * s) + d.ellipse([lx - gw, ly - gh, lx + gw, ly + gh], fill=DARK) + d.ellipse([rx - gw, ry - gh, rx + gw, ry + gh], fill=DARK) + # Bridge + arms + bw = max(1, int(1.2 * s)) + d.line([(lx + gw - int(s), ly), (rx - gw + int(s), ry)], fill=DARK, width=bw) + aw = max(1, int(s)) + d.line([(lx - gw, ly - int(0.5 * s)), (lx - gw - int(2.5 * s), ly - int(2 * s))], fill=DARK, width=aw) + d.line([(rx + gw, ry - int(0.5 * s)), (rx + gw + int(2.5 * s), ry - int(2 * s))], fill=DARK, width=aw) + # Lens shine + shr = int(1.2 * s) + d.ellipse( + [lx - int(1.5 * s) - shr, ly - int(0.8 * s) - shr, lx - int(1.5 * s) + shr, ly - int(0.8 * s) + shr], + fill=(255, 255, 255, 70), + ) + d.ellipse( + [rx - int(1.5 * s) - shr, ry - int(0.8 * s) - shr, rx - int(1.5 * s) + shr, ry - int(0.8 * s) + shr], + fill=(255, 255, 255, 70), + ) + + # --- Cheeky grin (RED) --- + d.arc([int(28.5 * s), int(14.5 * s), int(35.5 * s), int(19 * s)], 0, 180, fill=RED, width=max(1, int(1.3 * s))) + + # --- Heat shimmer wavy lines (RED, semi-transparent) --- + heat_color = (*RED[:3], 90) + for hx in [int(15 * s), int(38 * s), int(52 * s)]: + for yy in range(int(22 * s), int(37 * s), int(1.5 * s) or 1): + offset = int(2 * s * math.sin(yy * 0.25)) + for dx in range(max(1, int(0.8 * s))): + d.point((hx + offset + dx, yy), fill=heat_color) + + # --- City skyline (DARK / MID only, GOLD windows) --- + # Buildings extend to y=64 (full bottom) so rounded mask clips them cleanly + buildings = [ + # (x, y, w, h, color) + (4, 32, 8, 32, DARK), + (13, 37, 10, 27, MID), + (31, 30, 9, 34, DARK), + (41, 42, 10, 22, MID), + (52, 35, 8, 29, DARK), + ] + + for bx, by, bw, bh, color in buildings: + x1, y1 = int(bx * s), int(by * s) + x2, y2 = int((bx + bw) * s), int((by + bh) * s) + d.rectangle([x1, y1, x2, y2], fill=color) + + # Windows — GOLD only, varying alpha for life + wy = y1 + int(2 * s) + win_idx = 0 + while wy + int(2 * s) < y2 - int(1 * s): + wx = x1 + int(1.5 * s) + while wx + int(2 * s) < x2 - int(0.5 * s): + alpha = [210, 140, 180, 100, 220, 160][win_idx % 6] + d.rectangle( + [wx, wy, wx + int(2 * s), wy + int(2 * s)], + fill=(*GOLD[:3], alpha), + ) + wx += int(3.5 * s) + win_idx += 1 + wy += int(4 * s) + + # --- Tree (DARK trunk, MID canopy — stays monochromatic) --- + trunk_x = int(25.5 * s) + trunk_w = max(1, int(1 * s)) + d.rectangle( + [trunk_x - trunk_w, int(48 * s), trunk_x + trunk_w, int(64 * s)], + fill=DARK, + ) + tree_r = int(4.5 * s) + d.ellipse( + [trunk_x - tree_r, int(45 * s) - tree_r, trunk_x + tree_r, int(45 * s) + tree_r], + fill=MID, + ) + d.ellipse( + [trunk_x - int(3 * s), int(46.5 * s) - int(3 * s), trunk_x + int(1 * s), int(46.5 * s) + int(3 * s)], + fill=(68, 75, 84), # slightly lighter MID variant + ) + d.ellipse( + [trunk_x - int(1 * s), int(46.5 * s) - int(3 * s), trunk_x + int(3.5 * s), int(46.5 * s) + int(3 * s)], + fill=MID, + ) + + # --- Antenna on building 3 (DARK pole, RED blink) --- + ant_x = int(35.5 * s) + d.line([(ant_x, int(30 * s)), (ant_x, int(25 * s))], fill=MID, width=max(1, int(1.2 * s))) + br = int(1.2 * s) + d.ellipse([ant_x - br, int(25 * s) - br, ant_x + br, int(25 * s) + br], fill=RED) + + # --- Ground strip (DARK, semi-transparent) — full width to bottom edge --- + d.rectangle([0, int(56 * s), size - 1, size - 1], fill=(*DARK[:3], 100)) + + # --- Thermometer (WHITE body, RED mercury) --- + tx = int(57 * s) + ty_top, ty_bot = int(3 * s), int(19 * s) + tw = int(1.8 * s) + bulb_r = int(3 * s) + + # White body + d.rounded_rectangle( + [tx - tw, ty_top, tx + tw, ty_bot], + radius=tw, + fill=WHITE, + outline=(*DARK[:3], 120), + width=max(1, int(0.5 * s)), + ) + # Red mercury + d.rounded_rectangle( + [tx - int(1 * s), int(6 * s), tx + int(1 * s), ty_bot], + radius=int(1 * s), + fill=RED, + ) + # Red bulb + d.ellipse( + [tx - bulb_r, ty_bot - int(1 * s), tx + bulb_r, ty_bot + bulb_r + int(1 * s)], + fill=RED, + outline=(*DARK[:3], 120), + width=max(1, int(0.5 * s)), + ) + # Tick marks (DARK) + for tick_y in [int(6 * s), int(9 * s), int(12 * s), int(15 * s)]: + d.line( + [(tx - tw - int(1 * s), tick_y), (tx - tw + int(0.5 * s), tick_y)], + fill=(*DARK[:3], 150), + width=max(1, int(0.5 * s)), + ) + + return img + + +if __name__ == "__main__": + here = os.path.dirname(os.path.abspath(__file__)) + + icon_128 = draw_icon(128) + icon_128.save(os.path.join(here, "icon_128.png")) + print("Saved icon_128.png") + + icon_64 = icon_128.resize((64, 64), Image.Resampling.LANCZOS) + icon_64.save(os.path.join(here, "icon.png")) + print("Saved icon.png (64x64)") diff --git a/qgis_plugin/solweig_qgis/algorithms/__init__.py b/qgis_plugin/solweig_qgis/algorithms/__init__.py new file mode 100644 index 0000000..ef34e4d --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/__init__.py @@ -0,0 +1 @@ +"""SOLWEIG Processing Algorithms.""" diff --git a/qgis_plugin/solweig_qgis/algorithms/base.py b/qgis_plugin/solweig_qgis/algorithms/base.py new file mode 100644 index 0000000..51439be --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/base.py @@ -0,0 +1,438 @@ +""" +Base algorithm class for SOLWEIG processing algorithms. + +Provides shared utilities for loading rasters, saving outputs, +and integrating with QGIS. +""" + +from __future__ import annotations + +import os +import tempfile +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import numpy as np +from osgeo import gdal +from qgis.core import ( + QgsProcessingAlgorithm, + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsProject, + QgsRasterLayer, +) +from qgis.PyQt.QtCore import QCoreApplication + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +class SolweigAlgorithmBase(QgsProcessingAlgorithm): + """ + Base class for all SOLWEIG processing algorithms. + + Provides common functionality: + - Raster loading via GDAL + - Georeferenced output saving + - Thermal comfort styling for outputs + - Translation support + """ + + def tr(self, string: str) -> str: + """Translate string to current locale.""" + return QCoreApplication.translate("SolweigProcessing", string) + + def createInstance(self): + """Return new instance of algorithm.""" + return self.__class__() + + def group(self) -> str: + """Return algorithm group name (empty = directly under provider).""" + return "" + + def groupId(self) -> str: + """Return algorithm group ID (empty = directly under provider).""" + return "" + + def helpUrl(self) -> str: + """Return URL to algorithm documentation.""" + return "https://umep-docs.readthedocs.io/" + + # ------------------------------------------------------------------------- + # SOLWEIG Import Helper + # ------------------------------------------------------------------------- + + def import_solweig(self): + """ + Import the solweig library. + + Returns: + The imported solweig module. + + Raises: + QgsProcessingException: If solweig cannot be imported. + """ + try: + from .. import check_dependencies + + success, message = check_dependencies() + if not success: + raise QgsProcessingException(message) + + import solweig + + return solweig + except QgsProcessingException: + raise + except Exception as e: + raise QgsProcessingException("SOLWEIG library not found. Install it with: pip install solweig") from e + + # ------------------------------------------------------------------------- + # Raster Loading + # ------------------------------------------------------------------------- + + def load_raster_from_layer(self, layer: QgsRasterLayer) -> tuple[NDArray[np.floating], list[float], str]: + """ + Load QGIS raster layer to numpy array using GDAL. + + Args: + layer: QGIS raster layer to load. + + Returns: + tuple of (array, geotransform, crs_wkt): + - array: 2D numpy float32 array + - geotransform: GDAL 6-tuple [x_origin, x_res, 0, y_origin, 0, -y_res] + - crs_wkt: Coordinate reference system as WKT string + + Raises: + QgsProcessingException: If raster cannot be opened. + """ + source = layer.source() + ds = gdal.Open(source, gdal.GA_ReadOnly) + if ds is None: + raise QgsProcessingException(f"Cannot open raster: {source}") + + try: + band = ds.GetRasterBand(1) + array = band.ReadAsArray().astype(np.float32) + + # Handle nodata — only honor negative sentinel values (e.g. -9999) + # to avoid converting valid zero-height pixels to NaN + nodata = band.GetNoDataValue() + if nodata is not None and nodata < 0: + array = np.where(array == nodata, np.nan, array) + + geotransform = list(ds.GetGeoTransform()) + crs_wkt = ds.GetProjection() + + return array, geotransform, crs_wkt + finally: + ds = None # Close dataset + + def load_optional_raster( + self, + parameters: dict[str, Any], + param_name: str, + context: QgsProcessingContext, + ) -> NDArray[np.floating] | None: + """ + Load optional raster parameter, return None if not provided. + + Args: + parameters: Algorithm parameters dict. + param_name: Name of the raster parameter. + context: Processing context. + + Returns: + Numpy array if parameter provided, None otherwise. + """ + if param_name not in parameters or not parameters[param_name]: + return None + + layer = self.parameterAsRasterLayer(parameters, param_name, context) + if layer is None: + return None + + array, _, _ = self.load_raster_from_layer(layer) + return array + + def get_pixel_size_from_layer(self, layer: QgsRasterLayer) -> float: + """ + Extract pixel size from raster layer. + + Args: + layer: QGIS raster layer. + + Returns: + Pixel size in meters (assumes square pixels). + """ + source = layer.source() + ds = gdal.Open(source, gdal.GA_ReadOnly) + if ds is None: + raise QgsProcessingException(f"Cannot open raster: {source}") + + try: + gt = ds.GetGeoTransform() + # gt[1] is x pixel size, gt[5] is y pixel size (negative) + pixel_size = abs(gt[1]) + return pixel_size + finally: + ds = None + + # ------------------------------------------------------------------------- + # Output Saving + # ------------------------------------------------------------------------- + + def save_georeferenced_output( + self, + array: NDArray[np.floating], + output_path: str | Path, + geotransform: list[float], + crs_wkt: str, + nodata: float = -9999.0, + feedback: QgsProcessingFeedback | None = None, + ) -> str: + """ + Save numpy array to GeoTIFF with proper georeferencing. + + Uses Cloud-Optimized GeoTIFF (COG) format with LZW compression. + + Args: + array: 2D numpy array to save. + output_path: Path for output GeoTIFF. + geotransform: GDAL geotransform [x_origin, x_res, 0, y_origin, 0, -y_res]. + crs_wkt: Coordinate reference system as WKT string. + nodata: NoData value to use. Default -9999. + feedback: Optional feedback for progress reporting. + + Returns: + Path to saved file. + """ + output_path = str(output_path) + + # Replace NaN with nodata + array_out = np.where(np.isnan(array), nodata, array).astype(np.float32) + + # Create output directory if needed + os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True) + + # Create GeoTIFF + driver = gdal.GetDriverByName("GTiff") + rows, cols = array_out.shape + ds = driver.Create( + output_path, + cols, + rows, + 1, # bands + gdal.GDT_Float32, + options=["COMPRESS=LZW", "TILED=YES"], + ) + + if ds is None: + raise QgsProcessingException(f"Cannot create output raster: {output_path}") + + try: + ds.SetGeoTransform(geotransform) + ds.SetProjection(crs_wkt) + + band = ds.GetRasterBand(1) + band.WriteArray(array_out) + band.SetNoDataValue(nodata) + band.FlushCache() + finally: + ds = None # Close and flush + + if feedback: + feedback.pushInfo(f"Saved: {output_path}") + + return output_path + + def get_output_path( + self, + parameters: dict[str, Any], + param_name: str, + default_name: str, + context: QgsProcessingContext, + ) -> str: + """ + Get output path from parameter or create temp file. + + Args: + parameters: Algorithm parameters. + param_name: Output parameter name. + default_name: Default filename if not specified. + context: Processing context. + + Returns: + Path for output file. + """ + if param_name in parameters and parameters[param_name]: + output_dest = self.parameterAsOutputLayer(parameters, param_name, context) + if output_dest: + return output_dest + + # Create temp file + temp_dir = Path(tempfile.gettempdir()) / "solweig_qgis_output" + temp_dir.mkdir(parents=True, exist_ok=True) + return str(temp_dir / default_name) + + # ------------------------------------------------------------------------- + # Canvas Integration + # ------------------------------------------------------------------------- + + def add_raster_to_canvas( + self, + path: str, + layer_name: str, + style: str | None = None, + context: QgsProcessingContext | None = None, + ) -> QgsRasterLayer: + """ + Add raster layer to QGIS canvas with optional styling. + + Args: + path: Path to raster file. + layer_name: Display name in layer panel. + style: Style preset ('tmrt', 'utci', 'pet', 'shadow', or None). + context: Processing context. + + Returns: + The created QgsRasterLayer. + + Raises: + QgsProcessingException: If layer cannot be loaded. + """ + layer = QgsRasterLayer(path, layer_name) + if not layer.isValid(): + raise QgsProcessingException(f"Cannot load output layer: {path}") + + # Apply thermal comfort color ramp if requested + if style in ("tmrt", "utci", "pet"): + self.apply_thermal_comfort_style(layer, style) + elif style == "shadow": + self.apply_shadow_style(layer) + + # Add to project + QgsProject.instance().addMapLayer(layer) + + return layer + + def apply_thermal_comfort_style(self, layer: QgsRasterLayer, style_type: str) -> None: + """ + Apply thermal comfort color ramp for visualization. + + Args: + layer: QgsRasterLayer to style. + style_type: 'tmrt', 'utci', or 'pet'. + """ + from qgis.core import ( + QgsColorRampShader, + QgsRasterShader, + QgsSingleBandPseudoColorRenderer, + ) + from qgis.PyQt.QtGui import QColor + + # Define color ramps based on style type + if style_type == "utci": + # UTCI thermal stress categories (ISO 7730 / Jendritzky et al. 2012) + color_points = [ + (-40, QColor(0, 0, 128), "Extreme cold stress"), + (-27, QColor(0, 100, 200), "Very strong cold stress"), + (-13, QColor(51, 153, 255), "Strong cold stress"), + (0, QColor(153, 204, 255), "Moderate cold stress"), + (9, QColor(204, 255, 204), "Slight cold stress"), + (26, QColor(255, 255, 102), "No thermal stress"), + (32, QColor(255, 204, 51), "Moderate heat stress"), + (38, QColor(255, 128, 0), "Strong heat stress"), + (46, QColor(255, 51, 51), "Very strong heat stress"), + (60, QColor(128, 0, 0), "Extreme heat stress"), + ] + else: # tmrt, pet - use generic thermal ramp + color_points = [ + (0, QColor(0, 0, 200), "Cold"), + (15, QColor(51, 153, 255), "Cool"), + (25, QColor(153, 255, 153), "Comfortable"), + (35, QColor(255, 255, 102), "Warm"), + (45, QColor(255, 153, 51), "Hot"), + (55, QColor(255, 51, 51), "Very hot"), + (70, QColor(128, 0, 0), "Extreme"), + ] + + # Create shader + shader = QgsRasterShader() + ramp_shader = QgsColorRampShader() + ramp_shader.setColorRampType(QgsColorRampShader.Type.Interpolated) + + items = [] + for value, color, label in color_points: + items.append(QgsColorRampShader.ColorRampItem(value, color, label)) + + ramp_shader.setColorRampItemList(items) + shader.setRasterShaderFunction(ramp_shader) + + # Apply renderer + renderer = QgsSingleBandPseudoColorRenderer( + layer.dataProvider(), + 1, # band + shader, + ) + layer.setRenderer(renderer) + layer.triggerRepaint() + + def apply_shadow_style(self, layer: QgsRasterLayer) -> None: + """ + Apply shadow mask styling (binary: sunlit/shadow). + + Args: + layer: QgsRasterLayer to style. + """ + from qgis.core import ( + QgsColorRampShader, + QgsRasterShader, + QgsSingleBandPseudoColorRenderer, + ) + from qgis.PyQt.QtGui import QColor + + shader = QgsRasterShader() + ramp_shader = QgsColorRampShader() + ramp_shader.setColorRampType(QgsColorRampShader.Type.Interpolated) + + items = [ + QgsColorRampShader.ColorRampItem(0, QColor(255, 255, 153), "Sunlit"), + QgsColorRampShader.ColorRampItem(1, QColor(102, 102, 102), "Shadow"), + ] + + ramp_shader.setColorRampItemList(items) + shader.setRasterShaderFunction(ramp_shader) + + renderer = QgsSingleBandPseudoColorRenderer(layer.dataProvider(), 1, shader) + layer.setRenderer(renderer) + layer.triggerRepaint() + + # ------------------------------------------------------------------------- + # Validation Helpers + # ------------------------------------------------------------------------- + + def check_grid_shapes_match( + self, + reference_shape: tuple[int, int], + arrays: dict[str, NDArray | None], + feedback: QgsProcessingFeedback, + ) -> None: + """ + Verify all provided arrays match reference shape. + + Args: + reference_shape: Expected (rows, cols) shape. + arrays: Dict of {name: array} to check (None values skipped). + feedback: For reporting errors. + + Raises: + QgsProcessingException: If shapes don't match. + """ + for name, arr in arrays.items(): + if arr is not None and arr.shape != reference_shape: + raise QgsProcessingException( + f"Grid shape mismatch: {name} has shape {arr.shape}, expected {reference_shape} (matching DSM)" + ) diff --git a/qgis_plugin/solweig_qgis/algorithms/calculation/__init__.py b/qgis_plugin/solweig_qgis/algorithms/calculation/__init__.py new file mode 100644 index 0000000..4567fce --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/calculation/__init__.py @@ -0,0 +1 @@ +"""Unified SOLWEIG calculation algorithm.""" diff --git a/qgis_plugin/solweig_qgis/algorithms/calculation/solweig_calculation.py b/qgis_plugin/solweig_qgis/algorithms/calculation/solweig_calculation.py new file mode 100644 index 0000000..c0207d3 --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/calculation/solweig_calculation.py @@ -0,0 +1,817 @@ +""" +Unified SOLWEIG Calculation Algorithm + +Supports single timestep, EPW timeseries, or UMEP met timeseries, +with optional tiled processing and UTCI/PET post-processing. +""" + +from __future__ import annotations + +import contextlib +import os +import time +from pathlib import Path + +import numpy as np +from osgeo import gdal +from qgis.core import ( + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsProcessingOutputFolder, + QgsProcessingOutputNumber, + QgsProcessingParameterBoolean, + QgsProcessingParameterDefinition, + QgsProcessingParameterEnum, + QgsProcessingParameterFile, + QgsProcessingParameterFolderDestination, +) + +from ...utils.converters import ( + build_materials_from_lc_mapping, + create_human_params_from_parameters, + create_location_from_parameters, + create_physics_from_parameters, + create_weather_from_parameters, + load_prepared_surface, + load_weather_from_epw, + load_weather_from_umep_met, +) +from ...utils.parameters import ( + add_date_filter_parameters, + add_epw_parameters, + add_heat_threshold_parameters, + add_human_body_parameters, + add_human_parameters, + add_location_parameters, + add_options_parameters, + add_umep_met_parameters, + add_vegetation_parameters, + add_weather_parameters, +) +from ..base import SolweigAlgorithmBase + + +def _apply_saved_surface_settings( + prepared_dir: str, + parameters: dict, + feedback: QgsProcessingFeedback, +) -> None: + """Use ``parametersforsolweig.json`` saved during surface preparation. + + If the prepared surface directory contains a ``parametersforsolweig.json`` + and the user has not supplied a custom materials file, point the + ``CUSTOM_MATERIALS_FILE`` parameter at it so that ``build_materials_from_lc_mapping`` + picks it up automatically. + """ + params_path = os.path.join(prepared_dir, "parametersforsolweig.json") + if os.path.exists(params_path): + # Only apply if the user hasn't explicitly provided a custom file + custom = parameters.get("CUSTOM_MATERIALS_FILE") + if not custom: + parameters["CUSTOM_MATERIALS_FILE"] = params_path + feedback.pushInfo("Using saved parametersforsolweig.json from prepared surface") + + +class SolweigCalculationAlgorithm(SolweigAlgorithmBase): + """ + Unified SOLWEIG calculation algorithm. + + Combines single timestep, timeseries, and optional UTCI/PET post-processing + into a single Processing algorithm. Large rasters are automatically tiled. + """ + + # Weather source enum values + WEATHER_SINGLE = 0 + WEATHER_EPW = 1 + WEATHER_UMEP = 2 + + def name(self) -> str: + return "solweig_calculation" + + def displayName(self) -> str: + return self.tr("3. SOLWEIG Calculation") + + def shortHelpString(self) -> str: + return self.tr( + """Calculate Mean Radiant Temperature (Tmrt) with SOLWEIG. + +Surface data: +Provide the prepared surface directory from "Prepare Surface Data". +All rasters (DSM, CDSM, DEM, walls) are loaded automatically. + +Weather modes: +
    +
  • Single timestep: Manual weather input for one date/time
  • +
  • EPW weather file: Load hourly data from an EnergyPlus Weather file
  • +
  • UMEP met file: Load from UMEP/SUEWS meteorological forcing files
  • +
+For timeseries modes, thermal state (ground heating/cooling) accumulates +across timesteps for physically accurate results. Large rasters are +automatically processed using overlapping tiles to manage memory. + +Post-processing (optional): +
    +
  • UTCI - fast polynomial (~200 timesteps/sec)
  • +
  • PET - iterative heat balance (~4 timesteps/sec, ~50x slower than UTCI)
  • +
+ +Outputs: +GeoTIFF files organised into subfolders of the output directory: +
+  output_dir/
+    tmrt/        tmrt_YYYYMMDD_HHMM.tif  (always)
+    shadow/      shadow_...              (if selected)
+    kdown/       kdown_...               (if selected)
+    utci/        utci_...                (if enabled)
+    pet/         pet_...                 (if enabled)
+
+ +Recommended workflow: +
    +
  1. Run "Prepare Surface Data" to align rasters and compute walls
  2. +
  3. Run "Compute Sky View Factor" on the prepared surface (optional, for anisotropic sky)
  4. +
  5. Run this algorithm with the prepared surface directory
  6. +
""" + ) + + def group(self) -> str: + return "" + + def groupId(self) -> str: + return "" + + def initAlgorithm(self, config=None): + """Define algorithm parameters.""" + # --- Prepared surface directory (required) --- + self.addParameter( + QgsProcessingParameterFile( + "PREPARED_SURFACE_DIR", + self.tr("Prepared surface directory (from 'Prepare Surface Data')"), + behavior=QgsProcessingParameterFile.Behavior.Folder, + ) + ) + + # --- Location --- + add_location_parameters(self) + + # --- Weather source selector --- + self.addParameter( + QgsProcessingParameterEnum( + "WEATHER_SOURCE", + self.tr("Weather data source"), + options=[ + "Single timestep (manual entry)", + "EPW weather file (timeseries)", + "UMEP met file (timeseries)", + ], + defaultValue=self.WEATHER_EPW, + ) + ) + + # --- Single timestep weather (advanced - collapsed by default) --- + add_weather_parameters(self) + for name in ("DATETIME", "TEMPERATURE", "HUMIDITY", "GLOBAL_RADIATION", "WIND_SPEED", "PRESSURE"): + param = self.parameterDefinition(name) + if param: + param.setFlags(param.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + + # --- EPW weather --- + add_epw_parameters(self) + + # --- UMEP met weather --- + add_umep_met_parameters(self) + + # --- Date/time filtering (shared by EPW and UMEP) --- + add_date_filter_parameters(self) + + # --- Human parameters --- + add_human_parameters(self) + + # --- Options --- + add_options_parameters(self) + + # --- Vegetation (advanced) --- + add_vegetation_parameters(self) + + # --- Post-processing --- + self.addParameter( + QgsProcessingParameterBoolean( + "COMPUTE_UTCI", + self.tr("Compute UTCI (Universal Thermal Climate Index)"), + defaultValue=False, + ) + ) + + self.addParameter( + QgsProcessingParameterBoolean( + "COMPUTE_PET", + self.tr("Compute PET (Physiological Equivalent Temperature) - ~50x slower than UTCI"), + defaultValue=False, + ) + ) + + # PET body parameters (advanced) + add_human_body_parameters(self) + for name in ("AGE", "WEIGHT", "HEIGHT", "SEX", "ACTIVITY", "CLOTHING"): + param = self.parameterDefinition(name) + if param: + param.setFlags(param.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + + # --- Heat-stress thresholds (for UTCI exceedance summary grids) --- + add_heat_threshold_parameters(self) + + # --- Output selection --- + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_TMRT", + self.tr("Save Tmrt (Mean Radiant Temperature) per timestep"), + defaultValue=True, + ) + ) + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_SHADOW", + self.tr("Save shadow fraction"), + defaultValue=False, + ) + ) + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_KDOWN", + self.tr("Save Kdown (incoming shortwave)"), + defaultValue=False, + ) + ) + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_KUP", + self.tr("Save Kup (reflected shortwave)"), + defaultValue=False, + ) + ) + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_LDOWN", + self.tr("Save Ldown (incoming longwave)"), + defaultValue=False, + ) + ) + self.addParameter( + QgsProcessingParameterBoolean( + "OUTPUT_LUP", + self.tr("Save Lup (emitted longwave)"), + defaultValue=False, + ) + ) + + # --- Output directory --- + self.addParameter( + QgsProcessingParameterFolderDestination( + "OUTPUT_DIR", + self.tr("Output directory (defaults to 'results' inside prepared surface dir)"), + optional=True, + ) + ) + + # --- Output metadata --- + self.addOutput( + QgsProcessingOutputFolder( + "OUTPUT_FOLDER", + self.tr("Output folder"), + ) + ) + + self.addOutput( + QgsProcessingOutputNumber( + "TIMESTEP_COUNT", + self.tr("Number of timesteps processed"), + ) + ) + + self.addOutput( + QgsProcessingOutputNumber( + "UTCI_COUNT", + self.tr("Number of UTCI files created"), + ) + ) + + self.addOutput( + QgsProcessingOutputNumber( + "PET_COUNT", + self.tr("Number of PET files created"), + ) + ) + + def processAlgorithm( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Execute the algorithm.""" + feedback.pushInfo("=" * 60) + feedback.pushInfo("SOLWEIG Calculation") + feedback.pushInfo("=" * 60) + + start_time = time.time() + + # Import solweig + solweig = self.import_solweig() + + # Determine weather mode + weather_mode = self.parameterAsEnum(parameters, "WEATHER_SOURCE", context) + is_single = weather_mode == self.WEATHER_SINGLE + mode_names = {0: "Single timestep", 1: "EPW timeseries", 2: "UMEP met timeseries"} + feedback.pushInfo(f"Mode: {mode_names.get(weather_mode, 'Unknown')}") + + # Step 1: Load surface data from prepared directory + feedback.setProgressText("Loading surface data...") + feedback.setProgress(5) + + prepared_dir = self.parameterAsFile(parameters, "PREPARED_SURFACE_DIR", context) + surface = load_prepared_surface(prepared_dir, feedback) + + # Apply saved settings from surface preparation as parameter defaults + _apply_saved_surface_settings(prepared_dir, parameters, feedback) + + if feedback.isCanceled(): + return {} + + # Step 2: Create Location + feedback.setProgressText("Setting up location...") + feedback.setProgress(10) + + location = create_location_from_parameters(parameters, surface, feedback) + + if feedback.isCanceled(): + return {} + + # Step 3: Load weather + feedback.setProgressText("Loading weather data...") + feedback.setProgress(15) + + # Parse shared date/hour filters (used by both EPW and UMEP) + start_qdt = self.parameterAsDateTime(parameters, "START_DATE", context) + end_qdt = self.parameterAsDateTime(parameters, "END_DATE", context) + start_dt = start_qdt if start_qdt.isValid() else None + end_dt = end_qdt if end_qdt.isValid() else None + hours_filter = self.parameterAsString(parameters, "HOURS_FILTER", context) + + if is_single: + weather = create_weather_from_parameters(parameters, feedback) + weather_series = [weather] + elif weather_mode == self.WEATHER_EPW: + epw_path = self.parameterAsFile(parameters, "EPW_FILE", context) + weather_series = load_weather_from_epw( + epw_path=epw_path, + start_dt=start_dt, + end_dt=end_dt, + hours_filter=hours_filter, + feedback=feedback, + ) + if not weather_series: + raise QgsProcessingException("No timesteps found in specified date range") + elif weather_mode == self.WEATHER_UMEP: + umep_path = self.parameterAsFile(parameters, "UMEP_MET_FILE", context) + weather_series = load_weather_from_umep_met( + met_path=umep_path, + start_dt=start_dt, + end_dt=end_dt, + hours_filter=hours_filter, + feedback=feedback, + ) + if not weather_series: + raise QgsProcessingException("No timesteps found in UMEP met file") + + if feedback.isCanceled(): + return {} + + # Step 4: Get options + human = create_human_params_from_parameters(parameters) + physics = create_physics_from_parameters(parameters) + materials = build_materials_from_lc_mapping(parameters, context, self, feedback) + use_anisotropic_sky = self.parameterAsBool(parameters, "USE_ANISOTROPIC_SKY", context) + conifer = self.parameterAsBool(parameters, "CONIFER", context) + max_shadow_distance_m = self.parameterAsDouble(parameters, "MAX_SHADOW_DISTANCE", context) + output_dir = self.parameterAsString(parameters, "OUTPUT_DIR", context) + + # Default output to 'results/' inside prepared surface directory + if not output_dir or output_dir.rstrip("/").endswith("OUTPUT_DIR"): + output_dir = os.path.join(prepared_dir, "results") + feedback.pushInfo(f"Output directory: {output_dir} (inside prepared surface dir)") + + # Parse heat-stress thresholds + heat_thresholds_day = self._parse_thresholds(self.parameterAsString(parameters, "HEAT_THRESHOLDS_DAY", context)) + heat_thresholds_night = self._parse_thresholds( + self.parameterAsString(parameters, "HEAT_THRESHOLDS_NIGHT", context) + ) + if heat_thresholds_day: + feedback.pushInfo(f"Daytime UTCI thresholds: {heat_thresholds_day}") + if heat_thresholds_night: + feedback.pushInfo(f"Nighttime UTCI thresholds: {heat_thresholds_night}") + + # Parse output components + selected_outputs = [] + if self.parameterAsBool(parameters, "OUTPUT_TMRT", context): + selected_outputs.append("tmrt") + for comp in ["shadow", "kdown", "kup", "ldown", "lup"]: + if self.parameterAsBool(parameters, f"OUTPUT_{comp.upper()}", context): + selected_outputs.append(comp) + # UTCI/PET are now computed inline during the main calculation + if self.parameterAsBool(parameters, "COMPUTE_UTCI", context): + selected_outputs.append("utci") + if self.parameterAsBool(parameters, "COMPUTE_PET", context): + selected_outputs.append("pet") + feedback.pushInfo(f"Outputs: {', '.join(selected_outputs)}") + + # Load precomputed SVF — check explicit SVF_DIR, then prepared surface dir + precomputed = None + svf_dir = parameters.get("SVF_DIR") or None + if not svf_dir: + # Auto-detect SVF in prepared surface directory + svfs_path = os.path.join(prepared_dir, "svfs.zip") + if os.path.exists(svfs_path): + svf_dir = prepared_dir + feedback.pushInfo("Auto-detected SVF in prepared surface directory") + + if svf_dir: + feedback.pushInfo(f"Loading pre-computed SVF from {svf_dir}") + try: + precomputed = solweig.PrecomputedData.prepare(svf_dir=svf_dir) + except Exception as e: + feedback.reportError( + f"Could not load SVF from {svf_dir}: {e}", + fatalError=False, + ) + + if feedback.isCanceled(): + return {} + + # Auto-fallback: anisotropic sky requires precomputed shadow matrices + if use_anisotropic_sky: + has_shadow = (precomputed is not None and precomputed.shadow_matrices is not None) or ( + surface.shadow_matrices is not None + ) + if not has_shadow: + feedback.reportError( + "Anisotropic sky requires pre-computed SVF with shadow matrices. " + "Falling back to isotropic sky model. To use anisotropic sky, " + "first run 'Compute Sky View Factor' and provide the SVF directory.", + fatalError=False, + ) + use_anisotropic_sky = False + + # Step 5: Validate inputs + feedback.setProgressText("Validating inputs...") + feedback.setProgress(20) + + try: + warnings = solweig.validate_inputs( + surface=surface, + location=location, + weather=weather_series[0], + use_anisotropic_sky=use_anisotropic_sky, + precomputed=precomputed, + ) + for warning in warnings: + feedback.reportError(f"Warning: {warning}", fatalError=False) + except solweig.SolweigError as e: + raise QgsProcessingException(f"Validation failed: {e}") from e + + if feedback.isCanceled(): + return {} + + # Step 6: Run calculation + os.makedirs(output_dir, exist_ok=True) + + # results is used for single/tiled paths; timeseries uses n_results + tmrt_stats + results = None + n_results = 0 + tmrt_stats = {} + + if is_single: + results = self._run_single( + solweig, + surface, + location, + weather_series[0], + human, + use_anisotropic_sky, + conifer, + physics, + precomputed, + output_dir, + selected_outputs, + max_shadow_distance_m, + materials, + feedback, + ) + else: + n_results, tmrt_stats = self._run_timeseries( + solweig, + surface, + location, + weather_series, + human, + use_anisotropic_sky, + conifer, + physics, + precomputed, + output_dir, + selected_outputs, + max_shadow_distance_m, + materials, + heat_thresholds_day, + heat_thresholds_night, + feedback, + ) + + if feedback.isCanceled(): + return {} + + n_timesteps = n_results if results is None else len(results) + calc_elapsed = time.time() - start_time + feedback.pushInfo(f"Calculation complete: {n_timesteps} timestep(s) in {calc_elapsed:.1f}s") + + # Step 7: Add first Tmrt to canvas (single timestep only) + if is_single: + tmrt_files = sorted(Path(output_dir, "tmrt").glob("tmrt_*.tif")) + if tmrt_files: + timestamp_str = weather_series[0].datetime.strftime("%Y-%m-%d %H:%M") + self.add_raster_to_canvas( + path=str(tmrt_files[0]), + layer_name=f"Tmrt {timestamp_str}", + style="tmrt", + context=context, + ) + + # Report summary + total_elapsed = time.time() - start_time + utci_count = n_results if "utci" in selected_outputs else 0 + pet_count = n_results if "pet" in selected_outputs else 0 + if results is None: + # Timeseries path: use incremental stats + self._report_summary(n_results, total_elapsed, utci_count, pet_count, output_dir, feedback, tmrt_stats) + else: + # Single/tiled path: compute stats from results list + stats = {} + all_valid = [r.tmrt[~np.isnan(r.tmrt)] for r in results if r.tmrt is not None] + if all_valid: + stats = { + "mean": np.mean([arr.mean() for arr in all_valid]), + "min": float(min(arr.min() for arr in all_valid)), + "max": float(max(arr.max() for arr in all_valid)), + } + self._report_summary(len(results), total_elapsed, utci_count, pet_count, output_dir, feedback, stats) + + return { + "OUTPUT_FOLDER": output_dir, + "TIMESTEP_COUNT": n_timesteps, + "UTCI_COUNT": utci_count, + "PET_COUNT": pet_count, + } + + # ------------------------------------------------------------------------- + # Calculation helpers + # ------------------------------------------------------------------------- + + def _run_single( + self, + solweig, + surface, + location, + weather, + human, + use_anisotropic_sky, + conifer, + physics, + precomputed, + output_dir, + selected_outputs, + max_shadow_distance_m, + materials, + feedback, + ) -> list: + """Run single timestep with standard processing.""" + feedback.setProgressText("Calculating Mean Radiant Temperature...") + feedback.setProgress(25) + + try: + result = solweig.calculate( + surface=surface, + location=location, + weather=weather, + human=human, + precomputed=precomputed, + use_anisotropic_sky=use_anisotropic_sky, + conifer=conifer, + physics=physics, + materials=materials, + max_shadow_distance_m=max_shadow_distance_m, + ) + except Exception as e: + raise QgsProcessingException(f"Calculation failed: {e}") from e + + feedback.setProgress(80) + + # Save selected outputs to component subdirectories + timestamp = weather.datetime.strftime("%Y%m%d_%H%M") + for component in selected_outputs: + if hasattr(result, component): + array = getattr(result, component) + if array is not None: + comp_dir = os.path.join(output_dir, component) + os.makedirs(comp_dir, exist_ok=True) + filepath = os.path.join(comp_dir, f"{component}_{timestamp}.tif") + self.save_georeferenced_output( + array=array, + output_path=filepath, + geotransform=surface._geotransform, + crs_wkt=surface._crs_wkt, + feedback=feedback, + ) + + feedback.setProgress(90) + return [result] + + def _run_timeseries( + self, + solweig, + surface, + location, + weather_series, + human, + use_anisotropic_sky, + conifer, + physics, + precomputed, + output_dir, + selected_outputs, + max_shadow_distance_m, + materials, + heat_thresholds_day, + heat_thresholds_night, + feedback, + ) -> tuple[int, dict]: + """Run multi-timestep timeseries with per-timestep progress. + + Delegates to solweig.calculate_timeseries() so QGIS and direct Python + usage follow the exact same execution path and options. + + Returns (n_results, tmrt_stats). + """ + n_steps = len(weather_series) + feedback.setProgressText(f"Running timeseries ({n_steps} timesteps)...") + feedback.setProgress(25) + progress_state = {"completed": 0} + + def _qgis_progress(current: int, total: int) -> None: + progress_state["completed"] = current + if feedback.isCanceled(): + raise KeyboardInterrupt + if current > 0 and weather_series: + idx = min(current - 1, len(weather_series) - 1) + timestamp_str = weather_series[idx].datetime.strftime("%Y-%m-%d %H:%M") + feedback.setProgressText(f"Timestep {current}/{total} \u2014 {timestamp_str}") + total_safe = max(total, 1) + pct = 25 + int(55 * current / total_safe) + feedback.setProgress(pct) + + summary = None + try: + summary = solweig.calculate_timeseries( + surface=surface, + weather_series=weather_series, + location=location, + human=human, + precomputed=precomputed, + use_anisotropic_sky=use_anisotropic_sky, + conifer=conifer, + physics=physics, + materials=materials, + max_shadow_distance_m=max_shadow_distance_m, + output_dir=output_dir, + outputs=selected_outputs, + heat_thresholds_day=heat_thresholds_day or None, + heat_thresholds_night=heat_thresholds_night or None, + progress_callback=_qgis_progress, + ) + except KeyboardInterrupt: + feedback.pushInfo("Timeseries cancelled by user.") + except Exception as e: + raise QgsProcessingException(f"Timeseries calculation failed: {e}") from e + + feedback.setProgress(80) + n_results = ( + progress_state["completed"] + if progress_state["completed"] > 0 + else (0 if feedback.isCanceled() else n_steps) + ) + + # Log the summary report + if summary is not None and n_results > 0: + feedback.pushInfo("") + for line in summary.report().splitlines(): + feedback.pushInfo(line) + + tmrt_stats = self._compute_tmrt_stats_from_outputs(output_dir) if output_dir and n_results > 0 else {} + return n_results, tmrt_stats + + @staticmethod + def _compute_tmrt_stats_from_outputs(output_dir: str) -> dict: + """Compute summary stats from saved Tmrt rasters (mean of per-file means).""" + tmrt_dir = Path(output_dir) / "tmrt" + tmrt_files = sorted(tmrt_dir.glob("tmrt_*.tif")) + if not tmrt_files: + return {} + + mean_sum = 0.0 + mean_count = 0 + tmrt_min = np.inf + tmrt_max = -np.inf + + for tif_path in tmrt_files: + ds = gdal.Open(str(tif_path), gdal.GA_ReadOnly) + if ds is None: + continue + try: + band = ds.GetRasterBand(1) + stats = band.GetStatistics(False, True) + if stats is None or len(stats) < 3: + continue + min_v, max_v, mean_v = float(stats[0]), float(stats[1]), float(stats[2]) + if not (np.isfinite(min_v) and np.isfinite(max_v) and np.isfinite(mean_v)): + continue + mean_sum += mean_v + mean_count += 1 + tmrt_min = min(tmrt_min, min_v) + tmrt_max = max(tmrt_max, max_v) + finally: + ds = None + + if mean_count == 0: + return {} + + return {"mean": mean_sum / mean_count, "min": float(tmrt_min), "max": float(tmrt_max)} + + # ------------------------------------------------------------------------- + # Utility helpers + # ------------------------------------------------------------------------- + + @staticmethod + def _parse_thresholds(raw: str | None) -> list[float]: + """Parse a comma-separated string of temperature thresholds into a list of floats.""" + if not raw or not raw.strip(): + return [] + values = [] + for part in raw.split(","): + part = part.strip() + if part: + with contextlib.suppress(ValueError): + values.append(float(part)) + return values + + @staticmethod + def _report_summary( + n_timesteps, + elapsed, + utci_count, + pet_count, + output_dir, + feedback, + tmrt_stats=None, + ) -> None: + """Report calculation summary statistics. + + Args: + n_timesteps: Number of timesteps processed. + elapsed: Total elapsed time in seconds. + utci_count: Number of UTCI files created. + pet_count: Number of PET files created. + output_dir: Output directory path. + feedback: QGIS feedback object. + tmrt_stats: Dict with 'mean', 'min', 'max' Tmrt values (optional). + """ + feedback.pushInfo("") + feedback.pushInfo("=" * 60) + feedback.pushInfo("Calculation complete!") + feedback.pushInfo(f" Timesteps: {n_timesteps}") + feedback.pushInfo(f" Total time: {elapsed:.1f} seconds") + + if n_timesteps > 1: + feedback.pushInfo(f" Per timestep: {elapsed / n_timesteps:.2f} seconds") + + if tmrt_stats: + feedback.pushInfo(f" Tmrt range: {tmrt_stats['min']:.1f}C - {tmrt_stats['max']:.1f}C") + feedback.pushInfo(f" Mean Tmrt: {tmrt_stats['mean']:.1f}C") + + if utci_count > 0: + feedback.pushInfo(f" UTCI files: {utci_count}") + if pet_count > 0: + feedback.pushInfo(f" PET files: {pet_count}") + + feedback.pushInfo(f" Output: {output_dir}") + feedback.pushInfo("=" * 60) + + feedback.setProgress(100) diff --git a/qgis_plugin/solweig_qgis/algorithms/preprocess/__init__.py b/qgis_plugin/solweig_qgis/algorithms/preprocess/__init__.py new file mode 100644 index 0000000..cb69f32 --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/preprocess/__init__.py @@ -0,0 +1 @@ +"""Preprocessing algorithms for SOLWEIG.""" diff --git a/qgis_plugin/solweig_qgis/algorithms/preprocess/surface_preprocessing.py b/qgis_plugin/solweig_qgis/algorithms/preprocess/surface_preprocessing.py new file mode 100644 index 0000000..5404de1 --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/preprocess/surface_preprocessing.py @@ -0,0 +1,598 @@ +""" +Surface Data Preprocessing Algorithm + +Aligns rasters, computes valid mask, crops, computes walls and SVF, +and saves cleaned GeoTIFFs to a prepared surface directory. This +directory can then be loaded directly by the SOLWEIG Calculation +algorithm, avoiding repeated preprocessing and making intermediate +results transparent. +""" + +from __future__ import annotations + +import json +import os +import shutil +import tempfile +import time +import zipfile +from pathlib import Path + +import numpy as np +from qgis.core import ( + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsProcessingOutputFolder, + QgsProcessingOutputNumber, + QgsProcessingParameterExtent, + QgsProcessingParameterFolderDestination, + QgsProcessingParameterNumber, +) + +from ...utils.converters import _align_layer, _load_optional_raster, load_raster_from_layer +from ...utils.parameters import ( + add_land_cover_mapping_parameters, + add_surface_parameters, + add_vegetation_parameters, +) +from ..base import SolweigAlgorithmBase + + +class SurfacePreprocessingAlgorithm(SolweigAlgorithmBase): + """ + Prepare surface data for SOLWEIG calculation. + + Loads all surface rasters, aligns them to a common grid, computes + a unified valid mask, crops to the valid bounding box, computes + wall heights and aspects, and saves cleaned GeoTIFFs. + + The output directory can be loaded directly by the SOLWEIG Calculation + algorithm, skipping all preprocessing steps. + """ + + def name(self) -> str: + return "surface_preprocessing" + + def displayName(self) -> str: + return self.tr("2. Prepare Surface Data (align, walls, SVF)") + + def shortHelpString(self) -> str: + return self.tr( + """Prepare surface data for SOLWEIG calculation. + +Aligns all rasters, computes walls and Sky View Factor, and saves +everything needed to run SOLWEIG Calculation directly. + +What this does: +
    +
  1. Loads all surface rasters (DSM, CDSM, DEM, TDSM, Land cover)
  2. +
  3. Aligns all rasters to a common grid (intersection of extents)
  4. +
  5. Converts vegetation heights from relative to absolute (if needed)
  6. +
  7. Computes a unified valid mask (removes NaN borders)
  8. +
  9. Crops all rasters to the valid bounding box
  10. +
  11. Computes wall heights and wall aspects from the DSM
  12. +
  13. Computes Sky View Factor (SVF) and shadow matrices
  14. +
  15. Saves all cleaned rasters as GeoTIFFs
  16. +
+ +Outputs: +
+  output_dir/
+    dsm.tif
+    wall_height.tif
+    wall_aspect.tif
+    svfs.zip          (Sky View Factor arrays)
+    shadowmats.npz    (shadow matrices for anisotropic sky, when export size is manageable)
+    svf/<pixel>/shadow_memmaps/ (large-grid shadow cache fallback)
+    cdsm.tif         (if CDSM provided)
+    dem.tif           (if DEM provided)
+    tdsm.tif          (if TDSM provided)
+    land_cover.tif    (if land cover provided)
+    metadata.json     (pixel size, CRS, etc.)
+
+ +Next step: +Run "SOLWEIG Calculation" with the prepared surface directory.""" + ) + + def initAlgorithm(self, config=None): + """Define algorithm parameters.""" + # Surface inputs (DSM, CDSM, DEM, TDSM, Land cover + per-layer height modes) + add_surface_parameters(self) + + # Processing extent (optional) + self.addParameter( + QgsProcessingParameterExtent( + "EXTENT", + self.tr("Processing extent (leave empty to use intersection of inputs)"), + optional=True, + ) + ) + + # Output pixel size (optional — coarser than native for faster processing) + pixel_size_param = QgsProcessingParameterNumber( + "PIXEL_SIZE", + self.tr("Output pixel size (m) — leave 0 to use native DSM resolution"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.0, + minValue=0.0, + maxValue=100.0, + optional=True, + ) + self.addParameter(pixel_size_param) + + # Wall limit (advanced) + wall_limit = QgsProcessingParameterNumber( + "WALL_LIMIT", + self.tr("Minimum wall height (m)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=1.0, + minValue=0.0, + maxValue=10.0, + ) + from qgis.core import QgsProcessingParameterDefinition + + wall_limit.setFlags(wall_limit.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + self.addParameter(wall_limit) + + # --- Vegetation (advanced) --- + add_vegetation_parameters(self) + + # --- Land cover mapping (advanced) --- + add_land_cover_mapping_parameters(self) + + # Output directory + self.addParameter( + QgsProcessingParameterFolderDestination( + "OUTPUT_DIR", + self.tr("Output directory for prepared surface"), + ) + ) + + # Outputs + self.addOutput( + QgsProcessingOutputFolder( + "SURFACE_DIR", + self.tr("Prepared surface directory"), + ) + ) + self.addOutput( + QgsProcessingOutputNumber( + "COMPUTATION_TIME", + self.tr("Computation time (seconds)"), + ) + ) + + def processAlgorithm( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Execute the algorithm.""" + feedback.pushInfo("=" * 60) + feedback.pushInfo("SOLWEIG Surface Preprocessing") + feedback.pushInfo("=" * 60) + + start_time = time.time() + + # Import solweig + solweig = self.import_solweig() + from solweig.utils import extract_bounds, intersect_bounds + + # Step 1: Load DSM + feedback.setProgressText("Loading surface data...") + feedback.setProgress(5) + + dsm_layer = self.parameterAsRasterLayer(parameters, "DSM", context) + if dsm_layer is None: + raise QgsProcessingException("DSM layer is required") + + dsm, dsm_gt, crs_wkt = load_raster_from_layer(dsm_layer) + native_pixel_size = abs(dsm_gt[1]) + feedback.pushInfo(f"DSM: {dsm.shape[1]}x{dsm.shape[0]} pixels") + feedback.pushInfo(f" range: {float(np.nanmin(dsm)):.1f} – {float(np.nanmax(dsm)):.1f} m") + feedback.pushInfo(f"Native pixel size: {native_pixel_size:.2f} m") + + # Resolve output pixel size + requested_pixel_size = self.parameterAsDouble(parameters, "PIXEL_SIZE", context) + if requested_pixel_size > 0: + if requested_pixel_size < native_pixel_size - 1e-6: + raise QgsProcessingException( + f"Requested pixel size ({requested_pixel_size:.2f} m) is finer than the DSM " + f"native resolution ({native_pixel_size:.2f} m). Upsampling creates false " + f"precision. Use a value >= {native_pixel_size:.2f} or leave at 0 for native." + ) + pixel_size = requested_pixel_size + if abs(pixel_size - native_pixel_size) > 1e-6: + feedback.pushInfo(f"Resampling all rasters from {native_pixel_size:.2f} m to {pixel_size:.2f} m") + else: + pixel_size = native_pixel_size + + feedback.pushInfo(f"Output pixel size: {pixel_size:.2f} m") + + # Load optional rasters + cdsm, cdsm_gt = _load_optional_raster(parameters, "CDSM", context, self) + if cdsm is not None: + feedback.pushInfo( + f"Loaded CDSM (vegetation), range: {float(np.nanmin(cdsm)):.1f} – {float(np.nanmax(cdsm)):.1f} m" + ) + + dem, dem_gt = _load_optional_raster(parameters, "DEM", context, self) + if dem is not None: + feedback.pushInfo( + f"Loaded DEM (ground elevation), range: {float(np.nanmin(dem)):.1f} – {float(np.nanmax(dem)):.1f} m" + ) + + tdsm, tdsm_gt = _load_optional_raster(parameters, "TDSM", context, self) + if tdsm is not None: + feedback.pushInfo( + f"Loaded TDSM (trunk zone), range: {float(np.nanmin(tdsm)):.1f} – {float(np.nanmax(tdsm)):.1f} m" + ) + + lc_arr, lc_gt = _load_optional_raster(parameters, "LAND_COVER", context, self) + land_cover = lc_arr.astype(np.uint8) if lc_arr is not None else None + if land_cover is not None: + feedback.pushInfo("Loaded land cover classification") + + if feedback.isCanceled(): + return {} + + # Step 2: Compute extent intersection + feedback.setProgressText("Aligning rasters...") + feedback.setProgress(15) + + bounds_list = [extract_bounds(dsm_gt, dsm.shape)] + for arr, gt in [(cdsm, cdsm_gt), (dem, dem_gt), (tdsm, tdsm_gt), (lc_arr, lc_gt)]: + if arr is not None and gt is not None: + bounds_list.append(extract_bounds(gt, arr.shape)) + + extent_rect = self.parameterAsExtent(parameters, "EXTENT", context) + if not extent_rect.isNull(): + target_bbox = [ + extent_rect.xMinimum(), + extent_rect.yMinimum(), + extent_rect.xMaximum(), + extent_rect.yMaximum(), + ] + feedback.pushInfo(f"Using custom extent: {target_bbox}") + elif len(bounds_list) > 1: + target_bbox = intersect_bounds(bounds_list) + feedback.pushInfo(f"Auto-computed intersection extent: {target_bbox}") + else: + target_bbox = bounds_list[0] + + # Align all layers + dsm = _align_layer(dsm, dsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if cdsm is not None and cdsm_gt is not None: + cdsm = _align_layer(cdsm, cdsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if dem is not None and dem_gt is not None: + dem = _align_layer(dem, dem_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if tdsm is not None and tdsm_gt is not None: + tdsm = _align_layer(tdsm, tdsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if land_cover is not None and lc_gt is not None: + land_cover = _align_layer( + land_cover.astype(np.float32), + lc_gt, + target_bbox, + pixel_size, + "nearest", + crs_wkt, + ).astype(np.uint8) + + aligned_gt = [target_bbox[0], pixel_size, 0, target_bbox[3], 0, -pixel_size] + feedback.pushInfo(f"Aligned grid: {dsm.shape[1]}x{dsm.shape[0]} pixels") + + if feedback.isCanceled(): + return {} + + # Step 3: Create SurfaceData, preprocess, mask, crop + feedback.setProgressText("Computing valid mask and cropping...") + feedback.setProgress(25) + + dsm_relative = self.parameterAsEnum(parameters, "DSM_HEIGHT_MODE", context) == 0 + cdsm_relative = self.parameterAsEnum(parameters, "CDSM_HEIGHT_MODE", context) == 0 + tdsm_relative = self.parameterAsEnum(parameters, "TDSM_HEIGHT_MODE", context) == 0 + + surface = solweig.SurfaceData( + dsm=dsm, + cdsm=cdsm, + dem=dem, + tdsm=tdsm, + land_cover=land_cover, + pixel_size=pixel_size, + dsm_relative=dsm_relative, + cdsm_relative=cdsm_relative, + tdsm_relative=tdsm_relative, + ) + surface._geotransform = aligned_gt + surface._crs_wkt = crs_wkt + + # Convert relative heights to absolute where needed + needs_preprocess = dsm_relative or (cdsm_relative and cdsm is not None) or (tdsm_relative and tdsm is not None) + if needs_preprocess: + feedback.pushInfo("Converting relative heights to absolute...") + surface.preprocess() + + # Fill NaN with ground reference, mask invalid pixels, crop to valid bbox + # (uses SurfaceData library methods — single source of truth) + surface.fill_nan() + surface.compute_valid_mask() + surface.apply_valid_mask() + surface.crop_to_valid_bbox() + + # Update local geotransform reference after crop + aligned_gt = surface._geotransform + + feedback.pushInfo(f"After NaN fill + mask + crop: {surface.dsm.shape[1]}x{surface.dsm.shape[0]} pixels") + + if feedback.isCanceled(): + return {} + + # Create output directory early so we can write incrementally + output_dir = self.parameterAsString(parameters, "OUTPUT_DIR", context) + os.makedirs(output_dir, exist_ok=True) + gt = surface._geotransform or aligned_gt + crs = surface._crs_wkt or crs_wkt + + # Save aligned/cropped surface rasters immediately + feedback.setProgressText("Saving aligned surface rasters...") + self.save_georeferenced_output(surface.dsm, os.path.join(output_dir, "dsm.tif"), gt, crs) + feedback.pushInfo("Saved dsm.tif") + + if surface.cdsm is not None: + self.save_georeferenced_output(surface.cdsm, os.path.join(output_dir, "cdsm.tif"), gt, crs) + feedback.pushInfo("Saved cdsm.tif") + + if surface.dem is not None: + self.save_georeferenced_output(surface.dem, os.path.join(output_dir, "dem.tif"), gt, crs) + feedback.pushInfo("Saved dem.tif") + + if surface.tdsm is not None: + self.save_georeferenced_output(surface.tdsm, os.path.join(output_dir, "tdsm.tif"), gt, crs) + feedback.pushInfo("Saved tdsm.tif") + + if surface.land_cover is not None: + self.save_georeferenced_output( + surface.land_cover.astype(np.float32), + os.path.join(output_dir, "land_cover.tif"), + gt, + crs, + ) + feedback.pushInfo("Saved land_cover.tif") + + # Step 4: Compute walls and save immediately + feedback.setProgressText("Computing wall heights...") + feedback.setProgress(25) + + from solweig.physics import wallalgorithms as wa + + wall_limit = self.parameterAsDouble(parameters, "WALL_LIMIT", context) + feedback.pushInfo(f"Computing walls (min height: {wall_limit:.1f} m)...") + + walls = wa.findwalls(surface.dsm, wall_limit) + feedback.pushInfo("Wall heights computed") + + feedback.setProgressText("Computing wall aspects...") + feedback.setProgress(30) + + dsm_scale = 1.0 / pixel_size + dirwalls = wa.filter1Goodwin_as_aspect_v3(walls, dsm_scale, surface.dsm, feedback=feedback) + feedback.pushInfo("Wall aspects computed") + + surface.wall_height = walls + surface.wall_aspect = dirwalls + + # Save walls immediately + self.save_georeferenced_output(walls, os.path.join(output_dir, "wall_height.tif"), gt, crs) + feedback.pushInfo("Saved wall_height.tif") + self.save_georeferenced_output(dirwalls, os.path.join(output_dir, "wall_aspect.tif"), gt, crs) + feedback.pushInfo("Saved wall_aspect.tif") + + if feedback.isCanceled(): + return {} + + # Step 5: Compute Sky View Factor + # Uses the same Python API as SurfaceData.prepare() — automatically + # tiles large grids to stay within GPU buffer limits. + feedback.setProgressText("Computing Sky View Factor (this may take a while)...") + feedback.setProgress(35) + + from solweig.models.surface import SurfaceData as SD + + use_veg = surface.cdsm is not None + dsm_f32 = surface.dsm.astype(np.float32) + + aligned_rasters = { + "dsm_arr": dsm_f32, + "cdsm_arr": surface.cdsm.astype(np.float32) if use_veg else None, + "tdsm_arr": ( + surface.tdsm.astype(np.float32) + if surface.tdsm is not None + else (surface.cdsm * 0.25).astype(np.float32) + if use_veg + else None + ), + "pixel_size": pixel_size, + "dsm_transform": gt, + "dsm_crs": crs, + } + + rows, cols = dsm_f32.shape + from solweig.tiling import compute_max_tile_pixels + + _max_px = compute_max_tile_pixels(context="svf") + n_pixels = rows * cols + if n_pixels > _max_px: + feedback.pushInfo( + f"Large grid ({rows}x{cols} = {n_pixels:,} px, limit {_max_px:,}) — using tiled computation" + ) + else: + feedback.pushInfo(f"Grid {rows}x{cols} = {n_pixels:,} px — single-pass computation") + + try: + SD._compute_and_cache_svf( + surface, + aligned_rasters, + Path(output_dir), + trunk_ratio=0.25, + feedback=feedback, + progress_range=(35.0, 75.0), + ) + except Exception as e: + raise QgsProcessingException(f"SVF computation failed: {e}") from e + if surface.svf is None: + raise QgsProcessingException( + "SVF computation completed without producing SVF arrays " + "(surface.svf is None). Check that the active solweig build " + "matches the current plugin code." + ) + + feedback.pushInfo("Sky View Factor computed") + feedback.setProgress(75) + + if feedback.isCanceled(): + return {} + + # Save SVF outputs (extract from surface object populated by _compute_and_cache_svf) + feedback.setProgressText("Saving SVF and shadow matrices...") + feedback.setProgress(80) + + # Reuse cache artifacts from _compute_and_cache_svf to avoid re-running + # large single-threaded SVF/shadow serialization. + from solweig.cache import pixel_size_tag + + cache_dir = Path(output_dir) / "svf" / pixel_size_tag(pixel_size) + cache_zip = cache_dir / "svfs.zip" + cache_shadow = cache_dir / "shadowmats.npz" + cache_shadow_memmaps = cache_dir / "shadow_memmaps" + svf_zip_path = Path(output_dir) / "svfs.zip" + shadow_path = Path(output_dir) / "shadowmats.npz" + + copied_zip = False + if cache_zip.exists(): + shutil.copy2(cache_zip, svf_zip_path) + feedback.pushInfo(f"Copied svfs.zip from cache: {svf_zip_path}") + copied_zip = True + else: + feedback.pushInfo("SVF cache zip not found; generating svfs.zip from in-memory arrays") + + if not copied_zip: + svf_data = surface.svf + svf_files = { + "svf.tif": svf_data.svf, + "svfN.tif": svf_data.svf_north, + "svfE.tif": svf_data.svf_east, + "svfS.tif": svf_data.svf_south, + "svfW.tif": svf_data.svf_west, + "svfveg.tif": svf_data.svf_veg, + "svfNveg.tif": svf_data.svf_veg_north, + "svfEveg.tif": svf_data.svf_veg_east, + "svfSveg.tif": svf_data.svf_veg_south, + "svfWveg.tif": svf_data.svf_veg_west, + "svfaveg.tif": svf_data.svf_aveg, + "svfNaveg.tif": svf_data.svf_aveg_north, + "svfEaveg.tif": svf_data.svf_aveg_east, + "svfSaveg.tif": svf_data.svf_aveg_south, + "svfWaveg.tif": svf_data.svf_aveg_west, + } + with tempfile.TemporaryDirectory() as tmpdir: + for filename, arr in svf_files.items(): + self.save_georeferenced_output(arr, os.path.join(tmpdir, filename), gt, crs) + with zipfile.ZipFile(str(svf_zip_path), "w", zipfile.ZIP_DEFLATED) as zf: + for filename in svf_files: + zf.write(os.path.join(tmpdir, filename), filename) + feedback.pushInfo("Saved svfs.zip") + + copied_shadow = False + if cache_shadow.exists(): + shutil.copy2(cache_shadow, shadow_path) + feedback.pushInfo(f"Copied shadowmats.npz from cache: {shadow_path}") + copied_shadow = True + elif cache_shadow_memmaps.exists() and (cache_shadow_memmaps / "metadata.json").exists(): + feedback.pushInfo( + f"Using shadow memmap cache (skipping shadowmats.npz export for large grid): {cache_shadow_memmaps}" + ) + copied_shadow = True + else: + feedback.pushInfo("Shadow cache not found; generating shadowmats.npz from in-memory arrays") + + if not copied_shadow: + sm = surface.shadow_matrices + shmat_u8 = np.array(sm._shmat_u8) + vegshmat_u8 = np.array(sm._vegshmat_u8) + vbshmat_u8 = np.array(sm._vbshmat_u8) + np.savez_compressed( + str(shadow_path), + shadowmat=shmat_u8, + vegshadowmat=vegshmat_u8, + vbshmat=vbshmat_u8, + patch_count=np.array(sm.patch_count), + ) + feedback.pushInfo("Saved shadowmats.npz") + + # Save UMEP-compatible parametersforsolweig.json with user's LC mapping, + # vegetation settings, and any matrix overrides applied. + from ...utils.converters import build_materials_from_lc_mapping + + materials = build_materials_from_lc_mapping(parameters, context, self, feedback) + # Apply vegetation settings into the materials namespace + ts = materials.Tree_settings.Value + ts.Transmissivity = parameters.get("TRANSMISSIVITY", 0.03) + ts.Transmissivity_leafoff = parameters.get("TRANSMISSIVITY_LEAFOFF", 0.5) + ts.First_day_leaf = int(parameters.get("LEAF_START", 97)) + ts.Last_day_leaf = int(parameters.get("LEAF_END", 300)) + + try: + from solweig.utils import namespace_to_dict + + params_path = os.path.join(output_dir, "parametersforsolweig.json") + with open(params_path, "w") as f: + json.dump(namespace_to_dict(materials), f, indent=2) + feedback.pushInfo("Saved parametersforsolweig.json (UMEP-compatible)") + except ImportError: + pass + + # Save metadata last (acts as a completion marker) + metadata = { + "pixel_size": pixel_size, + "geotransform": list(gt), + "crs_wkt": crs, + "shape": list(surface.dsm.shape), + "dsm_relative": False, # Always absolute after preprocessing + "cdsm_relative": False, + "tdsm_relative": False, + "has_cdsm": surface.cdsm is not None, + "has_dem": surface.dem is not None, + "has_tdsm": surface.tdsm is not None, + "has_land_cover": surface.land_cover is not None, + "has_walls": True, + "has_svf": True, + } + metadata_path = os.path.join(output_dir, "metadata.json") + with open(metadata_path, "w") as f: + json.dump(metadata, f, indent=2) + feedback.pushInfo("Saved metadata.json") + + feedback.setProgress(95) + + # Report summary + computation_time = time.time() - start_time + feedback.pushInfo("") + feedback.pushInfo("=" * 60) + feedback.pushInfo("Surface preprocessing complete!") + feedback.pushInfo(f" Grid size: {surface.dsm.shape[1]}x{surface.dsm.shape[0]} pixels") + feedback.pushInfo(f" Pixel size: {pixel_size:.2f} m") + feedback.pushInfo(" Walls computed: yes") + feedback.pushInfo(" SVF computed: yes") + feedback.pushInfo(f" Computation time: {computation_time:.1f} seconds") + feedback.pushInfo(f" Output directory: {output_dir}") + feedback.pushInfo("=" * 60) + + feedback.setProgress(100) + + return { + "SURFACE_DIR": output_dir, + "COMPUTATION_TIME": computation_time, + } diff --git a/qgis_plugin/solweig_qgis/algorithms/preprocess/svf_preprocessing.py b/qgis_plugin/solweig_qgis/algorithms/preprocess/svf_preprocessing.py new file mode 100644 index 0000000..531946b --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/preprocess/svf_preprocessing.py @@ -0,0 +1,340 @@ +""" +SVF Preprocessing Algorithm + +Pre-computes Sky View Factor (SVF) arrays for reuse across timesteps. +""" + +from __future__ import annotations + +from qgis.core import ( + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsProcessingOutputFolder, + QgsProcessingOutputNumber, + QgsProcessingParameterDefinition, + QgsProcessingParameterFile, + QgsProcessingParameterFolderDestination, + QgsProcessingParameterNumber, +) + +from ...utils.converters import load_prepared_surface +from ..base import SolweigAlgorithmBase + + +class SvfPreprocessingAlgorithm(SolweigAlgorithmBase): + """ + Pre-compute Sky View Factor (SVF) arrays. + + SVF computation is expensive (~30-120s for 1000x1000 grid). + Pre-computing allows reuse across multiple timesteps, providing + significant speedup (from 60s to 0.3s per timestep). + """ + + TRUNK_RATIO = "TRUNK_RATIO" + OUTPUT_DIR = "OUTPUT_DIR" + + def name(self) -> str: + return "svf_preprocessing" + + def displayName(self) -> str: + return self.tr("3. Recompute Sky View Factor (advanced)") + + def shortHelpString(self) -> str: + return self.tr( + """Recompute Sky View Factor (SVF) with custom parameters. + +Note: SVF is already computed during "Prepare Surface Data" (step 2). +Use this tool only if you need to recompute SVF with different parameters +(e.g., different trunk ratio) without re-running the full surface preparation. + +Input: +Provide the prepared surface directory from "Prepare Surface Data". +DSM, CDSM, and TDSM are loaded automatically. + +Output: +SVF arrays are saved into the prepared surface directory (`svfs.zip`). +Shadow matrices are saved as `shadowmats.npz` when manageable, or kept +as `svf/<pixel>/shadow_memmaps/` for very large grids. The SOLWEIG +Calculation algorithm loads these automatically. + +Typical runtime: +- 1000x1000 grid: 30-120 seconds""" + ) + + def group(self) -> str: + return "" + + def groupId(self) -> str: + return "" + + def initAlgorithm(self, config=None): + """Define algorithm parameters.""" + self.addParameter( + QgsProcessingParameterFile( + "PREPARED_SURFACE_DIR", + self.tr("Prepared surface directory (from 'Prepare Surface Data')"), + behavior=QgsProcessingParameterFile.Behavior.Folder, + ) + ) + + trunk_ratio = QgsProcessingParameterNumber( + self.TRUNK_RATIO, + self.tr("Trunk ratio (fraction of canopy height, used when no TDSM provided)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.25, + minValue=0.0, + maxValue=1.0, + ) + trunk_ratio.setFlags(trunk_ratio.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + self.addParameter(trunk_ratio) + + self.addParameter( + QgsProcessingParameterFolderDestination( + self.OUTPUT_DIR, + self.tr("Output directory for SVF arrays (defaults to prepared surface directory)"), + optional=True, + ) + ) + + # Outputs + self.addOutput( + QgsProcessingOutputFolder( + "SVF_DIR", + self.tr("SVF output directory"), + ) + ) + + self.addOutput( + QgsProcessingOutputNumber( + "COMPUTATION_TIME", + self.tr("Computation time (seconds)"), + ) + ) + + def processAlgorithm( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Execute the algorithm.""" + import time + + feedback.pushInfo("=" * 60) + feedback.pushInfo("SOLWEIG SVF Preprocessing") + feedback.pushInfo("=" * 60) + + start_time = time.time() + + # Import solweig + self.import_solweig() + + # Load surface data from prepared directory + feedback.setProgressText("Loading surface data...") + feedback.setProgress(5) + + prepared_dir = self.parameterAsFile(parameters, "PREPARED_SURFACE_DIR", context) + surface = load_prepared_surface(prepared_dir, feedback) + dsm = surface.dsm + cdsm = surface.cdsm + tdsm = surface.tdsm + geotransform = surface._geotransform + crs_wkt = surface._crs_wkt + pixel_size = surface.pixel_size + + feedback.pushInfo(f"DSM: {dsm.shape[1]}x{dsm.shape[0]} pixels") + feedback.pushInfo(f"Pixel size: {pixel_size:.2f} m") + + if feedback.isCanceled(): + return {} + + trunk_ratio = self.parameterAsDouble(parameters, self.TRUNK_RATIO, context) + + import os + import shutil + import tempfile + import zipfile + + import numpy as np + + output_dir = self.parameterAsString(parameters, self.OUTPUT_DIR, context) + # QGIS auto-generates a temp path ending in the parameter name when left blank + if not output_dir or output_dir.rstrip("/").endswith("OUTPUT_DIR"): + output_dir = prepared_dir + feedback.pushInfo(f"SVF output will be saved to prepared surface directory: {output_dir}") + os.makedirs(output_dir, exist_ok=True) + + # Compute SVF using the same Python API as SurfaceData.prepare() — + # automatically tiles large grids to stay within GPU buffer limits. + feedback.setProgressText("Computing Sky View Factor...") + feedback.setProgress(20) + + from pathlib import Path + + from solweig.models.surface import SurfaceData as SD + + use_veg = cdsm is not None + dsm_f32 = dsm.astype(np.float32) + + aligned_rasters = { + "dsm_arr": dsm_f32, + "cdsm_arr": cdsm.astype(np.float32) if use_veg else None, + "tdsm_arr": ( + tdsm.astype(np.float32) + if tdsm is not None + else (cdsm * trunk_ratio).astype(np.float32) + if use_veg + else None + ), + "pixel_size": pixel_size, + "dsm_transform": geotransform, + "dsm_crs": crs_wkt, + } + + rows, cols = dsm_f32.shape + from solweig.tiling import compute_max_tile_pixels + + _max_px = compute_max_tile_pixels(context="svf") + n_pixels = rows * cols + if n_pixels > _max_px: + feedback.pushInfo( + f"Large grid ({rows}x{cols} = {n_pixels:,} px, limit {_max_px:,}) — using tiled computation" + ) + else: + feedback.pushInfo(f"Grid {rows}x{cols} = {n_pixels:,} px — single-pass computation") + + try: + SD._compute_and_cache_svf( + surface, + aligned_rasters, + Path(output_dir), + trunk_ratio=trunk_ratio, + feedback=feedback, + progress_range=(20.0, 90.0), + ) + except Exception as e: + raise QgsProcessingException(f"SVF computation failed: {e}") from e + if surface.svf is None: + raise QgsProcessingException( + "SVF computation completed without producing SVF arrays " + "(surface.svf is None). Check that the active solweig build " + "matches the current plugin code." + ) + + if feedback.isCanceled(): + return {} + + feedback.setProgress(90) + + # Reuse exports created by SurfaceData._compute_and_cache_svf when available + # to avoid re-running CPU-heavy serialization/compression in this QGIS step. + from solweig.cache import pixel_size_tag + + cache_dir = Path(output_dir) / "svf" / pixel_size_tag(pixel_size) + cache_zip = cache_dir / "svfs.zip" + cache_shadow = cache_dir / "shadowmats.npz" + cache_shadow_memmaps = cache_dir / "shadow_memmaps" + svf_zip_path = Path(output_dir) / "svfs.zip" + shadow_path = Path(output_dir) / "shadowmats.npz" + + copied_zip = False + if cache_zip.exists(): + feedback.setProgressText("Finalizing SVF arrays...") + shutil.copy2(cache_zip, svf_zip_path) + feedback.pushInfo(f"Copied SVF arrays from cache: {svf_zip_path}") + copied_zip = True + else: + feedback.pushInfo("SVF cache zip not found; generating svfs.zip from in-memory arrays") + + if not copied_zip: + # Save SVF as svfs.zip (format expected by PrecomputedData.prepare()) + feedback.setProgressText("Saving SVF arrays...") + + svf_data = surface.svf + svf_files = { + "svf.tif": svf_data.svf, + "svfN.tif": svf_data.svf_north, + "svfE.tif": svf_data.svf_east, + "svfS.tif": svf_data.svf_south, + "svfW.tif": svf_data.svf_west, + "svfveg.tif": svf_data.svf_veg, + "svfNveg.tif": svf_data.svf_veg_north, + "svfEveg.tif": svf_data.svf_veg_east, + "svfSveg.tif": svf_data.svf_veg_south, + "svfWveg.tif": svf_data.svf_veg_west, + "svfaveg.tif": svf_data.svf_aveg, + "svfNaveg.tif": svf_data.svf_aveg_north, + "svfEaveg.tif": svf_data.svf_aveg_east, + "svfSaveg.tif": svf_data.svf_aveg_south, + "svfWaveg.tif": svf_data.svf_aveg_west, + } + + with tempfile.TemporaryDirectory() as tmpdir: + for filename, arr in svf_files.items(): + tif_path = os.path.join(tmpdir, filename) + self.save_georeferenced_output( + array=arr, + output_path=tif_path, + geotransform=geotransform, + crs_wkt=crs_wkt, + feedback=feedback, + ) + with zipfile.ZipFile(str(svf_zip_path), "w", zipfile.ZIP_DEFLATED) as zf: + for filename in svf_files: + zf.write(os.path.join(tmpdir, filename), filename) + + feedback.pushInfo(f"Saved SVF arrays: {svf_zip_path}") + + copied_shadow = False + if cache_shadow.exists(): + feedback.setProgressText("Finalizing shadow matrices...") + shutil.copy2(cache_shadow, shadow_path) + feedback.pushInfo(f"Copied shadow matrices from cache: {shadow_path}") + copied_shadow = True + elif cache_shadow_memmaps.exists() and (cache_shadow_memmaps / "metadata.json").exists(): + feedback.pushInfo( + f"Using shadow memmap cache (skipping shadowmats.npz export for large grid): {cache_shadow_memmaps}" + ) + copied_shadow = True + else: + feedback.pushInfo("Shadow cache not found; generating shadowmats.npz from in-memory arrays") + + if not copied_shadow: + # Save shadow matrices as shadowmats.npz (for anisotropic sky) + feedback.setProgressText("Saving shadow matrices...") + + sm = surface.shadow_matrices + shmat_u8 = np.array(sm._shmat_u8) + vegshmat_u8 = np.array(sm._vegshmat_u8) + vbshmat_u8 = np.array(sm._vbshmat_u8) + + np.savez_compressed( + str(shadow_path), + shadowmat=shmat_u8, + vegshadowmat=vegshmat_u8, + vbshmat=vbshmat_u8, + patch_count=np.array(sm.patch_count), + ) + + feedback.pushInfo(f"Saved shadow matrices: {shadow_path}") + + computation_time = time.time() - start_time + feedback.setProgress(100) + + # Report results + feedback.pushInfo("") + feedback.pushInfo("=" * 60) + feedback.pushInfo("SVF preprocessing complete!") + feedback.pushInfo(f" Computation time: {computation_time:.1f} seconds") + feedback.pushInfo(f" Output directory: {output_dir}") + feedback.pushInfo("") + feedback.pushInfo( + "Use this directory as 'Pre-computed SVF directory' in calculation algorithms for ~200x speedup." + ) + feedback.pushInfo("=" * 60) + + return { + "SVF_DIR": output_dir, + "COMPUTATION_TIME": computation_time, + } diff --git a/qgis_plugin/solweig_qgis/algorithms/utilities/__init__.py b/qgis_plugin/solweig_qgis/algorithms/utilities/__init__.py new file mode 100644 index 0000000..5c20a24 --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/utilities/__init__.py @@ -0,0 +1 @@ +"""Utility algorithms for SOLWEIG.""" diff --git a/qgis_plugin/solweig_qgis/algorithms/utilities/epw_import.py b/qgis_plugin/solweig_qgis/algorithms/utilities/epw_import.py new file mode 100644 index 0000000..631a58f --- /dev/null +++ b/qgis_plugin/solweig_qgis/algorithms/utilities/epw_import.py @@ -0,0 +1,514 @@ +""" +EPW Weather File Tool + +Download EPW files from PVGIS or preview/validate existing EPW files. +""" + +from __future__ import annotations + +import tempfile +from pathlib import Path + +from qgis.core import ( + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsProcessingOutputFile, + QgsProcessingOutputHtml, + QgsProcessingParameterEnum, + QgsProcessingParameterFile, + QgsProcessingParameterFileDestination, + QgsProcessingParameterNumber, +) + +from ...utils.parameters import _canvas_center_latlon +from ..base import SolweigAlgorithmBase + + +class EpwImportAlgorithm(SolweigAlgorithmBase): + """ + Download EPW files from PVGIS or preview existing EPW files. + + In download mode, fetches a Typical Meteorological Year (TMY) EPW + file from the EU PVGIS service for any location (no API key needed). + + In preview mode, displays location, date range, and data statistics + for an existing EPW file. + """ + + # Mode enum values + MODE_DOWNLOAD = 0 + MODE_PREVIEW = 1 + + def name(self) -> str: + return "epw_import" + + def displayName(self) -> str: + return self.tr("1. Download / Preview Weather File") + + def shortHelpString(self) -> str: + return self.tr( + """Download or preview EnergyPlus Weather (EPW) files. + +Download mode: +Downloads a Typical Meteorological Year (TMY) EPW file from the EU +PVGIS service (no API key required). Near-global coverage using +ERA5 reanalysis data. + +Enter latitude and longitude, and the file will be downloaded and +saved to the specified output path. + +Preview mode: +Inspect an existing EPW file before running SOLWEIG calculations. +Generates an HTML report with location, date range, and data statistics. + +EPW files contain hourly data including: +
    +
  • Air temperature, relative humidity
  • +
  • Wind speed and direction
  • +
  • Solar radiation (global, direct, diffuse)
  • +
  • Atmospheric pressure
  • +
+ +Data source: +PVGIS (Photovoltaic Geographical Information System) by the +EU Joint Research Centre. Data derived from ERA5 reanalysis.""" + ) + + def group(self) -> str: + return "" + + def groupId(self) -> str: + return "" + + def initAlgorithm(self, config=None): + """Define algorithm parameters.""" + # Mode selector + self.addParameter( + QgsProcessingParameterEnum( + "MODE", + self.tr("Mode"), + options=["Download EPW from PVGIS", "Preview existing EPW file"], + defaultValue=self.MODE_DOWNLOAD, + ) + ) + + # Download parameters — default to map canvas centre so users + # don't accidentally download weather for the wrong location. + canvas_lat, canvas_lon = _canvas_center_latlon() + + self.addParameter( + QgsProcessingParameterNumber( + "LATITUDE", + self.tr("Latitude (for download)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=canvas_lat, + minValue=-90.0, + maxValue=90.0, + optional=True, + ) + ) + + self.addParameter( + QgsProcessingParameterNumber( + "LONGITUDE", + self.tr("Longitude (for download)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=canvas_lon, + minValue=-180.0, + maxValue=180.0, + optional=True, + ) + ) + + self.addParameter( + QgsProcessingParameterFileDestination( + "OUTPUT_EPW", + self.tr("Save EPW file to (for download)"), + fileFilter="EPW files (*.epw)", + optional=True, + ) + ) + + # Preview parameters + self.addParameter( + QgsProcessingParameterFile( + "EPW_FILE", + self.tr("EPW weather file (for preview)"), + extension="epw", + optional=True, + ) + ) + + # Outputs + self.addOutput( + QgsProcessingOutputFile( + "DOWNLOADED_FILE", + self.tr("Downloaded EPW file"), + ) + ) + + self.addOutput( + QgsProcessingOutputHtml( + "OUTPUT_HTML", + self.tr("EPW Information Report"), + ) + ) + + def processAlgorithm( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Execute the algorithm.""" + mode = self.parameterAsEnum(parameters, "MODE", context) + + if mode == self.MODE_DOWNLOAD: + return self._download_epw(parameters, context, feedback) + else: + return self._preview_epw(parameters, context, feedback) + + def _download_epw( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Download EPW from PVGIS and generate preview report.""" + feedback.pushInfo("=" * 60) + feedback.pushInfo("EPW Download from PVGIS") + feedback.pushInfo("=" * 60) + + # Import solweig + self.import_solweig() + from solweig.io import read_epw + + # Get parameters + latitude = self.parameterAsDouble(parameters, "LATITUDE", context) + longitude = self.parameterAsDouble(parameters, "LONGITUDE", context) + output_path = self.parameterAsFileOutput(parameters, "OUTPUT_EPW", context) + + if not output_path: + output_path = str(Path(tempfile.gettempdir()) / f"pvgis_{latitude:.2f}_{longitude:.2f}.epw") + + if not -90 <= latitude <= 90: + raise QgsProcessingException(f"Latitude must be between -90 and 90, got {latitude}") + if not -180 <= longitude <= 180: + raise QgsProcessingException(f"Longitude must be between -180 and 180, got {longitude}") + + feedback.pushInfo(f"Location: {latitude:.4f}N, {longitude:.4f}E") + feedback.pushInfo(f"Output: {output_path}") + feedback.pushInfo("") + feedback.setProgressText("Downloading from PVGIS...") + feedback.setProgress(10) + + # Use QgsNetworkAccessManager instead of urllib to respect QGIS proxy settings + from qgis.core import QgsNetworkAccessManager + from qgis.PyQt.QtCore import QUrl + from qgis.PyQt.QtNetwork import QNetworkRequest + + url = f"https://re.jrc.ec.europa.eu/api/v5_3/tmy?lat={latitude}&lon={longitude}&outputformat=epw" + request = QNetworkRequest(QUrl(url)) + reply = QgsNetworkAccessManager.instance().blockingGet(request) + + # Check for network errors + error_code = reply.error() + if error_code != 0: + error_msg = reply.errorString() + raise QgsProcessingException(f"Cannot reach PVGIS server. Check your internet connection.\n{error_msg}") + + http_status = reply.attribute(QNetworkRequest.Attribute.HttpStatusCodeAttribute) + data = bytes(reply.content()) + + if http_status == 400: + raise QgsProcessingException( + f"PVGIS has no data for ({latitude}, {longitude}). The location may be over ocean or outside coverage." + ) + if http_status and http_status != 200: + raise QgsProcessingException(f"PVGIS download failed (HTTP {http_status})") + + if len(data) < 1000: + text = data.decode("utf-8", errors="replace") + raise QgsProcessingException(f"PVGIS returned an error: {text.strip()}") + + output = Path(output_path) + output.parent.mkdir(parents=True, exist_ok=True) + output.write_bytes(data) + + feedback.setProgress(60) + feedback.pushInfo(f"Downloaded EPW file: {output_path}") + + # Generate preview report + feedback.setProgressText("Generating report...") + try: + df, metadata = read_epw(output_path) + except Exception as e: + raise QgsProcessingException(f"Error reading downloaded EPW: {e}") from e + + feedback.pushInfo("") + feedback.pushInfo(f"Location: {metadata.get('city', 'Unknown')}") + feedback.pushInfo(f"Coordinates: {metadata.get('latitude', 'N/A')}N, {metadata.get('longitude', 'N/A')}E") + feedback.pushInfo(f"Data range: {df.index.min()} to {df.index.max()}") + feedback.pushInfo(f"Timesteps: {len(df)}") + + html = self._generate_html_report(df, metadata, output_path) + html_path = str(Path(tempfile.gettempdir()) / f"epw_report_{Path(output_path).stem}.html") + with open(html_path, "w", encoding="utf-8") as f: + f.write(html) + + feedback.setProgress(100) + feedback.pushInfo("") + feedback.pushInfo("=" * 60) + feedback.pushInfo("Download complete!") + feedback.pushInfo(f" EPW file: {output_path}") + feedback.pushInfo(f" Report: {html_path}") + feedback.pushInfo("=" * 60) + + return { + "DOWNLOADED_FILE": output_path, + "OUTPUT_HTML": html_path, + } + + def _preview_epw( + self, + parameters: dict, + context: QgsProcessingContext, + feedback: QgsProcessingFeedback, + ) -> dict: + """Preview an existing EPW file.""" + feedback.pushInfo("=" * 60) + feedback.pushInfo("EPW Weather File Preview") + feedback.pushInfo("=" * 60) + + # Import solweig + self.import_solweig() + from solweig.io import read_epw + + # Get parameters + epw_path = self.parameterAsFile(parameters, "EPW_FILE", context) + if not epw_path: + raise QgsProcessingException("No EPW file specified for preview mode") + + feedback.pushInfo(f"Reading: {epw_path}") + + # Read EPW file + try: + df, metadata = read_epw(epw_path) + except FileNotFoundError as e: + raise QgsProcessingException(f"EPW file not found: {epw_path}") from e + except Exception as e: + raise QgsProcessingException(f"Error reading EPW file: {e}") from e + + # Display key info + feedback.pushInfo("") + feedback.pushInfo(f"Location: {metadata.get('city', 'Unknown')}") + feedback.pushInfo(f"Coordinates: {metadata.get('latitude', 'N/A')}N, {metadata.get('longitude', 'N/A')}E") + feedback.pushInfo(f"Elevation: {metadata.get('elevation', 'N/A')} m") + feedback.pushInfo(f"UTC offset: {metadata.get('tz_offset', 'N/A')} hours") + feedback.pushInfo("") + feedback.pushInfo(f"Data range: {df.index.min()} to {df.index.max()}") + feedback.pushInfo(f"Timesteps: {len(df)}") + + # Generate HTML report + html = self._generate_html_report(df, metadata, epw_path) + + # Save to temp file + output_html = str(Path(tempfile.gettempdir()) / f"epw_report_{Path(epw_path).stem}.html") + with open(output_html, "w", encoding="utf-8") as f: + f.write(html) + + feedback.pushInfo("") + feedback.pushInfo(f"Report saved: {output_html}") + + return {"OUTPUT_HTML": output_html} + + @staticmethod + def _column_stats(df, col: str) -> tuple: + """Compute (min, max, mean, missing_count) for a column. + + Works with both pandas DataFrames and the lightweight _EpwDataFrame. + """ + import math + + values = df[col] + valid = [v for v in values if isinstance(v, (int, float)) and not math.isnan(v)] + n_missing = len(values) - len(valid) + if valid: + return min(valid), max(valid), sum(valid) / len(valid), n_missing + return 0.0, 0.0, 0.0, n_missing + + def _generate_html_report(self, df, metadata: dict, epw_path: str) -> str: + """Generate HTML report for EPW file.""" + # Map column names to friendly names + column_names = { + "temp_air": "Air Temperature (°C)", + "relative_humidity": "Relative Humidity (%)", + "wind_speed": "Wind Speed (m/s)", + "ghi": "Global Horizontal Irradiance (W/m²)", + "dni": "Direct Normal Irradiance (W/m²)", + "dhi": "Diffuse Horizontal Irradiance (W/m²)", + "atmospheric_pressure": "Atmospheric Pressure (Pa)", + } + + # Build statistics table rows + stats_rows = "" + has_missing = False + for col in ["temp_air", "relative_humidity", "wind_speed", "ghi"]: + if col in df.columns: + friendly_name = column_names.get(col, col) + col_min, col_max, col_mean, col_missing = self._column_stats(df, col) + if col_missing > 0: + has_missing = True + stats_rows += f""" + + {friendly_name} + {col_min:.1f} + {col_max:.1f} + {col_mean:.1f} + {col_missing} + + """ + + # Build HTML + html = f""" + + + + + EPW Weather File Report + + + +

EPW Weather File Report

+ +
+

File Information

+
{epw_path}
+
+ +
+

Location

+ + + + + + + + + +
PropertyValue
City{metadata.get("city", "Unknown")}
State/Province{metadata.get("state", "-")}
Country{metadata.get("country", "Unknown")}
Latitude{metadata.get("latitude", "N/A")}° N
Longitude{metadata.get("longitude", "N/A")}° E
Elevation{metadata.get("elevation", "N/A")} m
UTC OffsetUTC{metadata.get("tz_offset", 0):+.0f}
+
+ +
+

Data Range

+ + + + + + +
PropertyValue
Start Date{df.index.min()}
End Date{df.index.max()}
Total Timesteps{len(df):,}
Timestep IntervalHourly
+
+ +
+

Data Statistics

+ + + + + + + + + {stats_rows} +
VariableMinMaxMeanMissing
+
+ + { + "
Warning: " + "Some variables have missing values. Check the Missing column above.
" + if has_missing + else "" + } + +
+ Next Steps: +
    +
  • Use this EPW file with the "SOLWEIG Calculation" algorithm
  • +
  • Set weather source to "EPW weather file"
  • +
  • UTC offset is important for accurate sun position calculation
  • +
  • Consider filtering hours (e.g., 9-17) for daylight-only analysis
  • +
+
+ +
+

SOLWEIG Compatibility

+

This EPW file is compatible with SOLWEIG calculations. The following + variables will be used:

+
    +
  • temp_air: Air temperature for Tmrt and thermal comfort
  • +
  • relative_humidity: For UTCI and PET calculations
  • +
  • ghi: Global solar radiation for shortwave radiation
  • +
  • wind_speed: For UTCI and PET calculations
  • +
+
+ + + + """ + + return html diff --git a/qgis_plugin/solweig_qgis/icon.png b/qgis_plugin/solweig_qgis/icon.png new file mode 100644 index 0000000..cd837e9 Binary files /dev/null and b/qgis_plugin/solweig_qgis/icon.png differ diff --git a/qgis_plugin/solweig_qgis/icon.svg b/qgis_plugin/solweig_qgis/icon.svg new file mode 100644 index 0000000..54ca6f2 --- /dev/null +++ b/qgis_plugin/solweig_qgis/icon.svg @@ -0,0 +1,134 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tmrt + diff --git a/qgis_plugin/solweig_qgis/icon_128.png b/qgis_plugin/solweig_qgis/icon_128.png new file mode 100644 index 0000000..9771026 Binary files /dev/null and b/qgis_plugin/solweig_qgis/icon_128.png differ diff --git a/qgis_plugin/solweig_qgis/metadata.txt b/qgis_plugin/solweig_qgis/metadata.txt new file mode 100644 index 0000000..196748e --- /dev/null +++ b/qgis_plugin/solweig_qgis/metadata.txt @@ -0,0 +1,143 @@ +[general] +name=SOLWEIG +qgisMinimumVersion=4.0 +description=Calculate Mean Radiant Temperature & UTCI thermal comfort using the SOLWEIG model. +version=0.1.0-beta47 +author=SOLWEIG Development Team +email=gareth.simons@ucl.ac.uk + +about=SOLWEIG (Solar and Longwave Environmental Irradiance Geometry) is a high-performance urban microclimate model for calculating Mean Radiant Temperature and thermal comfort indices. + + Features: + - Single timestep and timeseries Tmrt calculations + - UTCI (Universal Thermal Climate Index) computation + - PET (Physiological Equivalent Temperature) computation + - Sky View Factor (SVF) preprocessing + - EPW & SUEWS weather file import + - Support for large rasters via tiled processing and GPU + + Requirements: + - The SOLWEIG Python library must be installed separately. + - The plugin will offer to install it automatically on first use. + - To install manually, run this in the QGIS Python Console: + import pip; pip.main(["install", "solweig"]) + + This plugin provides QGIS Processing algorithms that wrap the SOLWEIG Python library. This version of Solweig is currently in testing as a proof of concept for Rust + GPU + tiled processing to handle large rasters. + + Adapted from UMEP (Urban Multi-scale Environmental Predictor) by Fredrik Lindberg, Sue Grimmond, and contributors. If you use this plugin in research, please cite: + + Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services. Environmental Modelling and Software 99, 70-87 https://doi.org/10.1016/j.envsoft.2017.09.020 + +tracker=https://github.com/UMEP-dev/solweig/issues +repository=https://github.com/UMEP-dev/solweig +homepage=https://umep-docs.readthedocs.io/ + +hasProcessingProvider=yes +tags=urban climate, thermal comfort, mean radiant temperature, UTCI, PET, microclimate, heat stress, urban planning + +category=Analysis +icon=icon.png + +experimental=True +deprecated=False +supportsQt6=True + +changelog= + 0.1.0-beta46 + - Support QGIS 4 (Qt6); raise minimum to QGIS 4.0, Python 3.11 + 0.1.0-beta45 + - Fix mountain shadow reach; simplify QGIS tiling options; minor bug fixes + 0.1.0-beta44 + - Expose day/night UTCI heat-stress thresholds; optional per-timestep Tmrt output; log TimeseriesSummary report + 0.1.0-beta43 + - TimeseriesSummary with aggregated grids and inline UTCI/PET; remove batch postprocess + 0.1.0-beta42 + - Remove percent symbol from changelog (fixes ini-style metadata parsing) + 0.1.0-beta41 + - Configurable land cover materials table in Surface Preparation with UMEP defaults + - Save/load UMEP-compatible parametersforsolweig.json from surface directory + 0.1.0-beta40 + - Tighten golden test tolerances (SVF, wall temp, Tmrt, PET, ground temp) + 0.1.0-beta39 + - Fix nighttime Tmrt: full longwave radiation balance via Rust pipeline + - Remove deprecated Python physics; all paths use fused Rust pipeline + 0.1.0-beta38 + - SVF must be prepared before calculate(); fix shadow spec convention + - Documentation and quick-start improvements + 0.1.0-beta37 + - Fix integer raster crash on NaN assignment; rename coerce_f64_to_f32 to ensure_float32 + 0.1.0-beta36 + - GPU shadow-to-bitpack shader; anisotropic sky GPU cleanup + - SVF core API tests; Python 3.9 fix; surface model type-safety fixes + 0.1.0-beta35 + - Fall back to Python logging/tqdm when QGIS is importable but inactive + 0.1.0-beta34 + - Use local relief for max_height; harden NaN handling; fix SVF tile-size context + 0.1.0-beta33 + - GPU and processing performance improvements; multi-resolution SVF/wall cache + 0.1.0-beta32 - Bump + 0.1.0-beta31 - Bump + 0.1.0-beta30 + - Reuse precomputed SVF and shadow matrices in tiled timeseries + 0.1.0-beta29 + - Configurable tile orchestration (workers, queue depth, prefetch) + - Backpressure-controlled dispatch; memory-aware prefetch; adaptive worker count + 0.1.0-beta28 + - Optimized tiled timeseries; parallel tile execution via ThreadPoolExecutor + 0.1.0-beta27 + - Fix 4 tiling bugs (pipelining split, tile-size semantics, anisotropic sky, overlap buffer) + - Free result arrays after writing to disk in timeseries mode + 0.1.0-beta26 + - Vegetation transmissivity for diffuse SVF; configurable leaf-off transmissivity + - Expose vegetation parameters in QGIS (transmissivity, seasonal leaf dates) + 0.1.0-beta25 + - Fix multiple bugs: GVF shadow inversion, CDSM/TDSM clamping, thermal delay state mutation, nighttime NaN propagation, diffuse fraction division by zero, clearness index log(0), TMY date filter, polar winter altmax, missing conifer params + 0.1.0-beta24 + - Skip tiling for nighttime timesteps (single-pass full grid) + 0.1.0-beta23 + - Fix EPW hour-24 timestamp bug; fix hardcoded Gothenburg lat/lon defaults + 0.1.0-beta22 + - GPU pipelining for sustained utilisation; opt-in timing instrumentation + 0.1.0-beta21 + - Height-aware tile buffer (reduces tile count for short buildings) + 0.1.0-beta20 + - QGIS progress monitor for timeseries; auto-tile large rasters + - Resource-aware tile sizing from real GPU/RAM limits + 0.1.0-beta19 + - Fix import sorting (ruff formatting) + 0.1.0-beta18 + - Enable anisotropic sky by default; port Perez model to Rust + - Fix bitpacked shadow extraction; fix veg shadow init; release GIL + 0.1.0-beta17 + - Install solweig with --no-deps in QGIS (fixes gdal_array ABI crash) + 0.1.0-beta16 + - Catch all exceptions on import so upgrade prompt still appears + 0.1.0-beta15 + - Force GDAL backend via env var (fixes rasterio detection in QGIS) + 0.1.0-beta14 + - Gate plugin build on passing tests; fix CI test reload failure + 0.1.0-beta13 + - Add Location.from_epw(); enable anisotropic sky in tiled mode + - Fix SVF cache reuse and validation; enforce release-mode Rust builds + 0.1.0-beta12 + - CI: create GitHub Release with wheels on tag push + 0.1.0-beta11 + - Fix PyPI upload: include LICENSE in sdist + 0.1.0-beta10 + - Auto-fill NaN with DEM ground reference; clamp near-ground noise + 0.1.0-beta9 + - Fix numpy dtype crash in QGIS; consolidate backend detection + 0.1.0-beta8 + - Fix plugin not appearing in Processing Toolbox; defer install prompt + 0.1.0-beta7 + - Publish workflow gates on passing tests + 0.1.0-beta6 + - Auto-detect outdated library and prompt to upgrade + 0.1.0-beta5 + - CI fixes: simplified build workflow; tags no longer trigger redundant workflows + 0.1.0-beta4 + - Library installed via pip (auto-prompted); EPW proxy support; remove bundled binaries + 0.1.0-beta1 - First public beta + - Tmrt and thermal comfort; SVF preprocessing; anisotropic sky; GPU shadows + 0.1.0-alpha1 - Internal alpha + - Single timestep and timeseries Tmrt; SVF; UTCI/PET; EPW import; tiled processing diff --git a/qgis_plugin/solweig_qgis/provider.py b/qgis_plugin/solweig_qgis/provider.py new file mode 100644 index 0000000..6c3fb4f --- /dev/null +++ b/qgis_plugin/solweig_qgis/provider.py @@ -0,0 +1,61 @@ +""" +SOLWEIG Processing Provider + +Registers all SOLWEIG algorithms with the QGIS Processing framework. +""" + +import os + +from qgis.core import QgsProcessingProvider +from qgis.PyQt.QtGui import QIcon + + +class SolweigProvider(QgsProcessingProvider): + """ + QGIS Processing provider for SOLWEIG algorithms. + + Algorithms (in workflow order): + 1. Download EPW weather file + 2. Prepare Surface Data (align, walls, SVF) + 3. SOLWEIG Calculation + """ + + def id(self): + """Unique provider ID used in processing scripts.""" + return "solweig" + + def name(self): + """Display name shown in Processing Toolbox.""" + return "SOLWEIG" + + def longName(self): + """Extended name for provider description.""" + return "SOLWEIG - Solar and Longwave Environmental Irradiance Geometry" + + def icon(self): + """Provider icon shown in Processing Toolbox.""" + icon_path = os.path.join(os.path.dirname(__file__), "icon.png") + if os.path.exists(icon_path): + return QIcon(icon_path) + return QgsProcessingProvider.icon(self) + + def loadAlgorithms(self): + """ + Load and register all SOLWEIG algorithms. + + Called by QGIS when the provider is initialized. + """ + # 1. Download EPW weather file + from .algorithms.utilities.epw_import import EpwImportAlgorithm + + self.addAlgorithm(EpwImportAlgorithm()) + + # 2. Prepare Surface Data (align, walls, SVF) + from .algorithms.preprocess.surface_preprocessing import SurfacePreprocessingAlgorithm + + self.addAlgorithm(SurfacePreprocessingAlgorithm()) + + # 3. SOLWEIG Calculation + from .algorithms.calculation.solweig_calculation import SolweigCalculationAlgorithm + + self.addAlgorithm(SolweigCalculationAlgorithm()) diff --git a/qgis_plugin/solweig_qgis/utils/__init__.py b/qgis_plugin/solweig_qgis/utils/__init__.py new file mode 100644 index 0000000..aaeb5f7 --- /dev/null +++ b/qgis_plugin/solweig_qgis/utils/__init__.py @@ -0,0 +1,25 @@ +"""Utility functions for SOLWEIG QGIS plugin.""" + +from .converters import ( + create_location_from_parameters, + create_surface_from_parameters, + create_weather_from_parameters, + load_raster_from_layer, +) +from .parameters import ( + add_human_parameters, + add_location_parameters, + add_surface_parameters, + add_weather_parameters, +) + +__all__ = [ + "create_surface_from_parameters", + "create_location_from_parameters", + "create_weather_from_parameters", + "load_raster_from_layer", + "add_surface_parameters", + "add_location_parameters", + "add_weather_parameters", + "add_human_parameters", +] diff --git a/qgis_plugin/solweig_qgis/utils/converters.py b/qgis_plugin/solweig_qgis/utils/converters.py new file mode 100644 index 0000000..68eb5e9 --- /dev/null +++ b/qgis_plugin/solweig_qgis/utils/converters.py @@ -0,0 +1,981 @@ +""" +Converters between QGIS parameters and SOLWEIG dataclasses. + +Handles translation of QGIS Processing parameters into the dataclasses +expected by the solweig library API. +""" + +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING, Any + +import numpy as np +from osgeo import gdal, osr +from qgis.core import ( + QgsProcessingContext, + QgsProcessingException, + QgsProcessingFeedback, + QgsRasterLayer, +) + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +def load_raster_from_layer( + layer: QgsRasterLayer, +) -> tuple[NDArray[np.floating], list[float], str]: + """ + Load QGIS raster layer to numpy array using GDAL. + + Args: + layer: QGIS raster layer to load. + + Returns: + tuple of (array, geotransform, crs_wkt): + - array: 2D numpy float32 array + - geotransform: GDAL 6-tuple + - crs_wkt: CRS as WKT string + + Raises: + QgsProcessingException: If raster cannot be opened. + """ + source = layer.source() + ds = gdal.Open(source, gdal.GA_ReadOnly) + if ds is None: + raise QgsProcessingException(f"Cannot open raster: {source}") + + try: + band = ds.GetRasterBand(1) + array = band.ReadAsArray().astype(np.float32) + + # Handle nodata — only honor negative sentinel values (e.g. -9999) + # to avoid converting valid zero-height pixels to NaN + nodata = band.GetNoDataValue() + if nodata is not None and nodata < 0: + array = np.where(array == nodata, np.nan, array) + + geotransform = list(ds.GetGeoTransform()) + crs_wkt = ds.GetProjection() + + return array, geotransform, crs_wkt + finally: + ds = None + + +def _read_height_mode( + parameters: dict[str, Any], + param_name: str, + default_absolute: bool = True, +) -> bool: + """Read a per-layer height mode enum and return True if relative. + + Args: + parameters: Algorithm parameters dict. + param_name: Enum parameter name (e.g. "DSM_HEIGHT_MODE"). + default_absolute: If True, default is absolute (enum 1); if False, default is relative (enum 0). + + Returns: + True if the layer uses relative heights, False if absolute. + """ + default = 1 if default_absolute else 0 + value = parameters.get(param_name, default) + return (int(value) if isinstance(value, (int, float)) else default) == 0 + + +def _load_optional_raster( + parameters: dict[str, Any], + param_name: str, + context: QgsProcessingContext, + param_handler: Any, +) -> tuple[NDArray[np.floating] | None, list[float] | None]: + """Load optional raster, returning (array, geotransform) or (None, None).""" + if param_name not in parameters or not parameters[param_name]: + return None, None + layer = param_handler.parameterAsRasterLayer(parameters, param_name, context) + if layer is None: + return None, None + arr, gt, _ = load_raster_from_layer(layer) + return arr, gt + + +def _align_layer( + arr: NDArray[np.floating], + gt: list[float], + target_bbox: list[float], + pixel_size: float, + method: str, + crs_wkt: str, +) -> NDArray[np.floating]: + """Resample a raster to the target grid if extents or shape differ.""" + from solweig.utils import extract_bounds, resample_to_grid + + # Expected target dimensions (same formula as resample_to_grid) + expected_h = int(np.round((target_bbox[3] - target_bbox[1]) / pixel_size)) + expected_w = int(np.round((target_bbox[2] - target_bbox[0]) / pixel_size)) + + bounds = extract_bounds(gt, arr.shape) + needs_resample = ( + abs(bounds[0] - target_bbox[0]) > 1e-6 + or abs(bounds[1] - target_bbox[1]) > 1e-6 + or abs(bounds[2] - target_bbox[2]) > 1e-6 + or abs(bounds[3] - target_bbox[3]) > 1e-6 + or abs(abs(gt[1]) - pixel_size) > 1e-6 + or arr.shape != (expected_h, expected_w) + ) + if needs_resample: + arr, _ = resample_to_grid(arr, gt, target_bbox, pixel_size, method=method, src_crs=crs_wkt) + return arr + + +def create_surface_from_parameters( + parameters: dict[str, Any], + context: QgsProcessingContext, + param_handler: Any, # Algorithm instance with parameterAsRasterLayer + feedback: QgsProcessingFeedback, + bbox: list[float] | None = None, + output_dir: str | None = None, +) -> Any: # Returns solweig.SurfaceData + """ + Create SurfaceData from QGIS processing parameters. + + Loads all surface rasters, aligns them to a common grid (intersection + of all extents or user-specified bbox), computes a unified valid mask, + and saves cleaned rasters to disk. + + Args: + parameters: Algorithm parameters dict. + context: Processing context. + param_handler: Object with parameterAsRasterLayer method. + feedback: Processing feedback. + bbox: Optional explicit bounding box [minx, miny, maxx, maxy]. + output_dir: Optional directory for saving cleaned rasters. + + Returns: + solweig.SurfaceData instance with aligned, masked arrays. + + Raises: + QgsProcessingException: If required DSM is missing or invalid. + """ + try: + import solweig + from solweig.utils import extract_bounds, intersect_bounds + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + # Load required DSM (with geotransform) + dsm_layer = param_handler.parameterAsRasterLayer(parameters, "DSM", context) + if dsm_layer is None: + raise QgsProcessingException("DSM layer is required") + + dsm, dsm_gt, crs_wkt = load_raster_from_layer(dsm_layer) + lo, hi = float(np.nanmin(dsm)), float(np.nanmax(dsm)) + feedback.pushInfo(f"Loaded DSM: {dsm.shape[1]}x{dsm.shape[0]} pixels, range: {lo:.1f} – {hi:.1f} m") + + pixel_size = abs(dsm_gt[1]) + feedback.pushInfo(f"Pixel size: {pixel_size:.2f} m") + + # Load optional rasters (keeping geotransforms) + cdsm, cdsm_gt = _load_optional_raster(parameters, "CDSM", context, param_handler) + if cdsm is not None: + feedback.pushInfo( + f"Loaded CDSM (vegetation), range: {float(np.nanmin(cdsm)):.1f} – {float(np.nanmax(cdsm)):.1f} m" + ) + + dem, dem_gt = _load_optional_raster(parameters, "DEM", context, param_handler) + if dem is not None: + feedback.pushInfo( + f"Loaded DEM (ground elevation), range: {float(np.nanmin(dem)):.1f} – {float(np.nanmax(dem)):.1f} m" + ) + + tdsm, tdsm_gt = _load_optional_raster(parameters, "TDSM", context, param_handler) + if tdsm is not None: + feedback.pushInfo( + f"Loaded TDSM (trunk zone), range: {float(np.nanmin(tdsm)):.1f} – {float(np.nanmax(tdsm)):.1f} m" + ) + + lc_arr, lc_gt = _load_optional_raster(parameters, "LAND_COVER", context, param_handler) + land_cover = lc_arr.astype(np.uint8) if lc_arr is not None else None + if land_cover is not None: + feedback.pushInfo("Loaded land cover classification") + + # Compute extent intersection of all loaded layers + bounds_list = [extract_bounds(dsm_gt, dsm.shape)] + for arr, gt in [(cdsm, cdsm_gt), (dem, dem_gt), (tdsm, tdsm_gt), (lc_arr, lc_gt)]: + if arr is not None and gt is not None: + bounds_list.append(extract_bounds(gt, arr.shape)) + + if bbox is not None: + target_bbox = bbox + elif len(bounds_list) > 1: + target_bbox = intersect_bounds(bounds_list) + feedback.pushInfo(f"Auto-computed intersection extent: {target_bbox}") + else: + target_bbox = bounds_list[0] + + # Align all layers to the target grid + dsm = _align_layer(dsm, dsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if cdsm is not None and cdsm_gt is not None: + cdsm = _align_layer(cdsm, cdsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if dem is not None and dem_gt is not None: + dem = _align_layer(dem, dem_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if tdsm is not None and tdsm_gt is not None: + tdsm = _align_layer(tdsm, tdsm_gt, target_bbox, pixel_size, "bilinear", crs_wkt) + if land_cover is not None and lc_gt is not None: + land_cover = _align_layer( + land_cover.astype(np.float32), + lc_gt, + target_bbox, + pixel_size, + "nearest", + crs_wkt, + ).astype(np.uint8) + + feedback.pushInfo(f"Aligned grid: {dsm.shape[1]}x{dsm.shape[0]} pixels") + + # Build aligned geotransform for the target bbox + aligned_gt = [target_bbox[0], pixel_size, 0, target_bbox[3], 0, -pixel_size] + + # Get per-layer height convention flags (enum: 0=relative, 1=absolute) + dsm_relative = _read_height_mode(parameters, "DSM_HEIGHT_MODE", default_absolute=True) + cdsm_relative = _read_height_mode(parameters, "CDSM_HEIGHT_MODE", default_absolute=False) + tdsm_relative = _read_height_mode(parameters, "TDSM_HEIGHT_MODE", default_absolute=False) + + # Create SurfaceData + surface = solweig.SurfaceData( + dsm=dsm, + cdsm=cdsm, + dem=dem, + tdsm=tdsm, + land_cover=land_cover, + pixel_size=pixel_size, + dsm_relative=dsm_relative, + cdsm_relative=cdsm_relative, + tdsm_relative=tdsm_relative, + ) + + # Store geospatial metadata for output georeferencing + surface._geotransform = aligned_gt + surface._crs_wkt = crs_wkt + + # Convert relative heights to absolute where needed + needs_preprocess = dsm_relative or (cdsm_relative and cdsm is not None) or (tdsm_relative and tdsm is not None) + if needs_preprocess: + feedback.pushInfo("Converting relative heights to absolute...") + surface.preprocess() + + # Fill NaN with ground reference, mask invalid pixels, crop to valid bbox + # (uses SurfaceData library methods — single source of truth) + surface.fill_nan() + surface.compute_valid_mask() + surface.apply_valid_mask() + surface.crop_to_valid_bbox() + + feedback.pushInfo(f"After NaN fill + mask + crop: {surface.dsm.shape[1]}x{surface.dsm.shape[0]} pixels") + + # Compute wall heights and aspects from DSM + feedback.setProgressText("Computing wall heights...") + feedback.pushInfo("Computing walls from DSM...") + from solweig.physics import wallalgorithms as wa + + walls = wa.findwalls(surface.dsm, 1.0) + feedback.pushInfo("Computing wall aspects...") + feedback.setProgressText("Computing wall aspects...") + dsm_scale = 1.0 / pixel_size + dirwalls = wa.filter1Goodwin_as_aspect_v3(walls, dsm_scale, surface.dsm, feedback=feedback) + surface.wall_height = walls + surface.wall_aspect = dirwalls + feedback.pushInfo("Wall computation complete") + + # Save cleaned rasters + if output_dir: + surface.save_cleaned(output_dir) + + return surface + + +def load_prepared_surface( + surface_dir: str, + feedback: QgsProcessingFeedback, +) -> Any: # Returns solweig.SurfaceData + """ + Load a prepared surface directory into SurfaceData. + + Reads GeoTIFFs and metadata saved by the Surface Preprocessing algorithm. + + Args: + surface_dir: Path to prepared surface directory. + feedback: Processing feedback. + + Returns: + solweig.SurfaceData instance with all arrays loaded. + + Raises: + QgsProcessingException: If required files are missing. + """ + import json + import os + + try: + import solweig + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found.") from e + + # Load metadata + metadata_path = os.path.join(surface_dir, "metadata.json") + if not os.path.exists(metadata_path): + raise QgsProcessingException( + f"Not a valid prepared surface directory: {surface_dir}\n" + "Missing metadata.json. Run 'Prepare Surface Data' first." + ) + + with open(metadata_path) as f: + metadata = json.load(f) + + feedback.pushInfo(f"Loading prepared surface from {surface_dir}") + + # Load DSM (required) + dsm_path = os.path.join(surface_dir, "dsm.tif") + if not os.path.exists(dsm_path): + raise QgsProcessingException(f"Missing required file: {dsm_path}") + + dsm, gt, crs_wkt = _load_geotiff(dsm_path) + feedback.pushInfo(f"DSM: {dsm.shape[1]}x{dsm.shape[0]} pixels") + + # Load optional rasters + cdsm = _load_geotiff_if_exists(os.path.join(surface_dir, "cdsm.tif")) + dem = _load_geotiff_if_exists(os.path.join(surface_dir, "dem.tif")) + tdsm = _load_geotiff_if_exists(os.path.join(surface_dir, "tdsm.tif")) + lc = _load_geotiff_if_exists(os.path.join(surface_dir, "land_cover.tif")) + land_cover = lc.astype(np.uint8) if lc is not None else None + wall_height = _load_geotiff_if_exists(os.path.join(surface_dir, "wall_height.tif")) + wall_aspect = _load_geotiff_if_exists(os.path.join(surface_dir, "wall_aspect.tif")) + + pixel_size = metadata.get("pixel_size", abs(gt[1])) + + surface = solweig.SurfaceData( + dsm=dsm, + cdsm=cdsm, + dem=dem, + tdsm=tdsm, + land_cover=land_cover, + pixel_size=pixel_size, + dsm_relative=False, # Always absolute after preprocessing + cdsm_relative=False, + tdsm_relative=False, + ) + surface._geotransform = gt + surface._crs_wkt = crs_wkt + surface.wall_height = wall_height + surface.wall_aspect = wall_aspect + + layers = ["dsm"] + if cdsm is not None: + layers.append("cdsm") + if dem is not None: + layers.append("dem") + if tdsm is not None: + layers.append("tdsm") + if land_cover is not None: + layers.append("land_cover") + if wall_height is not None: + layers.append("walls") + feedback.pushInfo(f"Loaded layers: {', '.join(layers)}") + + return surface + + +def _load_geotiff(path: str) -> tuple[NDArray[np.floating], list[float], str]: + """Load a GeoTIFF file, returning (array, geotransform, crs_wkt).""" + ds = gdal.Open(path, gdal.GA_ReadOnly) + if ds is None: + raise QgsProcessingException(f"Cannot open raster: {path}") + try: + band = ds.GetRasterBand(1) + array = band.ReadAsArray().astype(np.float32) + nodata = band.GetNoDataValue() + if nodata is not None and nodata < 0: + array = np.where(array == nodata, np.nan, array) + geotransform = list(ds.GetGeoTransform()) + crs_wkt = ds.GetProjection() + return array, geotransform, crs_wkt + finally: + ds = None + + +def _load_geotiff_if_exists(path: str) -> NDArray[np.floating] | None: + """Load a GeoTIFF if it exists, return None otherwise.""" + import os + + if not os.path.exists(path): + return None + arr, _, _ = _load_geotiff(path) + return arr + + +def create_location_from_parameters( + parameters: dict[str, Any], + surface: Any, # solweig.SurfaceData + feedback: QgsProcessingFeedback, +) -> Any: # Returns solweig.Location + """ + Create Location from QGIS processing parameters. + + Supports auto-extraction from DSM CRS or manual input. + + Args: + parameters: Algorithm parameters dict. + surface: SurfaceData instance (for auto-extraction). + feedback: Processing feedback. + + Returns: + solweig.Location instance. + + Raises: + QgsProcessingException: If location cannot be determined. + """ + try: + import solweig + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + utc_offset = parameters.get("UTC_OFFSET", 0) + + if parameters.get("AUTO_EXTRACT_LOCATION", False): + # Extract from DSM CRS + feedback.pushInfo("Auto-extracting location from DSM CRS...") + + if surface._crs_wkt is None: + raise QgsProcessingException("Cannot auto-extract location: DSM has no CRS information") + + # Get center point of raster + gt = surface._geotransform + rows, cols = surface.dsm.shape + center_x = gt[0] + cols * gt[1] / 2 + center_y = gt[3] + rows * gt[5] / 2 + + # Transform to WGS84 + source_srs = osr.SpatialReference() + source_srs.ImportFromWkt(surface._crs_wkt) + + target_srs = osr.SpatialReference() + target_srs.ImportFromEPSG(4326) # WGS84 + + transform = osr.CoordinateTransformation(source_srs, target_srs) + lon, lat, _ = transform.TransformPoint(center_x, center_y) + + feedback.pushInfo(f"Location: {lat:.4f}N, {lon:.4f}E") + + location = solweig.Location( + latitude=lat, + longitude=lon, + utc_offset=utc_offset, + ) + else: + # Use manual input + latitude = parameters.get("LATITUDE") + longitude = parameters.get("LONGITUDE") + + if latitude is None or longitude is None: + raise QgsProcessingException("Latitude and longitude are required when auto-extract is disabled") + + location = solweig.Location( + latitude=latitude, + longitude=longitude, + utc_offset=utc_offset, + ) + feedback.pushInfo(f"Location: {latitude:.4f}N, {longitude:.4f}E") + + return location + + +def create_weather_from_parameters( + parameters: dict[str, Any], + feedback: QgsProcessingFeedback, +) -> Any: # Returns solweig.Weather + """ + Create Weather from QGIS processing parameters. + + Args: + parameters: Algorithm parameters dict. + feedback: Processing feedback. + + Returns: + solweig.Weather instance. + """ + try: + import solweig + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + # Get datetime from QDateTime parameter + qdt = parameters["DATETIME"] + dt = qdt.toPyDateTime() + + weather = solweig.Weather( + datetime=dt, + ta=parameters.get("TEMPERATURE", 25.0), + rh=parameters.get("HUMIDITY", 50.0), + global_rad=parameters.get("GLOBAL_RADIATION", 800.0), + ws=parameters.get("WIND_SPEED", 1.0), + pressure=parameters.get("PRESSURE", 1013.25), + ) + + feedback.pushInfo( + f"Weather: {dt.strftime('%Y-%m-%d %H:%M')}, " + f"Ta={weather.ta:.1f}C, RH={weather.rh:.0f}%, " + f"G={weather.global_rad:.0f}W/m2" + ) + + return weather + + +def create_human_params_from_parameters( + parameters: dict[str, Any], +) -> Any: # Returns solweig.HumanParams + """ + Create HumanParams from QGIS processing parameters. + + Args: + parameters: Algorithm parameters dict. + + Returns: + solweig.HumanParams instance. + """ + try: + import solweig + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + # Map posture enum to string + posture_map = {0: "standing", 1: "sitting"} + posture = posture_map.get(parameters.get("POSTURE", 0), "standing") + + # Basic human params + human = solweig.HumanParams( + posture=posture, + abs_k=parameters.get("ABS_K", 0.7), + ) + + # Add detailed body params if present (for PET) + if "WEIGHT" in parameters: + human.weight = parameters["WEIGHT"] + if "HEIGHT" in parameters: + human.height = parameters["HEIGHT"] + if "AGE" in parameters: + human.age = parameters["AGE"] + if "ACTIVITY" in parameters: + human.activity = parameters["ACTIVITY"] + if "CLOTHING" in parameters: + human.clothing = parameters["CLOTHING"] + if "SEX" in parameters: + sex_map = {0: 1, 1: 2} + human.sex = sex_map.get(parameters["SEX"], 1) + + return human + + +def create_physics_from_parameters( + parameters: dict[str, Any], +) -> Any: # Returns types.SimpleNamespace + """ + Create a physics namespace from QGIS vegetation parameters. + + Loads default physics and overrides Tree_settings with user-supplied + transmissivity and seasonal date values. + + Args: + parameters: Algorithm parameters dict. + + Returns: + SimpleNamespace with Tree_settings overridden by QGIS parameters. + """ + try: + from solweig.loaders import load_physics + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + physics = load_physics() + ts = physics.Tree_settings.Value + ts.Transmissivity = parameters.get("TRANSMISSIVITY", 0.03) + ts.Transmissivity_leafoff = parameters.get("TRANSMISSIVITY_LEAFOFF", 0.5) + ts.First_day_leaf = int(parameters.get("LEAF_START", 97)) + ts.Last_day_leaf = int(parameters.get("LEAF_END", 300)) + + return physics + + +def build_materials_from_lc_mapping( + parameters: dict[str, Any], + context: QgsProcessingContext, + param_handler: Any, + feedback: QgsProcessingFeedback, +) -> Any: # Returns types.SimpleNamespace + """ + Build a materials namespace from QGIS land cover mapping parameters. + + Priority (highest first): + 1. CUSTOM_MATERIALS_FILE — loads a full JSON override. + 2. LC_MATERIALS matrix table — per-code properties + (Code, Name, Albedo, Emissivity, TgK, Tstart, TmaxLST). + 3. Bundled UMEP defaults. + + Args: + parameters: Algorithm parameters dict. + context: Processing context. + param_handler: Algorithm instance (for parameterAs* methods). + feedback: Processing feedback for logging. + + Returns: + SimpleNamespace compatible with ``solweig.calculate(materials=...)``. + """ + try: + from solweig.loaders import load_params + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + # Custom JSON takes priority over everything + custom_path = param_handler.parameterAsFile(parameters, "CUSTOM_MATERIALS_FILE", context) + if custom_path: + feedback.pushInfo(f"Using custom materials file: {custom_path}") + return load_params(custom_path) + + # Load bundled defaults as base + materials = load_params() + + # Apply material properties from the table + _apply_lc_materials(parameters, context, param_handler, materials, feedback) + + return materials + + +def _apply_lc_materials( + parameters: dict[str, Any], + context: QgsProcessingContext, + param_handler: Any, + materials: Any, + feedback: QgsProcessingFeedback, +) -> None: + """Parse LC_MATERIALS matrix and set material properties in-place. + + Each row has 7 values: Code, Name, Albedo, Emissivity, TgK, Tstart, TmaxLST. + A surface type name is registered in ``materials.Names.Value`` for each code + so the standard lookup chain resolves correctly. + """ + raw = parameters.get("LC_MATERIALS") + if not raw: + return + + # QgsProcessingParameterMatrix stores values as a flat list + flat: list[str] = [ + str(v) + for v in ( + param_handler.parameterAsMatrix(parameters, "LC_MATERIALS", context) + if hasattr(param_handler, "parameterAsMatrix") + else raw + ) + ] + n_cols = 7 # Code, Name, Albedo, Emissivity, TgK, Tstart, TmaxLST + if len(flat) < n_cols: + return + + prop_sections = ["Albedo.Effective", "Emissivity", "Ts_deg", "Tstart", "TmaxLST"] + + for row_start in range(0, len(flat) - n_cols + 1, n_cols): + row = flat[row_start : row_start + n_cols] + try: + code = int(float(row[0])) + except (ValueError, TypeError): + continue + + name = row[1].strip() if row[1].strip() else f"LC_{code}" + + # Parse the 5 property columns + values: list[float | None] = [] + for cell in row[2:]: + cell = cell.strip() if isinstance(cell, str) else str(cell).strip() + if not cell: + values.append(None) + else: + try: + values.append(float(cell)) + except (ValueError, TypeError): + values.append(None) + + if all(v is None for v in values): + continue + + # Register the type name for this code + type_name = f"LC_{code}_{name.replace(' ', '_')}" + setattr(materials.Names.Value, str(code), type_name) + + # Resolve base values from the UMEP default for this code (if any) + # so that empty cells inherit sensible defaults + default_name = getattr(materials.Names.Value, str(code), None) + if default_name == type_name: + default_name = "Cobble_stone_2014a" # fallback + + for i, section_path in enumerate(prop_sections): + parts = section_path.split(".") + ns = materials + for part in parts: + ns = getattr(ns, part, ns) + ns = getattr(ns, "Value", ns) + + base_val = getattr(ns, default_name, None) if default_name else None + final_val = values[i] if values[i] is not None else base_val + if final_val is not None: + setattr(ns, type_name, final_val) + + feedback.pushInfo( + f" LC code {code} ({name}): " + f"albedo={values[0]}, emis={values[1]}, TgK={values[2]}, " + f"Tstart={values[3]}, TmaxLST={values[4]}" + ) + + +def load_weather_from_epw( + epw_path: str, + start_dt: Any | None, # QDateTime, datetime, or None + end_dt: Any | None, # QDateTime, datetime, or None + hours_filter: str | None, + feedback: QgsProcessingFeedback, +) -> list: # Returns list[solweig.Weather] + """ + Load weather data from EPW file with optional filtering. + + Args: + epw_path: Path to EPW file. + start_dt: Start datetime (inclusive), or None for EPW start. + end_dt: End datetime (inclusive), or None for EPW end. + hours_filter: Comma-separated hours to include (e.g., "9,10,11,12"). + feedback: Processing feedback. + + Returns: + List of solweig.Weather objects. + + Raises: + QgsProcessingException: If EPW file cannot be read or dates don't overlap. + """ + try: + import solweig + from solweig.io import read_epw + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + # Convert QDateTime to Python naive datetime + if start_dt is not None and hasattr(start_dt, "toPyDateTime"): + start_dt = start_dt.toPyDateTime() + if end_dt is not None and hasattr(end_dt, "toPyDateTime"): + end_dt = end_dt.toPyDateTime() + # Strip timezone info to avoid aware/naive comparison errors + if start_dt is not None and start_dt.tzinfo is not None: + start_dt = start_dt.replace(tzinfo=None) + if end_dt is not None and end_dt.tzinfo is not None: + end_dt = end_dt.replace(tzinfo=None) + + # Parse hours filter + hours_list = None + if hours_filter: + try: + hours_str = hours_filter.replace(" ", "") + hours_list = [int(h) for h in hours_str.split(",")] + feedback.pushInfo(f"Hour filter: {hours_list}") + except ValueError: + feedback.reportError( + f"Invalid hours filter: {hours_filter}. Using all hours.", + fatalError=False, + ) + + # Read EPW file + try: + df, metadata = read_epw(epw_path) + except FileNotFoundError as e: + raise QgsProcessingException(f"EPW file not found: {epw_path}") from e + except Exception as e: + raise QgsProcessingException(f"Error reading EPW file: {e}") from e + + feedback.pushInfo( + f"EPW location: {metadata.get('city', 'Unknown')}, " + f"lat={metadata.get('latitude', 'N/A')}, lon={metadata.get('longitude', 'N/A')}" + ) + + # Report EPW date range + epw_start = df.index.min() + epw_end = df.index.max() + feedback.pushInfo(f"EPW date range: {epw_start} to {epw_end}") + + # Default to full EPW range when dates not provided + if start_dt is None: + start_dt = epw_start if isinstance(epw_start, datetime) else epw_start.to_pydatetime() + feedback.pushInfo("No start date specified — using EPW start") + if end_dt is None: + end_dt = epw_end if isinstance(epw_end, datetime) else epw_end.to_pydatetime() + feedback.pushInfo("No end date specified — using EPW end") + + # Filter by date range + mask = (df.index >= start_dt) & (df.index <= end_dt) + df_filtered = df[mask] + + # TMY EPW files mix years (e.g., Jan from 2015, Feb from 2009). + # If exact date filtering yields nothing, match by month-day-hour instead. + if len(df_filtered) == 0: + feedback.pushInfo( + "No exact date matches — trying month/day filter " + "(EPW may be a Typical Meteorological Year with mixed years)" + ) + start_md = (start_dt.month, start_dt.day, start_dt.hour) + end_md = (end_dt.month, end_dt.day, end_dt.hour) + + def _md_tuple(ts): + return (ts.month, ts.day, ts.hour) + + if start_md <= end_md: + # Same-year range (e.g., Feb 1 – Feb 7) + mask = [start_md <= _md_tuple(t) <= end_md for t in df.index] + else: + # Cross-year range (e.g., Dec 15 – Jan 15) + mask = [_md_tuple(t) >= start_md or _md_tuple(t) <= end_md for t in df.index] + + df_filtered = df[mask] + + # Filter by hours if specified + if hours_list: + df_filtered = df_filtered[df_filtered.index.hour.isin(hours_list)] + + if len(df_filtered) == 0: + raise QgsProcessingException( + f"No timesteps found between {start_dt} and {end_dt}.\n" + f"The EPW file contains data from {epw_start} to {epw_end}.\n" + f"Please adjust the date range to overlap with the EPW data." + ) + + # Convert to Weather objects — normalize timestamps to requested year + target_year = start_dt.year + weather_series = [] + for timestamp, row in df_filtered.iterrows(): + dt = timestamp.to_pydatetime() if hasattr(timestamp, "to_pydatetime") else timestamp + # Remap to target year so timestamps are contiguous + try: + dt = dt.replace(year=target_year) + except ValueError: + # Feb 29 in a non-leap target year → skip + continue + w = solweig.Weather( + datetime=dt, + ta=float(row["temp_air"]) if not np.isnan(row["temp_air"]) else 20.0, + rh=float(row["relative_humidity"]) if not np.isnan(row["relative_humidity"]) else 50.0, + global_rad=float(row["ghi"]) if not np.isnan(row["ghi"]) else 0.0, + ws=float(row["wind_speed"]) if not np.isnan(row["wind_speed"]) else 1.0, + pressure=(float(row["atmospheric_pressure"]) / 100.0) # Pa → hPa + if not np.isnan(row["atmospheric_pressure"]) + else 1013.25, + measured_direct_rad=float(row["dni"]) if not np.isnan(row["dni"]) else None, + measured_diffuse_rad=float(row["dhi"]) if not np.isnan(row["dhi"]) else None, + ) + weather_series.append(w) + + if not weather_series: + raise QgsProcessingException( + f"No timesteps found between {start_dt} and {end_dt}.\n" + f"The EPW file contains data from {epw_start} to {epw_end}.\n" + f"Please adjust the date range to overlap with the EPW data." + ) + + feedback.pushInfo(f"Loaded {len(weather_series)} timesteps from EPW") + feedback.pushInfo(f"Period: {weather_series[0].datetime} to {weather_series[-1].datetime}") + + return weather_series + + +def load_weather_from_umep_met( + met_path: str, + start_dt: Any | None, + end_dt: Any | None, + hours_filter: str | None, + feedback: QgsProcessingFeedback, +) -> list: # Returns list[solweig.Weather] + """ + Load weather data from a UMEP/SUEWS meteorological forcing file. + + Args: + met_path: Path to UMEP met file. + start_dt: Start datetime (inclusive), or None for full range. + end_dt: End datetime (inclusive), or None for full range. + hours_filter: Comma-separated hours to include (e.g., "9,10,11,12"). + feedback: Processing feedback. + + Returns: + List of solweig.Weather objects. + + Raises: + QgsProcessingException: If file cannot be read or no data found. + """ + try: + from solweig.models.weather import Weather + except ImportError as e: + raise QgsProcessingException("SOLWEIG library not found. Please install solweig package.") from e + + if not met_path: + raise QgsProcessingException("No UMEP met file specified.") + + feedback.pushInfo(f"Loading UMEP met file: {met_path}") + + # Convert QDateTime to Python naive datetime + if start_dt is not None and hasattr(start_dt, "toPyDateTime"): + start_dt = start_dt.toPyDateTime() + if end_dt is not None and hasattr(end_dt, "toPyDateTime"): + end_dt = end_dt.toPyDateTime() + if start_dt is not None and start_dt.tzinfo is not None: + start_dt = start_dt.replace(tzinfo=None) + if end_dt is not None and end_dt.tzinfo is not None: + end_dt = end_dt.replace(tzinfo=None) + + # Parse hours filter + hours_list = None + if hours_filter: + try: + hours_str = hours_filter.replace(" ", "") + hours_list = [int(h) for h in hours_str.split(",")] + feedback.pushInfo(f"Hour filter: {hours_list}") + except ValueError: + feedback.reportError( + f"Invalid hours filter: {hours_filter}. Using all hours.", + fatalError=False, + ) + + # Load via Weather.from_umep_met() + try: + weather_series = Weather.from_umep_met( + paths=[met_path], + resample_hourly=True, + start=start_dt, + end=end_dt, + ) + except FileNotFoundError as e: + raise QgsProcessingException(f"UMEP met file not found: {e}") from e + except ValueError as e: + raise QgsProcessingException(f"Error reading UMEP met file: {e}") from e + + if not weather_series: + raise QgsProcessingException("No valid timesteps found in UMEP met file.") + + # Report date range + met_start = weather_series[0].datetime + met_end = weather_series[-1].datetime + feedback.pushInfo(f"UMEP met date range: {met_start} to {met_end}") + + # Apply hours filter if specified + if hours_list: + weather_series = [w for w in weather_series if w.datetime.hour in hours_list] + if not weather_series: + raise QgsProcessingException( + f"No timesteps remaining after hour filter {hours_list}.\n" + f"The data contains hours from {met_start} to {met_end}." + ) + + feedback.pushInfo(f"Loaded {len(weather_series)} timesteps from UMEP met") + feedback.pushInfo(f"Period: {weather_series[0].datetime} to {weather_series[-1].datetime}") + + return weather_series diff --git a/qgis_plugin/solweig_qgis/utils/parameters.py b/qgis_plugin/solweig_qgis/utils/parameters.py new file mode 100644 index 0000000..331a428 --- /dev/null +++ b/qgis_plugin/solweig_qgis/utils/parameters.py @@ -0,0 +1,640 @@ +""" +Common parameter definitions for SOLWEIG algorithms. + +Provides reusable parameter builders for consistent UI across algorithms. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from qgis.core import ( + QgsProcessingParameterBoolean, + QgsProcessingParameterDateTime, + QgsProcessingParameterDefinition, + QgsProcessingParameterEnum, + QgsProcessingParameterFile, + QgsProcessingParameterFolderDestination, + QgsProcessingParameterMatrix, + QgsProcessingParameterNumber, + QgsProcessingParameterRasterDestination, + QgsProcessingParameterRasterLayer, + QgsProcessingParameterString, +) + +if TYPE_CHECKING: + from qgis.core import QgsProcessingAlgorithm + + +def _canvas_center_latlon() -> tuple[float, float]: + """Return (lat, lon) of the current map canvas centre in WGS 84. + + Falls back to (0, 0) when the canvas is not available (e.g. headless). + """ + try: + from qgis.core import QgsCoordinateReferenceSystem, QgsCoordinateTransform, QgsProject + from qgis.utils import iface + + canvas = iface.mapCanvas() + center = canvas.center() + project_crs = canvas.mapSettings().destinationCrs() + wgs84 = QgsCoordinateReferenceSystem("EPSG:4326") + + if project_crs != wgs84: + xform = QgsCoordinateTransform(project_crs, wgs84, QgsProject.instance()) + center = xform.transform(center) + + return round(center.y(), 4), round(center.x(), 4) + except Exception: + return 0.0, 0.0 + + +def add_surface_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add standard surface data input parameters. + + Parameters added: + DSM (required): Digital Surface Model + DSM_HEIGHT_MODE: DSM height convention (0=relative, 1=absolute) + CDSM (optional): Canopy DSM (vegetation heights) + CDSM_HEIGHT_MODE: CDSM height convention (0=relative, 1=absolute) + DEM (optional): Digital Elevation Model (ground) + TDSM (optional): Trunk zone DSM + TDSM_HEIGHT_MODE: TDSM height convention (0=relative, 1=absolute) + LAND_COVER (optional): Land cover classification + """ + _height_options = [ + "Relative — above ground", + "Absolute — above sea level", + ] + + algorithm.addParameter( + QgsProcessingParameterRasterLayer( + "DSM", + algorithm.tr("Digital Surface Model (DSM)"), + optional=False, + ) + ) + algorithm.addParameter( + QgsProcessingParameterEnum( + "DSM_HEIGHT_MODE", + algorithm.tr("DSM height convention"), + options=_height_options, + defaultValue=1, # Absolute (most common for DSM) + ) + ) + + algorithm.addParameter( + QgsProcessingParameterRasterLayer( + "CDSM", + algorithm.tr("Canopy DSM (vegetation heights)"), + optional=True, + ) + ) + algorithm.addParameter( + QgsProcessingParameterEnum( + "CDSM_HEIGHT_MODE", + algorithm.tr("CDSM height convention"), + options=_height_options, + defaultValue=0, # Relative (most common for CDSM) + ) + ) + + algorithm.addParameter( + QgsProcessingParameterRasterLayer( + "DEM", + algorithm.tr("Digital Elevation Model (ground)"), + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterRasterLayer( + "TDSM", + algorithm.tr("Trunk zone DSM"), + optional=True, + ) + ) + algorithm.addParameter( + QgsProcessingParameterEnum( + "TDSM_HEIGHT_MODE", + algorithm.tr("TDSM height convention"), + options=_height_options, + defaultValue=0, # Relative (most common for TDSM) + ) + ) + + algorithm.addParameter( + QgsProcessingParameterRasterLayer( + "LAND_COVER", + algorithm.tr("Land cover classification (UMEP IDs)"), + optional=True, + ) + ) + + +def add_location_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add location parameters with auto-extraction option. + + Parameters added: + AUTO_EXTRACT_LOCATION: Extract lat/lon from DSM CRS + LATITUDE: Manual latitude input + LONGITUDE: Manual longitude input + UTC_OFFSET: UTC timezone offset + """ + algorithm.addParameter( + QgsProcessingParameterBoolean( + "AUTO_EXTRACT_LOCATION", + algorithm.tr("Auto-extract location from DSM CRS"), + defaultValue=False, + ) + ) + + canvas_lat, canvas_lon = _canvas_center_latlon() + + algorithm.addParameter( + QgsProcessingParameterNumber( + "LATITUDE", + algorithm.tr("Latitude (degrees)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=canvas_lat, + minValue=-90.0, + maxValue=90.0, + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "LONGITUDE", + algorithm.tr("Longitude (degrees)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=canvas_lon, + minValue=-180.0, + maxValue=180.0, + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "UTC_OFFSET", + algorithm.tr("UTC offset (hours)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0, + minValue=-12, + maxValue=14, + ) + ) + + +def add_weather_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add weather parameters for single timestep calculation. + + Parameters added: + DATETIME: Date and time of calculation + TEMPERATURE: Air temperature (°C) + HUMIDITY: Relative humidity (%) + GLOBAL_RADIATION: Global solar radiation (W/m²) + WIND_SPEED: Wind speed (m/s) + PRESSURE: Atmospheric pressure (hPa) + """ + algorithm.addParameter( + QgsProcessingParameterDateTime( + "DATETIME", + algorithm.tr("Date and time"), + type=QgsProcessingParameterDateTime.Type.DateTime, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "TEMPERATURE", + algorithm.tr("Air temperature (°C)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=25.0, + minValue=-50.0, + maxValue=60.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "HUMIDITY", + algorithm.tr("Relative humidity (%)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=50.0, + minValue=0.0, + maxValue=100.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "GLOBAL_RADIATION", + algorithm.tr("Global solar radiation (W/m²)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=800.0, + minValue=0.0, + maxValue=1400.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "WIND_SPEED", + algorithm.tr("Wind speed (m/s)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=1.0, + minValue=0.0, + maxValue=50.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "PRESSURE", + algorithm.tr("Atmospheric pressure (hPa)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=1013.25, + minValue=800.0, + maxValue=1100.0, + ) + ) + + +def add_human_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add human body parameters. + + Parameters added: + POSTURE: Standing or sitting + ABS_K: Shortwave absorption coefficient + """ + algorithm.addParameter( + QgsProcessingParameterEnum( + "POSTURE", + algorithm.tr("Body posture"), + options=["Standing", "Sitting"], + defaultValue=0, # Standing + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "ABS_K", + algorithm.tr("Shortwave absorption coefficient"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.7, + minValue=0.0, + maxValue=1.0, + ) + ) + + +def add_human_body_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add detailed human body parameters for PET calculation. + + Parameters added: + AGE, WEIGHT, HEIGHT, SEX, ACTIVITY, CLOTHING + """ + algorithm.addParameter( + QgsProcessingParameterNumber( + "AGE", + algorithm.tr("Age (years)"), + type=QgsProcessingParameterNumber.Type.Integer, + defaultValue=35, + minValue=1, + maxValue=120, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "WEIGHT", + algorithm.tr("Body weight (kg)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=75.0, + minValue=20.0, + maxValue=200.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "HEIGHT", + algorithm.tr("Body height (m)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=1.75, + minValue=1.0, + maxValue=2.5, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterEnum( + "SEX", + algorithm.tr("Sex"), + options=["Male", "Female"], + defaultValue=0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "ACTIVITY", + algorithm.tr("Metabolic activity (W)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=80.0, + minValue=40.0, + maxValue=500.0, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterNumber( + "CLOTHING", + algorithm.tr("Clothing insulation (clo)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.9, + minValue=0.0, + maxValue=2.0, + ) + ) + + +def add_options_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add calculation options parameters. + + Parameters added: + USE_ANISOTROPIC_SKY: Enable anisotropic sky model + CONIFER: Treat vegetation as evergreen + SVF_DIR: Override SVF directory (optional) + MAX_SHADOW_DISTANCE: Maximum horizontal shadow distance in metres + """ + + algorithm.addParameter( + QgsProcessingParameterBoolean( + "USE_ANISOTROPIC_SKY", + algorithm.tr("Use anisotropic sky model"), + defaultValue=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterBoolean( + "CONIFER", + algorithm.tr("Treat vegetation as evergreen (conifer)"), + defaultValue=False, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterFile( + "SVF_DIR", + algorithm.tr("Override SVF directory (SVF is included in prepared surface by default)"), + behavior=QgsProcessingParameterFile.Behavior.Folder, + optional=True, + ) + ) + + max_shadow = QgsProcessingParameterNumber( + "MAX_SHADOW_DISTANCE", + algorithm.tr("Maximum shadow distance (m) — caps horizontal shadow ray reach"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=1000.0, + minValue=50.0, + maxValue=5000.0, + ) + max_shadow.setFlags(max_shadow.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(max_shadow) + + +def add_vegetation_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """Add vegetation transmissivity parameters (advanced). + + Parameters added: + TRANSMISSIVITY: Leaf-on canopy transmissivity (0-1) + TRANSMISSIVITY_LEAFOFF: Leaf-off (winter) transmissivity (0-1) + LEAF_START: First day of year with leaves (1-366) + LEAF_END: Last day of year with leaves (1-366) + """ + + trans_on = QgsProcessingParameterNumber( + "TRANSMISSIVITY", + algorithm.tr("Vegetation transmissivity — leaf-on season (0 = opaque, 1 = transparent)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.03, + minValue=0.0, + maxValue=1.0, + ) + trans_on.setFlags(trans_on.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(trans_on) + + trans_off = QgsProcessingParameterNumber( + "TRANSMISSIVITY_LEAFOFF", + algorithm.tr("Vegetation transmissivity — leaf-off season (bare branches)"), + type=QgsProcessingParameterNumber.Type.Double, + defaultValue=0.5, + minValue=0.0, + maxValue=1.0, + ) + trans_off.setFlags(trans_off.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(trans_off) + + leaf_start = QgsProcessingParameterNumber( + "LEAF_START", + algorithm.tr("First day of year with leaves (1–366)"), + type=QgsProcessingParameterNumber.Type.Integer, + defaultValue=97, + minValue=1, + maxValue=366, + ) + leaf_start.setFlags(leaf_start.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(leaf_start) + + leaf_end = QgsProcessingParameterNumber( + "LEAF_END", + algorithm.tr("Last day of year with leaves (1–366)"), + type=QgsProcessingParameterNumber.Type.Integer, + defaultValue=300, + minValue=1, + maxValue=366, + ) + leaf_end.setFlags(leaf_end.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(leaf_end) + + +def add_land_cover_mapping_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """Add land cover material properties table (advanced). + + Creates a pre-populated matrix table mapping integer land cover codes to + surface material properties. Defaults match the UMEP standard. Users can + edit values in-place, add rows for additional codes, or provide a full + custom ``parametersforsolweig.json`` file. + + Parameters added: + LC_MATERIALS: Matrix table (Code, Name, Albedo, Emissivity, TgK, Tstart, TmaxLST) + CUSTOM_MATERIALS_FILE: Optional custom materials JSON (overrides table) + """ + + # UMEP standard defaults as flat list (7 columns per row) + # fmt: off + umep_defaults = [ + 0, "Paved", 0.20, 0.95, 0.37, -3.41, 15.0, + 1, "Asphalt", 0.18, 0.95, 0.58, -9.78, 15.0, + 2, "Buildings", 0.18, 0.95, 0.58, -9.78, 15.0, + 5, "Grass", 0.16, 0.94, 0.21, -3.38, 14.0, + 6, "Bare soil", 0.25, 0.94, 0.33, -3.01, 14.0, + 7, "Water", 0.05, 0.98, 0.00, 0.00, 12.0, + ] + # fmt: on + + materials = QgsProcessingParameterMatrix( + "LC_MATERIALS", + algorithm.tr("Land cover material properties"), + headers=[ + "Code", + "Name", + "Albedo", + "Emissivity", + "TgK (Ts_deg)", + "Tstart", + "TmaxLST", + ], + hasFixedNumberRows=False, + numberRows=6, + defaultValue=umep_defaults, + optional=True, + ) + materials.setFlags(materials.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(materials) + + custom_file = QgsProcessingParameterFile( + "CUSTOM_MATERIALS_FILE", + algorithm.tr("Custom materials JSON (overrides table)"), + extension="json", + optional=True, + ) + custom_file.setFlags(custom_file.flags() | QgsProcessingParameterDefinition.Flag.FlagAdvanced) + algorithm.addParameter(custom_file) + + +def add_heat_threshold_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """Add UTCI heat-stress threshold parameters for timeseries summary. + + Parameters added: + HEAT_THRESHOLDS_DAY: Comma-separated UTCI thresholds for daytime (°C) + HEAT_THRESHOLDS_NIGHT: Comma-separated UTCI thresholds for nighttime (°C) + """ + algorithm.addParameter( + QgsProcessingParameterString( + "HEAT_THRESHOLDS_DAY", + algorithm.tr("Daytime UTCI thresholds (°C, comma-separated)"), + defaultValue="32, 38", + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterString( + "HEAT_THRESHOLDS_NIGHT", + algorithm.tr("Nighttime UTCI thresholds (°C, comma-separated)"), + defaultValue="26", + optional=True, + ) + ) + + +def add_output_tmrt_parameter(algorithm: QgsProcessingAlgorithm) -> None: + """Add Tmrt output raster parameter.""" + algorithm.addParameter( + QgsProcessingParameterRasterDestination( + "OUTPUT_TMRT", + algorithm.tr("Mean Radiant Temperature (Tmrt)"), + ) + ) + + +def add_output_dir_parameter(algorithm: QgsProcessingAlgorithm) -> None: + """Add output directory parameter.""" + algorithm.addParameter( + QgsProcessingParameterFolderDestination( + "OUTPUT_DIR", + algorithm.tr("Output directory"), + ) + ) + + +def add_epw_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add EPW weather file parameter. + + Parameters added: + EPW_FILE: Path to EPW file + """ + algorithm.addParameter( + QgsProcessingParameterFile( + "EPW_FILE", + algorithm.tr("EPW weather file"), + extension="epw", + optional=True, + ) + ) + + +def add_umep_met_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add UMEP/SUEWS meteorological file parameter. + + Parameters added: + UMEP_MET_FILE: Path to UMEP/SUEWS met file + """ + algorithm.addParameter( + QgsProcessingParameterFile( + "UMEP_MET_FILE", + algorithm.tr("UMEP/SUEWS meteorological forcing file"), + extension="txt", + optional=True, + ) + ) + + +def add_date_filter_parameters(algorithm: QgsProcessingAlgorithm) -> None: + """ + Add shared date/time filter parameters (used by EPW and UMEP modes). + + Parameters added: + START_DATE: Start date for filtering + END_DATE: End date for filtering + HOURS_FILTER: Comma-separated hours to include + """ + algorithm.addParameter( + QgsProcessingParameterDateTime( + "START_DATE", + algorithm.tr("Start date (leave empty for full range)"), + type=QgsProcessingParameterDateTime.Type.DateTime, + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterDateTime( + "END_DATE", + algorithm.tr("End date (leave empty for full range)"), + type=QgsProcessingParameterDateTime.Type.DateTime, + optional=True, + ) + ) + + algorithm.addParameter( + QgsProcessingParameterString( + "HOURS_FILTER", + algorithm.tr("Hours to include (comma-separated, e.g., 9,10,11,12)"), + optional=True, + ) + ) diff --git a/rust/Cargo.toml b/rust/Cargo.toml index aed9439..8907ec6 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -pyo3 = { version = "0.24.2", features = ["extension-module"] } +pyo3 = { version = "0.24.2", features = ["extension-module", "abi3-py39"] } numpy = "0.24.0" ndarray = { version = "0.16.1", features = ["rayon"] } rayon = "1.10.0" diff --git a/rust/src/gpu/aniso_gpu.rs b/rust/src/gpu/aniso_gpu.rs new file mode 100644 index 0000000..eea89d0 --- /dev/null +++ b/rust/src/gpu/aniso_gpu.rs @@ -0,0 +1,1052 @@ +//! GPU-accelerated anisotropic sky radiation computation. +//! +//! Fuses the per-pixel × per-patch loop from `anisotropic_sky_pure()` onto a +//! single GPU compute dispatch. Each thread handles one pixel and iterates +//! over all sky patches, accumulating longwave and shortwave radiation. +//! +//! Shares `Arc` and `Arc` with `ShadowGpuContext`. + +use ndarray::{Array2, ArrayView1, ArrayView2, ArrayView3}; +use std::sync::mpsc; +use std::sync::{Arc, Mutex}; + +/// Ensures mapped staging buffers are always unmapped on scope exit. +struct MappedBufferGuard<'a> { + buffer: &'a wgpu::Buffer, +} + +impl<'a> MappedBufferGuard<'a> { + fn new(buffer: &'a wgpu::Buffer) -> Self { + Self { buffer } + } +} + +impl Drop for MappedBufferGuard<'_> { + fn drop(&mut self) { + self.buffer.unmap(); + } +} + +// ── Uniform buffer (must match Params in anisotropic_sky.wgsl) ─────────── + +#[repr(C)] +#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] +struct AnisoParams { + total_pixels: u32, + cols: u32, + rows: u32, + n_patches: u32, + n_pack: u32, + cyl: u32, + solar_altitude: f32, + solar_azimuth: f32, + ta: f32, + albedo: f32, + tgwall: f32, + ewall: f32, + rad_i: f32, + rad_d: f32, + psi: f32, + rad_tot: f32, +} + +// ── Cached GPU buffers ─────────────────────────────────────────────────── + +#[derive(Clone, Copy, PartialEq, Eq)] +struct ViewSig3D { + ptr: usize, + d0: usize, + d1: usize, + d2: usize, + s0: isize, + s1: isize, + s2: isize, + sample_hash: u64, +} + +#[derive(Clone, Copy, PartialEq, Eq)] +struct ViewSig2D { + ptr: usize, + d0: usize, + d1: usize, + s0: isize, + s1: isize, + sample_hash: u64, +} + +#[derive(Clone, Copy, PartialEq, Eq)] +struct ViewSig1D { + ptr: usize, + d0: usize, + s0: isize, + sample_hash: u64, +} + +#[derive(Clone, Copy, PartialEq, Eq)] +struct StaticInputSig { + sh_ptr: usize, + veg_ptr: usize, + vb_ptr: usize, + asvf_ptr: usize, + valid_ptr: usize, + patch_alt_ptr: usize, + patch_azi_ptr: usize, + ster_ptr: usize, + rows: usize, + cols: usize, + n_pack: usize, + n_patches: usize, +} + +struct CachedBuffers { + rows: usize, + cols: usize, + n_pack: usize, + n_patches: usize, + // Uniform + params_buffer: wgpu::Buffer, + // Per-pixel inputs + shmat_buffer: wgpu::Buffer, + vegshmat_buffer: wgpu::Buffer, + vbshvegshmat_buffer: wgpu::Buffer, + asvf_buffer: wgpu::Buffer, + lup_buffer: wgpu::Buffer, + valid_buffer: wgpu::Buffer, + // Per-patch LUTs + patch_alt_buffer: wgpu::Buffer, + patch_azi_buffer: wgpu::Buffer, + steradians_buffer: wgpu::Buffer, + esky_band_buffer: wgpu::Buffer, + lum_chi_buffer: wgpu::Buffer, + // Outputs + out_ldown_buffer: wgpu::Buffer, + out_lside_buffer: wgpu::Buffer, + out_kside_partial_buffer: wgpu::Buffer, + out_drad_buffer: wgpu::Buffer, + // Staging for readback + staging_buffer: wgpu::Buffer, + // Bind groups + bind_group: wgpu::BindGroup, + // Signatures for static inputs (skip uploads when unchanged) + last_shmat_sig: Option, + last_vegshmat_sig: Option, + last_vbshmat_sig: Option, + last_asvf_sig: Option, + last_valid_sig: Option, + last_patch_alt_sig: Option, + last_patch_azi_sig: Option, + last_steradians_sig: Option, + last_static_input_sig: Option, + readback_inflight: bool, +} + +// ── Public context ─────────────────────────────────────────────────────── + +pub struct AnisoGpuContext { + device: Arc, + queue: Arc, + max_compute_workgroups_per_dimension: u32, + pipeline: wgpu::ComputePipeline, + bind_group_layout: wgpu::BindGroupLayout, + cached: Mutex>, +} + +/// Result of GPU anisotropic sky computation. +pub struct AnisoGpuResult { + pub ldown: Array2, + pub lside: Array2, + pub kside_partial: Array2, + pub drad: Array2, +} + +/// In-flight GPU dispatch token for overlapping CPU work with GPU readback. +pub struct AnisoGpuPending { + rows: usize, + cols: usize, + total_pixels: usize, + staging_size: u64, + submission_index: wgpu::SubmissionIndex, + map_rx: mpsc::Receiver>, +} + +impl AnisoGpuContext { + /// Create a new context, sharing device/queue from the shadow GPU context. + pub fn new(device: Arc, queue: Arc) -> Self { + let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("Anisotropic Sky Shader"), + source: wgpu::ShaderSource::Wgsl(include_str!("anisotropic_sky.wgsl").into()), + }); + + let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("Aniso Bind Group Layout"), + entries: &Self::bind_group_layout_entries(), + }); + + let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("Aniso Pipeline Layout"), + bind_group_layouts: &[&bind_group_layout], + push_constant_ranges: &[], + }); + + let pipeline = device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor { + label: Some("Aniso Compute Pipeline"), + layout: Some(&pipeline_layout), + module: &shader, + entry_point: Some("main"), + compilation_options: Default::default(), + cache: None, + }); + + let max_compute_workgroups_per_dimension = + device.limits().max_compute_workgroups_per_dimension; + + Self { + device, + queue, + max_compute_workgroups_per_dimension, + pipeline, + bind_group_layout, + cached: Mutex::new(None), + } + } + + fn checked_workgroups_2d( + &self, + rows: usize, + cols: usize, + workgroup_x: u32, + workgroup_y: u32, + label: &str, + ) -> Result<(u32, u32), String> { + let workgroups_x = (cols as u32).div_ceil(workgroup_x); + let workgroups_y = (rows as u32).div_ceil(workgroup_y); + let limit = self.max_compute_workgroups_per_dimension; + if workgroups_x > limit || workgroups_y > limit { + return Err(format!( + "{} dispatch exceeds GPU workgroup limit {}: got ({}, {}) for grid {}x{} and workgroup {}x{}", + label, limit, workgroups_x, workgroups_y, rows, cols, workgroup_x, workgroup_y + )); + } + Ok((workgroups_x, workgroups_y)) + } + + /// Begin anisotropic GPU dispatch and start asynchronous readback mapping. + /// + /// Call `dispatch_end()` to complete and retrieve arrays. + #[allow(clippy::too_many_arguments)] + pub fn dispatch_begin( + &self, + // Shadow matrices (bitpacked u8, shape rows×cols×n_pack) + shmat: ArrayView3, + vegshmat: ArrayView3, + vbshvegshmat: ArrayView3, + // Per-pixel arrays (shape rows×cols) + asvf: ArrayView2, + lup: ArrayView2, + valid: ArrayView2, + // Per-patch LUTs (length n_patches) + patch_alt: ArrayView1, + patch_azi: ArrayView1, + steradians: ArrayView1, + esky_band: ArrayView1, + lum_chi: ArrayView1, + // Scalar parameters + solar_altitude: f32, + solar_azimuth: f32, + ta: f32, + cyl: bool, + albedo: f32, + tgwall: f32, + ewall: f32, + rad_i: f32, + rad_d: f32, + psi: f32, + rad_tot: f32, + ) -> Result { + let rows = shmat.shape()[0]; + let cols = shmat.shape()[1]; + let n_pack = shmat.shape()[2]; + let n_patches = patch_alt.len(); + let total_pixels = rows * cols; + + let mut cache = self + .cached + .lock() + .map_err(|e| format!("Failed to lock anisotropic buffer cache: {}", e))?; + if cache.as_ref().map(|b| b.readback_inflight).unwrap_or(false) { + return Err("anisotropic GPU readback already in flight".to_string()); + } + // Ensure buffers are allocated for this grid size while holding the cache lock, + // so dimensions cannot change between the inflight check and allocation. + self.ensure_buffers_locked(&mut cache, rows, cols, n_pack, n_patches); + let buffers = cache + .as_mut() + .ok_or_else(|| "anisotropic GPU buffers missing after allocation".to_string())?; + buffers.readback_inflight = true; + + // Static inputs are expected to stay stable during a run. If the backing + // arrays change (new surface/tile), force re-upload of all cached static + // buffers before signature checks below. + let static_sig = StaticInputSig { + sh_ptr: shmat.as_ptr() as usize, + veg_ptr: vegshmat.as_ptr() as usize, + vb_ptr: vbshvegshmat.as_ptr() as usize, + asvf_ptr: asvf.as_ptr() as usize, + valid_ptr: valid.as_ptr() as usize, + patch_alt_ptr: patch_alt.as_ptr() as usize, + patch_azi_ptr: patch_azi.as_ptr() as usize, + ster_ptr: steradians.as_ptr() as usize, + rows, + cols, + n_pack, + n_patches, + }; + if buffers.last_static_input_sig != Some(static_sig) { + buffers.last_shmat_sig = None; + buffers.last_vegshmat_sig = None; + buffers.last_vbshmat_sig = None; + buffers.last_asvf_sig = None; + buffers.last_valid_sig = None; + buffers.last_patch_alt_sig = None; + buffers.last_patch_azi_sig = None; + buffers.last_steradians_sig = None; + buffers.last_static_input_sig = Some(static_sig); + } + + // ── Upload uniform params ──────────────────────────────────────── + let params = AnisoParams { + total_pixels: total_pixels as u32, + cols: cols as u32, + rows: rows as u32, + n_patches: n_patches as u32, + n_pack: n_pack as u32, + cyl: if cyl { 1 } else { 0 }, + solar_altitude, + solar_azimuth, + ta, + albedo, + tgwall, + ewall, + rad_i, + rad_d, + psi, + rad_tot, + }; + self.queue + .write_buffer(&buffers.params_buffer, 0, bytemuck::bytes_of(¶ms)); + + // ── Upload shadow matrices ─────────────────────────────────────── + // Contiguous u8 data → reinterpreted as u32 on GPU (little-endian) + let sh_sig = Self::sig_3d_u8(&shmat); + if buffers.last_shmat_sig != Some(sh_sig) { + Self::write_3d_u8_padded(&self.queue, &buffers.shmat_buffer, &shmat); + buffers.last_shmat_sig = Some(sh_sig); + } + + let veg_sig = Self::sig_3d_u8(&vegshmat); + if buffers.last_vegshmat_sig != Some(veg_sig) { + Self::write_3d_u8_padded(&self.queue, &buffers.vegshmat_buffer, &vegshmat); + buffers.last_vegshmat_sig = Some(veg_sig); + } + + let vb_sig = Self::sig_3d_u8(&vbshvegshmat); + if buffers.last_vbshmat_sig != Some(vb_sig) { + Self::write_3d_u8_padded(&self.queue, &buffers.vbshvegshmat_buffer, &vbshvegshmat); + buffers.last_vbshmat_sig = Some(vb_sig); + } + + // ── Upload per-pixel arrays ────────────────────────────────────── + let asvf_sig = Self::sig_2d_f32(&asvf); + if buffers.last_asvf_sig != Some(asvf_sig) { + Self::write_2d_f32(&self.queue, &buffers.asvf_buffer, &asvf); + buffers.last_asvf_sig = Some(asvf_sig); + } + + Self::write_2d_f32(&self.queue, &buffers.lup_buffer, &lup); + + // Valid mask: u8 → pad to u32 alignment + let valid_sig = Self::sig_2d_u8(&valid); + if buffers.last_valid_sig != Some(valid_sig) { + Self::write_2d_u8_padded(&self.queue, &buffers.valid_buffer, &valid); + buffers.last_valid_sig = Some(valid_sig); + } + + // ── Upload per-patch LUTs ──────────────────────────────────────── + let patch_alt_sig = Self::sig_1d_f32(&patch_alt); + if buffers.last_patch_alt_sig != Some(patch_alt_sig) { + Self::write_1d_f32(&self.queue, &buffers.patch_alt_buffer, &patch_alt); + buffers.last_patch_alt_sig = Some(patch_alt_sig); + } + + let patch_azi_sig = Self::sig_1d_f32(&patch_azi); + if buffers.last_patch_azi_sig != Some(patch_azi_sig) { + Self::write_1d_f32(&self.queue, &buffers.patch_azi_buffer, &patch_azi); + buffers.last_patch_azi_sig = Some(patch_azi_sig); + } + + let ster_sig = Self::sig_1d_f32(&steradians); + if buffers.last_steradians_sig != Some(ster_sig) { + Self::write_1d_f32(&self.queue, &buffers.steradians_buffer, &steradians); + buffers.last_steradians_sig = Some(ster_sig); + } + + Self::write_1d_f32(&self.queue, &buffers.esky_band_buffer, &esky_band); + + Self::write_1d_f32(&self.queue, &buffers.lum_chi_buffer, &lum_chi); + + // ── Dispatch compute shader ────────────────────────────────────── + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("Aniso Sky Encoder"), + }); + + { + let mut pass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { + label: Some("Aniso Sky Pass"), + timestamp_writes: None, + }); + pass.set_pipeline(&self.pipeline); + pass.set_bind_group(0, &buffers.bind_group, &[]); + let workgroup_x = 16u32; + let workgroup_y = 16u32; + let (workgroups_x, workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + workgroup_x, + workgroup_y, + "anisotropic sky", + )?; + pass.dispatch_workgroups(workgroups_x, workgroups_y, 1); + } + // ── Copy outputs to staging buffer ─────────────────────────────── + let pixel_bytes = (total_pixels * 4) as u64; // f32 per pixel + encoder.copy_buffer_to_buffer( + &buffers.out_ldown_buffer, + 0, + &buffers.staging_buffer, + 0, + pixel_bytes, + ); + encoder.copy_buffer_to_buffer( + &buffers.out_lside_buffer, + 0, + &buffers.staging_buffer, + pixel_bytes, + pixel_bytes, + ); + encoder.copy_buffer_to_buffer( + &buffers.out_kside_partial_buffer, + 0, + &buffers.staging_buffer, + pixel_bytes * 2, + pixel_bytes, + ); + encoder.copy_buffer_to_buffer( + &buffers.out_drad_buffer, + 0, + &buffers.staging_buffer, + pixel_bytes * 3, + pixel_bytes, + ); + // Wait only for this dispatch submission, not the whole device queue. + let submission_index = self.queue.submit(Some(encoder.finish())); + + let staging_size = pixel_bytes * 4; + let buffer_slice = buffers.staging_buffer.slice(..staging_size); + let (sender, receiver) = mpsc::channel(); + buffer_slice.map_async(wgpu::MapMode::Read, move |result| { + let _ = sender.send(result); + }); + + Ok(AnisoGpuPending { + rows, + cols, + total_pixels, + staging_size, + submission_index, + map_rx: receiver, + }) + } + + /// Complete an in-flight anisotropic GPU dispatch and read back arrays. + pub fn dispatch_end(&self, pending: AnisoGpuPending) -> Result { + let result = (|| { + self.device + .poll(wgpu::PollType::Wait { + submission_index: Some(pending.submission_index), + timeout: None, + }) + .map_err(|e| format!("GPU poll failed: {:?}", e))?; + + pending + .map_rx + .recv() + .map_err(|e| format!("Channel recv failed: {}", e))? + .map_err(|e| format!("Failed to map staging buffer: {:?}", e))?; + + let cache = self + .cached + .lock() + .map_err(|e| format!("Failed to lock anisotropic buffer cache: {}", e))?; + let buffers = cache + .as_ref() + .ok_or_else(|| "anisotropic GPU buffers missing".to_string())?; + let buffer_slice = buffers.staging_buffer.slice(..pending.staging_size); + let _unmap_guard = MappedBufferGuard::new(&buffers.staging_buffer); + let data = buffer_slice.get_mapped_range(); + let all_f32: &[f32] = bytemuck::cast_slice(&data); + + let ldown = Array2::from_shape_vec( + (pending.rows, pending.cols), + all_f32[..pending.total_pixels].to_vec(), + ) + .map_err(|e| format!("ldown array: {}", e))?; + + let lside = Array2::from_shape_vec( + (pending.rows, pending.cols), + all_f32[pending.total_pixels..pending.total_pixels * 2].to_vec(), + ) + .map_err(|e| format!("lside array: {}", e))?; + + let kside_partial = Array2::from_shape_vec( + (pending.rows, pending.cols), + all_f32[pending.total_pixels * 2..pending.total_pixels * 3].to_vec(), + ) + .map_err(|e| format!("kside_partial array: {}", e))?; + let drad = Array2::from_shape_vec( + (pending.rows, pending.cols), + all_f32[pending.total_pixels * 3..pending.total_pixels * 4].to_vec(), + ) + .map_err(|e| format!("drad array: {}", e))?; + + Ok(AnisoGpuResult { + ldown, + lside, + kside_partial, + drad, + }) + })(); + + if let Ok(mut cache) = self.cached.lock() { + if let Some(buffers) = cache.as_mut() { + buffers.readback_inflight = false; + } + } + + result + } + + // ── Buffer management ──────────────────────────────────────────────── + + fn ensure_buffers_locked( + &self, + cache: &mut Option, + rows: usize, + cols: usize, + n_pack: usize, + n_patches: usize, + ) { + // Reuse if dimensions match + if let Some(ref c) = *cache { + if c.rows == rows && c.cols == cols && c.n_pack == n_pack && c.n_patches == n_patches { + return; + } + } + + let total_pixels = rows * cols; + let pixel_bytes = (total_pixels * 4) as u64; + let shadow_bytes = (total_pixels * n_pack) as u64; + // Pad shadow buffer to u32 alignment + let shadow_bytes_aligned = (shadow_bytes + 3) & !3; + let patch_bytes = (n_patches * 4) as u64; + // Valid mask: 1 byte per pixel, padded to u32 + let valid_bytes = ((total_pixels + 3) & !3) as u64; + + let make = |label: &str, size: u64, usage: wgpu::BufferUsages| -> wgpu::Buffer { + self.device.create_buffer(&wgpu::BufferDescriptor { + label: Some(label), + size, + usage, + mapped_at_creation: false, + }) + }; + + let input = wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST; + let output = wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC; + + let params_buffer = make( + "Aniso Params", + std::mem::size_of::() as u64, + wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + ); + + let shmat_buffer = make("Aniso shmat", shadow_bytes_aligned, input); + let vegshmat_buffer = make("Aniso vegshmat", shadow_bytes_aligned, input); + let vbshvegshmat_buffer = make("Aniso vbshvegshmat", shadow_bytes_aligned, input); + let asvf_buffer = make("Aniso asvf", pixel_bytes, input); + let lup_buffer = make("Aniso lup", pixel_bytes, input); + let valid_buffer = make("Aniso valid", valid_bytes, input); + + let patch_alt_buffer = make("Aniso patch_alt", patch_bytes, input); + let patch_azi_buffer = make("Aniso patch_azi", patch_bytes, input); + let steradians_buffer = make("Aniso steradians", patch_bytes, input); + let esky_band_buffer = make("Aniso esky_band", patch_bytes, input); + let lum_chi_buffer = make("Aniso lum_chi", patch_bytes, input); + + let out_ldown_buffer = make("Aniso out_ldown", pixel_bytes, output); + let out_lside_buffer = make("Aniso out_lside", pixel_bytes, output); + let out_kside_partial_buffer = make("Aniso out_kside_partial", pixel_bytes, output); + let out_drad_buffer = make("Aniso out_drad", pixel_bytes, output); + + let staging_buffer = make( + "Aniso Staging", + pixel_bytes * 4, + wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST, + ); + + let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("Aniso Bind Group"), + layout: &self.bind_group_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: params_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: shmat_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: vegshmat_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 3, + resource: vbshvegshmat_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 4, + resource: asvf_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 5, + resource: lup_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 6, + resource: valid_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 7, + resource: patch_alt_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 8, + resource: patch_azi_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 9, + resource: steradians_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 10, + resource: esky_band_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 11, + resource: lum_chi_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 12, + resource: out_ldown_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 13, + resource: out_lside_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 14, + resource: out_kside_partial_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 15, + resource: out_drad_buffer.as_entire_binding(), + }, + ], + }); + + *cache = Some(CachedBuffers { + rows, + cols, + n_pack, + n_patches, + params_buffer, + shmat_buffer, + vegshmat_buffer, + vbshvegshmat_buffer, + asvf_buffer, + lup_buffer, + valid_buffer, + patch_alt_buffer, + patch_azi_buffer, + steradians_buffer, + esky_band_buffer, + lum_chi_buffer, + out_ldown_buffer, + out_lside_buffer, + out_kside_partial_buffer, + out_drad_buffer, + staging_buffer, + bind_group, + last_shmat_sig: None, + last_vegshmat_sig: None, + last_vbshmat_sig: None, + last_asvf_sig: None, + last_valid_sig: None, + last_patch_alt_sig: None, + last_patch_azi_sig: None, + last_steradians_sig: None, + last_static_input_sig: None, + readback_inflight: false, + }); + } + + // ── Bind group layout ──────────────────────────────────────────────── + + fn bind_group_layout_entries() -> Vec { + let uniform_entry = |binding: u32| wgpu::BindGroupLayoutEntry { + binding, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }; + let storage_ro = |binding: u32| wgpu::BindGroupLayoutEntry { + binding, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }; + let storage_rw = |binding: u32| wgpu::BindGroupLayoutEntry { + binding, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: false }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }; + + vec![ + uniform_entry(0), // params + storage_ro(1), // shmat + storage_ro(2), // vegshmat + storage_ro(3), // vbshvegshmat + storage_ro(4), // asvf + storage_ro(5), // lup + storage_ro(6), // valid_mask + storage_ro(7), // patch_alt + storage_ro(8), // patch_azi + storage_ro(9), // steradians + storage_ro(10), // esky_band + storage_ro(11), // lum_chi + storage_rw(12), // out_ldown + storage_rw(13), // out_lside + storage_rw(14), // out_kside_partial + storage_rw(15), // out_drad + ] + } + + fn sig_3d_u8(arr: &ArrayView3) -> ViewSig3D { + let sh = arr.shape(); + let st = arr.strides(); + let sample_hash = Self::sample_hash_3d_u8(arr); + ViewSig3D { + ptr: arr.as_ptr() as usize, + d0: sh[0], + d1: sh[1], + d2: sh[2], + s0: st[0], + s1: st[1], + s2: st[2], + sample_hash, + } + } + + fn sig_2d_f32(arr: &ArrayView2) -> ViewSig2D { + let sh = arr.shape(); + let st = arr.strides(); + let sample_hash = Self::sample_hash_2d_f32(arr); + ViewSig2D { + ptr: arr.as_ptr() as usize, + d0: sh[0], + d1: sh[1], + s0: st[0], + s1: st[1], + sample_hash, + } + } + + fn sig_2d_u8(arr: &ArrayView2) -> ViewSig2D { + let sh = arr.shape(); + let st = arr.strides(); + let sample_hash = Self::sample_hash_2d_u8(arr); + ViewSig2D { + ptr: arr.as_ptr() as usize, + d0: sh[0], + d1: sh[1], + s0: st[0], + s1: st[1], + sample_hash, + } + } + + fn sig_1d_f32(arr: &ArrayView1) -> ViewSig1D { + let sh = arr.shape(); + let st = arr.strides(); + let sample_hash = Self::sample_hash_1d_f32(arr); + ViewSig1D { + ptr: arr.as_ptr() as usize, + d0: sh[0], + s0: st[0], + sample_hash, + } + } + + #[inline] + fn hash_mix_u64(mut hash: u64, v: u64) -> u64 { + // FNV-1a style mixing over 64-bit words. + const FNV_PRIME: u64 = 0x0000_0100_0000_01B3; + for b in v.to_le_bytes() { + hash ^= b as u64; + hash = hash.wrapping_mul(FNV_PRIME); + } + hash + } + + fn sample_hash_3d_u8(arr: &ArrayView3) -> u64 { + let sh = arr.shape(); + let r_last = sh[0] - 1; + let c_last = sh[1] - 1; + let k_last = sh[2] - 1; + let coords = [ + (0, 0, 0), + (r_last / 2, c_last / 2, k_last / 2), + (r_last, c_last, k_last), + (r_last / 4, c_last / 4, k_last / 4), + ((3 * r_last) / 4, (3 * c_last) / 4, (3 * k_last) / 4), + (r_last, 0, 0), + (0, c_last, 0), + (0, 0, k_last), + (r_last, c_last, 0), + (r_last, 0, k_last), + (0, c_last, k_last), + ]; + let mut hash = 0xCBF2_9CE4_8422_2325u64; + hash = Self::hash_mix_u64(hash, sh[0] as u64); + hash = Self::hash_mix_u64(hash, sh[1] as u64); + hash = Self::hash_mix_u64(hash, sh[2] as u64); + for &(r, c, k) in &coords { + hash = Self::hash_mix_u64(hash, arr[[r, c, k]] as u64); + } + hash + } + + fn sample_hash_2d_f32(arr: &ArrayView2) -> u64 { + let sh = arr.shape(); + let r_last = sh[0] - 1; + let c_last = sh[1] - 1; + let coords = [ + (0, 0), + (r_last / 2, c_last / 2), + (r_last, c_last), + (r_last / 4, c_last / 4), + ((3 * r_last) / 4, (3 * c_last) / 4), + (r_last, 0), + (0, c_last), + ]; + let mut hash = 0xCBF2_9CE4_8422_2325u64; + hash = Self::hash_mix_u64(hash, sh[0] as u64); + hash = Self::hash_mix_u64(hash, sh[1] as u64); + for &(r, c) in &coords { + hash = Self::hash_mix_u64(hash, arr[[r, c]].to_bits() as u64); + } + hash + } + + fn sample_hash_2d_u8(arr: &ArrayView2) -> u64 { + let sh = arr.shape(); + let r_last = sh[0] - 1; + let c_last = sh[1] - 1; + let coords = [ + (0, 0), + (r_last / 2, c_last / 2), + (r_last, c_last), + (r_last / 4, c_last / 4), + ((3 * r_last) / 4, (3 * c_last) / 4), + (r_last, 0), + (0, c_last), + ]; + let mut hash = 0xCBF2_9CE4_8422_2325u64; + hash = Self::hash_mix_u64(hash, sh[0] as u64); + hash = Self::hash_mix_u64(hash, sh[1] as u64); + for &(r, c) in &coords { + hash = Self::hash_mix_u64(hash, arr[[r, c]] as u64); + } + hash + } + + fn sample_hash_1d_f32(arr: &ArrayView1) -> u64 { + let sh = arr.shape(); + let last = sh[0] - 1; + let idx = [0, last / 4, last / 2, (3 * last) / 4, last]; + let mut hash = 0xCBF2_9CE4_8422_2325u64; + hash = Self::hash_mix_u64(hash, sh[0] as u64); + for &i in &idx { + hash = Self::hash_mix_u64(hash, arr[i].to_bits() as u64); + } + hash + } + + // ── Data conversion helpers ────────────────────────────────────────── + + #[inline] + fn write_2d_f32(queue: &wgpu::Queue, buffer: &wgpu::Buffer, arr: &ArrayView2) { + if let Some(slice) = arr.as_slice() { + queue.write_buffer(buffer, 0, bytemuck::cast_slice(slice)); + } else { + let packed = Self::contiguous_f32_2d(arr); + queue.write_buffer(buffer, 0, bytemuck::cast_slice(&packed)); + } + } + + #[inline] + fn write_1d_f32(queue: &wgpu::Queue, buffer: &wgpu::Buffer, arr: &ArrayView1) { + if let Some(slice) = arr.as_slice() { + queue.write_buffer(buffer, 0, bytemuck::cast_slice(slice)); + } else { + let packed = Self::contiguous_f32_1d(arr); + queue.write_buffer(buffer, 0, bytemuck::cast_slice(&packed)); + } + } + + #[inline] + fn write_3d_u8_padded(queue: &wgpu::Queue, buffer: &wgpu::Buffer, arr: &ArrayView3) { + if let Some(slice) = arr.as_slice() { + if slice.len() % 4 == 0 { + queue.write_buffer(buffer, 0, slice); + } else { + let mut packed = Vec::with_capacity((slice.len() + 3) & !3); + packed.extend_from_slice(slice); + while packed.len() % 4 != 0 { + packed.push(0); + } + queue.write_buffer(buffer, 0, &packed); + } + } else { + let packed = Self::contiguous_bytes_3d(arr); + queue.write_buffer(buffer, 0, &packed); + } + } + + #[inline] + fn write_2d_u8_padded(queue: &wgpu::Queue, buffer: &wgpu::Buffer, arr: &ArrayView2) { + if let Some(slice) = arr.as_slice() { + if slice.len() % 4 == 0 { + queue.write_buffer(buffer, 0, slice); + } else { + let mut packed = Vec::with_capacity((slice.len() + 3) & !3); + packed.extend_from_slice(slice); + while packed.len() % 4 != 0 { + packed.push(0); + } + queue.write_buffer(buffer, 0, &packed); + } + } else { + let packed = Self::contiguous_bytes_2d_u8(arr); + queue.write_buffer(buffer, 0, &packed); + } + } + + /// Convert a 3D u8 ndarray to a contiguous byte Vec (row-major). + fn contiguous_bytes_3d(arr: &ArrayView3) -> Vec { + if let Some(slice) = arr.as_slice() { + // Already contiguous — pad to u32 alignment + let mut v = slice.to_vec(); + while v.len() % 4 != 0 { + v.push(0); + } + v + } else { + let shape = arr.shape(); + let total = shape[0] * shape[1] * shape[2]; + let mut v = Vec::with_capacity((total + 3) & !3); + for r in 0..shape[0] { + for c in 0..shape[1] { + for k in 0..shape[2] { + v.push(arr[[r, c, k]]); + } + } + } + while v.len() % 4 != 0 { + v.push(0); + } + v + } + } + + /// Convert a 2D f32 ndarray to a contiguous Vec. + fn contiguous_f32_2d(arr: &ArrayView2) -> Vec { + if let Some(slice) = arr.as_slice() { + slice.to_vec() + } else { + let shape = arr.shape(); + let mut v = Vec::with_capacity(shape[0] * shape[1]); + for r in 0..shape[0] { + for c in 0..shape[1] { + v.push(arr[[r, c]]); + } + } + v + } + } + + /// Convert a 2D u8 ndarray to a contiguous byte Vec, padded to u32. + fn contiguous_bytes_2d_u8(arr: &ArrayView2) -> Vec { + let shape = arr.shape(); + let total = shape[0] * shape[1]; + let mut v = if let Some(slice) = arr.as_slice() { + slice.to_vec() + } else { + let mut v = Vec::with_capacity(total); + for r in 0..shape[0] { + for c in 0..shape[1] { + v.push(arr[[r, c]]); + } + } + v + }; + while v.len() % 4 != 0 { + v.push(0); + } + v + } + + /// Convert a 1D f32 ndarray to a contiguous Vec. + fn contiguous_f32_1d(arr: &ArrayView1) -> Vec { + if let Some(slice) = arr.as_slice() { + slice.to_vec() + } else { + arr.iter().cloned().collect() + } + } +} diff --git a/rust/src/gpu/anisotropic_sky.wgsl b/rust/src/gpu/anisotropic_sky.wgsl new file mode 100644 index 0000000..c0f92fc --- /dev/null +++ b/rust/src/gpu/anisotropic_sky.wgsl @@ -0,0 +1,311 @@ +// Anisotropic sky radiation — GPU compute shader. +// +// Fuses the per-pixel, per-patch loop from anisotropic_sky_pure() (sky.rs) +// onto the GPU. Each thread handles one pixel and iterates over all sky +// patches, accumulating longwave and shortwave radiation. +// +// Two passes: +// Pass 1 — sky, vegetation, and building longwave + diffuse shortwave +// Pass 2 — reflected longwave (depends on ldown_sky from pass 1) +// +// Outputs 4 accumulated arrays consumed by pipeline.rs: +// out_ldown — total downwelling longwave (sky+veg+bldg+ref) +// out_lside — total side longwave (sky+veg+bldg+ref) +// out_kside_partial — diffuse shortwave + reflected shortwave (kside_d+kref) +// out_drad — anisotropic diffuse shortwave for Kdown +// +// The pipeline adds kside_i = shadow * rad_i * cos(alt) on the CPU. + +const PI: f32 = 3.14159265358979323846; +const SBC: f32 = 5.67051e-8; // Stefan-Boltzmann constant (W m⁻² K⁻⁴) +const DEG2RAD: f32 = 0.017453292519943295; // PI / 180 +const RAD2DEG: f32 = 57.29577951308232; // 180 / PI +const NAN_BITS: u32 = 0x7FC00000u; // quiet NaN in IEEE 754 + +// ── Uniform parameters (one per dispatch) ──────────────────────────────── + +struct Params { + total_pixels: u32, + cols: u32, + rows: u32, + n_patches: u32, + n_pack: u32, // ceil(n_patches / 8) + cyl: u32, // 1 = standing (cylindric), 0 = lying + solar_altitude: f32, // degrees + solar_azimuth: f32, // degrees + ta: f32, // air temperature (°C) + albedo: f32, // ground albedo + tgwall: f32, // wall temperature excess (°C) + ewall: f32, // wall emissivity + rad_i: f32, // direct radiation (W m⁻²) + rad_d: f32, // diffuse radiation (W m⁻²) + psi: f32, // vegetation transmissivity factor for diffsh + rad_tot: f32, // ∑(lv * steradians * sin(alt)) for drad recovery +}; + +@group(0) @binding(0) var params: Params; + +// ── Per-pixel inputs (flattened row-major) ─────────────────────────────── + +// Bitpacked shadow matrices — 1 bit per patch, 8 per byte, read as u32. +// Layout: pixel_idx * n_pack_u32 + word, where n_pack_u32 = ceil(n_pack/4). +// Byte within word: little-endian (byte 0 = bits 0..7 of u32). +@group(0) @binding(1) var shmat: array; +@group(0) @binding(2) var vegshmat: array; +@group(0) @binding(3) var vbshvegshmat: array; + +@group(0) @binding(4) var asvf: array; +@group(0) @binding(5) var lup: array; +// Valid mask — packed u8 as u32 (4 pixels per word). 0 = invalid. +@group(0) @binding(6) var valid_mask: array; + +// ── Per-patch look-up tables (length = n_patches) ──────────────────────── + +@group(0) @binding(7) var patch_alt: array; +@group(0) @binding(8) var patch_azi: array; +@group(0) @binding(9) var steradians_buf: array; +@group(0) @binding(10) var esky_band_buf: array; +@group(0) @binding(11) var lum_chi_buf: array; + +// ── Outputs (per-pixel, flattened row-major) ───────────────────────────── + +@group(0) @binding(12) var out_ldown: array; +@group(0) @binding(13) var out_lside: array; +@group(0) @binding(14) var out_kside_partial: array; +@group(0) @binding(15) var out_drad: array; + +// ── Bit extraction ─────────────────────────────────────────────────────── +// +// Shadow matrices are stored as u8 arrays bitpacked with 1 bit per patch. +// On the GPU they are uploaded as &[u8] reinterpreted as array. +// Byte order is little-endian (WGSL storage buffer default). + +fn sh_bit(pixel: u32, p: u32) -> bool { + let byte_idx: u32 = pixel * params.n_pack + (p >> 3u); + let word_idx: u32 = byte_idx >> 2u; + let byte_in_word: u32 = byte_idx & 3u; + let byte_val: u32 = (shmat[word_idx] >> (byte_in_word * 8u)) & 0xFFu; + return ((byte_val >> (p & 7u)) & 1u) == 1u; +} + +fn veg_bit(pixel: u32, p: u32) -> bool { + let byte_idx: u32 = pixel * params.n_pack + (p >> 3u); + let word_idx: u32 = byte_idx >> 2u; + let byte_in_word: u32 = byte_idx & 3u; + let byte_val: u32 = (vegshmat[word_idx] >> (byte_in_word * 8u)) & 0xFFu; + return ((byte_val >> (p & 7u)) & 1u) == 1u; +} + +fn vb_bit(pixel: u32, p: u32) -> bool { + let byte_idx: u32 = pixel * params.n_pack + (p >> 3u); + let word_idx: u32 = byte_idx >> 2u; + let byte_in_word: u32 = byte_idx & 3u; + let byte_val: u32 = (vbshvegshmat[word_idx] >> (byte_in_word * 8u)) & 0xFFu; + return ((byte_val >> (p & 7u)) & 1u) == 1u; +} + +// ── Sunlit / shaded classification ─────────────────────────────────────── +// Matches sunlit_shaded_patches::shaded_or_sunlit_pixel in Rust. + +fn compute_sunlit_degrees(p_alt: f32, p_azi: f32, pixel_asvf: f32) -> f32 { + let patch_to_sun_azi: f32 = abs(params.solar_azimuth - p_azi); + let xi: f32 = cos(patch_to_sun_azi * DEG2RAD); + let yi: f32 = 2.0 * xi * tan(params.solar_altitude * DEG2RAD); + let hsvf: f32 = tan(pixel_asvf); + let yi_: f32 = select(yi, 0.0, yi > 0.0); + let tan_delta: f32 = hsvf + yi_; + return atan(tan_delta) * RAD2DEG; +} + +// ── Main kernel ────────────────────────────────────────────────────────── + +@compute @workgroup_size(16, 16, 1) +fn main(@builtin(global_invocation_id) gid: vec3) { + let x: u32 = gid.x; + let y: u32 = gid.y; + if (x >= params.cols || y >= params.rows) { + return; + } + let idx: u32 = y * params.cols + x; + if (idx >= params.total_pixels) { + return; + } + + // ── Valid mask check ───────────────────────────────────────────────── + let valid_word: u32 = valid_mask[idx >> 2u]; + let valid_byte: u32 = (valid_word >> ((idx & 3u) * 8u)) & 0xFFu; + if (valid_byte == 0u) { + out_ldown[idx] = bitcast(NAN_BITS); + out_lside[idx] = bitcast(NAN_BITS); + out_kside_partial[idx] = bitcast(NAN_BITS); + out_drad[idx] = bitcast(NAN_BITS); + return; + } + + // ── Short-circuit when not cylindric (cyl == 0) ───────────────────── + if (params.cyl == 0u) { + out_ldown[idx] = 0.0; + out_lside[idx] = 0.0; + out_kside_partial[idx] = 0.0; + out_drad[idx] = 0.0; + return; + } + + // ── Pre-compute scalars ───────────────────────────────────────────── + let ta_k: f32 = params.ta + 273.15; + let pixel_asvf: f32 = asvf[idx]; + let pixel_lup: f32 = lup[idx]; + let sun_above: bool = params.solar_altitude > 0.0; + + // Surfaces for building longwave + let sunlit_surface: f32 = (params.ewall * SBC * pow(ta_k + params.tgwall, 4.0)) / PI; + let shaded_surface: f32 = (params.ewall * SBC * pow(ta_k, 4.0)) / PI; + // Vegetation surface (same emissivity model as shaded buildings) + let veg_surface: f32 = shaded_surface; + + // Reflected shortwave surfaces (only when sun is up) + // CPU formula: (albedo * rad_i * cos(alt) + rad_d * 0.5) / PI + // Note: albedo only multiplies the direct component, not the diffuse half + let k_sunlit_surface: f32 = select( + 0.0, + (params.albedo * params.rad_i * cos(params.solar_altitude * DEG2RAD) + params.rad_d * 0.5) / PI, + sun_above + ); + let k_shaded_surface: f32 = select( + 0.0, + (params.albedo * params.rad_d * 0.5) / PI, + sun_above + ); + + // ── Accumulators ──────────────────────────────────────────────────── + var ldown_sky: f32 = 0.0; + var lside_sky: f32 = 0.0; + var ldown_veg: f32 = 0.0; + var lside_veg: f32 = 0.0; + var ldown_sun: f32 = 0.0; + var lside_sun: f32 = 0.0; + var ldown_sh: f32 = 0.0; + var lside_sh: f32 = 0.0; + var kside_d_acc: f32 = 0.0; + var kref_acc: f32 = 0.0; // combined kref_sun + kref_sh + kref_veg + var drad_norm_acc: f32 = 0.0; // Σ(diffsh * lum_chi); multiply by rad_tot at end + + // ── Pass 1: Main patch loop ───────────────────────────────────────── + for (var i: u32 = 0u; i < params.n_patches; i++) { + let sh: bool = sh_bit(idx, i); + let vsh: bool = veg_bit(idx, i); + let vbsh: bool = vb_bit(idx, i); + let sh_f: f32 = select(0.0, 1.0, sh); + let vsh_f: f32 = select(0.0, 1.0, vsh); + + // Classification (matches sky.rs) + let temp_sky: bool = sh && vsh; + let temp_vegsh: bool = !vsh || !vbsh; + let temp_sh: bool = !sh && vbsh; + + let p_alt: f32 = patch_alt[i]; + let p_azi: f32 = patch_azi[i]; + let steradian: f32 = steradians_buf[i]; + let alt_rad: f32 = p_alt * DEG2RAD; + let aoi: f32 = cos(alt_rad); // angle of incidence (vertical surface) + let aoi_h: f32 = sin(alt_rad); // horizontal surface projection + + // Diffuse shadow term used by Kdown: + // diffsh = sh - (1 - vegsh) * (1 - psi) + let diffsh: f32 = sh_f - (1.0 - vsh_f) * (1.0 - params.psi); + if (sun_above && params.rad_tot > 0.0) { + drad_norm_acc += diffsh * lum_chi_buf[i]; + } + + // ── Sky longwave ──────────────────────────────────────────── + if (temp_sky) { + let esky_i: f32 = esky_band_buf[i]; + let lval: f32 = (esky_i * SBC * pow(ta_k, 4.0)) / PI; + lside_sky += lval * steradian * aoi; + ldown_sky += lval * steradian * aoi_h; + + // Diffuse shortwave + if (sun_above) { + kside_d_acc += lum_chi_buf[i] * aoi * steradian; + } + } + + // ── Vegetation longwave ───────────────────────────────────── + if (temp_vegsh) { + lside_veg += veg_surface * steradian * aoi; + ldown_veg += veg_surface * steradian * aoi_h; + + // Vegetation reflected shortwave + if (sun_above) { + kref_acc += k_shaded_surface * steradian * aoi; + } + } + + // ── Building longwave ─────────────────────────────────────── + if (temp_sh) { + let sunlit_deg: f32 = compute_sunlit_degrees(p_alt, p_azi, pixel_asvf); + let is_sunlit: bool = sunlit_deg < p_alt; + let is_shaded: bool = sunlit_deg > p_alt; + + let azimuth_difference: f32 = abs(params.solar_azimuth - p_azi); + let facing_sun: bool = azimuth_difference > 90.0 + && azimuth_difference < 270.0 + && sun_above; + + if (facing_sun) { + let sf: f32 = select(0.0, 1.0, is_sunlit); + let shf: f32 = select(0.0, 1.0, is_shaded); + lside_sun += sf * sunlit_surface * steradian * aoi; + lside_sh += shf * shaded_surface * steradian * aoi; + ldown_sun += sf * sunlit_surface * steradian * aoi_h; + ldown_sh += shf * shaded_surface * steradian * aoi_h; + } else { + // Not facing sun → all shaded + lside_sh += shaded_surface * steradian * aoi; + ldown_sh += shaded_surface * steradian * aoi_h; + } + + // Reflected shortwave from buildings + if (sun_above) { + if (is_sunlit) { + kref_acc += k_sunlit_surface * steradian * aoi; + } + if (is_shaded) { + kref_acc += k_shaded_surface * steradian * aoi; + } + } + } + } + + // ── Pass 2: Reflected longwave ────────────────────────────────────── + // Reflected radiation depends on ldown_sky from pass 1. + var lside_ref: f32 = 0.0; + var ldown_ref: f32 = 0.0; + + let reflected_base: f32 = (ldown_sky + pixel_lup) * (1.0 - params.ewall) * 0.5 / PI; + + for (var i: u32 = 0u; i < params.n_patches; i++) { + let sh: bool = sh_bit(idx, i); + let vsh: bool = veg_bit(idx, i); + let vbsh: bool = vb_bit(idx, i); + + // Any obstruction → reflected longwave + let is_obstructed: bool = !sh || !vsh || !vbsh; + + if (is_obstructed) { + let alt_rad: f32 = patch_alt[i] * DEG2RAD; + let aoi: f32 = cos(alt_rad); + let aoi_h: f32 = sin(alt_rad); + let steradian: f32 = steradians_buf[i]; + + lside_ref += reflected_base * steradian * aoi; + ldown_ref += reflected_base * steradian * aoi_h; + } + } + + // ── Write combined outputs ────────────────────────────────────────── + out_ldown[idx] = ldown_sky + ldown_veg + ldown_sh + ldown_sun + ldown_ref; + out_lside[idx] = lside_sky + lside_veg + lside_sh + lside_sun + lside_ref; + out_kside_partial[idx] = kside_d_acc + kref_acc; + out_drad[idx] = drad_norm_acc * params.rad_tot; +} diff --git a/rust/src/gpu/mod.rs b/rust/src/gpu/mod.rs index 9364b9f..9e48ae6 100644 --- a/rust/src/gpu/mod.rs +++ b/rust/src/gpu/mod.rs @@ -1,6 +1,10 @@ -// GPU acceleration module for shadow propagation +// GPU acceleration modules +#[cfg(feature = "gpu")] +pub mod aniso_gpu; #[cfg(feature = "gpu")] pub mod shadow_gpu; +#[cfg(feature = "gpu")] +pub use aniso_gpu::AnisoGpuContext; #[cfg(feature = "gpu")] pub use shadow_gpu::{create_shadow_gpu_context, ShadowGpuContext}; diff --git a/rust/src/gpu/shadow_gpu.rs b/rust/src/gpu/shadow_gpu.rs index 036a686..008f0d4 100644 --- a/rust/src/gpu/shadow_gpu.rs +++ b/rust/src/gpu/shadow_gpu.rs @@ -1,6 +1,22 @@ -use ndarray::{Array2, ArrayView2}; +use ndarray::{Array2, Array3, ArrayView2}; use std::sync::Arc; -use wgpu::util::DeviceExt; + +/// Ensures mapped staging buffers are always unmapped on scope exit. +struct MappedBufferGuard<'a> { + buffer: &'a wgpu::Buffer, +} + +impl<'a> MappedBufferGuard<'a> { + fn new(buffer: &'a wgpu::Buffer) -> Self { + Self { buffer } + } +} + +impl Drop for MappedBufferGuard<'_> { + fn drop(&mut self) { + self.buffer.unmap(); + } +} /// Result struct for GPU shadow calculations pub struct GpuShadowResult { @@ -15,13 +31,183 @@ pub struct GpuShadowResult { pub face_sun: Option>, } +/// SVF-specific shadow result: only the 3 arrays needed for SVF computation. +/// Skips wall outputs entirely, reducing staging bandwidth by ~70%. +pub struct SvfShadowResult { + pub bldg_sh: Array2, + pub veg_sh: Option>, + pub veg_blocks_bldg_sh: Option>, +} + +/// Uniform buffer matching the SvfAccumParams struct in svf_accumulation.wgsl. +#[repr(C)] +#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] +struct SvfAccumParams { + total_pixels: u32, + cols: u32, + rows: u32, + weight_iso: f32, + weight_n: f32, + weight_e: f32, + weight_s: f32, + weight_w: f32, + has_veg: u32, + _pad0: u32, + _pad1: u32, +} + +/// Uniform buffer matching the U8PackParams struct in shadow_to_bitpack.wgsl. +#[repr(C)] +#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] +struct U8PackParams { + total_pixels: u32, + cols: u32, + rows: u32, + n_pack: u32, + matrix_words: u32, + has_veg: u32, + patch_byte_idx: u32, + patch_bit_mask: u32, + _pad0: u32, + _pad1: u32, + _pad2: u32, + _pad3: u32, +} + +/// Bitpacked shadow matrices produced by GPU SVF path. +pub struct SvfBitpackedShadowResult { + pub bldg_sh_matrix: Array3, + pub veg_sh_matrix: Array3, + pub veg_blocks_bldg_sh_matrix: Array3, +} + +/// Result of GPU SVF accumulation — 15 arrays (5 building + 5 veg + 5 aveg). +pub struct SvfAccumResult { + pub svf: Array2, + pub svf_n: Array2, + pub svf_e: Array2, + pub svf_s: Array2, + pub svf_w: Array2, + pub svf_veg: Option>, + pub svf_veg_n: Option>, + pub svf_veg_e: Option>, + pub svf_veg_s: Option>, + pub svf_veg_w: Option>, + pub svf_aveg: Option>, + pub svf_aveg_n: Option>, + pub svf_aveg_e: Option>, + pub svf_aveg_s: Option>, + pub svf_aveg_w: Option>, +} + +/// Cached GPU buffers for shadow calculations. +/// Reused across calls when grid dimensions remain constant. +struct CachedBuffers { + rows: usize, + cols: usize, + // Binding 0: Params (UNIFORM | COPY_DST) + params_buffer: wgpu::Buffer, + // Binding 1: DSM input (STORAGE | COPY_DST) + dsm_buffer: wgpu::Buffer, + // Binding 2: Building shadow output (STORAGE | COPY_SRC) + bldg_shadow_buffer: wgpu::Buffer, + // Binding 3: Propagated building height (STORAGE | COPY_SRC | COPY_DST) + propagated_bldg_height_buffer: wgpu::Buffer, + // Bindings 4-6: Vegetation inputs (STORAGE | COPY_DST) + veg_canopy_buffer: wgpu::Buffer, + veg_trunk_buffer: wgpu::Buffer, + bush_buffer: wgpu::Buffer, + // Bindings 7-9: Vegetation outputs (STORAGE | COPY_SRC) + veg_shadow_buffer: wgpu::Buffer, + propagated_veg_height_buffer: wgpu::Buffer, + veg_blocks_bldg_shadow_buffer: wgpu::Buffer, + // Bindings 10-11: Wall inputs (STORAGE | COPY_DST) + walls_buffer: wgpu::Buffer, + aspect_buffer: wgpu::Buffer, + // Bindings 12-16: Wall outputs (STORAGE | COPY_SRC) + wall_sh_buffer: wgpu::Buffer, + wall_sun_buffer: wgpu::Buffer, + wall_sh_veg_buffer: wgpu::Buffer, + face_sh_buffer: wgpu::Buffer, + face_sun_buffer: wgpu::Buffer, + // Staging buffer for GPU -> CPU readback (MAP_READ | COPY_DST) + staging_buffer: wgpu::Buffer, + // Bind group (references all buffer handles) + bind_group: wgpu::BindGroup, + // --- SVF accumulation (populated by init_svf_accumulation) --- + svf_params_buffer: Option, + svf_data_buffer: Option, + svf_result_staging: Option, + svf_bind_group: Option, + svf_has_veg: bool, + svf_num_arrays: usize, // 5 (no veg) or 15 (with veg) + // --- Shadow bitpack accumulation (GPU-side across patches) --- + shadow_u8_params_buffer: Option, + shadow_u8_output_buffer: Option, + shadow_u8_staging: Option, + shadow_u8_bind_group: Option, + shadow_u8_packed_size: u64, // total bytes in packed output + shadow_u8_n_pack: usize, + shadow_u8_matrix_bytes: usize, + shadow_u8_matrix_words: usize, + shadow_u8_num_matrices: usize, + // Signature of static inputs currently uploaded to GPU. + last_static_input_sig: Option, +} + +#[derive(Clone, Copy, PartialEq, Eq)] +struct StaticShadowInputSig { + dsm_ptr: usize, + veg_canopy_ptr: usize, + veg_trunk_ptr: usize, + bush_ptr: usize, + walls_ptr: usize, + aspect_ptr: usize, + rows: usize, + cols: usize, + has_veg: bool, + has_walls: bool, +} + /// GPU context for shadow calculations - maintains GPU resources across multiple calls pub struct ShadowGpuContext { - device: Arc, - queue: Arc, + pub(crate) device: Arc, + pub(crate) queue: Arc, + /// Adapter-reported maximum single buffer size in bytes. + pub(crate) max_buffer_size: u64, + /// Adapter-reported maximum workgroups per dispatch dimension. + max_compute_workgroups_per_dimension: u32, + /// GPU backend (Metal, Vulkan, Dx12, Gl, etc.). + pub(crate) backend: wgpu::Backend, pipeline: wgpu::ComputePipeline, wall_pipeline: wgpu::ComputePipeline, bind_group_layout: wgpu::BindGroupLayout, + svf_pipeline: wgpu::ComputePipeline, + svf_bind_group_layout: wgpu::BindGroupLayout, + shadow_u8_pipeline: wgpu::ComputePipeline, + shadow_u8_bind_group_layout: wgpu::BindGroupLayout, + /// Cached buffers reused across calls with same grid dimensions + cached: std::sync::Mutex>, +} + +/// Uniform buffer struct for shadow shader parameters +#[repr(C)] +#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] +struct ShadowParams { + rows: u32, + cols: u32, + azimuth_rad: f32, + altitude_rad: f32, + sin_azimuth: f32, + cos_azimuth: f32, + tan_azimuth: f32, + tan_altitude_by_scale: f32, + scale: f32, + max_index: f32, + max_local_dsm_ht: f32, + has_veg: u32, + has_walls: u32, + _padding: u32, } impl ShadowGpuContext { @@ -42,9 +228,14 @@ impl ShadowGpuContext { .await .map_err(|e| format!("Failed to find suitable GPU adapter: {:?}", e))?; - // Request higher limits for storage buffers + // Request higher limits for storage buffers and buffer sizes + let adapter_limits = adapter.limits(); let mut limits = wgpu::Limits::default(); limits.max_storage_buffers_per_shader_stage = 16; // We need 16 storage buffers + // Request native max buffer sizes for large SVF accumulation buffers + // (default 256 MiB is too small for packed 15-array SVF at 6.7M pixels) + limits.max_buffer_size = adapter_limits.max_buffer_size; + limits.max_storage_buffer_binding_size = adapter_limits.max_storage_buffer_binding_size; let (device, queue) = adapter .request_device(&wgpu::DeviceDescriptor { @@ -274,269 +465,271 @@ impl ShadowGpuContext { cache: None, }); - Ok(Self { - device, - queue, - pipeline, - wall_pipeline, - bind_group_layout, - }) - } - - /// Optimized version accepting ArrayView to avoid unnecessary copies - #[allow(clippy::too_many_arguments)] - pub fn compute_all_shadows_view( - &self, - dsm: ArrayView2, - veg_canopy_dsm_opt: Option>, - veg_trunk_dsm_opt: Option>, - bush_opt: Option>, - walls_opt: Option>, - aspect_opt: Option>, - azimuth_deg: f32, - altitude_deg: f32, - scale: f32, - max_local_dsm_ht: f32, - min_sun_elev_deg: f32, - ) -> Result { - let (rows, cols) = dsm.dim(); - let total_pixels = rows * cols; - - // Check if vegetation inputs are provided - let has_veg = - veg_canopy_dsm_opt.is_some() && veg_trunk_dsm_opt.is_some() && bush_opt.is_some(); - let has_walls = walls_opt.is_some() && aspect_opt.is_some(); - - // Helper to get contiguous slice or allocate temp buffer - let get_slice = |view: ArrayView2| -> Vec { - if view.is_standard_layout() { - view.as_slice().unwrap().to_vec() - } else { - view.iter().copied().collect() - } - }; - - // Use slice directly when contiguous, otherwise allocate - let dsm_data = get_slice(dsm); - let veg_canopy_data = veg_canopy_dsm_opt - .map(get_slice) - .unwrap_or_else(|| vec![0.0; total_pixels]); - let veg_trunk_data = veg_trunk_dsm_opt - .map(get_slice) - .unwrap_or_else(|| vec![0.0; total_pixels]); - let bush_data = bush_opt - .map(get_slice) - .unwrap_or_else(|| vec![0.0; total_pixels]); - let walls_data = walls_opt - .map(get_slice) - .unwrap_or_else(|| vec![0.0; total_pixels]); - let aspect_data = aspect_opt - .map(get_slice) - .unwrap_or_else(|| vec![0.0; total_pixels]); - - // Precompute trigonometric values - let azimuth_rad = azimuth_deg.to_radians(); - let altitude_rad = altitude_deg.to_radians(); - let sin_azimuth = azimuth_rad.sin(); - let cos_azimuth = azimuth_rad.cos(); - let tan_azimuth = azimuth_rad.tan(); - let tan_altitude_by_scale = altitude_rad.tan() / scale; - let min_sun_elev_rad = min_sun_elev_deg.to_radians(); - let max_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); - let max_index = (max_reach_m / scale).ceil(); - - // Create uniform buffer with parameters - #[repr(C)] - #[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] - struct ShadowParams { - rows: u32, - cols: u32, - azimuth_rad: f32, - altitude_rad: f32, - sin_azimuth: f32, - cos_azimuth: f32, - tan_azimuth: f32, - tan_altitude_by_scale: f32, - scale: f32, - max_index: f32, - max_local_dsm_ht: f32, - has_veg: u32, - has_walls: u32, - _padding: u32, - } - - let params = ShadowParams { - rows: rows as u32, - cols: cols as u32, - azimuth_rad, - altitude_rad, - sin_azimuth, - cos_azimuth, - tan_azimuth, - tan_altitude_by_scale, - scale, - max_index, - max_local_dsm_ht, - has_veg: if has_veg { 1 } else { 0 }, - has_walls: if has_walls { 1 } else { 0 }, - _padding: 0, - }; - - // Create GPU buffers - let buffer_size = (total_pixels * std::mem::size_of::()) as u64; - - let params_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Shadow Params Buffer"), - contents: bytemuck::cast_slice(&[params]), - usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, - }); + // --- SVF accumulation pipeline --- + let svf_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("SVF Accumulation Shader"), + source: wgpu::ShaderSource::Wgsl(include_str!("svf_accumulation.wgsl").into()), + }); - // Binding 1: DSM - let dsm_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("DSM Buffer"), - contents: bytemuck::cast_slice(&dsm_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, + let svf_bind_group_layout = + device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("SVF Accumulation Bind Group Layout"), + entries: &[ + // Binding 0: Uniform params + wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 1: bldg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 1, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 2: veg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 2, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 3: veg_blocks_bldg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 3, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 4: svf_data (read_write) + wgpu::BindGroupLayoutEntry { + binding: 4, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: false }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + ], }); - // Binding 2: Building shadow output - let bldg_shadow_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Building Shadow Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, + let svf_pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("SVF Accumulation Pipeline Layout"), + bind_group_layouts: &[&svf_bind_group_layout], + push_constant_ranges: &[], }); - // Binding 3: Propagated building height - let propagated_bldg_height_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Propagated Building Height Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE - | wgpu::BufferUsages::COPY_SRC - | wgpu::BufferUsages::COPY_DST, - mapped_at_creation: false, + let svf_pipeline = device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor { + label: Some("SVF Accumulation Pipeline"), + layout: Some(&svf_pipeline_layout), + module: &svf_shader, + entry_point: Some("accumulate_svf"), + compilation_options: Default::default(), + cache: None, }); - self.queue.write_buffer( - &propagated_bldg_height_buffer, - 0, - bytemuck::cast_slice(&dsm_data), - ); - - // Bindings 4-6: Vegetation input buffers - let veg_canopy_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Veg Canopy Buffer"), - contents: bytemuck::cast_slice(&veg_canopy_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, - }); + // --- Shadow bitpack accumulation pipeline --- + let shadow_u8_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("Shadow Bitpack Shader"), + source: wgpu::ShaderSource::Wgsl(include_str!("shadow_to_bitpack.wgsl").into()), + }); - let veg_trunk_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Veg Trunk Buffer"), - contents: bytemuck::cast_slice(&veg_trunk_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, + let shadow_u8_bind_group_layout = + device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("Shadow Bitpack Bind Group Layout"), + entries: &[ + // Binding 0: Uniform params + wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 1: bldg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 1, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 2: veg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 2, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 3: veg_blocks_bldg_sh (read) + wgpu::BindGroupLayoutEntry { + binding: 3, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: true }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + // Binding 4: packed_output bit matrices (read_write) + wgpu::BindGroupLayoutEntry { + binding: 4, + visibility: wgpu::ShaderStages::COMPUTE, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Storage { read_only: false }, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + ], }); - let bush_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Bush Buffer"), - contents: bytemuck::cast_slice(&bush_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, + let shadow_u8_pipeline_layout = + device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("Shadow Bitpack Pipeline Layout"), + bind_group_layouts: &[&shadow_u8_bind_group_layout], + push_constant_ranges: &[], }); - // Bindings 7-9: Vegetation output buffers - let veg_shadow_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Veg Shadow Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); - - let propagated_veg_height_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Propagated Veg Height Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE - | wgpu::BufferUsages::COPY_SRC - | wgpu::BufferUsages::COPY_DST, - mapped_at_creation: false, + let shadow_u8_pipeline = device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor { + label: Some("Shadow Bitpack Pipeline"), + layout: Some(&shadow_u8_pipeline_layout), + module: &shadow_u8_shader, + entry_point: Some("shadow_to_bitpack"), + compilation_options: Default::default(), + cache: None, }); - if has_veg { - self.queue.write_buffer( - &propagated_veg_height_buffer, - 0, - bytemuck::cast_slice(&veg_canopy_data), - ); - } - - let veg_blocks_bldg_shadow_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Veg Blocks Bldg Shadow Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + let backend = adapter.get_info().backend; - // Bindings 10-11: Wall input buffers - let walls_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Walls Buffer"), - contents: bytemuck::cast_slice(&walls_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, - }); + Ok(Self { + device, + queue, + max_buffer_size: adapter_limits.max_buffer_size, + max_compute_workgroups_per_dimension: adapter_limits + .max_compute_workgroups_per_dimension, + backend, + pipeline, + wall_pipeline, + bind_group_layout, + svf_pipeline, + svf_bind_group_layout, + shadow_u8_pipeline, + shadow_u8_bind_group_layout, + cached: std::sync::Mutex::new(None), + }) + } - let aspect_buffer = self - .device - .create_buffer_init(&wgpu::util::BufferInitDescriptor { - label: Some("Aspect Buffer"), - contents: bytemuck::cast_slice(&aspect_data), - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_DST, - }); + fn checked_workgroups_2d( + &self, + rows: usize, + cols: usize, + workgroup_x: u32, + workgroup_y: u32, + label: &str, + ) -> Result<(u32, u32), String> { + let workgroups_x = (cols as u32).div_ceil(workgroup_x); + let workgroups_y = (rows as u32).div_ceil(workgroup_y); + let limit = self.max_compute_workgroups_per_dimension; + if workgroups_x > limit || workgroups_y > limit { + return Err(format!( + "{} dispatch exceeds GPU workgroup limit {}: got ({}, {}) for grid {}x{} and workgroup {}x{}", + label, limit, workgroups_x, workgroups_y, rows, cols, workgroup_x, workgroup_y + )); + } + Ok((workgroups_x, workgroups_y)) + } - // Bindings 12-16: Wall output buffers - let wall_sh_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Wall Shadow Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + /// Allocate a fresh set of GPU buffers for the given grid dimensions. + fn allocate_buffers(&self, rows: usize, cols: usize) -> CachedBuffers { + let total_pixels = rows * cols; + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + let params_size = std::mem::size_of::() as u64; - let wall_sun_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Wall Sun Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + // Helper to create a storage buffer with given usage flags + let make_buffer = |label: &str, size: u64, usage: wgpu::BufferUsages| -> wgpu::Buffer { + self.device.create_buffer(&wgpu::BufferDescriptor { + label: Some(label), + size, + usage, + mapped_at_creation: false, + }) + }; - let wall_sh_veg_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Wall Shadow Veg Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + let input_usage = wgpu::BufferUsages::STORAGE + | wgpu::BufferUsages::COPY_DST + | wgpu::BufferUsages::COPY_SRC; + let output_usage = wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC; + let working_usage = wgpu::BufferUsages::STORAGE + | wgpu::BufferUsages::COPY_SRC + | wgpu::BufferUsages::COPY_DST; - let face_sh_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Face Shadow Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + let params_buffer = make_buffer( + "Shadow Params Buffer", + params_size, + wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + ); + let dsm_buffer = make_buffer("DSM Buffer", buffer_size, input_usage); + let bldg_shadow_buffer = make_buffer("Building Shadow Buffer", buffer_size, output_usage); + let propagated_bldg_height_buffer = make_buffer( + "Propagated Building Height Buffer", + buffer_size, + working_usage, + ); + let veg_canopy_buffer = make_buffer("Veg Canopy Buffer", buffer_size, input_usage); + let veg_trunk_buffer = make_buffer("Veg Trunk Buffer", buffer_size, input_usage); + let bush_buffer = make_buffer("Bush Buffer", buffer_size, input_usage); + let veg_shadow_buffer = make_buffer("Veg Shadow Buffer", buffer_size, output_usage); + let propagated_veg_height_buffer = + make_buffer("Propagated Veg Height Buffer", buffer_size, working_usage); + let veg_blocks_bldg_shadow_buffer = + make_buffer("Veg Blocks Bldg Shadow Buffer", buffer_size, output_usage); + let walls_buffer = make_buffer("Walls Buffer", buffer_size, input_usage); + let aspect_buffer = make_buffer("Aspect Buffer", buffer_size, input_usage); + let wall_sh_buffer = make_buffer("Wall Shadow Buffer", buffer_size, output_usage); + let wall_sun_buffer = make_buffer("Wall Sun Buffer", buffer_size, output_usage); + let wall_sh_veg_buffer = make_buffer("Wall Shadow Veg Buffer", buffer_size, output_usage); + let face_sh_buffer = make_buffer("Face Shadow Buffer", buffer_size, output_usage); + let face_sun_buffer = make_buffer("Face Sun Buffer", buffer_size, output_usage); - let face_sun_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Face Sun Buffer"), - size: buffer_size, - usage: wgpu::BufferUsages::STORAGE | wgpu::BufferUsages::COPY_SRC, - mapped_at_creation: false, - }); + let staging_buffer = make_buffer( + "Staging Buffer", + buffer_size * 10, + wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST, + ); - // Create bind group with all buffers let bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { label: Some("Shadow Bind Group"), layout: &self.bind_group_layout, @@ -612,6 +805,173 @@ impl ShadowGpuContext { ], }); + eprintln!( + "[GPU] Allocated buffer cache for {}x{} grid ({:.1} MB)", + rows, + cols, + (buffer_size * 17 + buffer_size * 10) as f64 / 1_048_576.0 + ); + + CachedBuffers { + rows, + cols, + params_buffer, + dsm_buffer, + bldg_shadow_buffer, + propagated_bldg_height_buffer, + veg_canopy_buffer, + veg_trunk_buffer, + bush_buffer, + veg_shadow_buffer, + propagated_veg_height_buffer, + veg_blocks_bldg_shadow_buffer, + walls_buffer, + aspect_buffer, + wall_sh_buffer, + wall_sun_buffer, + wall_sh_veg_buffer, + face_sh_buffer, + face_sun_buffer, + staging_buffer, + bind_group, + svf_params_buffer: None, + svf_data_buffer: None, + svf_result_staging: None, + svf_bind_group: None, + svf_has_veg: false, + svf_num_arrays: 0, + shadow_u8_params_buffer: None, + shadow_u8_output_buffer: None, + shadow_u8_staging: None, + shadow_u8_bind_group: None, + shadow_u8_packed_size: 0, + shadow_u8_n_pack: 0, + shadow_u8_matrix_bytes: 0, + shadow_u8_matrix_words: 0, + shadow_u8_num_matrices: 0, + last_static_input_sig: None, + } + } + + /// Optimized version accepting ArrayView to avoid unnecessary copies + #[allow(clippy::too_many_arguments)] + pub fn compute_all_shadows_view( + &self, + dsm: ArrayView2, + veg_canopy_dsm_opt: Option>, + veg_trunk_dsm_opt: Option>, + bush_opt: Option>, + walls_opt: Option>, + aspect_opt: Option>, + need_propagated_veg_height: bool, + need_full_wall_outputs: bool, + azimuth_deg: f32, + altitude_deg: f32, + scale: f32, + max_local_dsm_ht: f32, + min_sun_elev_deg: f32, + max_shadow_distance_m: f32, + ) -> Result { + let (rows, cols) = dsm.dim(); + let total_pixels = rows * cols; + + // Check if vegetation inputs are provided + let has_veg = + veg_canopy_dsm_opt.is_some() && veg_trunk_dsm_opt.is_some() && bush_opt.is_some(); + let has_walls = walls_opt.is_some() && aspect_opt.is_some(); + + // Precompute trigonometric values + let azimuth_rad = azimuth_deg.to_radians(); + let altitude_rad = altitude_deg.to_radians(); + let sin_azimuth = azimuth_rad.sin(); + let cos_azimuth = azimuth_rad.cos(); + let tan_azimuth = azimuth_rad.tan(); + let tan_altitude_by_scale = altitude_rad.tan() / scale; + let min_sun_elev_rad = min_sun_elev_deg.to_radians(); + let height_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); + let max_reach_m = if max_shadow_distance_m > 0.0 { height_reach_m.min(max_shadow_distance_m) } else { height_reach_m }; + let max_index = (max_reach_m / scale).ceil(); + + let params = ShadowParams { + rows: rows as u32, + cols: cols as u32, + azimuth_rad, + altitude_rad, + sin_azimuth, + cos_azimuth, + tan_azimuth, + tan_altitude_by_scale, + scale, + max_index, + max_local_dsm_ht, + has_veg: if has_veg { 1 } else { 0 }, + has_walls: if has_walls { 1 } else { 0 }, + _padding: 0, + }; + + // Get or create cached buffers for this grid size + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + let needs_realloc = match cache_guard.as_ref() { + Some(c) => c.rows != rows || c.cols != cols, + None => true, + }; + if needs_realloc { + *cache_guard = Some(self.allocate_buffers(rows, cols)); + } + + let buffers = cache_guard + .as_mut() + .ok_or_else(|| "Buffer cache unexpectedly empty".to_string())?; + + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + + // Dynamic params change every timestep. + self.queue + .write_buffer(&buffers.params_buffer, 0, bytemuck::cast_slice(&[params])); + + // Static inputs are invariant across timesteps for a tile; avoid + // re-uploading when backing arrays are unchanged. + let static_sig = StaticShadowInputSig { + dsm_ptr: dsm.as_ptr() as usize, + veg_canopy_ptr: veg_canopy_dsm_opt.map_or(0, |a| a.as_ptr() as usize), + veg_trunk_ptr: veg_trunk_dsm_opt.map_or(0, |a| a.as_ptr() as usize), + bush_ptr: bush_opt.map_or(0, |a| a.as_ptr() as usize), + walls_ptr: walls_opt.map_or(0, |a| a.as_ptr() as usize), + aspect_ptr: aspect_opt.map_or(0, |a| a.as_ptr() as usize), + rows, + cols, + has_veg, + has_walls, + }; + + if buffers.last_static_input_sig != Some(static_sig) { + Self::write_2d_f32(&self.queue, &buffers.dsm_buffer, &dsm); + if has_veg { + let veg_canopy = veg_canopy_dsm_opt + .ok_or_else(|| "Vegetation canopy missing despite has_veg=true".to_string())?; + let veg_trunk = veg_trunk_dsm_opt + .ok_or_else(|| "Vegetation trunk missing despite has_veg=true".to_string())?; + let bush = bush_opt + .ok_or_else(|| "Bush raster missing despite has_veg=true".to_string())?; + Self::write_2d_f32(&self.queue, &buffers.veg_canopy_buffer, &veg_canopy); + Self::write_2d_f32(&self.queue, &buffers.veg_trunk_buffer, &veg_trunk); + Self::write_2d_f32(&self.queue, &buffers.bush_buffer, &bush); + } + if has_walls { + let walls = + walls_opt.ok_or_else(|| "Walls missing despite has_walls=true".to_string())?; + let aspect = aspect_opt + .ok_or_else(|| "Aspect missing despite has_walls=true".to_string())?; + Self::write_2d_f32(&self.queue, &buffers.walls_buffer, &walls); + Self::write_2d_f32(&self.queue, &buffers.aspect_buffer, &aspect); + } + buffers.last_static_input_sig = Some(static_sig); + } + // Encode and submit compute passes let mut encoder = self .device @@ -619,10 +979,33 @@ impl ShadowGpuContext { label: Some("Shadow Compute Encoder"), }); + // Reset mutable propagation buffers from static input buffers on-GPU. + encoder.copy_buffer_to_buffer( + &buffers.dsm_buffer, + 0, + &buffers.propagated_bldg_height_buffer, + 0, + buffer_size, + ); + if has_veg { + encoder.copy_buffer_to_buffer( + &buffers.veg_canopy_buffer, + 0, + &buffers.propagated_veg_height_buffer, + 0, + buffer_size, + ); + } + let workgroup_size_x = 16; let workgroup_size_y = 16; - let num_workgroups_x = (cols as u32 + workgroup_size_x - 1) / workgroup_size_x; - let num_workgroups_y = (rows as u32 + workgroup_size_y - 1) / workgroup_size_y; + let (num_workgroups_x, num_workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + workgroup_size_x, + workgroup_size_y, + "shadow propagation", + )?; // First pass: Main shadow propagation { @@ -631,7 +1014,7 @@ impl ShadowGpuContext { timestamp_writes: None, }); compute_pass.set_pipeline(&self.pipeline); - compute_pass.set_bind_group(0, &bind_group, &[]); + compute_pass.set_bind_group(0, &buffers.bind_group, &[]); compute_pass.dispatch_workgroups(num_workgroups_x, num_workgroups_y, 1); } @@ -642,177 +1025,215 @@ impl ShadowGpuContext { timestamp_writes: None, }); compute_pass.set_pipeline(&self.wall_pipeline); - compute_pass.set_bind_group(0, &bind_group, &[]); + compute_pass.set_bind_group(0, &buffers.bind_group, &[]); compute_pass.dispatch_workgroups(num_workgroups_x, num_workgroups_y, 1); } - // Create staging buffers and copy results - let staging_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { - label: Some("Staging Buffer"), - size: buffer_size * 10, // Large enough for multiple outputs - usage: wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST, - mapped_at_creation: false, - }); - - // Copy building shadow - encoder.copy_buffer_to_buffer(&bldg_shadow_buffer, 0, &staging_buffer, 0, buffer_size); + // Copy only required outputs to staging to reduce readback bandwidth. + let include_prop_veg = has_veg && need_propagated_veg_height; + let mut write_offset = 0u64; + encoder.copy_buffer_to_buffer( + &buffers.bldg_shadow_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; - // Copy vegetation outputs if enabled - let veg_offset = buffer_size; if has_veg { encoder.copy_buffer_to_buffer( - &veg_shadow_buffer, + &buffers.veg_shadow_buffer, 0, - &staging_buffer, - veg_offset, + &buffers.staging_buffer, + write_offset, buffer_size, ); + write_offset += buffer_size; encoder.copy_buffer_to_buffer( - &veg_blocks_bldg_shadow_buffer, + &buffers.veg_blocks_bldg_shadow_buffer, 0, - &staging_buffer, - veg_offset + buffer_size, - buffer_size, - ); - encoder.copy_buffer_to_buffer( - &propagated_veg_height_buffer, - 0, - &staging_buffer, - veg_offset + buffer_size * 2, + &buffers.staging_buffer, + write_offset, buffer_size, ); + write_offset += buffer_size; + if include_prop_veg { + encoder.copy_buffer_to_buffer( + &buffers.propagated_veg_height_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; + } } - // Copy wall outputs if enabled - let wall_offset = buffer_size * 4; if has_walls { + if need_full_wall_outputs { + encoder.copy_buffer_to_buffer( + &buffers.wall_sh_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; + } encoder.copy_buffer_to_buffer( - &wall_sh_buffer, - 0, - &staging_buffer, - wall_offset, - buffer_size, - ); - encoder.copy_buffer_to_buffer( - &wall_sun_buffer, - 0, - &staging_buffer, - wall_offset + buffer_size, - buffer_size, - ); - encoder.copy_buffer_to_buffer( - &wall_sh_veg_buffer, - 0, - &staging_buffer, - wall_offset + buffer_size * 2, - buffer_size, - ); - encoder.copy_buffer_to_buffer( - &face_sh_buffer, - 0, - &staging_buffer, - wall_offset + buffer_size * 3, - buffer_size, - ); - encoder.copy_buffer_to_buffer( - &face_sun_buffer, + &buffers.wall_sun_buffer, 0, - &staging_buffer, - wall_offset + buffer_size * 4, + &buffers.staging_buffer, + write_offset, buffer_size, ); + write_offset += buffer_size; + if need_full_wall_outputs { + encoder.copy_buffer_to_buffer( + &buffers.wall_sh_veg_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; + encoder.copy_buffer_to_buffer( + &buffers.face_sh_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; + encoder.copy_buffer_to_buffer( + &buffers.face_sun_buffer, + 0, + &buffers.staging_buffer, + write_offset, + buffer_size, + ); + write_offset += buffer_size; + } } + let read_size = write_offset; - self.queue.submit(Some(encoder.finish())); + let submission_index = self.queue.submit(Some(encoder.finish())); - // Read back all results - let buffer_slice = staging_buffer.slice(..); + // Read back only populated bytes from staging buffer. + let buffer_slice = buffers.staging_buffer.slice(..read_size); let (sender, receiver) = std::sync::mpsc::channel(); buffer_slice.map_async(wgpu::MapMode::Read, move |result| { - sender.send(result).unwrap(); + let _ = sender.send(result); }); self.device .poll(wgpu::PollType::Wait { - submission_index: None, + submission_index: Some(submission_index), timeout: None, }) - .unwrap(); + .map_err(|e| format!("GPU poll failed while reading shadow buffers: {:?}", e))?; receiver .recv() - .unwrap() + .map_err(|e| format!("Failed waiting for shadow buffer mapping: {}", e))? .map_err(|e| format!("Failed to map buffer: {:?}", e))?; + let _unmap_guard = MappedBufferGuard::new(&buffers.staging_buffer); let data = buffer_slice.get_mapped_range(); let all_data: &[f32] = bytemuck::cast_slice(&data); // Extract building shadow - let bldg_sh = Array2::from_shape_vec((rows, cols), all_data[..total_pixels].to_vec()) - .map_err(|e| format!("Failed to create building shadow array: {}", e))?; + let mut read_offset_px = 0usize; + let bldg_sh = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| format!("Failed to create building shadow array: {}", e))?; + read_offset_px += total_pixels; // Extract vegetation results if enabled let (veg_sh, veg_blocks_bldg_sh, propagated_veg_height) = if has_veg { - let veg_offset_px = total_pixels; let veg_sh = Array2::from_shape_vec( (rows, cols), - all_data[veg_offset_px..veg_offset_px + total_pixels].to_vec(), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), ) - .ok(); + .map_err(|e| format!("Failed to create vegetation shadow array: {}", e))?; + read_offset_px += total_pixels; let veg_blocks = Array2::from_shape_vec( (rows, cols), - all_data[veg_offset_px + total_pixels..veg_offset_px + total_pixels * 2].to_vec(), - ) - .ok(); - let prop_veg = Array2::from_shape_vec( - (rows, cols), - all_data[veg_offset_px + total_pixels * 2..veg_offset_px + total_pixels * 3] - .to_vec(), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), ) - .ok(); - (veg_sh, veg_blocks, prop_veg) + .map_err(|e| format!("Failed to create vegetation-blocking shadow array: {}", e))?; + read_offset_px += total_pixels; + let prop_veg = if include_prop_veg { + let arr = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| { + format!("Failed to create propagated vegetation height array: {}", e) + })?; + read_offset_px += total_pixels; + Some(arr) + } else { + None + }; + (Some(veg_sh), Some(veg_blocks), prop_veg) } else { (None, None, None) }; // Extract wall results if enabled let (wall_sh, wall_sun, wall_sh_veg, face_sh, face_sun) = if has_walls { - let wall_offset_px = total_pixels * 4; - let wall_sh = Array2::from_shape_vec( - (rows, cols), - all_data[wall_offset_px..wall_offset_px + total_pixels].to_vec(), - ) - .ok(); + let wall_sh = if need_full_wall_outputs { + let arr = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| format!("Failed to create wall shadow array: {}", e))?; + read_offset_px += total_pixels; + Some(arr) + } else { + None + }; let wall_sun = Array2::from_shape_vec( (rows, cols), - all_data[wall_offset_px + total_pixels..wall_offset_px + total_pixels * 2].to_vec(), - ) - .ok(); - let wall_sh_veg = Array2::from_shape_vec( - (rows, cols), - all_data[wall_offset_px + total_pixels * 2..wall_offset_px + total_pixels * 3] - .to_vec(), - ) - .ok(); - let face_sh = Array2::from_shape_vec( - (rows, cols), - all_data[wall_offset_px + total_pixels * 3..wall_offset_px + total_pixels * 4] - .to_vec(), - ) - .ok(); - let face_sun = Array2::from_shape_vec( - (rows, cols), - all_data[wall_offset_px + total_pixels * 4..wall_offset_px + total_pixels * 5] - .to_vec(), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), ) - .ok(); - (wall_sh, wall_sun, wall_sh_veg, face_sh, face_sun) + .map_err(|e| format!("Failed to create wall sunlit array: {}", e))?; + read_offset_px += total_pixels; + if need_full_wall_outputs { + let wall_sh_veg = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| format!("Failed to create wall vegetation-shadow array: {}", e))?; + read_offset_px += total_pixels; + let face_sh = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| format!("Failed to create wall face-shadow array: {}", e))?; + read_offset_px += total_pixels; + let face_sun = Array2::from_shape_vec( + (rows, cols), + all_data[read_offset_px..read_offset_px + total_pixels].to_vec(), + ) + .map_err(|e| format!("Failed to create wall face-sun array: {}", e))?; + ( + wall_sh, + Some(wall_sun), + Some(wall_sh_veg), + Some(face_sh), + Some(face_sun), + ) + } else { + (wall_sh, Some(wall_sun), None, None, None) + } } else { (None, None, None, None, None) }; - drop(data); - staging_buffer.unmap(); - Ok(GpuShadowResult { bldg_sh, veg_sh, @@ -825,6 +1246,949 @@ impl ShadowGpuContext { face_sun, }) } + + /// SVF-optimized shadow computation. + /// + /// Compared to `compute_all_shadows_view()`, this: + /// - Skips the wall shader dispatch entirely + /// - Skips writing wall/aspect input buffers (saves ~50MB/call) + /// - Copies only 3 arrays to staging instead of 10 (~70% less readback) + #[allow(clippy::too_many_arguments)] + pub fn compute_shadows_for_svf( + &self, + dsm: ArrayView2, + veg_canopy_dsm_opt: Option>, + veg_trunk_dsm_opt: Option>, + bush_opt: Option>, + azimuth_deg: f32, + altitude_deg: f32, + scale: f32, + max_local_dsm_ht: f32, + min_sun_elev_deg: f32, + max_shadow_distance_m: f32, + ) -> Result { + let (rows, cols) = dsm.dim(); + let total_pixels = rows * cols; + + // Handle zenith case (altitude >= 89.5°): no shadows from directly overhead + if altitude_deg >= 89.5 { + let dim = (rows, cols); + return Ok(SvfShadowResult { + bldg_sh: Array2::ones(dim), + veg_sh: if veg_canopy_dsm_opt.is_some() { + Some(Array2::ones(dim)) + } else { + None + }, + veg_blocks_bldg_sh: if veg_canopy_dsm_opt.is_some() { + Some(Array2::ones(dim)) + } else { + None + }, + }); + } + + let has_veg = + veg_canopy_dsm_opt.is_some() && veg_trunk_dsm_opt.is_some() && bush_opt.is_some(); + + let get_slice = |view: ArrayView2| -> Vec { + if let Some(slice) = view.as_slice() { + slice.to_vec() + } else { + view.iter().copied().collect() + } + }; + + let dsm_data = get_slice(dsm); + + // Precompute trigonometric values + let azimuth_rad = azimuth_deg.to_radians(); + let altitude_rad = altitude_deg.to_radians(); + let min_sun_elev_rad = min_sun_elev_deg.to_radians(); + let height_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); + let max_reach_m = if max_shadow_distance_m > 0.0 { height_reach_m.min(max_shadow_distance_m) } else { height_reach_m }; + let max_index = (max_reach_m / scale).ceil(); + + let params = ShadowParams { + rows: rows as u32, + cols: cols as u32, + azimuth_rad, + altitude_rad, + sin_azimuth: azimuth_rad.sin(), + cos_azimuth: azimuth_rad.cos(), + tan_azimuth: azimuth_rad.tan(), + tan_altitude_by_scale: altitude_rad.tan() / scale, + scale, + max_index, + max_local_dsm_ht, + has_veg: if has_veg { 1 } else { 0 }, + has_walls: 0, // SVF never uses walls + _padding: 0, + }; + + // Get or create cached buffers + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + let needs_realloc = match cache_guard.as_ref() { + Some(c) => c.rows != rows || c.cols != cols, + None => true, + }; + if needs_realloc { + *cache_guard = Some(self.allocate_buffers(rows, cols)); + } + + let buffers = cache_guard + .as_ref() + .ok_or_else(|| "Buffer cache unexpectedly empty".to_string())?; + + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + + // Write only needed inputs (skip walls/aspect entirely) + self.queue + .write_buffer(&buffers.params_buffer, 0, bytemuck::cast_slice(&[params])); + self.queue + .write_buffer(&buffers.dsm_buffer, 0, bytemuck::cast_slice(&dsm_data)); + self.queue.write_buffer( + &buffers.propagated_bldg_height_buffer, + 0, + bytemuck::cast_slice(&dsm_data), + ); + + if has_veg { + let veg_canopy = veg_canopy_dsm_opt + .ok_or_else(|| "Vegetation canopy DSM missing despite has_veg=true".to_string())?; + let veg_trunk = veg_trunk_dsm_opt + .ok_or_else(|| "Vegetation trunk DSM missing despite has_veg=true".to_string())?; + let bush = + bush_opt.ok_or_else(|| "Bush raster missing despite has_veg=true".to_string())?; + let veg_canopy_data = get_slice(veg_canopy); + let veg_trunk_data = get_slice(veg_trunk); + let bush_data = get_slice(bush); + self.queue.write_buffer( + &buffers.veg_canopy_buffer, + 0, + bytemuck::cast_slice(&veg_canopy_data), + ); + self.queue.write_buffer( + &buffers.veg_trunk_buffer, + 0, + bytemuck::cast_slice(&veg_trunk_data), + ); + self.queue + .write_buffer(&buffers.bush_buffer, 0, bytemuck::cast_slice(&bush_data)); + self.queue.write_buffer( + &buffers.propagated_veg_height_buffer, + 0, + bytemuck::cast_slice(&veg_canopy_data), + ); + } + + // Encode: shadow propagation only (no wall pass) + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Shadow Compute Encoder"), + }); + + let workgroup_size_x = 16; + let workgroup_size_y = 16; + let (num_workgroups_x, num_workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + workgroup_size_x, + workgroup_size_y, + "svf shadow propagation", + )?; + + { + let mut compute_pass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { + label: Some("SVF Shadow Propagation Pass"), + timestamp_writes: None, + }); + compute_pass.set_pipeline(&self.pipeline); + compute_pass.set_bind_group(0, &buffers.bind_group, &[]); + compute_pass.dispatch_workgroups(num_workgroups_x, num_workgroups_y, 1); + } + // No wall pass — SVF never uses walls + + // Copy only 3 arrays to staging (instead of up to 10) + encoder.copy_buffer_to_buffer( + &buffers.bldg_shadow_buffer, + 0, + &buffers.staging_buffer, + 0, + buffer_size, + ); + if has_veg { + encoder.copy_buffer_to_buffer( + &buffers.veg_shadow_buffer, + 0, + &buffers.staging_buffer, + buffer_size, + buffer_size, + ); + encoder.copy_buffer_to_buffer( + &buffers.veg_blocks_bldg_shadow_buffer, + 0, + &buffers.staging_buffer, + buffer_size * 2, + buffer_size, + ); + } + + let submission_index = self.queue.submit(Some(encoder.finish())); + + // Map only what we need (1 or 3 arrays) + let read_size = if has_veg { + buffer_size * 3 + } else { + buffer_size + }; + let buffer_slice = buffers.staging_buffer.slice(..read_size); + let (sender, receiver) = std::sync::mpsc::channel(); + buffer_slice.map_async(wgpu::MapMode::Read, move |result| { + let _ = sender.send(result); + }); + + self.device + .poll(wgpu::PollType::Wait { + submission_index: Some(submission_index), + timeout: None, + }) + .map_err(|e| format!("GPU poll failed while reading SVF shadow buffers: {:?}", e))?; + receiver + .recv() + .map_err(|e| format!("Failed waiting for SVF shadow buffer mapping: {}", e))? + .map_err(|e| format!("Failed to map buffer: {:?}", e))?; + + let _unmap_guard = MappedBufferGuard::new(&buffers.staging_buffer); + let data = buffer_slice.get_mapped_range(); + let all_data: &[f32] = bytemuck::cast_slice(&data); + + let bldg_sh = Array2::from_shape_vec((rows, cols), all_data[..total_pixels].to_vec()) + .map_err(|e| format!("Failed to create bldg_sh array: {}", e))?; + + let (veg_sh, veg_blocks_bldg_sh) = if has_veg { + let veg = Array2::from_shape_vec( + (rows, cols), + all_data[total_pixels..total_pixels * 2].to_vec(), + ) + .map_err(|e| format!("Failed to create SVF vegetation shadow array: {}", e))?; + let veg_blocks = Array2::from_shape_vec( + (rows, cols), + all_data[total_pixels * 2..total_pixels * 3].to_vec(), + ) + .map_err(|e| { + format!( + "Failed to create SVF vegetation-blocking shadow array: {}", + e + ) + })?; + (Some(veg), Some(veg_blocks)) + } else { + (None, None) + }; + + Ok(SvfShadowResult { + bldg_sh, + veg_sh, + veg_blocks_bldg_sh, + }) + } + + /// Initialize SVF accumulation buffers. Call once before the 153-patch loop. + /// + /// Allocates the packed SVF data buffer (15 × pixels for veg, 5 × for no-veg), + /// zeroes it, and creates the bind group referencing shadow output buffers. + /// Also writes static inputs (DSM, veg) to shadow buffers once. + pub fn init_svf_accumulation( + &self, + rows: usize, + cols: usize, + has_veg: bool, + total_patches: usize, + dsm: ArrayView2, + veg_canopy_dsm_opt: Option>, + veg_trunk_dsm_opt: Option>, + bush_opt: Option>, + ) -> Result<(), String> { + let total_pixels = rows * cols; + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + let num_arrays: usize = if has_veg { 15 } else { 5 }; + let svf_data_size = buffer_size * num_arrays as u64; + + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + // Ensure shadow buffers are allocated + let needs_realloc = match cache_guard.as_ref() { + Some(c) => c.rows != rows || c.cols != cols, + None => true, + }; + if needs_realloc { + *cache_guard = Some(self.allocate_buffers(rows, cols)); + } + + let buffers = cache_guard + .as_mut() + .ok_or_else(|| "Buffer cache unexpectedly empty".to_string())?; + + // Write static inputs to shadow buffers once (avoids re-uploading per patch) + let get_slice = |view: ArrayView2| -> Vec { + if let Some(slice) = view.as_slice() { + slice.to_vec() + } else { + view.iter().copied().collect() + } + }; + + let dsm_data = get_slice(dsm); + self.queue + .write_buffer(&buffers.dsm_buffer, 0, bytemuck::cast_slice(&dsm_data)); + + if has_veg { + if let (Some(vc), Some(vt), Some(b)) = (veg_canopy_dsm_opt, veg_trunk_dsm_opt, bush_opt) + { + let vc_data = get_slice(vc); + let vt_data = get_slice(vt); + let b_data = get_slice(b); + self.queue.write_buffer( + &buffers.veg_canopy_buffer, + 0, + bytemuck::cast_slice(&vc_data), + ); + self.queue.write_buffer( + &buffers.veg_trunk_buffer, + 0, + bytemuck::cast_slice(&vt_data), + ); + self.queue + .write_buffer(&buffers.bush_buffer, 0, bytemuck::cast_slice(&b_data)); + } + } + + // Create SVF-specific buffers + let make_buffer = |label: &str, size: u64, usage: wgpu::BufferUsages| -> wgpu::Buffer { + self.device.create_buffer(&wgpu::BufferDescriptor { + label: Some(label), + size, + usage, + mapped_at_creation: false, + }) + }; + + let svf_params_buffer = make_buffer( + "SVF Accum Params", + std::mem::size_of::() as u64, + wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + ); + let svf_data_buffer = make_buffer( + "SVF Data Buffer", + svf_data_size, + wgpu::BufferUsages::STORAGE + | wgpu::BufferUsages::COPY_SRC + | wgpu::BufferUsages::COPY_DST, + ); + let svf_result_staging = make_buffer( + "SVF Result Staging", + svf_data_size, + wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST, + ); + + // Zero-initialize SVF data buffer + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Init Encoder"), + }); + encoder.clear_buffer(&svf_data_buffer, 0, None); + self.queue.submit(Some(encoder.finish())); + + // Create SVF bind group referencing shadow output buffers + SVF buffers + let svf_bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("SVF Accumulation Bind Group"), + layout: &self.svf_bind_group_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: svf_params_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: buffers.bldg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: buffers.veg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 3, + resource: buffers.veg_blocks_bldg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 4, + resource: svf_data_buffer.as_entire_binding(), + }, + ], + }); + + // --- Shadow bitpack buffers (persist across all patch dispatches) --- + let n_pack = (total_patches + 7) / 8; // ceil(n_patches/8) + let matrix_bytes = total_pixels * n_pack; + let matrix_words = (matrix_bytes + 3) / 4; // u32 words + let num_matrices = if has_veg { 3usize } else { 1usize }; + let packed_output_size = (matrix_words * num_matrices) as u64 * 4; // bytes + + let shadow_u8_params_buffer = make_buffer( + "Shadow U8 Params", + std::mem::size_of::() as u64, + wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, + ); + let shadow_u8_output_buffer = make_buffer( + "Shadow U8 Output", + packed_output_size, + wgpu::BufferUsages::STORAGE + | wgpu::BufferUsages::COPY_SRC + | wgpu::BufferUsages::COPY_DST, + ); + let shadow_u8_staging = make_buffer( + "Shadow U8 Staging", + packed_output_size, + wgpu::BufferUsages::MAP_READ | wgpu::BufferUsages::COPY_DST, + ); + + // Write static U8 pack params; patch fields are updated per dispatch. + let u8_params = U8PackParams { + total_pixels: total_pixels as u32, + cols: cols as u32, + rows: rows as u32, + n_pack: n_pack as u32, + matrix_words: matrix_words as u32, + has_veg: if has_veg { 1 } else { 0 }, + patch_byte_idx: 0, + patch_bit_mask: 0, + _pad0: 0, + _pad1: 0, + _pad2: 0, + _pad3: 0, + }; + self.queue.write_buffer( + &shadow_u8_params_buffer, + 0, + bytemuck::cast_slice(&[u8_params]), + ); + + // Zero-initialize bitpacked output buffer once before patch loop. + let mut bitpack_init = + self.device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Bitpack Init Encoder"), + }); + bitpack_init.clear_buffer(&shadow_u8_output_buffer, 0, None); + self.queue.submit(Some(bitpack_init.finish())); + + let shadow_u8_bind_group = self.device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("Shadow U8 Pack Bind Group"), + layout: &self.shadow_u8_bind_group_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: shadow_u8_params_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: buffers.bldg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: buffers.veg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 3, + resource: buffers.veg_blocks_bldg_shadow_buffer.as_entire_binding(), + }, + wgpu::BindGroupEntry { + binding: 4, + resource: shadow_u8_output_buffer.as_entire_binding(), + }, + ], + }); + + eprintln!( + "[GPU] SVF accumulation initialized: {}x{} grid, {} SVF arrays ({:.1} MB), bitpack ({:.1} MB)", + rows, + cols, + num_arrays, + svf_data_size as f64 / 1_048_576.0, + packed_output_size as f64 / 1_048_576.0 + ); + + buffers.svf_params_buffer = Some(svf_params_buffer); + buffers.svf_data_buffer = Some(svf_data_buffer); + buffers.svf_result_staging = Some(svf_result_staging); + buffers.svf_bind_group = Some(svf_bind_group); + buffers.svf_has_veg = has_veg; + buffers.svf_num_arrays = num_arrays; + buffers.shadow_u8_params_buffer = Some(shadow_u8_params_buffer); + buffers.shadow_u8_output_buffer = Some(shadow_u8_output_buffer); + buffers.shadow_u8_staging = Some(shadow_u8_staging); + buffers.shadow_u8_bind_group = Some(shadow_u8_bind_group); + buffers.shadow_u8_packed_size = packed_output_size; + buffers.shadow_u8_n_pack = n_pack; + buffers.shadow_u8_matrix_bytes = matrix_bytes; + buffers.shadow_u8_matrix_words = matrix_words; + buffers.shadow_u8_num_matrices = num_matrices; + + Ok(()) + } + + /// Per-patch: dispatch shadow + SVF accumulate + bitpack update on GPU (non-blocking). + /// + /// Shadow matrices and SVF accumulators stay on GPU for the full patch loop. + #[allow(clippy::too_many_arguments)] + pub fn dispatch_shadow_and_accumulate_svf( + &self, + patch_idx: usize, + azimuth_deg: f32, + altitude_deg: f32, + scale: f32, + max_local_dsm_ht: f32, + min_sun_elev_deg: f32, + max_shadow_distance_m: f32, + weight_iso: f32, + weight_n: f32, + weight_e: f32, + weight_s: f32, + weight_w: f32, + ) -> Result { + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + let buffers = cache_guard + .as_mut() + .ok_or_else(|| "Buffer cache empty — call init_svf_accumulation first".to_string())?; + + let svf_params_buf = buffers + .svf_params_buffer + .as_ref() + .ok_or_else(|| "SVF not initialized".to_string())?; + let u8_params_buf = buffers + .shadow_u8_params_buffer + .as_ref() + .ok_or_else(|| "Shadow U8 params missing".to_string())?; + let has_veg = buffers.svf_has_veg; + let rows = buffers.rows; + let cols = buffers.cols; + let total_pixels = rows * cols; + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + + // Write shadow params (only thing that changes per patch) + let azimuth_rad = azimuth_deg.to_radians(); + let altitude_rad = altitude_deg.to_radians(); + let min_sun_elev_rad = min_sun_elev_deg.to_radians(); + let height_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); + let max_reach_m = if max_shadow_distance_m > 0.0 { height_reach_m.min(max_shadow_distance_m) } else { height_reach_m }; + let max_index = (max_reach_m / scale).ceil(); + + let shadow_params = ShadowParams { + rows: rows as u32, + cols: cols as u32, + azimuth_rad, + altitude_rad, + sin_azimuth: azimuth_rad.sin(), + cos_azimuth: azimuth_rad.cos(), + tan_azimuth: azimuth_rad.tan(), + tan_altitude_by_scale: altitude_rad.tan() / scale, + scale, + max_index, + max_local_dsm_ht, + has_veg: if has_veg { 1 } else { 0 }, + has_walls: 0, + _padding: 0, + }; + + self.queue.write_buffer( + &buffers.params_buffer, + 0, + bytemuck::cast_slice(&[shadow_params]), + ); + + // Write SVF accumulation params + let svf_params = SvfAccumParams { + total_pixels: total_pixels as u32, + cols: cols as u32, + rows: rows as u32, + weight_iso, + weight_n, + weight_e, + weight_s, + weight_w, + has_veg: if has_veg { 1 } else { 0 }, + _pad0: 0, + _pad1: 0, + }; + + self.queue + .write_buffer(svf_params_buf, 0, bytemuck::cast_slice(&[svf_params])); + + let u8_params = U8PackParams { + total_pixels: total_pixels as u32, + cols: cols as u32, + rows: rows as u32, + n_pack: buffers.shadow_u8_n_pack as u32, + matrix_words: buffers.shadow_u8_matrix_words as u32, + has_veg: if has_veg { 1 } else { 0 }, + patch_byte_idx: (patch_idx >> 3) as u32, + patch_bit_mask: (1u32 << (patch_idx & 7)), + _pad0: 0, + _pad1: 0, + _pad2: 0, + _pad3: 0, + }; + self.queue + .write_buffer(u8_params_buf, 0, bytemuck::cast_slice(&[u8_params])); + + // Build command encoder with 3 passes + staging copy + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Shadow+Accum+U8 Encoder"), + }); + + // Reset propagated height buffers (GPU→GPU copy from static inputs) + encoder.copy_buffer_to_buffer( + &buffers.dsm_buffer, + 0, + &buffers.propagated_bldg_height_buffer, + 0, + buffer_size, + ); + if has_veg { + encoder.copy_buffer_to_buffer( + &buffers.veg_canopy_buffer, + 0, + &buffers.propagated_veg_height_buffer, + 0, + buffer_size, + ); + } + + let workgroup_size_x = 16; + let workgroup_size_y = 16; + let (num_workgroups_x, num_workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + workgroup_size_x, + workgroup_size_y, + "svf shadow propagation update", + )?; + + // Pass 1: Shadow propagation + { + let mut pass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { + label: Some("Shadow Propagation (SVF)"), + timestamp_writes: None, + }); + pass.set_pipeline(&self.pipeline); + pass.set_bind_group(0, &buffers.bind_group, &[]); + pass.dispatch_workgroups(num_workgroups_x, num_workgroups_y, 1); + } + + // Pass 2: SVF accumulation (reads shadow outputs, accumulates into svf_data) + { + let svf_bg = buffers + .svf_bind_group + .as_ref() + .ok_or("SVF bind group missing")?; + let svf_workgroup_x = 16u32; + let svf_workgroup_y = 16u32; + let (svf_workgroups_x, svf_workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + svf_workgroup_x, + svf_workgroup_y, + "svf accumulation", + )?; + let mut pass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { + label: Some("SVF Accumulation"), + timestamp_writes: None, + }); + pass.set_pipeline(&self.svf_pipeline); + pass.set_bind_group(0, svf_bg, &[]); + pass.dispatch_workgroups(svf_workgroups_x, svf_workgroups_y, 1); + } + + // Pass 3: Update bitpacked shadow matrices for this patch. + { + let u8_bg = buffers + .shadow_u8_bind_group + .as_ref() + .ok_or("Shadow U8 bind group missing")?; + let u8_workgroup_x = 16u32; + let u8_workgroup_y = 16u32; + let (u8_workgroups_x, u8_workgroups_y) = self.checked_workgroups_2d( + rows, + cols, + u8_workgroup_x, + u8_workgroup_y, + "svf shadow bitpack update", + )?; + let mut pass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { + label: Some("Shadow Bitpack Update"), + timestamp_writes: None, + }); + pass.set_pipeline(&self.shadow_u8_pipeline); + pass.set_bind_group(0, u8_bg, &[]); + pass.dispatch_workgroups(u8_workgroups_x, u8_workgroups_y, 1); + } + + // Submit — no per-patch synchronization; read back once after all patches. + let submission_index = self.queue.submit(Some(encoder.finish())); + Ok(submission_index) + } + + /// Wait for a specific submitted GPU workload to complete. + pub fn wait_for_submission( + &self, + submission_index: wgpu::SubmissionIndex, + ) -> Result<(), String> { + self.device + .poll(wgpu::PollType::Wait { + submission_index: Some(submission_index), + timeout: None, + }) + .map(|_| ()) + .map_err(|e| format!("GPU poll failed while waiting for SVF dispatch: {:?}", e)) + } + + /// After all patches: read back GPU-built bitpacked shadow matrices. + pub fn read_svf_bitpacked_shadows(&self) -> Result { + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + let buffers = cache_guard + .as_mut() + .ok_or_else(|| "Buffer cache empty".to_string())?; + + let output_buf = buffers + .shadow_u8_output_buffer + .as_ref() + .ok_or_else(|| "Shadow U8 output buffer missing".to_string())?; + let staging = buffers + .shadow_u8_staging + .as_ref() + .ok_or_else(|| "Shadow U8 staging not initialized".to_string())?; + + let packed_size = buffers.shadow_u8_packed_size; + let matrix_bytes = buffers.shadow_u8_matrix_bytes; + let n_pack = buffers.shadow_u8_n_pack; + let rows = buffers.rows; + let cols = buffers.cols; + let has_veg = buffers.shadow_u8_num_matrices > 1; + + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Read Bitpacked Shadows Encoder"), + }); + encoder.copy_buffer_to_buffer(output_buf, 0, staging, 0, packed_size); + let submission_index = self.queue.submit(Some(encoder.finish())); + + let slice = staging.slice(..packed_size); + let (sender, receiver) = std::sync::mpsc::channel(); + slice.map_async(wgpu::MapMode::Read, move |result| { + let _ = sender.send(result); + }); + + self.device + .poll(wgpu::PollType::Wait { + submission_index: Some(submission_index), + timeout: None, + }) + .map_err(|e| format!("Poll failed while reading bitpacked shadows: {:?}", e))?; + receiver + .recv() + .map_err(|e| format!("Failed waiting for bitpacked shadow mapping: {}", e))? + .map_err(|e| format!("Failed to map bitpacked shadow staging: {:?}", e))?; + + let _unmap_guard = MappedBufferGuard::new(staging); + let data = slice.get_mapped_range(); + let all_bytes: &[u8] = bytemuck::cast_slice(&data); + let expected = if has_veg { + matrix_bytes * 3 + } else { + matrix_bytes + }; + if all_bytes.len() < expected { + return Err(format!( + "Bitpacked shadow buffer too small: got {} bytes, need at least {}", + all_bytes.len(), + expected + )); + } + + let bldg = + Array3::from_shape_vec((rows, cols, n_pack), all_bytes[0..matrix_bytes].to_vec()) + .map_err(|e| format!("Failed to reshape bldg shadow matrix: {}", e))?; + + let (veg, vb) = if has_veg { + let veg_start = matrix_bytes; + let vb_start = matrix_bytes * 2; + let veg_arr = Array3::from_shape_vec( + (rows, cols, n_pack), + all_bytes[veg_start..vb_start].to_vec(), + ) + .map_err(|e| format!("Failed to reshape veg shadow matrix: {}", e))?; + let vb_arr = Array3::from_shape_vec( + (rows, cols, n_pack), + all_bytes[vb_start..vb_start + matrix_bytes].to_vec(), + ) + .map_err(|e| format!("Failed to reshape vb shadow matrix: {}", e))?; + (veg_arr, vb_arr) + } else { + let shape = (rows, cols, n_pack); + (Array3::::zeros(shape), Array3::::zeros(shape)) + }; + + Ok(SvfBitpackedShadowResult { + bldg_sh_matrix: bldg, + veg_sh_matrix: veg, + veg_blocks_bldg_sh_matrix: vb, + }) + } + + /// After all patches: read back accumulated SVF values from GPU. + pub fn read_svf_results(&self) -> Result { + let mut cache_guard = self + .cached + .lock() + .map_err(|e| format!("Failed to lock buffer cache: {}", e))?; + + let buffers = cache_guard + .as_mut() + .ok_or_else(|| "Buffer cache empty".to_string())?; + + let svf_data_buf = buffers + .svf_data_buffer + .as_ref() + .ok_or_else(|| "SVF not initialized".to_string())?; + let svf_staging = buffers + .svf_result_staging + .as_ref() + .ok_or_else(|| "SVF staging not initialized".to_string())?; + + let rows = buffers.rows; + let cols = buffers.cols; + let total_pixels = rows * cols; + let has_veg = buffers.svf_has_veg; + let num_arrays = buffers.svf_num_arrays; + let buffer_size = (total_pixels * std::mem::size_of::()) as u64; + let svf_data_size = buffer_size * num_arrays as u64; + + // Copy svf_data to staging + let mut encoder = self + .device + .create_command_encoder(&wgpu::CommandEncoderDescriptor { + label: Some("SVF Read Results Encoder"), + }); + encoder.copy_buffer_to_buffer(svf_data_buf, 0, svf_staging, 0, svf_data_size); + let submission_index = self.queue.submit(Some(encoder.finish())); + + // Map and read + let slice = svf_staging.slice(..svf_data_size); + let (sender, receiver) = std::sync::mpsc::channel(); + slice.map_async(wgpu::MapMode::Read, move |result| { + let _ = sender.send(result); + }); + + self.device + .poll(wgpu::PollType::Wait { + submission_index: Some(submission_index), + timeout: None, + }) + .map_err(|e| format!("GPU poll failed while reading SVF results: {:?}", e))?; + receiver + .recv() + .map_err(|e| format!("Failed waiting for SVF result mapping: {}", e))? + .map_err(|e| format!("Failed to map SVF staging: {:?}", e))?; + + let _unmap_guard = MappedBufferGuard::new(svf_staging); + let data = slice.get_mapped_range(); + let all: &[f32] = bytemuck::cast_slice(&data); + let n = total_pixels; + + let extract = |offset: usize, label: &str| -> Result, String> { + Array2::from_shape_vec((rows, cols), all[offset..offset + n].to_vec()) + .map_err(|e| format!("Failed to reshape {} array: {}", label, e)) + }; + + let svf = extract(0, "svf")?; + let svf_n = extract(n, "svf_n")?; + let svf_e = extract(2 * n, "svf_e")?; + let svf_s = extract(3 * n, "svf_s")?; + let svf_w = extract(4 * n, "svf_w")?; + + let (svf_veg, svf_veg_n, svf_veg_e, svf_veg_s, svf_veg_w) = if has_veg { + ( + Some(extract(5 * n, "svf_veg")?), + Some(extract(6 * n, "svf_veg_n")?), + Some(extract(7 * n, "svf_veg_e")?), + Some(extract(8 * n, "svf_veg_s")?), + Some(extract(9 * n, "svf_veg_w")?), + ) + } else { + (None, None, None, None, None) + }; + + let (svf_aveg, svf_aveg_n, svf_aveg_e, svf_aveg_s, svf_aveg_w) = if has_veg { + ( + Some(extract(10 * n, "svf_aveg")?), + Some(extract(11 * n, "svf_aveg_n")?), + Some(extract(12 * n, "svf_aveg_e")?), + Some(extract(13 * n, "svf_aveg_s")?), + Some(extract(14 * n, "svf_aveg_w")?), + ) + } else { + (None, None, None, None, None) + }; + + Ok(SvfAccumResult { + svf, + svf_n, + svf_e, + svf_s, + svf_w, + svf_veg, + svf_veg_n, + svf_veg_e, + svf_veg_s, + svf_veg_w, + svf_aveg, + svf_aveg_n, + svf_aveg_e, + svf_aveg_s, + svf_aveg_w, + }) + } + + #[inline] + fn write_2d_f32(queue: &wgpu::Queue, buffer: &wgpu::Buffer, arr: &ArrayView2) { + if let Some(slice) = arr.as_slice() { + queue.write_buffer(buffer, 0, bytemuck::cast_slice(slice)); + } else { + let packed: Vec = arr.iter().copied().collect(); + queue.write_buffer(buffer, 0, bytemuck::cast_slice(&packed)); + } + } } /// Synchronous wrapper that blocks on async GPU initialization diff --git a/rust/src/gpu/shadow_to_bitpack.wgsl b/rust/src/gpu/shadow_to_bitpack.wgsl new file mode 100644 index 0000000..f98c069 --- /dev/null +++ b/rust/src/gpu/shadow_to_bitpack.wgsl @@ -0,0 +1,72 @@ +// Shadow float32 -> bitpacked matrix update shader. +// +// For each patch dispatch, this shader reads the 3 shadow output buffers and +// sets a single bit (patch-specific) in bitpacked shadow matrices. +// +// Layout of packed_output (byte-addressed): +// matrix 0: bldg_sh bits [0 .. matrix_bytes) +// matrix 1: veg_sh bits [matrix_bytes .. 2*matrix_bytes) (if has_veg) +// matrix 2: vbsh bits [2*matrix_bytes .. 3*matrix_bytes) (if has_veg) +// +// matrix_bytes = total_pixels * n_pack, where n_pack = ceil(n_patches / 8) +// Each matrix byte stores up to 8 patch bits for one pixel. +// Storage buffer uses u32 words; bytes are read/updated via atomic bit operations. + +struct U8PackParams { + total_pixels: u32, + cols: u32, + rows: u32, + n_pack: u32, + matrix_words: u32, + has_veg: u32, + patch_byte_idx: u32, + patch_bit_mask: u32, + _pad0: u32, + _pad1: u32, + _pad2: u32, + _pad3: u32, +} + +@group(0) @binding(0) var params: U8PackParams; +@group(0) @binding(1) var bldg_sh: array; +@group(0) @binding(2) var veg_sh: array; +@group(0) @binding(3) var veg_blocks_bldg_sh: array; +@group(0) @binding(4) var packed_output: array>; + +fn set_shadow_bit(matrix_base_words: u32, pixel_idx: u32) { + let byte_idx = pixel_idx * params.n_pack + params.patch_byte_idx; + let word_idx = matrix_base_words + (byte_idx >> 2u); + let shift = (byte_idx & 3u) * 8u; + let bit = (params.patch_bit_mask & 0xFFu) << shift; + atomicOr(&packed_output[word_idx], bit); +} + +@compute @workgroup_size(16, 16, 1) +fn shadow_to_bitpack(@builtin(global_invocation_id) id: vec3) { + let x = id.x; + let y = id.y; + if (x >= params.cols || y >= params.rows) { + return; + } + let idx = y * params.cols + x; + if (idx >= params.total_pixels) { + return; + } + + // Matrix 0: building shadow + if (bldg_sh[idx] >= 0.5) { + set_shadow_bit(0u, idx); + } + + if (params.has_veg == 1u) { + let matrix_words = params.matrix_words; + // Matrix 1: vegetation shadow + if (veg_sh[idx] >= 0.5) { + set_shadow_bit(matrix_words, idx); + } + // Matrix 2: veg-blocks-building shadow + if (veg_blocks_bldg_sh[idx] >= 0.5) { + set_shadow_bit(2u * matrix_words, idx); + } + } +} diff --git a/rust/src/gpu/shadow_to_u8.wgsl b/rust/src/gpu/shadow_to_u8.wgsl new file mode 100644 index 0000000..91bfe7f --- /dev/null +++ b/rust/src/gpu/shadow_to_u8.wgsl @@ -0,0 +1,69 @@ +// Shadow float32 → uint8 quantization shader. +// +// Reads 3 float32 shadow output buffers and packs them into a compact u32 array +// where each u32 holds 4 consecutive uint8 values (little-endian byte order). +// +// Layout of packed_output: +// [0..Q) bldg_sh packed (Q = ceil(total_pixels / 4)) +// [Q..2Q) veg_sh packed (only if has_veg) +// [2Q..3Q) vbsh packed (only if has_veg) + +struct U8PackParams { + total_pixels: u32, + num_quads: u32, // ceil(total_pixels / 4) + has_veg: u32, + _padding: u32, +} + +@group(0) @binding(0) var params: U8PackParams; +@group(0) @binding(1) var bldg_sh: array; +@group(0) @binding(2) var veg_sh: array; +@group(0) @binding(3) var veg_blocks_bldg_sh: array; +@group(0) @binding(4) var packed_output: array; + +@compute @workgroup_size(256) +fn shadow_to_u8(@builtin(global_invocation_id) id: vec3) { + let idx = id.x; + if (idx >= params.num_quads) { + return; + } + + let base = idx * 4u; + let n = params.total_pixels; + let q = params.num_quads; + + // Pack 4 consecutive building shadow float32 values into one u32 as 4×uint8 + var bldg_packed: u32 = 0u; + for (var i = 0u; i < 4u; i++) { + let px = base + i; + if (px < n) { + let val = u32(clamp(bldg_sh[px], 0.0, 1.0) * 255.0); + bldg_packed |= (val & 0xFFu) << (i * 8u); + } + } + packed_output[idx] = bldg_packed; + + if (params.has_veg == 1u) { + // Pack vegetation shadow + var veg_packed: u32 = 0u; + for (var i = 0u; i < 4u; i++) { + let px = base + i; + if (px < n) { + let val = u32(clamp(veg_sh[px], 0.0, 1.0) * 255.0); + veg_packed |= (val & 0xFFu) << (i * 8u); + } + } + packed_output[idx + q] = veg_packed; + + // Pack veg-blocks-building shadow + var vbsh_packed: u32 = 0u; + for (var i = 0u; i < 4u; i++) { + let px = base + i; + if (px < n) { + let val = u32(clamp(veg_blocks_bldg_sh[px], 0.0, 1.0) * 255.0); + vbsh_packed |= (val & 0xFFu) << (i * 8u); + } + } + packed_output[idx + 2u * q] = vbsh_packed; + } +} diff --git a/rust/src/gpu/svf_accumulation.wgsl b/rust/src/gpu/svf_accumulation.wgsl new file mode 100644 index 0000000..6924f52 --- /dev/null +++ b/rust/src/gpu/svf_accumulation.wgsl @@ -0,0 +1,84 @@ +// SVF accumulation shader — runs after shadow propagation for each sky patch. +// +// Reads the 3 shadow output buffers (bldg_sh, veg_sh, veg_blocks_bldg_sh) +// and accumulates weighted values into a packed SVF data buffer. +// +// Layout of svf_data (15 arrays × total_pixels, all contiguous): +// [0..N) svf (isotropic building) +// [N..2N) svf_n (north) +// [2N..3N) svf_e (east) +// [3N..4N) svf_s (south) +// [4N..5N) svf_w (west) +// [5N..6N) svf_veg (isotropic vegetation) +// [6N..7N) svf_veg_n +// [7N..8N) svf_veg_e +// [8N..9N) svf_veg_s +// [9N..10N) svf_veg_w +// [10N..11N) svf_aveg (isotropic veg-blocks-bldg) +// [11N..12N) svf_aveg_n +// [12N..13N) svf_aveg_e +// [13N..14N) svf_aveg_s +// [14N..15N) svf_aveg_w + +struct SvfAccumParams { + total_pixels: u32, + cols: u32, + rows: u32, + weight_iso: f32, + weight_n: f32, + weight_e: f32, + weight_s: f32, + weight_w: f32, + has_veg: u32, + _pad0: u32, + _pad1: u32, +} + +@group(0) @binding(0) var params: SvfAccumParams; +@group(0) @binding(1) var bldg_sh: array; +@group(0) @binding(2) var veg_sh: array; +@group(0) @binding(3) var veg_blocks_bldg_sh: array; +@group(0) @binding(4) var svf_data: array; + +@compute @workgroup_size(16, 16, 1) +fn accumulate_svf(@builtin(global_invocation_id) id: vec3) { + let x = id.x; + let y = id.y; + if (x >= params.cols || y >= params.rows) { + return; + } + let idx = y * params.cols + x; + if (idx >= params.total_pixels) { + return; + } + + let n = params.total_pixels; + let b = bldg_sh[idx]; + + // Accumulate building shadow into SVF (5 directional components) + svf_data[idx] += params.weight_iso * b; // svf + svf_data[idx + n] += params.weight_n * b; // svf_n + svf_data[idx + 2u * n] += params.weight_e * b; // svf_e + svf_data[idx + 3u * n] += params.weight_s * b; // svf_s + svf_data[idx + 4u * n] += params.weight_w * b; // svf_w + + if (params.has_veg == 1u) { + let v = veg_sh[idx]; + + // Accumulate vegetation shadow into SVF + svf_data[idx + 5u * n] += params.weight_iso * v; // svf_veg + svf_data[idx + 6u * n] += params.weight_n * v; // svf_veg_n + svf_data[idx + 7u * n] += params.weight_e * v; // svf_veg_e + svf_data[idx + 8u * n] += params.weight_s * v; // svf_veg_s + svf_data[idx + 9u * n] += params.weight_w * v; // svf_veg_w + + let a = veg_blocks_bldg_sh[idx]; + + // Accumulate veg-blocks-building shadow into SVF + svf_data[idx + 10u * n] += params.weight_iso * a; // svf_aveg + svf_data[idx + 11u * n] += params.weight_n * a; // svf_aveg_n + svf_data[idx + 12u * n] += params.weight_e * a; // svf_aveg_e + svf_data[idx + 13u * n] += params.weight_s * a; // svf_aveg_s + svf_data[idx + 14u * n] += params.weight_w * a; // svf_aveg_w + } +} diff --git a/rust/src/ground.rs b/rust/src/ground.rs new file mode 100644 index 0000000..b78b4c6 --- /dev/null +++ b/rust/src/ground.rs @@ -0,0 +1,498 @@ +use ndarray::{Array2, ArrayView2, Zip}; +use numpy::{PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; +use rayon::prelude::*; + +/// Ground temperature calculation parameters +const PI: f32 = std::f32::consts::PI; + +/// Pure result type for ground temperature (no PyO3 dependency). +pub(crate) struct GroundTempResult { + pub tg: Array2, + pub tg_wall: f32, + pub ci_tg: f32, +} + +/// Pure-ndarray implementation of ground temperature calculation. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). +#[allow(clippy::too_many_arguments)] +pub(crate) fn compute_ground_temperature_pure( + sun_altitude: f32, + altmax: f32, + dectime: f32, + snup: f32, + global_rad: f32, + rad_g0: f32, + zen_deg: f32, + tgk_grid: ArrayView2, + tstart_grid: ArrayView2, + tmaxlst_grid: ArrayView2, + tgk_wall: f32, + tstart_wall: f32, + tmaxlst_wall: f32, +) -> GroundTempResult { + let shape = tgk_grid.dim(); + + // Temperature amplitude based on max sun altitude (per-pixel from land cover) + let tgamp = &tgk_grid * altmax + &tstart_grid; + + // Wall temperature amplitude + let tgamp_wall = tgk_wall * altmax + tstart_wall; + + // Phase calculation for ground (per-pixel) + let snup_frac = snup / 24.0; + let tmaxlst_frac = &tmaxlst_grid / 24.0; + let tmaxlst_wall_frac = tmaxlst_wall / 24.0; + + let mut tg = Array2::::zeros(shape); + + tg.as_slice_mut() + .unwrap() + .par_iter_mut() + .enumerate() + .for_each(|(idx, out)| { + let row = idx / shape.1; + let col = idx % shape.1; + + let tgamp_val = tgamp[[row, col]]; + if !tgamp_val.is_finite() { + *out = f32::NAN; + return; + } + let tmaxlst_frac_val = tmaxlst_frac[[row, col]]; + + if dectime > snup_frac { + let denom = tmaxlst_frac_val - snup_frac; + let denom = if denom > 0.0 { denom } else { 1.0 }; + let phase = (dectime - snup_frac) / denom; + *out = tgamp_val * (phase * PI / 2.0).sin(); + } else { + *out = 0.0; + } + }); + + // Wall phase (scalar) + let tg_wall = if dectime > snup_frac && tmaxlst_wall_frac > snup_frac { + let denom_wall = tmaxlst_wall_frac - snup_frac; + let denom_wall = if denom_wall > 0.0 { denom_wall } else { 1.0 }; + let phase_wall = (dectime - snup_frac) / denom_wall; + tgamp_wall * (phase_wall * PI / 2.0).sin() + } else { + 0.0 + }; + + // CI_TgG correction for non-clear conditions + let ci_tg = if sun_altitude > 0.0 && rad_g0 > 0.0 { + let corr = if zen_deg > 0.0 && zen_deg < 90.0 { + 0.1473 * (90.0 - zen_deg).ln() + 0.3454 + } else { + 0.3454 + }; + let mut ci = (global_rad / rad_g0) + (1.0 - corr); + ci = ci.min(1.0); + if ci.is_infinite() || ci.is_nan() { + 1.0 + } else { + ci + } + } else { + 1.0 + }; + + // Apply clearness correction + tg.par_mapv_inplace(|v| (v * ci_tg).max(0.0)); + let tg_wall_corrected = (tg_wall * ci_tg).max(0.0); + + GroundTempResult { + tg, + tg_wall: tg_wall_corrected, + ci_tg, + } +} + +/// Pure result type for batched thermal delay (no PyO3 dependency). +pub(crate) struct TsWaveDelayBatchPureResult { + pub lup: Array2, + pub lup_e: Array2, + pub lup_s: Array2, + pub lup_w: Array2, + pub lup_n: Array2, + pub tg_out: Array2, + pub timeadd: f32, + pub tgmap1: Array2, + pub tgmap1_e: Array2, + pub tgmap1_s: Array2, + pub tgmap1_w: Array2, + pub tgmap1_n: Array2, + pub tgout1: Array2, +} + +/// Pure-ndarray implementation of batched thermal delay. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). +#[allow(clippy::too_many_arguments)] +pub(crate) fn ts_wave_delay_batch_pure( + lup: ArrayView2, + lup_e: ArrayView2, + lup_s: ArrayView2, + lup_w: ArrayView2, + lup_n: ArrayView2, + tg_temp: ArrayView2, + firstdaytime: i32, + timeadd: f32, + timestepdec: f32, + tgmap1: ArrayView2, + tgmap1_e: ArrayView2, + tgmap1_s: ArrayView2, + tgmap1_w: ArrayView2, + tgmap1_n: ArrayView2, + tgout1: ArrayView2, +) -> TsWaveDelayBatchPureResult { + let mut tgmap1_arr = tgmap1.to_owned(); + let mut tgmap1_e_arr = tgmap1_e.to_owned(); + let mut tgmap1_s_arr = tgmap1_s.to_owned(); + let mut tgmap1_w_arr = tgmap1_w.to_owned(); + let mut tgmap1_n_arr = tgmap1_n.to_owned(); + let mut tgout1_arr = tgout1.to_owned(); + + // First morning: reset previous temperatures + if firstdaytime == 1 { + tgmap1_arr.assign(&lup); + tgmap1_e_arr.assign(&lup_e); + tgmap1_s_arr.assign(&lup_s); + tgmap1_w_arr.assign(&lup_w); + tgmap1_n_arr.assign(&lup_n); + tgout1_arr.assign(&tg_temp); + } + + let threshold = 59.0 / 1440.0; + let decay_constant = -33.27f32; + + if timeadd >= threshold { + let weight1 = (decay_constant * timeadd).exp(); + let new_timeadd = if timestepdec > threshold { + timestepdec + } else { + 0.0 + }; + + let m = lerp_par(lup, tgmap1_arr.view(), weight1); + let me = lerp_par(lup_e, tgmap1_e_arr.view(), weight1); + let ms = lerp_par(lup_s, tgmap1_s_arr.view(), weight1); + let mw = lerp_par(lup_w, tgmap1_w_arr.view(), weight1); + let mn = lerp_par(lup_n, tgmap1_n_arr.view(), weight1); + let mt = lerp_par(tg_temp, tgout1_arr.view(), weight1); + + TsWaveDelayBatchPureResult { + lup: m.clone(), + lup_e: me.clone(), + lup_s: ms.clone(), + lup_w: mw.clone(), + lup_n: mn.clone(), + tg_out: mt.clone(), + timeadd: new_timeadd, + tgmap1: m, + tgmap1_e: me, + tgmap1_s: ms, + tgmap1_w: mw, + tgmap1_n: mn, + tgout1: mt, + } + } else { + let new_timeadd = timeadd + timestepdec; + let weight1 = (decay_constant * new_timeadd).exp(); + + TsWaveDelayBatchPureResult { + lup: lerp_par(lup, tgmap1_arr.view(), weight1), + lup_e: lerp_par(lup_e, tgmap1_e_arr.view(), weight1), + lup_s: lerp_par(lup_s, tgmap1_s_arr.view(), weight1), + lup_w: lerp_par(lup_w, tgmap1_w_arr.view(), weight1), + lup_n: lerp_par(lup_n, tgmap1_n_arr.view(), weight1), + tg_out: lerp_par(tg_temp, tgout1_arr.view(), weight1), + timeadd: new_timeadd, + tgmap1: tgmap1_arr, + tgmap1_e: tgmap1_e_arr, + tgmap1_s: tgmap1_s_arr, + tgmap1_w: tgmap1_w_arr, + tgmap1_n: tgmap1_n_arr, + tgout1: tgout1_arr, + } + } +} + +/// Parallel weighted average: out[i] = curr[i] * (1-w) + prev[i] * w +fn lerp_par(curr: ArrayView2, prev: ArrayView2, w: f32) -> Array2 { + let w1 = 1.0 - w; + let mut out = Array2::zeros(curr.dim()); + Zip::from(&mut out) + .and(&curr) + .and(&prev) + .par_for_each(|o, &c, &p| { + *o = c * w1 + p * w; + }); + out +} + +/// Calculate ground and wall temperature deviations from air temperature. +/// +/// Implements the SOLWEIG TgMaps model with land-cover-specific parameterization. +/// Temperature amplitude depends on max sun altitude and land cover type. +/// Clearness index correction accounts for reduced heating under cloudy skies. +/// +/// Parameters: +/// - ta: Air temperature (°C) +/// - sun_altitude: Sun altitude/elevation (degrees) +/// - altmax: Maximum sun altitude for the day (degrees) +/// - dectime: Decimal time (fraction of day, 0-1) +/// - snup: Sunrise time (hours, 0-24) +/// - global_rad: Global horizontal radiation (W/m²) +/// - rad_g0: Clear sky global horizontal radiation (W/m²) +/// - zen_deg: Solar zenith angle (degrees) +/// - alb_grid: Albedo per pixel (0-1) from land cover properties +/// - emis_grid: Emissivity per pixel (0-1) from land cover properties +/// - tgk_grid: TgK parameter per pixel (temperature gain coefficient) +/// - tstart_grid: Tstart parameter per pixel (temperature baseline offset) +/// - tmaxlst_grid: TmaxLST parameter per pixel (hour of maximum temperature, 0-24) +/// - tgk_wall: Optional wall TgK parameter (default: 0.37, cobblestone) +/// - tstart_wall: Optional wall Tstart parameter (default: -3.41, cobblestone) +/// - tmaxlst_wall: Optional wall TmaxLST parameter (default: 15.0, cobblestone) +/// +/// Returns tuple: +/// - tg: Ground temperature deviation from air temperature (K) +/// - tg_wall: Wall temperature deviation from air temperature (K) +/// - ci_tg: Clearness index correction factor (0-1) +#[pyfunction] +#[pyo3(signature = ( + _ta, sun_altitude, altmax, dectime, snup, global_rad, rad_g0, zen_deg, + alb_grid, emis_grid, tgk_grid, tstart_grid, tmaxlst_grid, + tgk_wall=None, tstart_wall=None, tmaxlst_wall=None, +))] +pub fn compute_ground_temperature<'py>( + py: Python<'py>, + _ta: f32, + sun_altitude: f32, + altmax: f32, + dectime: f32, + snup: f32, + global_rad: f32, + rad_g0: f32, + zen_deg: f32, + alb_grid: PyReadonlyArray2<'py, f32>, + emis_grid: PyReadonlyArray2<'py, f32>, + tgk_grid: PyReadonlyArray2<'py, f32>, + tstart_grid: PyReadonlyArray2<'py, f32>, + tmaxlst_grid: PyReadonlyArray2<'py, f32>, + tgk_wall: Option, + tstart_wall: Option, + tmaxlst_wall: Option, +) -> PyResult<( + Bound<'py, PyArray2>, + f32, + f32, + Bound<'py, PyArray2>, + Bound<'py, PyArray2>, +)> { + let alb_arr = alb_grid.as_array(); + let emis_arr = emis_grid.as_array(); + + let result = compute_ground_temperature_pure( + sun_altitude, + altmax, + dectime, + snup, + global_rad, + rad_g0, + zen_deg, + tgk_grid.as_array(), + tstart_grid.as_array(), + tmaxlst_grid.as_array(), + tgk_wall.unwrap_or(0.37), + tstart_wall.unwrap_or(-3.41), + tmaxlst_wall.unwrap_or(15.0), + ); + + let tg_py = PyArray2::from_owned_array(py, result.tg); + let alb_py = PyArray2::from_owned_array(py, alb_arr.to_owned()); + let emis_py = PyArray2::from_owned_array(py, emis_arr.to_owned()); + + Ok((tg_py, result.tg_wall, result.ci_tg, alb_py, emis_py)) +} + +/// Apply thermal delay to ground temperature using TsWaveDelay model. +/// +/// The thermal delay model simulates ground temperature response to changing +/// radiation conditions using an exponential decay function with a decay constant +/// of 33.27 day⁻¹ (time constant ≈ 43 minutes). +/// +/// Parameters: +/// - gvfLup: Current radiative equilibrium temperature (2D array) +/// - firstdaytime: True (1) if first timestep after sunrise, False (0) otherwise +/// - timeadd: Time since last full update (fraction of day) +/// - timestepdec: Current timestep duration (fraction of day) +/// - Tgmap1: Previous delayed temperature (2D array) +/// +/// Returns tuple: +/// - Lup: Temperature with thermal inertia applied (2D array) +/// - timeadd: Updated time accumulator (fraction of day) +/// - Tgmap1: Updated previous temperature for next iteration (2D array) +#[pyfunction] +pub fn ts_wave_delay<'py>( + py: Python<'py>, + gvf_lup: PyReadonlyArray2<'py, f32>, + firstdaytime: i32, + timeadd: f32, + timestepdec: f32, + tgmap1: PyReadonlyArray2<'py, f32>, +) -> PyResult<(Bound<'py, PyArray2>, f32, Bound<'py, PyArray2>)> { + let gvf_lup_arr = gvf_lup.as_array(); + let mut tgmap1_arr = tgmap1.as_array().to_owned(); + + let tgmap0 = &gvf_lup_arr; // current timestep + + // First morning: reset previous temperature + if firstdaytime == 1 { + tgmap1_arr.assign(tgmap0); + } + + let threshold = 59.0 / 1440.0; // ~59 minutes threshold + let decay_constant = -33.27f32; + + let (lup, new_timeadd, new_tgmap1) = if timeadd >= threshold { + // More or equal to 59 min + let weight1 = (decay_constant * timeadd).exp(); + let new_tgmap1 = tgmap0 * (1.0 - weight1) + &tgmap1_arr * weight1; + let lup = new_tgmap1.clone(); + + let new_timeadd = if timestepdec > threshold { + timestepdec + } else { + 0.0 + }; + + (lup, new_timeadd, new_tgmap1) + } else { + // Accumulate time + let new_timeadd = timeadd + timestepdec; + let weight1 = (decay_constant * new_timeadd).exp(); + let lup = tgmap0 * (1.0 - weight1) + &tgmap1_arr * weight1; + + (lup, new_timeadd, tgmap1_arr.clone()) + }; + + let lup_py = PyArray2::from_owned_array(py, lup); + let tgmap1_py = PyArray2::from_owned_array(py, new_tgmap1); + + Ok((lup_py, new_timeadd, tgmap1_py)) +} + +/// Result struct for batched thermal delay +#[pyclass] +pub struct TsWaveDelayBatchResult { + /// Delayed lup (center) + #[pyo3(get)] + pub lup: Py>, + /// Delayed lup_e (east) + #[pyo3(get)] + pub lup_e: Py>, + /// Delayed lup_s (south) + #[pyo3(get)] + pub lup_s: Py>, + /// Delayed lup_w (west) + #[pyo3(get)] + pub lup_w: Py>, + /// Delayed lup_n (north) + #[pyo3(get)] + pub lup_n: Py>, + /// Delayed ground temperature + #[pyo3(get)] + pub tg_out: Py>, + /// Updated time accumulator + #[pyo3(get)] + pub timeadd: f32, + /// Updated tgmap1 (center) + #[pyo3(get)] + pub tgmap1: Py>, + /// Updated tgmap1_e (east) + #[pyo3(get)] + pub tgmap1_e: Py>, + /// Updated tgmap1_s (south) + #[pyo3(get)] + pub tgmap1_s: Py>, + /// Updated tgmap1_w (west) + #[pyo3(get)] + pub tgmap1_w: Py>, + /// Updated tgmap1_n (north) + #[pyo3(get)] + pub tgmap1_n: Py>, + /// Updated tgout1 (ground temperature) + #[pyo3(get)] + pub tgout1: Py>, +} + +/// Apply thermal delay to ground temperature for all 6 directional components. +/// +/// Batched version of ts_wave_delay that processes lup, lup_e/s/w/n, and tg_temp +/// in a single FFI call, reducing Python/Rust crossing overhead from 6 calls to 1. +/// +/// Parameters: +/// - lup, lup_e, lup_s, lup_w, lup_n: Current radiative equilibrium for each direction +/// - tg_temp: Ground temperature (tg * shadow + ta) +/// - firstdaytime: True (1) if first timestep after sunrise, False (0) otherwise +/// - timeadd: Time since last full update (fraction of day) +/// - timestepdec: Current timestep duration (fraction of day) +/// - tgmap1, tgmap1_e, tgmap1_s, tgmap1_w, tgmap1_n: Previous delayed temperatures +/// - tgout1: Previous delayed ground temperature +/// +/// Returns TsWaveDelayBatchResult with all delayed outputs and updated state +#[pyfunction] +pub fn ts_wave_delay_batch<'py>( + py: Python<'py>, + lup: PyReadonlyArray2<'py, f32>, + lup_e: PyReadonlyArray2<'py, f32>, + lup_s: PyReadonlyArray2<'py, f32>, + lup_w: PyReadonlyArray2<'py, f32>, + lup_n: PyReadonlyArray2<'py, f32>, + tg_temp: PyReadonlyArray2<'py, f32>, + firstdaytime: i32, + timeadd: f32, + timestepdec: f32, + tgmap1: PyReadonlyArray2<'py, f32>, + tgmap1_e: PyReadonlyArray2<'py, f32>, + tgmap1_s: PyReadonlyArray2<'py, f32>, + tgmap1_w: PyReadonlyArray2<'py, f32>, + tgmap1_n: PyReadonlyArray2<'py, f32>, + tgout1: PyReadonlyArray2<'py, f32>, +) -> PyResult { + let result = ts_wave_delay_batch_pure( + lup.as_array(), + lup_e.as_array(), + lup_s.as_array(), + lup_w.as_array(), + lup_n.as_array(), + tg_temp.as_array(), + firstdaytime, + timeadd, + timestepdec, + tgmap1.as_array(), + tgmap1_e.as_array(), + tgmap1_s.as_array(), + tgmap1_w.as_array(), + tgmap1_n.as_array(), + tgout1.as_array(), + ); + + Ok(TsWaveDelayBatchResult { + lup: PyArray2::from_owned_array(py, result.lup).unbind(), + lup_e: PyArray2::from_owned_array(py, result.lup_e).unbind(), + lup_s: PyArray2::from_owned_array(py, result.lup_s).unbind(), + lup_w: PyArray2::from_owned_array(py, result.lup_w).unbind(), + lup_n: PyArray2::from_owned_array(py, result.lup_n).unbind(), + tg_out: PyArray2::from_owned_array(py, result.tg_out).unbind(), + timeadd: result.timeadd, + tgmap1: PyArray2::from_owned_array(py, result.tgmap1).unbind(), + tgmap1_e: PyArray2::from_owned_array(py, result.tgmap1_e).unbind(), + tgmap1_s: PyArray2::from_owned_array(py, result.tgmap1_s).unbind(), + tgmap1_w: PyArray2::from_owned_array(py, result.tgmap1_w).unbind(), + tgmap1_n: PyArray2::from_owned_array(py, result.tgmap1_n).unbind(), + tgout1: PyArray2::from_owned_array(py, result.tgout1).unbind(), + }) +} diff --git a/rust/src/gvf.rs b/rust/src/gvf.rs index 78d3562..5cf0c10 100644 --- a/rust/src/gvf.rs +++ b/rust/src/gvf.rs @@ -1,10 +1,79 @@ -use ndarray::{Array1, Array2, Zip}; +use ndarray::{Array1, Array2, ArrayView2, Zip}; use numpy::{IntoPyArray, PyArray2, PyReadonlyArray2}; use pyo3::prelude::*; use rayon::prelude::*; const PI: f32 = std::f32::consts::PI; +/// Scalar parameters for GVF calculation. +/// +/// Groups all scalar (non-array) parameters to reduce function signature complexity. +#[pyclass] +#[derive(Clone)] +pub struct GvfScalarParams { + /// Pixel scale (meters per pixel) + #[pyo3(get, set)] + pub scale: f32, + /// First threshold for wall/building ratio + #[pyo3(get, set)] + pub first: f32, + /// Second threshold for wall/building ratio + #[pyo3(get, set)] + pub second: f32, + /// Wall temperature deviation from air temperature (K) + #[pyo3(get, set)] + pub tgwall: f32, + /// Air temperature (°C) + #[pyo3(get, set)] + pub ta: f32, + /// Wall emissivity + #[pyo3(get, set)] + pub ewall: f32, + /// Stefan-Boltzmann constant (W/m²/K⁴) + #[pyo3(get, set)] + pub sbc: f32, + /// Building albedo + #[pyo3(get, set)] + pub albedo_b: f32, + /// Water temperature (°C) + #[pyo3(get, set)] + pub twater: f32, + /// Whether land cover data is available + #[pyo3(get, set)] + pub landcover: bool, +} + +#[pymethods] +impl GvfScalarParams { + #[new] + #[allow(clippy::too_many_arguments)] + pub fn new( + scale: f32, + first: f32, + second: f32, + tgwall: f32, + ta: f32, + ewall: f32, + sbc: f32, + albedo_b: f32, + twater: f32, + landcover: bool, + ) -> Self { + Self { + scale, + first, + second, + tgwall, + ta, + ewall, + sbc, + albedo_b, + twater, + landcover, + } + } +} + #[pyclass] pub struct GvfResult { #[pyo3(get)] @@ -43,103 +112,61 @@ pub struct GvfResult { pub gvf_norm: Py>, } -#[pyfunction] +/// Pure result type for GVF calculation (no PyO3 dependency). +pub(crate) struct GvfResultPure { + pub gvf_lup: Array2, + pub gvfalb: Array2, + /// Purely geometric albedo term. `None` when read from `GvfGeometryCache`. + pub gvfalbnosh: Option>, + pub gvf_lup_e: Array2, + pub gvfalb_e: Array2, + /// Purely geometric albedo term. `None` when read from `GvfGeometryCache`. + pub gvfalbnosh_e: Option>, + pub gvf_lup_s: Array2, + pub gvfalb_s: Array2, + /// Purely geometric albedo term. `None` when read from `GvfGeometryCache`. + pub gvfalbnosh_s: Option>, + pub gvf_lup_w: Array2, + pub gvfalb_w: Array2, + /// Purely geometric albedo term. `None` when read from `GvfGeometryCache`. + pub gvfalbnosh_w: Option>, + pub gvf_lup_n: Array2, + pub gvfalb_n: Array2, + /// Purely geometric albedo term. `None` when read from `GvfGeometryCache`. + pub gvfalbnosh_n: Option>, + /// Purely geometric normalization term. `None` when read from `GvfGeometryCache`. + pub gvf_sum: Option>, + /// Purely geometric normalization term. `None` when read from `GvfGeometryCache`. + pub gvf_norm: Option>, +} + +/// Pure-ndarray implementation of GVF calculation. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). #[allow(clippy::too_many_arguments)] #[allow(non_snake_case)] -pub fn gvf_calc( - py: Python, - wallsun: PyReadonlyArray2, - walls: PyReadonlyArray2, - buildings: PyReadonlyArray2, +pub(crate) fn gvf_calc_pure( + wallsun: ArrayView2, + walls: ArrayView2, + buildings: ArrayView2, scale: f32, - shadow: PyReadonlyArray2, + shadow: ArrayView2, first: f32, second: f32, - dirwalls: PyReadonlyArray2, - tg: PyReadonlyArray2, + dirwalls: ArrayView2, + tg: ArrayView2, tgwall: f32, ta: f32, - emis_grid: PyReadonlyArray2, + emis_grid: ArrayView2, ewall: f32, - alb_grid: PyReadonlyArray2, + alb_grid: ArrayView2, sbc: f32, albedo_b: f32, twater: f32, - lc_grid: Option>, + lc_grid: Option>, landcover: bool, -) -> PyResult> { - let wallsun = wallsun.as_array(); - let walls = walls.as_array(); - let buildings = buildings.as_array(); - let shadow = shadow.as_array(); - let dirwalls = dirwalls.as_array(); - let tg = tg.as_array(); - let emis_grid = emis_grid.as_array(); - let alb_grid = alb_grid.as_array(); - let lc_grid_arr = lc_grid.as_ref().map(|arr| arr.as_array()); - +) -> GvfResultPure { let (rows, cols) = (buildings.shape()[0], buildings.shape()[1]); - // Validate that all input arrays have the same shape - let expected_shape = [rows, cols]; - if wallsun.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "wallsun shape {:?} does not match buildings shape {:?}", - wallsun.shape(), - expected_shape - ))); - } - if walls.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "walls shape {:?} does not match buildings shape {:?}", - walls.shape(), - expected_shape - ))); - } - if shadow.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "shadow shape {:?} does not match buildings shape {:?}", - shadow.shape(), - expected_shape - ))); - } - if dirwalls.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "dirwalls shape {:?} does not match buildings shape {:?}", - dirwalls.shape(), - expected_shape - ))); - } - if tg.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "tg shape {:?} does not match buildings shape {:?}", - tg.shape(), - expected_shape - ))); - } - if emis_grid.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "emis_grid shape {:?} does not match buildings shape {:?}", - emis_grid.shape(), - expected_shape - ))); - } - if alb_grid.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "alb_grid shape {:?} does not match buildings shape {:?}", - alb_grid.shape(), - expected_shape - ))); - } - if let Some(lc) = lc_grid_arr.as_ref() { - if lc.shape() != expected_shape { - return Err(pyo3::exceptions::PyValueError::new_err(format!( - "lc_grid shape {:?} does not match buildings shape {:?}", - lc.shape(), - expected_shape - ))); - } - } let azimuth_a: Array1 = Array1::range(5.0, 359.0, 20.0); let num_azimuths = azimuth_a.len() as f32; let num_azimuths_half = num_azimuths / 2.0; @@ -220,7 +247,7 @@ pub fn gvf_calc( sbc, albedo_b, twater, - lc_grid_arr.as_ref().map(|a| a.view()), + lc_grid, landcover, ); a.lup.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); @@ -289,7 +316,7 @@ pub fn gvf_calc( let gvf_lup_n = accum.lup_n.mapv(|v| v * scale_half) + &emis_add; let gvfalb_n = accum.alb_n.mapv(|v| v * scale_half); let gvfalbnosh_n = accum.albnosh_n.mapv(|v| v * scale_half); - let gvf_sum = accum.sum; // raw sum + let gvf_sum = accum.sum; let mut gvf_norm = gvf_sum.mapv(|v| v * scale_all); Zip::from(&mut gvf_norm) .and(&buildings) @@ -299,26 +326,316 @@ pub fn gvf_calc( } }); + GvfResultPure { + gvf_lup, + gvfalb, + gvfalbnosh: Some(gvfalbnosh), + gvf_lup_e, + gvfalb_e, + gvfalbnosh_e: Some(gvfalbnosh_e), + gvf_lup_s, + gvfalb_s, + gvfalbnosh_s: Some(gvfalbnosh_s), + gvf_lup_w, + gvfalb_w, + gvfalbnosh_w: Some(gvfalbnosh_w), + gvf_lup_n, + gvfalb_n, + gvfalbnosh_n: Some(gvfalbnosh_n), + gvf_sum: Some(gvf_sum), + gvf_norm: Some(gvf_norm), + } +} + +/// GVF calculation using precomputed geometry cache (thermal-only pass). +/// +/// Skips all building ray-tracing. Uses cached blocking distances and geometric outputs. +/// Returns identical results to `gvf_calc_pure` but faster on subsequent timesteps. +#[allow(clippy::too_many_arguments)] +#[allow(non_snake_case)] +pub(crate) fn gvf_calc_with_cache( + cache: &crate::gvf_geometry::GvfGeometryCache, + wallsun: ArrayView2, + buildings: ArrayView2, + shadow: ArrayView2, + tg: ArrayView2, + tgwall: f32, + ta: f32, + emis_grid: ArrayView2, + ewall: f32, + alb_grid: ArrayView2, + sbc: f32, + albedo_b: f32, + twater: f32, + lc_grid: Option>, + landcover: bool, +) -> GvfResultPure { + let (rows, cols) = (buildings.shape()[0], buildings.shape()[1]); + + let num_azimuths = cache.azimuths.len() as f32; + let num_azimuths_half = num_azimuths / 2.0; + let ta_k = ta + 273.15; + let ta_k_pow4 = ta_k.powi(4); + + let mut sunwall_mask = wallsun.to_owned(); + sunwall_mask.mapv_inplace(|x| if x > 0. { 1. } else { x }); + + let lup = Zip::from(emis_grid) + .and(tg) + .and(shadow) + .map_collect(|&emis, &tg, &sh| { + sbc * emis * (tg * sh + ta_k).powi(4) - sbc * emis * ta_k_pow4 + }); + let albshadow = &alb_grid * &shadow; + + let mut tg_for_lup_final = tg.to_owned(); + if landcover { + if let Some(lc_grid) = lc_grid { + Zip::from(&mut tg_for_lup_final) + .and(lc_grid) + .for_each(|tg, &lc| { + if lc == 3. { + *tg = twater - ta; + } + }); + } + } + let lup_final = Zip::from(emis_grid) + .and(&tg_for_lup_final) + .and(shadow) + .map_collect(|&emis, &tg, &sh| { + sbc * emis * (tg * sh + ta_k).powi(4) - sbc * emis * ta_k_pow4 + }); + let buildings_inv = buildings.mapv(|x| 1.0 - x); + let lup_base = &lup_final * &buildings_inv; + let alb_base = &alb_grid * &buildings_inv * &shadow; + let lwall = sbc * ewall * (tgwall + ta_k).powi(4) - sbc * ewall * ta_k_pow4; + + let first = cache.first; + let second = cache.second; + + struct ThermalAccum { + lup: Array2, + alb: Array2, + lup_e: Array2, + alb_e: Array2, + lup_s: Array2, + alb_s: Array2, + lup_w: Array2, + alb_w: Array2, + lup_n: Array2, + alb_n: Array2, + } + let init_accum = || ThermalAccum { + lup: Array2::zeros((rows, cols)), + alb: Array2::zeros((rows, cols)), + lup_e: Array2::zeros((rows, cols)), + alb_e: Array2::zeros((rows, cols)), + lup_s: Array2::zeros((rows, cols)), + alb_s: Array2::zeros((rows, cols)), + lup_w: Array2::zeros((rows, cols)), + alb_w: Array2::zeros((rows, cols)), + lup_n: Array2::zeros((rows, cols)), + alb_n: Array2::zeros((rows, cols)), + }; + + let accum = cache + .azimuths + .par_iter() + .fold(init_accum, |mut a, geom| { + let (gvf_lup_i, gvfalb_i) = crate::sun::sun_on_surface_cached( + geom, + sunwall_mask.view(), + lup.view(), + albshadow.view(), + lwall, + albedo_b, + first, + second, + ); + + a.lup.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); + a.alb.zip_mut_with(&gvfalb_i, |x, &y| *x += y); + let azimuth = geom.azimuth_deg; + if (0.0..180.0).contains(&azimuth) { + a.lup_e.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); + a.alb_e.zip_mut_with(&gvfalb_i, |x, &y| *x += y); + } + if (90.0..270.0).contains(&azimuth) { + a.lup_s.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); + a.alb_s.zip_mut_with(&gvfalb_i, |x, &y| *x += y); + } + if (180.0..360.0).contains(&azimuth) { + a.lup_w.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); + a.alb_w.zip_mut_with(&gvfalb_i, |x, &y| *x += y); + } + if !(90.0..270.0).contains(&azimuth) { + a.lup_n.zip_mut_with(&gvf_lup_i, |x, &y| *x += y); + a.alb_n.zip_mut_with(&gvfalb_i, |x, &y| *x += y); + } + a + }) + .reduce(init_accum, |mut a, b| { + a.lup.zip_mut_with(&b.lup, |x, &y| *x += y); + a.alb.zip_mut_with(&b.alb, |x, &y| *x += y); + a.lup_e.zip_mut_with(&b.lup_e, |x, &y| *x += y); + a.alb_e.zip_mut_with(&b.alb_e, |x, &y| *x += y); + a.lup_s.zip_mut_with(&b.lup_s, |x, &y| *x += y); + a.alb_s.zip_mut_with(&b.alb_s, |x, &y| *x += y); + a.lup_w.zip_mut_with(&b.lup_w, |x, &y| *x += y); + a.alb_w.zip_mut_with(&b.alb_w, |x, &y| *x += y); + a.lup_n.zip_mut_with(&b.lup_n, |x, &y| *x += y); + a.alb_n.zip_mut_with(&b.alb_n, |x, &y| *x += y); + a + }); + + let scale_all = 1.0 / num_azimuths; + let scale_half = 1.0 / num_azimuths_half; + + // Ambient baseline: SBC × emis × Ta_K^4 (UMEP gvf_2018a.py line 64) + // The per-azimuth accumulation uses *differential* Lup (excess above ambient). + // After averaging, the ambient baseline must be added back to produce total Lup. + let ambient_lup = emis_grid.mapv(|emis| sbc * emis * ta_k_pow4); + + let gvf_lup = accum.lup.mapv(|v| v * scale_all) + &lup_base + &ambient_lup; + let gvfalb = accum.alb.mapv(|v| v * scale_all) + &alb_base; + let gvf_lup_e = accum.lup_e.mapv(|v| v * scale_half) + &lup_base + &ambient_lup; + let gvfalb_e = accum.alb_e.mapv(|v| v * scale_half) + &alb_base; + let gvf_lup_s = accum.lup_s.mapv(|v| v * scale_half) + &lup_base + &ambient_lup; + let gvfalb_s = accum.alb_s.mapv(|v| v * scale_half) + &alb_base; + let gvf_lup_w = accum.lup_w.mapv(|v| v * scale_half) + &lup_base + &ambient_lup; + let gvfalb_w = accum.alb_w.mapv(|v| v * scale_half) + &alb_base; + let gvf_lup_n = accum.lup_n.mapv(|v| v * scale_half) + &lup_base + &ambient_lup; + let gvfalb_n = accum.alb_n.mapv(|v| v * scale_half) + &alb_base; + + // `gvfalbnosh*` and GVF normalization terms are purely geometric and already + // cached in `GvfGeometryCache`. Return `None` for those fields so accidental + // access is explicit instead of silently carrying invalid sentinel arrays. + GvfResultPure { + gvf_lup, + gvfalb, + gvfalbnosh: None, + gvf_lup_e, + gvfalb_e, + gvfalbnosh_e: None, + gvf_lup_s, + gvfalb_s, + gvfalbnosh_s: None, + gvf_lup_w, + gvfalb_w, + gvfalbnosh_w: None, + gvf_lup_n, + gvfalb_n, + gvfalbnosh_n: None, + gvf_sum: None, + gvf_norm: None, + } +} + +/// Compute Ground View Factor (GVF) for upwelling longwave and albedo components. +/// +/// GVF represents how much a person "sees" the ground and walls from a given height. +/// This determines thermal radiation received from surrounding surfaces. +/// +/// Parameters: +/// - wallsun: Wall sun exposure grid +/// - walls: Wall height grid +/// - buildings: Building mask (0=building, 1=ground) +/// - shadow: Combined shadow fraction +/// - dirwalls: Wall direction/aspect in degrees +/// - tg: Ground temperature deviation from air temperature (K) +/// - emis_grid: Emissivity per pixel +/// - alb_grid: Albedo per pixel +/// - lc_grid: Optional land cover grid +/// - params: Scalar parameters (scale, thresholds, temperatures, etc.) +/// +/// Returns GvfResult with upwelling longwave and albedo view factors for all directions. +#[pyfunction] +#[allow(non_snake_case)] +pub fn gvf_calc( + py: Python, + wallsun: PyReadonlyArray2, + walls: PyReadonlyArray2, + buildings: PyReadonlyArray2, + shadow: PyReadonlyArray2, + dirwalls: PyReadonlyArray2, + tg: PyReadonlyArray2, + emis_grid: PyReadonlyArray2, + alb_grid: PyReadonlyArray2, + lc_grid: Option>, + params: &GvfScalarParams, +) -> PyResult> { + let lc_grid_arr = lc_grid.as_ref().map(|arr| arr.as_array()); + let result = gvf_calc_pure( + wallsun.as_array(), + walls.as_array(), + buildings.as_array(), + params.scale, + shadow.as_array(), + params.first, + params.second, + dirwalls.as_array(), + tg.as_array(), + params.tgwall, + params.ta, + emis_grid.as_array(), + params.ewall, + alb_grid.as_array(), + params.sbc, + params.albedo_b, + params.twater, + lc_grid_arr, + params.landcover, + ); + Py::new( py, GvfResult { - gvf_lup: gvf_lup.into_pyarray(py).unbind(), - gvfalb: gvfalb.into_pyarray(py).unbind(), - gvfalbnosh: gvfalbnosh.into_pyarray(py).unbind(), - gvf_lup_e: gvf_lup_e.into_pyarray(py).unbind(), - gvfalb_e: gvfalb_e.into_pyarray(py).unbind(), - gvfalbnosh_e: gvfalbnosh_e.into_pyarray(py).unbind(), - gvf_lup_s: gvf_lup_s.into_pyarray(py).unbind(), - gvfalb_s: gvfalb_s.into_pyarray(py).unbind(), - gvfalbnosh_s: gvfalbnosh_s.into_pyarray(py).unbind(), - gvf_lup_w: gvf_lup_w.into_pyarray(py).unbind(), - gvfalb_w: gvfalb_w.into_pyarray(py).unbind(), - gvfalbnosh_w: gvfalbnosh_w.into_pyarray(py).unbind(), - gvf_lup_n: gvf_lup_n.into_pyarray(py).unbind(), - gvfalb_n: gvfalb_n.into_pyarray(py).unbind(), - gvfalbnosh_n: gvfalbnosh_n.into_pyarray(py).unbind(), - gvf_sum: gvf_sum.into_pyarray(py).unbind(), - gvf_norm: gvf_norm.into_pyarray(py).unbind(), + gvf_lup: result.gvf_lup.into_pyarray(py).unbind(), + gvfalb: result.gvfalb.into_pyarray(py).unbind(), + gvfalbnosh: result + .gvfalbnosh + .expect("gvfalbnosh is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_lup_e: result.gvf_lup_e.into_pyarray(py).unbind(), + gvfalb_e: result.gvfalb_e.into_pyarray(py).unbind(), + gvfalbnosh_e: result + .gvfalbnosh_e + .expect("gvfalbnosh_e is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_lup_s: result.gvf_lup_s.into_pyarray(py).unbind(), + gvfalb_s: result.gvfalb_s.into_pyarray(py).unbind(), + gvfalbnosh_s: result + .gvfalbnosh_s + .expect("gvfalbnosh_s is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_lup_w: result.gvf_lup_w.into_pyarray(py).unbind(), + gvfalb_w: result.gvfalb_w.into_pyarray(py).unbind(), + gvfalbnosh_w: result + .gvfalbnosh_w + .expect("gvfalbnosh_w is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_lup_n: result.gvf_lup_n.into_pyarray(py).unbind(), + gvfalb_n: result.gvfalb_n.into_pyarray(py).unbind(), + gvfalbnosh_n: result + .gvfalbnosh_n + .expect("gvfalbnosh_n is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_sum: result + .gvf_sum + .expect("gvf_sum is required for gvf_calc()") + .into_pyarray(py) + .unbind(), + gvf_norm: result + .gvf_norm + .expect("gvf_norm is required for gvf_calc()") + .into_pyarray(py) + .unbind(), }, ) } diff --git a/rust/src/gvf_geometry.rs b/rust/src/gvf_geometry.rs new file mode 100644 index 0000000..6ba0cb3 --- /dev/null +++ b/rust/src/gvf_geometry.rs @@ -0,0 +1,354 @@ +//! GVF geometry caching — precompute building ray-trace once per DSM. +//! +//! `f` (building occlusion) is binary (0/1) and monotonically descending. +//! We represent it as a blocking distance (u16) per pixel per azimuth. +//! All purely-geometric accumulators are precomputed and cached. + +use ndarray::{s, Array1, Array2, ArrayView2, Zip}; +use rayon::prelude::*; + +const PI: f32 = std::f32::consts::PI; + +/// Per-azimuth precomputed geometry. +pub(crate) struct AzimuthGeometry { + /// Azimuth angle in degrees for this geometry snapshot. + pub azimuth_deg: f32, + /// Step at which each pixel gets blocked (f→0). `second` if never blocked. + pub blocking_distance: Array2, + /// (dx, dy) shift offsets per step. + pub shifts: Vec<(isize, isize)>, + /// Wall-facing mask for this azimuth direction. + pub facesh: Array2, + /// Accumulated albedo (no shadow) through occlusion — snapshot at `first` threshold. + pub albnosh_accum_first: Array2, + /// Accumulated albedo (no shadow) through occlusion — full range. + pub albnosh_accum: Array2, + /// Wall albedo (no shadow) weighted by geometric wall visibility — snapshot at `first`. + pub wallnosh_accum_first: Array2, + /// Wall albedo (no shadow) weighted by geometric wall visibility — full range. + pub wallnosh_accum: Array2, + /// Whether any wall is geometrically visible within `first` height. + pub wall_influence_first: Array2, + /// Whether any wall is geometrically visible within full height. + pub wall_influence: Array2, +} + +/// Full GVF geometry cache for all 18 azimuths. +pub(crate) struct GvfGeometryCache { + pub azimuths: Vec, + pub first: f32, + pub second: f32, + /// Cached gvfalbnosh outputs (purely geometric): center, E, S, W, N. + pub cached_albnosh: Array2, + pub cached_albnosh_e: Array2, + pub cached_albnosh_s: Array2, + pub cached_albnosh_w: Array2, + pub cached_albnosh_n: Array2, +} + +/// Compute (dx, dy) shift for a given azimuth and step index. +fn compute_shift(azimuth_rad: f32, index: f32) -> (isize, isize) { + let pibyfour = PI / 4.; + let threetimespibyfour = 3. * pibyfour; + let fivetimespibyfour = 5. * pibyfour; + let seventimespibyfour = 7. * pibyfour; + let sinazimuth = azimuth_rad.sin(); + let cosazimuth = azimuth_rad.cos(); + let tanazimuth = azimuth_rad.tan(); + let signsinazimuth = sinazimuth.signum(); + let signcosazimuth = cosazimuth.signum(); + + let (dx, dy) = if (pibyfour..threetimespibyfour).contains(&azimuth_rad) + || (fivetimespibyfour..seventimespibyfour).contains(&azimuth_rad) + { + ( + -1. * signcosazimuth * (index / tanazimuth).abs().round(), + signsinazimuth * index, + ) + } else { + ( + -1. * signcosazimuth * index, + signsinazimuth * (index * tanazimuth).abs().round(), + ) + }; + + (dx as isize, dy as isize) +} + +/// Compute slice bounds for a shift (dx, dy) on a grid of size (sizex, sizey). +/// Returns (x_c_slice, x_p_slice) as ((xc1,xc2,yc1,yc2), (xp1,xp2,yp1,yp2)). +fn compute_slices( + dx: isize, + dy: isize, + sizex: usize, + sizey: usize, +) -> ((isize, isize, isize, isize), (isize, isize, isize, isize)) { + let absdx = dx.abs(); + let absdy = dy.abs(); + + let xc1 = (dx + absdx) / 2; + let xc2 = sizex as isize + (dx - absdx) / 2; + let yc1 = (dy + absdy) / 2; + let yc2 = sizey as isize + (dy - absdy) / 2; + + let xp1 = -(dx - absdx) / 2; + let xp2 = sizex as isize - (dx + absdx) / 2; + let yp1 = -(dy - absdy) / 2; + let yp2 = sizey as isize - (dy + absdy) / 2; + + ((xc1, xc2, yc1, yc2), (xp1, xp2, yp1, yp2)) +} + +/// Compute facesh mask for a given azimuth vs wall aspects. +fn compute_facesh( + azimuth_rad: f32, + wall_aspect: ArrayView2, + wall_ht: ArrayView2, +) -> Array2 { + let azilow = azimuth_rad - PI / 2.; + let azihigh = azimuth_rad + PI / 2.; + let wallbol = wall_ht.mapv(|x| if x > 0. { 1. } else { 0. }); + + if azilow >= 0. && azihigh < 2. * PI { + let mut facesh = Zip::from(wall_aspect).map_collect(|&aspect| { + if aspect < azilow || aspect >= azihigh { + 1. + } else { + 0. + } + }); + facesh = facesh - &wallbol + 1.; + facesh + } else if azilow < 0. && azihigh <= 2. * PI { + let azilow_adj = azilow + 2. * PI; + let mut facesh = Zip::from(wall_aspect).map_collect(|&aspect| { + if aspect > azilow_adj || aspect <= azihigh { + -1. + } else { + 0. + } + }); + facesh.mapv_inplace(|x| x + 1.); + facesh + } else { + let azihigh_adj = azihigh - 2. * PI; + let mut facesh = Zip::from(wall_aspect).map_collect(|&aspect| { + if aspect > azilow || aspect <= azihigh_adj { + -1. + } else { + 0. + } + }); + facesh.mapv_inplace(|x| x + 1.); + facesh + } +} + +/// Precompute geometry for a single azimuth direction. +fn precompute_azimuth_geometry( + azimuth_deg: f32, + buildings: ArrayView2, + wall_aspect: ArrayView2, + wall_ht: ArrayView2, + alb_grid: ArrayView2, + wall_albedo: f32, + first: f32, + second: f32, + pixel_scale: f32, +) -> AzimuthGeometry { + let (sizex, sizey) = (buildings.nrows(), buildings.ncols()); + let azimuth_rad = azimuth_deg * (PI / 180.); + + // Precompute shifts + let num_steps = second as usize; + let mut shifts = Vec::with_capacity(num_steps); + for n in 0..num_steps { + shifts.push(compute_shift(azimuth_rad, n as f32)); + } + + // Ray-trace: compute blocking distances and geometric accumulators + let mut f = buildings.to_owned(); + let mut blocking_distance = Array2::::from_elem((sizex, sizey), second as u16); + let mut tempbu = Array2::::zeros((sizex, sizey)); + let mut tempalbnosh = Array2::::zeros((sizex, sizey)); + let mut tempbubwall = Array2::::zeros((sizex, sizey)); + + let mut weightsumalbnosh = Array2::::zeros((sizex, sizey)); + let mut weightsumalbwallnosh = Array2::::zeros((sizex, sizey)); + + let mut weightsumalbnosh_first = Array2::::zeros((sizex, sizey)); + let mut weightsumalbwallnosh_first = Array2::::zeros((sizex, sizey)); + + let _first_threshold = (first as f32 * pixel_scale).round().max(1.); + + for (n, &(dx, dy)) in shifts.iter().enumerate() { + let ((xc1, xc2, yc1, yc2), (xp1, xp2, yp1, yp2)) = compute_slices(dx, dy, sizex, sizey); + let x_c_slice = s![xc1..xc2, yc1..yc2]; + let x_p_slice = s![xp1..xp2, yp1..yp2]; + + // Shift buildings and update occlusion + tempbu + .slice_mut(x_p_slice) + .assign(&buildings.slice(x_c_slice)); + Zip::from(f.view_mut()) + .and(tempbu.view()) + .for_each(|f_val, &tb| { + *f_val = f_val.min(tb); + }); + + // Record blocking distance: first step where f drops to 0 + Zip::from(&mut blocking_distance) + .and(&f) + .for_each(|bd, &fv| { + // Only update if not already blocked (bd still at initial value or higher) + if fv == 0. && *bd > n as u16 { + *bd = n as u16; + } + }); + + // Accumulate albedo (no shadow) weighted by f + tempalbnosh + .slice_mut(x_p_slice) + .assign(&alb_grid.slice(x_c_slice)); + Zip::from(&mut weightsumalbnosh) + .and(&tempalbnosh) + .and(&f) + .for_each(|w, &a, &fv| *w += a * fv); + + // Wall tracking: tempbubwall = "have we seen any wall?" latch + Zip::from(&mut tempbubwall).and(&f).for_each(|bubw, &fv| { + let bwall = 1. - fv; + *bubw = if *bubw + bwall > 0. { 1. } else { 0. }; + }); + weightsumalbwallnosh.zip_mut_with(&tempbubwall, |w, &b| *w += b * wall_albedo); + + // Snapshot at first-height threshold + if (n + 1) as f32 <= first { + weightsumalbnosh_first.assign(&weightsumalbnosh); + weightsumalbwallnosh_first.assign(&weightsumalbwallnosh); + } + } + + // Wall influence masks + let wall_influence_first = weightsumalbwallnosh_first.mapv(|x| (x > 0.) as i32 as f32); + let wall_influence = weightsumalbwallnosh.mapv(|x| (x > 0.) as i32 as f32); + + // Facesh mask + let facesh = compute_facesh(azimuth_rad, wall_aspect, wall_ht); + + AzimuthGeometry { + azimuth_deg, + blocking_distance, + shifts, + facesh, + albnosh_accum_first: weightsumalbnosh_first, + albnosh_accum: weightsumalbnosh, + wallnosh_accum_first: weightsumalbwallnosh_first, + wallnosh_accum: weightsumalbwallnosh, + wall_influence_first, + wall_influence, + } +} + +/// Precompute GVF geometry cache for all 18 azimuths. +/// +/// This runs the building ray-trace once and caches the results. +/// Subsequent timesteps skip the geometry and only compute thermal quantities. +#[allow(clippy::too_many_arguments)] +pub(crate) fn precompute_gvf_geometry( + buildings: ArrayView2, + wall_aspect: ArrayView2, + wall_ht: ArrayView2, + alb_grid: ArrayView2, + pixel_scale: f32, + first_ht: f32, + second_ht: f32, + wall_albedo: f32, +) -> GvfGeometryCache { + let first = (first_ht * pixel_scale).round().max(1.); + let second = (second_ht * pixel_scale).round(); + let (rows, cols) = (buildings.nrows(), buildings.ncols()); + + let azimuth_a: Array1 = Array1::range(5.0, 359.0, 20.0); + let num_azimuths = azimuth_a.len() as f32; + let num_azimuths_half = num_azimuths / 2.0; + + // Precompute per-azimuth geometry in parallel + let az_geoms: Vec = azimuth_a + .iter() + .collect::>() + .par_iter() + .map(|&&az| { + precompute_azimuth_geometry( + az, + buildings, + wall_aspect, + wall_ht, + alb_grid, + wall_albedo, + first, + second, + pixel_scale, + ) + }) + .collect(); + + // Accumulate cached gvfalbnosh outputs (5 directions) from per-azimuth results + let buildings_inv = buildings.mapv(|x| 1. - x); + let scale_all = 1.0 / num_azimuths; + let scale_half = 1.0 / num_azimuths_half; + + let mut albnosh_center = Array2::::zeros((rows, cols)); + let mut albnosh_e = Array2::::zeros((rows, cols)); + let mut albnosh_s = Array2::::zeros((rows, cols)); + let mut albnosh_w = Array2::::zeros((rows, cols)); + let mut albnosh_n = Array2::::zeros((rows, cols)); + + for (i, geom) in az_geoms.iter().enumerate() { + let azimuth = azimuth_a[i]; + + // Per-azimuth gvfalbnosh (matches sun_on_surface post-loop logic) + let gvfalbnosh1 = (&geom.wallnosh_accum_first + &geom.albnosh_accum_first) / (first + 1.) + * &geom.wall_influence_first + + &geom.albnosh_accum_first / first * geom.wall_influence_first.mapv(|x| 1. - x); + let gvfalbnosh2 = (&geom.wallnosh_accum + &geom.albnosh_accum) / second + * &geom.wall_influence + + &geom.albnosh_accum / second * geom.wall_influence.mapv(|x| 1. - x); + + let gvfalbnosh_az = (&gvfalbnosh1 * 0.5 + &gvfalbnosh2 * 0.4) / 0.9 * &buildings + + &alb_grid * &buildings_inv; + + albnosh_center += &gvfalbnosh_az; + + if (0.0..180.0).contains(&azimuth) { + albnosh_e += &gvfalbnosh_az; + } + if (90.0..270.0).contains(&azimuth) { + albnosh_s += &gvfalbnosh_az; + } + if (180.0..360.0).contains(&azimuth) { + albnosh_w += &gvfalbnosh_az; + } + if !(90.0..270.0).contains(&azimuth) { + albnosh_n += &gvfalbnosh_az; + } + } + + // Scale by number of azimuths + albnosh_center.mapv_inplace(|v| v * scale_all); + albnosh_e.mapv_inplace(|v| v * scale_half); + albnosh_s.mapv_inplace(|v| v * scale_half); + albnosh_w.mapv_inplace(|v| v * scale_half); + albnosh_n.mapv_inplace(|v| v * scale_half); + + GvfGeometryCache { + azimuths: az_geoms, + first, + second, + cached_albnosh: albnosh_center, + cached_albnosh_e: albnosh_e, + cached_albnosh_s: albnosh_s, + cached_albnosh_w: albnosh_w, + cached_albnosh_n: albnosh_n, + } +} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 8bf7fbf..953c5b6 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -3,14 +3,23 @@ use pyo3::prelude::*; mod emissivity_models; #[cfg(feature = "gpu")] mod gpu; +mod ground; mod gvf; +mod gvf_geometry; +mod morphology; mod patch_radiation; +mod perez; +mod pet; +mod pipeline; mod shadowing; mod sky; mod skyview; mod sun; mod sunlit_shaded_patches; +mod tmrt; +mod utci; mod vegetation; +mod wall_aspect; #[pymodule] fn rustalgos(py_module: &Bound<'_, PyModule>) -> PyResult<()> { @@ -24,6 +33,13 @@ fn rustalgos(py_module: &Bound<'_, PyModule>) -> PyResult<()> { register_gvf_module(py_module)?; register_sky_module(py_module)?; register_vegetation_module(py_module)?; + register_utci_module(py_module)?; + register_pet_module(py_module)?; + register_ground_module(py_module)?; + register_tmrt_module(py_module)?; + register_pipeline_module(py_module)?; + register_morphology_module(py_module)?; + register_wall_aspect_module(py_module)?; // Add GPU feature flag #[cfg(feature = "gpu")] @@ -31,7 +47,13 @@ fn rustalgos(py_module: &Bound<'_, PyModule>) -> PyResult<()> { #[cfg(not(feature = "gpu"))] py_module.add("GPU_ENABLED", false)?; - py_module.add("__doc__", "UMEP algorithms implemented in Rust.")?; + // Add build profile flag (false in debug builds, true in release builds) + py_module.add("RELEASE_BUILD", !cfg!(debug_assertions))?; + + py_module.add( + "__doc__", + "SOLWEIG urban microclimate algorithms implemented in Rust.", + )?; Ok(()) } @@ -50,6 +72,7 @@ fn register_shadowing_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { submodule.add_function(wrap_pyfunction!(shadowing::enable_gpu, &submodule)?)?; submodule.add_function(wrap_pyfunction!(shadowing::disable_gpu, &submodule)?)?; submodule.add_function(wrap_pyfunction!(shadowing::is_gpu_enabled, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(shadowing::gpu_limits, &submodule)?)?; } py_module.add_submodule(&submodule)?; @@ -69,6 +92,7 @@ fn register_skyview_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { fn register_gvf_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { let submodule = PyModule::new(py_module.py(), "gvf")?; submodule.add("__doc__", "Ground View Factor calculation.")?; + submodule.add_class::()?; submodule.add_function(wrap_pyfunction!(gvf::gvf_calc, &submodule)?)?; py_module.add_submodule(&submodule)?; Ok(()) @@ -77,7 +101,13 @@ fn register_gvf_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { fn register_sky_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { let submodule = PyModule::new(py_module.py(), "sky")?; submodule.add("__doc__", "Anisotropic sky radiation calculations.")?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_class::()?; submodule.add_function(wrap_pyfunction!(sky::anisotropic_sky, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(sky::cylindric_wedge, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(sky::weighted_patch_sum, &submodule)?)?; py_module.add_submodule(&submodule)?; Ok(()) } @@ -92,3 +122,110 @@ fn register_vegetation_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { py_module.add_submodule(&submodule)?; Ok(()) } + +fn register_utci_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "utci")?; + submodule.add( + "__doc__", + "UTCI (Universal Thermal Climate Index) calculations.", + )?; + submodule.add_function(wrap_pyfunction!(utci::utci_single, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(utci::utci_grid, &submodule)?)?; + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_pet_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "pet")?; + submodule.add( + "__doc__", + "PET (Physiological Equivalent Temperature) calculations.", + )?; + submodule.add_function(wrap_pyfunction!(pet::pet_calculate, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(pet::pet_grid, &submodule)?)?; + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_ground_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "ground")?; + submodule.add( + "__doc__", + "Ground temperature and thermal delay calculations.", + )?; + submodule.add_function(wrap_pyfunction!( + ground::compute_ground_temperature, + &submodule + )?)?; + submodule.add_function(wrap_pyfunction!(ground::ts_wave_delay, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(ground::ts_wave_delay_batch, &submodule)?)?; + submodule.add_class::()?; + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_tmrt_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "tmrt")?; + submodule.add("__doc__", "Mean Radiant Temperature (Tmrt) calculations.")?; + submodule.add_class::()?; + submodule.add_function(wrap_pyfunction!(tmrt::compute_tmrt, &submodule)?)?; + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_pipeline_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "pipeline")?; + submodule.add( + "__doc__", + "Fused timestep pipeline — single FFI call per timestep.", + )?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_class::()?; + submodule.add_function(wrap_pyfunction!(pipeline::compute_timestep, &submodule)?)?; + submodule.add_function(wrap_pyfunction!( + pipeline::precompute_gvf_cache, + &submodule + )?)?; + submodule.add_function(wrap_pyfunction!(perez::perez_v3_py, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(perez::compute_steradians_py, &submodule)?)?; + + // Anisotropic sky GPU control functions + #[cfg(feature = "gpu")] + { + submodule.add_function(wrap_pyfunction!(pipeline::enable_aniso_gpu, &submodule)?)?; + submodule.add_function(wrap_pyfunction!(pipeline::disable_aniso_gpu, &submodule)?)?; + submodule.add_function(wrap_pyfunction!( + pipeline::is_aniso_gpu_enabled, + &submodule + )?)?; + } + + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_morphology_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "morphology")?; + submodule.add("__doc__", "Morphological operations (binary dilation).")?; + submodule.add_function(wrap_pyfunction!(morphology::binary_dilation, &submodule)?)?; + py_module.add_submodule(&submodule)?; + Ok(()) +} + +fn register_wall_aspect_module(py_module: &Bound<'_, PyModule>) -> PyResult<()> { + let submodule = PyModule::new(py_module.py(), "wall_aspect")?; + submodule.add( + "__doc__", + "Wall aspect (orientation) detection using the Goodwin filter algorithm.", + )?; + submodule.add_function(wrap_pyfunction!( + wall_aspect::compute_wall_aspect, + &submodule + )?)?; + submodule.add_class::()?; + py_module.add_submodule(&submodule)?; + Ok(()) +} diff --git a/rust/src/morphology.rs b/rust/src/morphology.rs new file mode 100644 index 0000000..fbe8b8e --- /dev/null +++ b/rust/src/morphology.rs @@ -0,0 +1,100 @@ +//! Morphological operations (binary dilation). +//! +//! Replaces the pure-Python implementation in `physics/morphology.py` +//! with an optimized Rust version using slice-based shift-and-OR. + +use ndarray::{s, Array2, ArrayView2, Zip}; +use numpy::{IntoPyArray, PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; + +/// Binary dilation — pure Rust, no PyO3. +/// +/// Uses the same shift-and-OR strategy as numpy: for each active position +/// in the structuring element, shift the entire grid and OR into the result. +/// This is cache-friendly and SIMD-vectorizable via ndarray's Zip. +pub(crate) fn binary_dilation_pure( + input: ArrayView2, + structure: ArrayView2, + iterations: usize, +) -> Array2 { + let (rows, cols) = input.dim(); + let (sr, sc) = structure.dim(); + let offset_r = (sr / 2) as i32; + let offset_c = (sc / 2) as i32; + + // Collect active structuring element offsets (relative to center) + let offsets: Vec<(i32, i32)> = (0..sr) + .flat_map(|dr| { + (0..sc).filter_map(move |dc| { + if structure[[dr, dc]] != 0 { + Some((dr as i32 - offset_r, dc as i32 - offset_c)) + } else { + None + } + }) + }) + .collect(); + + let mut current = input.to_owned(); + + for _ in 0..iterations { + let mut new_result = Array2::::zeros((rows, cols)); + + for &(dr, dc) in &offsets { + // Compute overlapping ranges for source and destination + let (src_r, dst_r, h) = shift_range(dr, rows); + let (src_c, dst_c, w) = shift_range(dc, cols); + + if h == 0 || w == 0 { + continue; + } + + // Slice-based OR: cache-friendly, SIMD-vectorizable + Zip::from(new_result.slice_mut(s![dst_r..dst_r + h, dst_c..dst_c + w])) + .and(current.slice(s![src_r..src_r + h, src_c..src_c + w])) + .for_each(|dst, &src| *dst |= src); + } + + current = new_result; + } + + current +} + +/// Compute source start, destination start, and length for a shift offset. +#[inline] +fn shift_range(offset: i32, size: usize) -> (usize, usize, usize) { + let n = size as i32; + if offset >= 0 { + // Shift right/down: source starts at 0, dest starts at offset + let len = (n - offset) as usize; + (0, offset as usize, len) + } else { + // Shift left/up: source starts at -offset, dest starts at 0 + let len = (n + offset) as usize; + ((-offset) as usize, 0, len) + } +} + +/// Binary dilation (PyO3 wrapper). +/// +/// Args: +/// input: 2D array (uint8, 0/1). +/// structure: 3×3 structuring element (uint8, 0/1). +/// iterations: Number of dilation passes. +/// +/// Returns: +/// Dilated 2D array (uint8, 0/1). +#[pyfunction] +pub fn binary_dilation( + py: Python<'_>, + input: PyReadonlyArray2, + structure: PyReadonlyArray2, + iterations: usize, +) -> PyResult>> { + let input_v = input.as_array(); + let struct_v = structure.as_array(); + + let result = binary_dilation_pure(input_v, struct_v, iterations); + Ok(result.into_pyarray(py).unbind()) +} diff --git a/rust/src/patch_radiation.rs b/rust/src/patch_radiation.rs index 3012b16..a1f2fa1 100644 --- a/rust/src/patch_radiation.rs +++ b/rust/src/patch_radiation.rs @@ -6,7 +6,6 @@ use std::f32::consts::PI; // Vectorized functions removed as they were unused. - pub fn longwave_from_sky_pixel( lsky_side: f32, lsky_down: f32, @@ -41,7 +40,6 @@ pub fn longwave_from_sky_pixel( // Vectorized functions removed as they were unused. - pub fn longwave_from_veg_pixel( steradian: f32, angle_of_incidence: f32, @@ -84,7 +82,6 @@ pub fn longwave_from_veg_pixel( // Vectorized functions removed as they were unused. - pub fn longwave_from_buildings_pixel( steradian: f32, angle_of_incidence: f32, @@ -150,7 +147,6 @@ pub fn longwave_from_buildings_pixel( // Vectorized functions removed as they were unused. - pub fn longwave_from_buildings_wall_scheme_pixel( voxel_table: ndarray::ArrayView2, voxel_map_val: usize, @@ -203,7 +199,6 @@ pub fn longwave_from_buildings_wall_scheme_pixel( // Vectorized functions removed as they were unused. - pub fn reflected_longwave_pixel( steradian: f32, angle_of_incidence: f32, diff --git a/rust/src/perez.rs b/rust/src/perez.rs new file mode 100644 index 0000000..a4bd62e --- /dev/null +++ b/rust/src/perez.rs @@ -0,0 +1,444 @@ +//! Perez sky luminance distribution model (all-weather). +//! +//! Ported from Python `Perez_v3.py` and `create_patches.py`. +//! +//! Reference: Perez, Seals & Michalsky (1993), Solar Energy 50(3), 235–245. + +use ndarray::{Array1, Array2}; +use numpy::IntoPyArray; +use pyo3::prelude::*; +use std::collections::HashMap; +use std::sync::{Arc, Mutex, OnceLock}; + +const PI: f32 = std::f32::consts::PI; +const DEG2RAD: f32 = PI / 180.0; +const RAD2DEG: f32 = 180.0 / PI; +const MIN_SUN_ELEVATION_DEG: f32 = 3.0; + +// ── Perez model coefficients (8 clearness bins × 4 polynomial terms) ──────── + +const M_A1: [f32; 8] = [ + 1.3525, -1.2219, -1.1000, -0.5484, -0.6000, -1.0156, -1.0000, -1.0500, +]; +const M_A2: [f32; 8] = [ + -0.2576, -0.7730, -0.2515, -0.6654, -0.3566, -0.3670, 0.0211, 0.0289, +]; +const M_A3: [f32; 8] = [ + -0.2690, 1.4148, 0.8952, -0.2672, -2.5000, 1.0078, 0.5025, 0.4260, +]; +const M_A4: [f32; 8] = [ + -1.4366, 1.1016, 0.0156, 0.7117, 2.3250, 1.4051, -0.5119, 0.3590, +]; + +const M_B1: [f32; 8] = [ + -0.7670, -0.2054, 0.2782, 0.7234, 0.2937, 0.2875, -0.3000, -0.3250, +]; +const M_B2: [f32; 8] = [ + 0.0007, 0.0367, -0.1812, -0.6219, 0.0496, -0.5328, 0.1922, 0.1156, +]; +const M_B3: [f32; 8] = [ + 1.2734, -3.9128, -4.5000, -5.6812, -5.6812, -3.8500, 0.7023, 0.7781, +]; +const M_B4: [f32; 8] = [ + -0.1233, 0.9156, 1.1766, 2.6297, 1.8415, 3.3750, -1.6317, 0.0025, +]; + +const M_C1: [f32; 8] = [ + 2.8000, 6.9750, 24.7219, 33.3389, 21.0000, 14.0000, 19.0000, 31.0625, +]; +const M_C2: [f32; 8] = [ + 0.6004, 0.1774, -13.0812, -18.3000, -4.7656, -0.9999, -5.0000, -14.5000, +]; +const M_C3: [f32; 8] = [ + 1.2375, 6.4477, -37.7000, -62.2500, -21.5906, -7.1406, 1.2438, -46.1148, +]; +const M_C4: [f32; 8] = [ + 1.0000, -0.1239, 34.8438, 52.0781, 7.2492, 7.5469, -1.9094, 55.3750, +]; + +const M_D1: [f32; 8] = [ + 1.8734, -1.5798, -5.0000, -3.5000, -3.5000, -3.4000, -4.0000, -7.2312, +]; +const M_D2: [f32; 8] = [ + 0.6297, -0.5081, 1.5218, 0.0016, -0.1554, -0.1078, 0.0250, 0.4050, +]; +const M_D3: [f32; 8] = [ + 0.9738, -1.7812, 3.9229, 1.1477, 1.4062, -1.0750, 0.3844, 13.3500, +]; +const M_D4: [f32; 8] = [ + 0.2809, 0.1080, -2.6204, 0.1062, 0.3988, 1.5702, 0.2656, 0.6234, +]; + +const M_E1: [f32; 8] = [ + 0.0356, 0.2624, -0.0156, 0.4659, 0.0032, -0.0672, 1.0468, 1.5000, +]; +const M_E2: [f32; 8] = [ + -0.1246, 0.0672, 0.1597, -0.3296, 0.0766, 0.4016, -0.3788, -0.6426, +]; +const M_E3: [f32; 8] = [ + -0.5718, -0.2190, 0.4199, -0.0876, -0.0656, 0.3017, -2.4517, 1.8564, +]; +const M_E4: [f32; 8] = [ + 0.9938, -0.4285, -0.5562, -0.0329, -0.1294, -0.4844, 1.4656, 0.5636, +]; + +// ── Patch layout (Robinson & Stone sky vault decomposition) ───────────────── + +#[derive(Clone)] +struct PatchLayoutCache { + altitudes: Arc>, + azimuths: Arc>, + steradians: Arc>, +} + +/// Create sky vault patches for a given patch option. +/// +/// Returns `(altitudes_deg, azimuths_deg)` — each a `Vec` of length N patches. +pub(crate) fn create_patches(patch_option: i32) -> (Vec, Vec) { + let (skyvault_alt_int, azistart, patches_in_band): (&[f32], &[f32], &[i32]) = match patch_option + { + 1 => ( + &[6., 18., 30., 42., 54., 66., 78., 90.], + &[0., 4., 2., 5., 8., 0., 10., 0.], + &[30, 30, 24, 24, 18, 12, 6, 1], + ), + 2 => ( + &[6., 18., 30., 42., 54., 66., 78., 90.], + &[0., 4., 2., 5., 8., 0., 10., 0.], + &[31, 30, 28, 24, 19, 13, 7, 1], + ), + 3 => ( + &[6., 18., 30., 42., 54., 66., 78., 90.], + &[0., 4., 2., 5., 8., 0., 10., 0.], + &[62, 60, 56, 48, 38, 26, 14, 1], + ), + 4 => ( + &[ + 3., 9., 15., 21., 27., 33., 39., 45., 51., 57., 63., 69., 75., 81., 90., + ], + &[0., 0., 4., 4., 2., 2., 5., 5., 8., 8., 0., 0., 10., 10., 0.], + &[62, 62, 60, 60, 56, 56, 48, 48, 38, 38, 26, 26, 14, 14, 1], + ), + _ => ( + // Default to option 2 (153 patches) + &[6., 18., 30., 42., 54., 66., 78., 90.], + &[0., 4., 2., 5., 8., 0., 10., 0.], + &[31, 30, 28, 24, 19, 13, 7, 1], + ), + }; + + let total: usize = patches_in_band.iter().map(|&p| p as usize).sum(); + let mut altitudes = Vec::with_capacity(total); + let mut azimuths = Vec::with_capacity(total); + + for (j, &alt) in skyvault_alt_int.iter().enumerate() { + let n = patches_in_band[j] as usize; + let azi_step = 360.0 / patches_in_band[j] as f32; + let azi_off = azistart[j]; + for k in 0..n { + altitudes.push(alt); + azimuths.push(k as f32 * azi_step + azi_off); + } + } + + (altitudes, azimuths) +} + +/// Compute steradians for each sky patch. +/// +/// Only depends on the patch altitude layout (constant for a given patch_option). +pub(crate) fn compute_steradians(altitudes: &[f32]) -> Array1 { + let n = altitudes.len(); + let mut steradian = Array1::::zeros(n); + + // Unique altitudes and counts + let mut unique_alts: Vec = Vec::new(); + let mut counts: Vec = Vec::new(); + for &a in altitudes { + if let Some(pos) = unique_alts.iter().position(|&u| (u - a).abs() < 1e-6) { + counts[pos] += 1; + } else { + unique_alts.push(a); + counts.push(1); + } + } + + let first_alt = altitudes[0]; + for i in 0..n { + let alt_i = altitudes[i]; + let count = counts[unique_alts + .iter() + .position(|&u| (u - alt_i).abs() < 1e-6) + .unwrap()]; + if count > 1 { + steradian[i] = (360.0 / count as f32) + * DEG2RAD + * (((alt_i + first_alt) * DEG2RAD).sin() - ((alt_i - first_alt) * DEG2RAD).sin()); + } else { + // Single patch in band (e.g. 90°) + let prev_alt = altitudes[i - 1]; + steradian[i] = (360.0 / count as f32) + * DEG2RAD + * ((alt_i * DEG2RAD).sin() - ((prev_alt + first_alt) * DEG2RAD).sin()); + } + } + + steradian +} + +fn patch_layout_for_option(patch_option: i32) -> PatchLayoutCache { + static CACHE: OnceLock>> = OnceLock::new(); + let cache = CACHE.get_or_init(|| Mutex::new(HashMap::new())); + let mut guard = cache.lock().expect("patch layout cache mutex poisoned"); + + guard + .entry(patch_option) + .or_insert_with(|| { + let (alts, azis) = create_patches(patch_option); + let ster = compute_steradians(&alts).to_vec(); + PatchLayoutCache { + altitudes: Arc::new(alts), + azimuths: Arc::new(azis), + steradians: Arc::new(ster), + } + }) + .clone() +} + +/// Cached patch geometry for a patch option. +/// +/// Returns `(altitudes_deg, azimuths_deg, steradians)`, each backed by an `Arc>` +/// so callers can reuse stable buffers across timesteps without reallocating. +pub(crate) fn patch_alt_azi_steradians_for_patch_option( + patch_option: i32, +) -> (Arc>, Arc>, Arc>) { + let layout = patch_layout_for_option(patch_option); + (layout.altitudes, layout.azimuths, layout.steradians) +} + +/// Cached steradians for a patch layout option. +/// +/// Steradians depend only on patch geometry, which is fixed per `patch_option`. +/// Caching avoids recreating patch geometry and recomputing steradians every timestep. +pub(crate) fn steradians_for_patch_option(patch_option: i32) -> Array1 { + let layout = patch_layout_for_option(patch_option); + Array1::from(layout.steradians.as_ref().clone()) +} + +/// Perez all-weather sky luminance distribution. +/// +/// Returns an Nx3 array: `[altitude_deg, azimuth_deg, luminance]` per patch. +/// +/// Matches the Python `Perez_v3` with `patchchoice=1`. +pub(crate) fn perez_v3( + zen_deg: f32, + azimuth_deg: f32, + rad_d: f32, + rad_i: f32, + jday: i32, + patch_option: i32, +) -> Array2 { + let (altitudes, azimuths, _) = patch_alt_azi_steradians_for_patch_option(patch_option); + let n = altitudes.len(); + let altitude_deg = 90.0 - zen_deg; + + // Low sun or very low diffuse → uniform distribution + if altitude_deg < MIN_SUN_ELEVATION_DEG || rad_d < 10.0 { + let uniform_lv = 1.0 / n as f32; + let mut lv = Array2::::zeros((n, 3)); + for i in 0..n { + lv[[i, 0]] = altitudes[i]; + lv[[i, 1]] = azimuths[i]; + lv[[i, 2]] = uniform_lv; + } + return lv; + } + + let zen = zen_deg * DEG2RAD; + let azimuth = azimuth_deg * DEG2RAD; + let altitude = altitude_deg * DEG2RAD; + + // Sky clearness + let idh_safe = rad_d.max(1.0); + let perez_clearness = + ((idh_safe + rad_i) / idh_safe + 1.041 * zen.powi(3)) / (1.0 + 1.041 * zen.powi(3)); + + // Extra-terrestrial radiation (Robinson correction) + let day_angle = jday as f32 * 2.0 * PI / 365.0; + let i0 = 1367.0 + * (1.00011 + + 0.034221 * day_angle.cos() + + 0.00128 * day_angle.sin() + + 0.000719 * (2.0 * day_angle).cos() + + 0.000077 * (2.0 * day_angle).sin()); + + // Optical air mass (Kasten & Young 1989) + let air_mass = if altitude >= 10.0 * DEG2RAD { + 1.0 / altitude.sin() + } else if altitude > 0.0 { + let alt_deg = altitude * RAD2DEG; + 1.0 / (altitude.sin() + 0.50572 * (alt_deg + 6.07995_f32).powf(-1.6364)) + } else { + 40.0 + } + .min(40.0); + + // Sky brightness + let perez_brightness = if rad_d <= 10.0 { + 0.0 + } else { + air_mass * rad_d / i0 + }; + + // Clearness bin index (0–7) + let bin = if perez_clearness < 1.065 { + 0 + } else if perez_clearness < 1.230 { + 1 + } else if perez_clearness < 1.500 { + 2 + } else if perez_clearness < 1.950 { + 3 + } else if perez_clearness < 2.800 { + 4 + } else if perez_clearness < 4.500 { + 5 + } else if perez_clearness < 6.200 { + 6 + } else { + 7 + }; + + // Perez model parameters + let m_a = M_A1[bin] + M_A2[bin] * zen + perez_brightness * (M_A3[bin] + M_A4[bin] * zen); + let m_b = M_B1[bin] + M_B2[bin] * zen + perez_brightness * (M_B3[bin] + M_B4[bin] * zen); + let m_e = M_E1[bin] + M_E2[bin] * zen + perez_brightness * (M_E3[bin] + M_E4[bin] * zen); + + let (m_c, m_d) = if bin > 0 { + let c = M_C1[bin] + M_C2[bin] * zen + perez_brightness * (M_C3[bin] + M_C4[bin] * zen); + let d = M_D1[bin] + M_D2[bin] * zen + perez_brightness * (M_D3[bin] + M_D4[bin] * zen); + (c, d) + } else { + // Different equations for clearness bin 0 (Robinson) + let c = (perez_brightness * (M_C1[0] + M_C2[0] * zen)) + .powf(M_C3[0]) + .exp() + - 1.0; + let d = -(perez_brightness * (M_D1[0] + M_D2[0] * zen)).exp() + + M_D3[0] + + perez_brightness * M_D4[0]; + (c, d) + }; + + // Compute luminance for each patch + let sin_alt = altitude.sin(); + let cos_alt = altitude.cos(); + let mut lv_vals = Vec::with_capacity(n); + let mut lv_sum: f32 = 0.0; + + for i in 0..n { + let sv_alt = altitudes[i] * DEG2RAD; + let sv_azi = azimuths[i] * DEG2RAD; + let sv_zen = (90.0 - altitudes[i]) * DEG2RAD; + + // Angular distance from sun (Robinson formula) + let cos_sky_sun = + sv_alt.sin() * sin_alt + cos_alt * sv_alt.cos() * (sv_azi - azimuth).abs().cos(); + let cos_sv_zen = sv_zen.cos(); + + // Perez luminance + let horizon = 1.0 + m_a * (m_b / cos_sv_zen).exp(); + let ang = cos_sky_sun.clamp(-1.0, 1.0).acos(); + let circumsolar = 1.0 + m_c * (m_d * ang).exp() + m_e * cos_sky_sun * cos_sky_sun; + let val = horizon * circumsolar; + + lv_vals.push(val); + lv_sum += val; + } + + // Check for negative luminances → uniform fallback + let has_negative = lv_vals.iter().any(|&v| v < 0.0); + if has_negative || lv_sum <= 0.0 { + let uniform = 1.0 / n as f32; + for v in lv_vals.iter_mut() { + *v = uniform; + } + } else { + // Normalise + for v in lv_vals.iter_mut() { + *v /= lv_sum; + } + } + + // Build Nx3 output: [altitude_deg, azimuth_deg, luminance] + let mut lv = Array2::::zeros((n, 3)); + for i in 0..n { + lv[[i, 0]] = altitudes[i]; + lv[[i, 1]] = azimuths[i]; + lv[[i, 2]] = lv_vals[i]; + } + + lv +} + +// ── PyO3 wrappers (for testing parity with Python implementation) ─────────── + +#[pyfunction] +pub fn perez_v3_py( + py: Python, + zen_deg: f32, + azimuth_deg: f32, + rad_d: f32, + rad_i: f32, + jday: i32, + patch_option: i32, +) -> PyObject { + perez_v3(zen_deg, azimuth_deg, rad_d, rad_i, jday, patch_option) + .into_pyarray(py) + .into() +} + +#[pyfunction] +pub fn compute_steradians_py(py: Python, patch_option: i32) -> PyObject { + let (alts, _) = create_patches(patch_option); + compute_steradians(&alts).into_pyarray(py).into() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_patches_option2_count() { + let (alts, azis) = create_patches(2); + assert_eq!(alts.len(), 153); + assert_eq!(azis.len(), 153); + } + + #[test] + fn test_perez_uniform_low_sun() { + let lv = perez_v3(89.0, 180.0, 50.0, 10.0, 180, 2); + assert_eq!(lv.shape(), &[153, 3]); + // Low altitude (1°) → uniform + let first_lum = lv[[0, 2]]; + let last_lum = lv[[152, 2]]; + assert!((first_lum - last_lum).abs() < 1e-10); + } + + #[test] + fn test_perez_normalised() { + let lv = perez_v3(30.0, 180.0, 200.0, 400.0, 180, 2); + let sum: f32 = lv.column(2).sum(); + assert!( + (sum - 1.0).abs() < 1e-5, + "luminance sum = {sum}, expected 1.0" + ); + } + + #[test] + fn test_steradians_length() { + let (alts, _) = create_patches(2); + let ster = compute_steradians(&alts); + assert_eq!(ster.len(), 153); + } +} diff --git a/rust/src/pet.rs b/rust/src/pet.rs new file mode 100644 index 0000000..bee96ad --- /dev/null +++ b/rust/src/pet.rs @@ -0,0 +1,400 @@ +use numpy::{PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; +use rayon::prelude::*; + +/// Physical constants for PET calculation +const PO: f32 = 1013.25; // Reference pressure (hPa) +const P: f32 = 1013.25; // Atmospheric pressure (hPa) +const ROB: f32 = 1.06; +const CB: f32 = 3.64 * 1000.0; +const EMSK: f32 = 0.99; +const EMCL: f32 = 0.95; +const EVAP: f32 = 2.42e6; +const SIGMA: f32 = 5.67e-8; +const CAIR: f32 = 1.01 * 1000.0; + +/// Calculate PET for a single point. +/// +/// Parameters: +/// - ta: Air temperature (°C) +/// - rh: Relative humidity (%) +/// - tmrt: Mean radiant temperature (°C) +/// - v: Wind speed at pedestrian height (m/s) +/// - mbody: Body mass (kg) +/// - age: Age (years) +/// - ht: Height (m) +/// - work: Activity level (W) +/// - icl: Clothing insulation (clo, 0-5) +/// - sex: 1=male, 2=female +#[inline] +fn pet_single( + ta: f32, + rh: f32, + tmrt: f32, + v: f32, + mbody: f32, + age: f32, + ht: f32, + work: f32, + icl: f32, + sex: i32, +) -> f32 { + // Humidity conversion + let vps = 6.107 * 10.0_f32.powf(7.5 * ta / (238.0 + ta)); + let vpa = rh * vps / 100.0; + + let eta = 0.0; + + // INBODY - metabolic rate calculation + let metbf = 3.19 + * mbody.powf(0.75) + * (1.0 + 0.004 * (30.0 - age) + 0.018 * ((ht * 100.0 / mbody.powf(1.0 / 3.0)) - 42.1)); + let metbm = 3.45 + * mbody.powf(0.75) + * (1.0 + 0.004 * (30.0 - age) + 0.010 * ((ht * 100.0 / mbody.powf(1.0 / 3.0)) - 43.4)); + + let met = if sex == 1 { metbm + work } else { metbf + work }; + + let h = met * (1.0 - eta); + let rtv = 1.44e-6 * met; + + // Sensible respiration energy + let tex = 0.47 * ta + 21.0; + let eres = CAIR * (ta - tex) * rtv; + + // Latent respiration energy + let vpex = 6.11 * 10.0_f32.powf(7.45 * tex / (235.0 + tex)); + let erel = 0.623 * EVAP / P * (vpa - vpex) * rtv; + let ere = eres + erel; + + // Calculation constants + let feff = 0.725; + let adu = 0.203 * mbody.powf(0.425) * ht.powf(0.725); + let mut facl = (-2.36 + 173.51 * icl - 100.76 * icl * icl + 19.28 * icl.powi(3)) / 100.0; + if facl > 1.0 { + facl = 1.0; + } + let rcl = (icl / 6.45) / facl; + + let mut y = 1.0; + if icl < 2.0 { + y = (ht - 0.2) / ht; + } + if icl <= 0.6 { + y = 0.5; + } + if icl <= 0.3 { + y = 0.1; + } + + let fcl = 1.0 + 0.15 * icl; + let r2 = adu * (fcl - 1.0 + facl) / (2.0 * std::f32::consts::PI * ht * y); + let r1 = facl * adu / (2.0 * std::f32::consts::PI * ht * y); + let di = r2 - r1; + let acl = adu * facl + adu * (fcl - 1.0); + + let mut tcore = [0.0_f32; 8]; + + let mut wetsk = 0.0; + let mut hc = 2.67 + 6.5 * v.powf(0.67); + hc *= (P / PO).powf(0.55); + + let c_1 = h + ere; + let he = 0.633 * hc / (P * CAIR); + let fec = 1.0 / (1.0 + 0.92 * hc * rcl); + let htcl = 6.28 * ht * y * di / (rcl * (r2 / r1).ln() * acl); + let aeff = adu * feff; + let c_2 = adu * ROB * CB; + let c_5 = 0.0208 * c_2; + let c_6 = 0.76075 * c_2; + let rdsk = 0.79e7; + let rdcl = 0.0; + + let mut count2 = 0; + let mut j = 1_usize; + + let mut tsk = 34.0_f32; + let mut tcl = (ta + tmrt + tsk) / 3.0; + let mut vb = 0.0_f32; + let mut esw = 0.0_f32; + let mut vpts = 0.0_f32; + let mut c_9 = 0.0_f32; + let mut c_11 = 0.0_f32; + + while count2 == 0 && j < 7 { + tsk = 34.0; + let mut count1 = 0; + tcl = (ta + tmrt + tsk) / 3.0; + let mut count3 = 1; + let mut enbal2 = 0.0_f32; + + while count1 <= 3 { + let mut enbal = 0.0_f32; + + while enbal * enbal2 >= 0.0 && count3 < 200 { + enbal2 = enbal; + + let rclo2 = EMCL * SIGMA * ((tcl + 273.2).powi(4) - (tmrt + 273.2).powi(4)) * feff; + tsk = 1.0 / htcl * (hc * (tcl - ta) + rclo2) + tcl; + + // Radiation balance + let rbare = aeff + * (1.0 - facl) + * EMSK + * SIGMA + * ((tmrt + 273.2).powi(4) - (tsk + 273.2).powi(4)); + let rclo = + feff * acl * EMCL * SIGMA * ((tmrt + 273.2).powi(4) - (tcl + 273.2).powi(4)); + let rsum = rbare + rclo; + + // Convection + let cbare = hc * (ta - tsk) * adu * (1.0 - facl); + let cclo = hc * (ta - tcl) * acl; + let csum = cbare + cclo; + + // Core temperature + let c_3 = 18.0 - 0.5 * tsk; + let c_4 = 5.28 * adu * c_3; + let c_7 = c_4 - c_6 - tsk * c_5; + let c_8 = -c_1 * c_3 - tsk * c_4 + tsk * c_6; + c_9 = c_7 * c_7 - 4.0 * c_5 * c_8; + let c_10 = 5.28 * adu - c_6 - c_5 * tsk; + c_11 = c_10 * c_10 - 4.0 * c_5 * (c_6 * tsk - c_1 - 5.28 * adu * tsk); + + let tsk_adj = if tsk == 36.0 { 36.01 } else { tsk }; + + tcore[7] = c_1 / (5.28 * adu + c_2 * 6.3 / 3600.0) + tsk_adj; + tcore[3] = c_1 + / (5.28 * adu + (c_2 * 6.3 / 3600.0) / (1.0 + 0.5 * (34.0 - tsk_adj))) + + tsk_adj; + + if c_11 >= 0.0 { + tcore[6] = (-c_10 - c_11.sqrt()) / (2.0 * c_5); + tcore[1] = (-c_10 + c_11.sqrt()) / (2.0 * c_5); + } + if c_9 >= 0.0 { + tcore[2] = (-c_7 + c_9.abs().sqrt()) / (2.0 * c_5); + tcore[5] = (-c_7 - c_9.abs().sqrt()) / (2.0 * c_5); + } + tcore[4] = c_1 / (5.28 * adu + c_2 * 1.0 / 40.0) + tsk_adj; + + // Transpiration + let tbody = 0.1 * tsk + 0.9 * tcore[j]; + let mut sw = 304.94 * (tbody - 36.6) * adu / 3600000.0; + vpts = 6.11 * 10.0_f32.powf(7.45 * tsk / (235.0 + tsk)); + + if tbody <= 36.6 { + sw = 0.0; + } + if sex == 2 { + sw *= 0.7; + } + let eswphy = -sw * EVAP; + + let eswpot = he * (vpa - vpts) * adu * EVAP * fec; + wetsk = eswphy / eswpot; + if wetsk > 1.0 { + wetsk = 1.0; + } + let eswdif = eswphy - eswpot; + esw = if eswdif <= 0.0 { eswpot } else { eswphy }; + if esw > 0.0 { + esw = 0.0; + } + + // Diffusion + let ed = EVAP / (rdsk + rdcl) * adu * (1.0 - wetsk) * (vpa - vpts); + + // MAX VB + let mut vb1 = 34.0 - tsk; + let mut vb2 = tcore[j] - 36.6; + if vb2 < 0.0 { + vb2 = 0.0; + } + if vb1 < 0.0 { + vb1 = 0.0; + } + vb = (6.3 + 75.0 * vb2) / (1.0 + 0.5 * vb1); + + // Energy balance + enbal = h + ed + ere + esw + csum + rsum; + + // Clothing temperature iteration + let xx = match count1 { + 0 => 1.0, + 1 => 0.1, + 2 => 0.01, + _ => 0.001, + }; + + if enbal > 0.0 { + tcl += xx; + } else { + tcl -= xx; + } + + count3 += 1; + } + count1 += 1; + enbal2 = 0.0; + } + + // Check convergence conditions for different j modes + let converged = match j { + 2 | 5 => c_9 >= 0.0 && tcore[j] >= 36.6 && tsk <= 34.050, + 6 | 1 => c_11 > 0.0 && tcore[j] >= 36.6 && tsk > 33.850, + 3 => tcore[j] < 36.6 && tsk <= 34.000, + 7 => tcore[j] < 36.6 && tsk > 34.000, + 4 => true, + _ => false, + }; + + if converged { + let vb_check = (j != 4 && vb >= 91.0) || (j == 4 && vb < 89.0); + if !vb_check { + if vb > 90.0 { + vb = 90.0; + } + count2 = 1; + } + } + + j += 1; + } + + // PET calculation phase + let mut tx = ta; + let mut enbal2 = 0.0_f32; + let mut count1 = 0; + + hc = 2.67 + 6.5 * 0.1_f32.powf(0.67); + hc *= (P / PO).powf(0.55); + + while count1 <= 3 { + let mut enbal = 0.0_f32; + + while enbal * enbal2 >= 0.0 { + enbal2 = enbal; + + // Radiation balance + let rbare = + aeff * (1.0 - facl) * EMSK * SIGMA * ((tx + 273.2).powi(4) - (tsk + 273.2).powi(4)); + let rclo = feff * acl * EMCL * SIGMA * ((tx + 273.2).powi(4) - (tcl + 273.2).powi(4)); + let rsum = rbare + rclo; + + // Convection + let cbare = hc * (tx - tsk) * adu * (1.0 - facl); + let cclo = hc * (tx - tcl) * acl; + let csum = cbare + cclo; + + // Diffusion + let ed = EVAP / (rdsk + rdcl) * adu * (1.0 - wetsk) * (12.0 - vpts); + + // Respiration + let tex = 0.47 * tx + 21.0; + let eres = CAIR * (tx - tex) * rtv; + let vpex = 6.11 * 10.0_f32.powf(7.45 * tex / (235.0 + tex)); + let erel = 0.623 * EVAP / P * (12.0 - vpex) * rtv; + let ere = eres + erel; + + // Energy balance + enbal = h + ed + ere + esw + csum + rsum; + + // Iteration step + let xx = match count1 { + 0 => 1.0, + 1 => 0.1, + 2 => 0.01, + _ => 0.001, + }; + + if enbal > 0.0 { + tx -= xx; + } else if enbal < 0.0 { + tx += xx; + } + } + count1 += 1; + enbal2 = 0.0; + } + + tx +} + +/// Calculate PET for a single point (Python interface). +#[pyfunction] +pub fn pet_calculate( + ta: f32, + rh: f32, + tmrt: f32, + va: f32, + mbody: f32, + age: f32, + height: f32, + activity: f32, + clo: f32, + sex: i32, +) -> f32 { + pet_single(ta, rh, tmrt, va, mbody, age, height, activity, clo, sex) +} + +/// Calculate PET for a 2D grid using parallel processing. +/// +/// Parameters: +/// - ta: Air temperature (°C) - scalar +/// - rh: Relative humidity (%) - scalar +/// - tmrt: Mean radiant temperature grid (°C) +/// - va: Wind speed grid (m/s) +/// - mbody: Body mass (kg) +/// - age: Age (years) +/// - height: Height (m) +/// - activity: Activity level (W) +/// - clo: Clothing insulation (clo) +/// - sex: 1=male, 2=female +#[pyfunction] +pub fn pet_grid<'py>( + py: Python<'py>, + ta: f32, + rh: f32, + tmrt: PyReadonlyArray2<'py, f32>, + va: PyReadonlyArray2<'py, f32>, + mbody: f32, + age: f32, + height: f32, + activity: f32, + clo: f32, + sex: i32, +) -> PyResult>> { + let tmrt_arr = tmrt.as_array(); + let va_arr = va.as_array(); + + let (rows, cols) = tmrt_arr.dim(); + + // Create output array + let mut result = ndarray::Array2::zeros((rows, cols)); + + // Process in parallel using rayon + result + .as_slice_mut() + .unwrap() + .par_iter_mut() + .enumerate() + .for_each(|(idx, out)| { + let row = idx / cols; + let col = idx % cols; + + let tmrt_val = tmrt_arr[[row, col]]; + let va_val = va_arr[[row, col]]; + + // Check for invalid pixel values (NaN, nodata, non-finite) + if !tmrt_val.is_finite() || !va_val.is_finite() || va_val <= 0.0 || tmrt_val <= -999.0 { + *out = f32::NAN; + } else { + *out = pet_single( + ta, rh, tmrt_val, va_val, mbody, age, height, activity, clo, sex, + ); + } + }); + + Ok(PyArray2::from_owned_array(py, result)) +} diff --git a/rust/src/pipeline.rs b/rust/src/pipeline.rs new file mode 100644 index 0000000..bee066a --- /dev/null +++ b/rust/src/pipeline.rs @@ -0,0 +1,1704 @@ +//! Fused timestep pipeline — single FFI entrance/exit per timestep. +//! +//! Orchestrates: shadows → ground_temp → GVF → thermal_delay → radiation → Tmrt +//! All intermediate arrays stay as ndarray::Array2 — never cross FFI boundary. +//! +//! Supports both isotropic and anisotropic (Perez) sky models. + +use ndarray::{Array1, Array2, ArrayView1, ArrayView2, ArrayView3, Zip}; +use numpy::{IntoPyArray, PyArray2, PyReadonlyArray2, PyReadonlyArray3}; +use pyo3::prelude::*; +use rayon::prelude::*; +use std::collections::{HashMap, VecDeque}; +use std::sync::{Arc, Mutex, OnceLock}; + +use crate::ground::{compute_ground_temperature_pure, ts_wave_delay_batch_pure, GroundTempResult}; +use crate::gvf::{gvf_calc_pure, gvf_calc_with_cache, GvfResultPure}; +use crate::gvf_geometry::{precompute_gvf_geometry, GvfGeometryCache}; +use crate::shadowing::{calculate_shadows_rust, ShadowingResultRust}; +use crate::sky::{anisotropic_sky_pure, cylindric_wedge_pure_masked}; +use crate::tmrt::compute_tmrt_from_dir_sums_pure; +use crate::vegetation::{kside_veg_isotropic_pure, lside_veg_pure}; + +#[cfg(feature = "gpu")] +use crate::gpu::AnisoGpuContext; + +use std::time::Instant; + +const PI: f32 = std::f32::consts::PI; +const SBC: f32 = 5.67e-8; +const KELVIN_OFFSET: f32 = 273.15; + +/// Check once per process whether timing output is enabled (``SOLWEIG_TIMING=1``). +fn timing_enabled() -> bool { + static ENABLED: std::sync::OnceLock = std::sync::OnceLock::new(); + *ENABLED.get_or_init(|| { + std::env::var("SOLWEIG_TIMING") + .map(|v| v == "1" || v.eq_ignore_ascii_case("true")) + .unwrap_or(false) + }) +} + +// ── GPU anisotropic sky context (lazy-initialized, shares device with shadows) ── + +#[cfg(feature = "gpu")] +static ANISO_GPU_CONTEXT: OnceLock> = OnceLock::new(); + +#[cfg(feature = "gpu")] +static ANISO_GPU_ENABLED: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(true); + +#[cfg(feature = "gpu")] +fn get_aniso_gpu_context() -> Option<&'static AnisoGpuContext> { + if !ANISO_GPU_ENABLED.load(std::sync::atomic::Ordering::Relaxed) { + return None; + } + ANISO_GPU_CONTEXT + .get_or_init(|| { + // Share device/queue from the shadow GPU context + let shadow_ctx = crate::shadowing::get_gpu_context()?; + let device = shadow_ctx.device.clone(); + let queue = shadow_ctx.queue.clone(); + let ctx = AnisoGpuContext::new(device, queue); + eprintln!("[GPU] Anisotropic sky GPU context initialized"); + Some(ctx) + }) + .as_ref() +} + +#[cfg(feature = "gpu")] +#[pyfunction] +/// Enable GPU acceleration for anisotropic sky computation +pub fn enable_aniso_gpu() { + ANISO_GPU_ENABLED.store(true, std::sync::atomic::Ordering::Relaxed); + eprintln!("[GPU] Anisotropic sky GPU acceleration enabled"); +} + +#[cfg(feature = "gpu")] +#[pyfunction] +/// Disable GPU acceleration for anisotropic sky computation (CPU fallback) +pub fn disable_aniso_gpu() { + ANISO_GPU_ENABLED.store(false, std::sync::atomic::Ordering::Relaxed); + eprintln!("[GPU] Anisotropic sky GPU acceleration disabled"); +} + +#[cfg(feature = "gpu")] +#[pyfunction] +/// Check if GPU acceleration is enabled for anisotropic sky +pub fn is_aniso_gpu_enabled() -> bool { + ANISO_GPU_ENABLED.load(std::sync::atomic::Ordering::Relaxed) +} + +// ── Input structs (created once in Python, passed by reference) ─────────── + +/// Weather scalars for a single timestep. +#[pyclass] +#[derive(Clone)] +pub struct WeatherScalars { + #[pyo3(get, set)] + pub sun_azimuth: f32, + #[pyo3(get, set)] + pub sun_altitude: f32, + #[pyo3(get, set)] + pub sun_zenith: f32, + #[pyo3(get, set)] + pub ta: f32, + #[pyo3(get, set)] + pub rh: f32, + #[pyo3(get, set)] + pub global_rad: f32, + #[pyo3(get, set)] + pub direct_rad: f32, + #[pyo3(get, set)] + pub diffuse_rad: f32, + #[pyo3(get, set)] + pub altmax: f32, + #[pyo3(get, set)] + pub clearness_index: f32, + #[pyo3(get, set)] + pub dectime: f32, + #[pyo3(get, set)] + pub snup: f32, + #[pyo3(get, set)] + pub rad_g0: f32, + #[pyo3(get, set)] + pub zen_deg: f32, + #[pyo3(get, set)] + pub psi: f32, + #[pyo3(get, set)] + pub is_daytime: bool, + #[pyo3(get, set)] + pub jday: i32, + #[pyo3(get, set)] + pub patch_option: i32, +} + +#[pymethods] +impl WeatherScalars { + #[new] + #[allow(clippy::too_many_arguments)] + pub fn new( + sun_azimuth: f32, + sun_altitude: f32, + sun_zenith: f32, + ta: f32, + rh: f32, + global_rad: f32, + direct_rad: f32, + diffuse_rad: f32, + altmax: f32, + clearness_index: f32, + dectime: f32, + snup: f32, + rad_g0: f32, + zen_deg: f32, + psi: f32, + is_daytime: bool, + jday: i32, + patch_option: i32, + ) -> Self { + Self { + sun_azimuth, + sun_altitude, + sun_zenith, + ta, + rh, + global_rad, + direct_rad, + diffuse_rad, + altmax, + clearness_index, + dectime, + snup, + rad_g0, + zen_deg, + psi, + is_daytime, + jday, + patch_option, + } + } +} + +/// Human body parameters. +#[pyclass] +#[derive(Clone)] +pub struct HumanScalars { + #[pyo3(get, set)] + pub height: f32, + #[pyo3(get, set)] + pub abs_k: f32, + #[pyo3(get, set)] + pub abs_l: f32, + #[pyo3(get, set)] + pub is_standing: bool, +} + +#[pymethods] +impl HumanScalars { + #[new] + pub fn new(height: f32, abs_k: f32, abs_l: f32, is_standing: bool) -> Self { + Self { + height, + abs_k, + abs_l, + is_standing, + } + } +} + +/// Configuration scalars (constant across timesteps). +#[pyclass] +#[derive(Clone)] +pub struct ConfigScalars { + #[pyo3(get, set)] + pub pixel_size: f32, + #[pyo3(get, set)] + pub max_height: f32, + #[pyo3(get, set)] + pub albedo_wall: f32, + #[pyo3(get, set)] + pub emis_wall: f32, + #[pyo3(get, set)] + pub tgk_wall: f32, + #[pyo3(get, set)] + pub tstart_wall: f32, + #[pyo3(get, set)] + pub tmaxlst_wall: f32, + #[pyo3(get, set)] + pub use_veg: bool, + #[pyo3(get, set)] + pub has_walls: bool, + #[pyo3(get, set)] + pub conifer: bool, + #[pyo3(get, set)] + pub use_anisotropic: bool, + #[pyo3(get, set)] + pub max_shadow_distance_m: f32, +} + +#[pymethods] +impl ConfigScalars { + #[new] + #[allow(clippy::too_many_arguments)] + pub fn new( + pixel_size: f32, + max_height: f32, + albedo_wall: f32, + emis_wall: f32, + tgk_wall: f32, + tstart_wall: f32, + tmaxlst_wall: f32, + use_veg: bool, + has_walls: bool, + conifer: bool, + use_anisotropic: bool, + max_shadow_distance_m: f32, + ) -> Self { + Self { + pixel_size, + max_height, + albedo_wall, + emis_wall, + tgk_wall, + tstart_wall, + tmaxlst_wall, + use_veg, + has_walls, + conifer, + use_anisotropic, + max_shadow_distance_m, + } + } +} + +// ── Output struct ────────────────────────────────────────────────────────── + +/// Result from a single fused timestep. +#[pyclass] +pub struct TimestepResult { + #[pyo3(get)] + pub tmrt: Py>, + #[pyo3(get)] + pub shadow: Option>>, + #[pyo3(get)] + pub kdown: Option>>, + #[pyo3(get)] + pub kup: Option>>, + #[pyo3(get)] + pub ldown: Option>>, + #[pyo3(get)] + pub lup: Option>>, + // Updated thermal state arrays (Python extracts and passes back next timestep) + #[pyo3(get)] + pub timeadd: f32, + #[pyo3(get)] + pub tgmap1: Py>, + #[pyo3(get)] + pub tgmap1_e: Py>, + #[pyo3(get)] + pub tgmap1_s: Py>, + #[pyo3(get)] + pub tgmap1_w: Py>, + #[pyo3(get)] + pub tgmap1_n: Py>, + #[pyo3(get)] + pub tgout1: Py>, +} + +/// Raw result struct with owned arrays (no Python types — Send-safe). +struct TimestepResultRaw { + tmrt: Array2, + shadow: Option>, + kdown: Option>, + kup: Option>, + ldown: Option>, + lup: Option>, + timeadd: f32, + tgmap1: Array2, + tgmap1_e: Array2, + tgmap1_s: Array2, + tgmap1_w: Array2, + tgmap1_n: Array2, + tgout1: Array2, +} + +const OUT_SHADOW: u32 = 1 << 0; +const OUT_KDOWN: u32 = 1 << 1; +const OUT_KUP: u32 = 1 << 2; +const OUT_LDOWN: u32 = 1 << 3; +const OUT_LUP: u32 = 1 << 4; +const OUT_ALL: u32 = OUT_SHADOW | OUT_KDOWN | OUT_KUP | OUT_LDOWN | OUT_LUP; + +/// Release the GIL for a closure whose captured state may not be `Send`. +/// +/// # Safety +/// Caller must guarantee that all borrowed data remains alive for the duration +/// of the closure (i.e. the Python objects backing any `ArrayView` are not +/// deallocated while the GIL is released). +unsafe fn allow_threads_unchecked T>(py: Python, f: F) -> T { + // Move f to the heap and erase through usize so the auto-Send derivation + // for the closure sees only Send types (usize), not the non-Send F. + let raw = Box::into_raw(Box::new(f)); + let addr = raw as usize; + py.allow_threads(move || unsafe { + let f = *Box::from_raw(addr as *mut F); + f() + }) +} + +// ── Radiation helpers (ported from Python physics) ───────────────────────── + +/// Compute sky emissivity (Jonsson et al. 2006). +#[inline] +fn compute_esky(ta: f32, rh: f32) -> f32 { + let ta_k = ta + KELVIN_OFFSET; + let ea = 6.107 * 10.0_f32.powf((7.5 * ta) / (237.3 + ta)) * (rh / 100.0); + let msteg = 46.5 * (ea / ta_k); + 1.0 - (1.0 + msteg) * (-((1.2 + 3.0 * msteg) as f32).sqrt()).exp() +} + +/// Compute Kup (ground-reflected shortwave) — Kup_veg_2015a. +/// +/// Returns (kup, kup_e, kup_s, kup_w, kup_n) as owned arrays. +#[allow(non_snake_case)] +#[allow(clippy::too_many_arguments)] +fn compute_kup( + rad_i: f32, + rad_d: f32, + rad_g: f32, + altitude: f32, + svfbuveg: ArrayView2, + albedo_b: f32, + f_sh: ArrayView2, + gvfalb: ArrayView2, + gvfalb_e: ArrayView2, + gvfalb_s: ArrayView2, + gvfalb_w: ArrayView2, + gvfalb_n: ArrayView2, + gvfalbnosh: ArrayView2, + gvfalbnosh_e: ArrayView2, + gvfalbnosh_s: ArrayView2, + gvfalbnosh_w: ArrayView2, + gvfalbnosh_n: ArrayView2, + valid: ArrayView2, +) -> ( + Array2, + Array2, + Array2, + Array2, + Array2, +) { + let rad_i_sin_alt = rad_i * (altitude * PI / 180.0).sin(); + + let shape = svfbuveg.dim(); + let mut kup = Array2::::zeros(shape); + let mut kup_e = Array2::::zeros(shape); + let mut kup_s = Array2::::zeros(shape); + let mut kup_w = Array2::::zeros(shape); + let mut kup_n = Array2::::zeros(shape); + + Zip::indexed(&mut kup) + .and(&mut kup_e) + .and(&mut kup_s) + .and(&mut kup_w) + .and(&mut kup_n) + .par_for_each(|(r, c), k, ke, ks, kw, kn| { + if valid[[r, c]] == 0 { + *k = f32::NAN; + *ke = f32::NAN; + *ks = f32::NAN; + *kw = f32::NAN; + *kn = f32::NAN; + return; + } + let sv = svfbuveg[[r, c]]; + let fsh = f_sh[[r, c]]; + let ct = rad_d * sv + albedo_b * (1.0 - sv) * (rad_g * (1.0 - fsh) + rad_d * fsh); + *k = gvfalb[[r, c]] * rad_i_sin_alt + ct * gvfalbnosh[[r, c]]; + *ke = gvfalb_e[[r, c]] * rad_i_sin_alt + ct * gvfalbnosh_e[[r, c]]; + *ks = gvfalb_s[[r, c]] * rad_i_sin_alt + ct * gvfalbnosh_s[[r, c]]; + *kw = gvfalb_w[[r, c]] * rad_i_sin_alt + ct * gvfalbnosh_w[[r, c]]; + *kn = gvfalb_n[[r, c]] * rad_i_sin_alt + ct * gvfalbnosh_n[[r, c]]; + }); + + (kup, kup_e, kup_s, kup_w, kup_n) +} + +/// Compute Ldown (downwelling longwave) — Jonsson et al. 2006. +#[allow(clippy::too_many_arguments)] +fn compute_ldown( + esky: f32, + ta: f32, + tg_wall: f32, + svf: ArrayView2, + svf_veg: ArrayView2, + svf_aveg: ArrayView2, + emis_wall: f32, + ci: f32, + valid: ArrayView2, +) -> Array2 { + let ta_k = ta + KELVIN_OFFSET; + let ta_k4 = ta_k.powi(4); + let tg_wall_k4 = (ta + tg_wall + KELVIN_OFFSET).powi(4); + let shape = svf.dim(); + let mut ldown = Array2::::zeros(shape); + + Zip::indexed(&mut ldown).par_for_each(|(r, c), ld| { + if valid[[r, c]] == 0 { + *ld = f32::NAN; + return; + } + let sv = svf[[r, c]]; + let sv_veg = svf_veg[[r, c]]; + let sv_aveg = svf_aveg[[r, c]]; + + let val = (sv + sv_veg - 1.0) * esky * SBC * ta_k4 + + (2.0 - sv_veg - sv_aveg) * emis_wall * SBC * ta_k4 + + (sv_aveg - sv) * emis_wall * SBC * tg_wall_k4 + + (2.0 - sv - sv_veg) * (1.0 - emis_wall) * esky * SBC * ta_k4; + + if ci < 0.95 { + let c_cloud = 1.0 - ci; + let val_cloudy = (sv + sv_veg - 1.0) * SBC * ta_k4 + + (2.0 - sv_veg - sv_aveg) * emis_wall * SBC * ta_k4 + + (sv_aveg - sv) * emis_wall * SBC * tg_wall_k4 + + (2.0 - sv - sv_veg) * (1.0 - emis_wall) * SBC * ta_k4; + *ld = val * (1.0 - c_cloud) + val_cloudy * c_cloud; + } else { + *ld = val; + } + }); + + ldown +} + +/// Compute Kdown (downwelling shortwave). +#[allow(clippy::too_many_arguments)] +fn compute_kdown( + rad_i: f32, + rad_d: f32, + rad_g: f32, + shadow: ArrayView2, + sin_alt: f32, + svfbuveg: ArrayView2, + albedo_wall: f32, + f_sh: ArrayView2, + drad: ArrayView2, + valid: ArrayView2, +) -> Array2 { + let shape = shadow.dim(); + let mut kdown = Array2::::zeros(shape); + + Zip::indexed(&mut kdown).par_for_each(|(r, c), kd| { + if valid[[r, c]] == 0 { + *kd = f32::NAN; + return; + } + *kd = rad_i * shadow[[r, c]] * sin_alt + + drad[[r, c]] + + albedo_wall + * (1.0 - svfbuveg[[r, c]]) + * (rad_g * (1.0 - f_sh[[r, c]]) + rad_d * f_sh[[r, c]]); + }); + + kdown +} + +/// Cached ASVF (`acos(sqrt(clamp(svf, 0, 1)))`) for a static SVF raster. +/// +/// SVF is invariant across timesteps for a given surface/tile, so recomputing +/// ASVF every timestep is wasted work in anisotropic mode. +fn asvf_for_svf_cached(svf: ArrayView2) -> Arc> { + const MAX_ENTRIES: usize = 16; + + type AsvfKey = (usize, usize, u64); + #[derive(Default)] + struct AsvfCache { + map: HashMap>>, + lru: VecDeque, + } + + fn fnv1a_u64(mut hash: u64, word: u64) -> u64 { + const FNV_PRIME: u64 = 0x0000_0100_0000_01B3; + for b in word.to_le_bytes() { + hash ^= b as u64; + hash = hash.wrapping_mul(FNV_PRIME); + } + hash + } + + fn svf_key(svf: ArrayView2) -> AsvfKey { + const FNV_OFFSET: u64 = 0xCBF2_9CE4_8422_2325; + let mut hash = FNV_OFFSET; + hash = fnv1a_u64(hash, svf.nrows() as u64); + hash = fnv1a_u64(hash, svf.ncols() as u64); + for &v in svf.iter() { + hash = fnv1a_u64(hash, v.to_bits() as u64); + } + (svf.nrows(), svf.ncols(), hash) + } + + let key = svf_key(svf); + static CACHE: OnceLock> = OnceLock::new(); + let cache = CACHE.get_or_init(|| Mutex::new(AsvfCache::default())); + + let mut guard = cache.lock().expect("ASVF cache mutex poisoned"); + if let Some(hit) = guard.map.get(&key).cloned() { + if let Some(pos) = guard.lru.iter().position(|k| *k == key) { + guard.lru.remove(pos); + } + guard.lru.push_back(key); + return hit; + } + + let data = svf + .iter() + .map(|&v| v.clamp(0.0, 1.0).sqrt().acos()) + .collect::>(); + let entry = Arc::new(data); + + while guard.map.len() >= MAX_ENTRIES { + if let Some(oldest) = guard.lru.pop_front() { + guard.map.remove(&oldest); + } else { + break; + } + } + guard.map.insert(key, entry.clone()); + guard.lru.push_back(key); + entry +} + +/// Weighted sum of four directional side components with valid-mask handling. +/// +/// Used to avoid materializing per-direction arrays in Tmrt-only pathways. +fn weighted_side_sum_four( + a: ArrayView2, + b: ArrayView2, + c_arr: ArrayView2, + d_arr: ArrayView2, + valid: ArrayView2, + weight: f32, +) -> Array2 { + let shape = a.dim(); + let mut sum = Array2::::zeros(shape); + + Zip::indexed(&mut sum).par_for_each(|(r, c), out| { + if valid[[r, c]] == 0 { + *out = f32::NAN; + return; + } + *out = (a[[r, c]] + b[[r, c]] + c_arr[[r, c]] + d_arr[[r, c]]) * weight; + }); + + sum +} + +/// Directional longwave side sum for anisotropic mode. +/// +/// In anisotropic mode the directional longwave sides are `lup_dir * 0.5`; +/// only their sum is needed by Tmrt. +fn lside_dirs_sum_aniso_from_lup( + lup_e: ArrayView2, + lup_s: ArrayView2, + lup_w: ArrayView2, + lup_n: ArrayView2, + valid: ArrayView2, +) -> Array2 { + weighted_side_sum_four(lup_e, lup_s, lup_w, lup_n, valid, 0.5) +} + +/// Directional shortwave side sum for anisotropic mode. +/// +/// Current anisotropic directional kside terms are `kup_dir * 0.5`. +fn kside_dirs_sum_aniso_from_kup( + kup_e: ArrayView2, + kup_s: ArrayView2, + kup_w: ArrayView2, + kup_n: ArrayView2, + valid: ArrayView2, +) -> Array2 { + weighted_side_sum_four(kup_e, kup_s, kup_w, kup_n, valid, 0.5) +} + +fn side_sum_from_directional( + north: ArrayView2, + east: ArrayView2, + south: ArrayView2, + west: ArrayView2, + valid: ArrayView2, +) -> Array2 { + let shape = north.dim(); + let mut sum = Array2::::zeros(shape); + + Zip::indexed(&mut sum).par_for_each(|(r, c), out| { + if valid[[r, c]] == 0 { + *out = f32::NAN; + return; + } + *out = north[[r, c]] + east[[r, c]] + south[[r, c]] + west[[r, c]]; + }); + + sum +} + +struct PatchOptionLut { + altitudes: Arc>, + azimuths: Arc>, + steradians: Arc>, + altitude_sin: Arc>, +} + +/// Cached patch LUTs keyed by `patch_option`. +/// +/// Geometry is already cached in the Perez module; this cache adds derived +/// `sin(altitude)` values so anisotropic timesteps can skip per-patch trig. +fn patch_lut_for_option_cached(patch_option: i32) -> Arc { + static CACHE: OnceLock>>> = OnceLock::new(); + let cache = CACHE.get_or_init(|| Mutex::new(HashMap::new())); + + let mut guard = cache.lock().expect("patch LUT cache mutex poisoned"); + guard + .entry(patch_option) + .or_insert_with(|| { + let (altitudes, azimuths, steradians) = + crate::perez::patch_alt_azi_steradians_for_patch_option(patch_option); + let deg2rad = PI / 180.0; + let altitude_sin = Arc::new( + altitudes + .iter() + .map(|alt| (alt * deg2rad).sin()) + .collect::>(), + ); + Arc::new(PatchOptionLut { + altitudes, + azimuths, + steradians, + altitude_sin, + }) + }) + .clone() +} + +/// Compute anisotropic diffuse luminance sum directly from bitpacked shadow matrices. +/// +/// Equivalent to: +/// diffsh = shmat_bit - (1 - vegshmat_bit) * (1 - psi) +/// ani_lum = sum_i(diffsh[:,:,i] * lv_lum[i]) +/// +/// but avoids allocating a full (rows, cols, patches) float array. +fn compute_ani_lum_from_packed( + shmat: ArrayView3, + vegshmat: ArrayView3, + lv_lum: ArrayView1, + psi: f32, + valid: ArrayView2, +) -> Array2 { + let (rows, cols, _) = shmat.dim(); + let n_patches = lv_lum.len(); + let mut out = Array2::::zeros((rows, cols)); + + let ncols = cols; + if let Some(out_slice) = out.as_slice_mut() { + out_slice.par_iter_mut().enumerate().for_each(|(idx, v)| { + let r = idx / ncols; + let c = idx % ncols; + + if valid[[r, c]] == 0 { + *v = f32::NAN; + return; + } + + let mut sum = 0.0_f32; + for i in 0..n_patches { + let byte = i >> 3; + let bit = i & 7; + let sh = ((shmat[[r, c, byte]] >> bit) & 1) as f32; + let vsh = ((vegshmat[[r, c, byte]] >> bit) & 1) as f32; + let diff = sh - (1.0 - vsh) * (1.0 - psi); + sum += diff * lv_lum[i]; + } + *v = sum; + }); + } else { + for r in 0..rows { + for c in 0..cols { + if valid[[r, c]] == 0 { + out[[r, c]] = f32::NAN; + continue; + } + let mut sum = 0.0_f32; + for i in 0..n_patches { + let byte = i >> 3; + let bit = i & 7; + let sh = ((shmat[[r, c, byte]] >> bit) & 1) as f32; + let vsh = ((vegshmat[[r, c, byte]] >> bit) & 1) as f32; + let diff = sh - (1.0 - vsh) * (1.0 - psi); + sum += diff * lv_lum[i]; + } + out[[r, c]] = sum; + } + } + } + + out +} + +// ── GVF Geometry Cache (opaque handle for Python) ───────────────────────── + +/// Opaque handle to a precomputed GVF geometry cache. +/// +/// Created once per DSM via `precompute_gvf_cache()`, then passed to +/// `compute_timestep()` on subsequent calls to skip building ray-tracing. +#[pyclass] +pub struct PyGvfGeometryCache { + pub(crate) inner: GvfGeometryCache, +} + +/// Precompute GVF geometry cache for a given set of surface arrays. +/// +/// This runs the building ray-trace once (18 azimuths, parallelized). +/// The returned cache is passed to `compute_timestep()` to skip geometry +/// on subsequent timesteps with the same DSM. +#[pyfunction] +#[allow(clippy::too_many_arguments)] +pub fn precompute_gvf_cache( + buildings: PyReadonlyArray2, + wall_asp: PyReadonlyArray2, + wall_ht: PyReadonlyArray2, + alb_grid: PyReadonlyArray2, + pixel_size: f32, + human_height: f32, + wall_albedo: f32, +) -> PyResult { + let first_ht = human_height.round().max(1.0); + let second_ht = human_height * 20.0; + + let cache = precompute_gvf_geometry( + buildings.as_array(), + wall_asp.as_array(), + wall_ht.as_array(), + alb_grid.as_array(), + pixel_size, + first_ht, + second_ht, + wall_albedo, + ); + + Ok(PyGvfGeometryCache { inner: cache }) +} + +// ── Main fused timestep function ─────────────────────────────────────────── + +/// Compute a single daytime timestep entirely in Rust. +/// +/// All intermediate arrays stay as ndarray::Array2 — only the final +/// results cross back to Python. +/// +/// Parameters are grouped into structs to keep the signature manageable: +/// - weather: Per-timestep scalars (sun position, temperature, radiation) +/// - human: Body parameters (height, posture, absorptivities) +/// - config: Constants (pixel_size, wall materials) +/// - Surface/SVF arrays: Borrowed from Python (zero-copy on input) +/// - Thermal state: Carried forward between timesteps +#[pyfunction] +#[allow(clippy::too_many_arguments)] +pub fn compute_timestep( + py: Python, + // Scalar parameter structs + weather: &WeatherScalars, + human: &HumanScalars, + config: &ConfigScalars, + // Optional GVF geometry cache (skip building ray-tracing if provided) + gvf_cache: Option<&PyGvfGeometryCache>, + // Surface arrays (constant across timesteps, borrowed) + dsm: PyReadonlyArray2, + cdsm: Option>, + tdsm: Option>, + bush: Option>, + wall_ht: Option>, + wall_asp: Option>, + // SVF arrays (constant across timesteps, borrowed) + svf: PyReadonlyArray2, + svf_n: PyReadonlyArray2, + svf_e: PyReadonlyArray2, + svf_s: PyReadonlyArray2, + svf_w: PyReadonlyArray2, + svf_veg: PyReadonlyArray2, + svf_veg_n: PyReadonlyArray2, + svf_veg_e: PyReadonlyArray2, + svf_veg_s: PyReadonlyArray2, + svf_veg_w: PyReadonlyArray2, + svf_aveg: PyReadonlyArray2, + svf_aveg_n: PyReadonlyArray2, + svf_aveg_e: PyReadonlyArray2, + svf_aveg_s: PyReadonlyArray2, + svf_aveg_w: PyReadonlyArray2, + svfbuveg: PyReadonlyArray2, + svfalfa: PyReadonlyArray2, + // Land cover property grids (constant across timesteps, borrowed) + alb_grid: PyReadonlyArray2, + emis_grid: PyReadonlyArray2, + tgk_grid: PyReadonlyArray2, + tstart_grid: PyReadonlyArray2, + tmaxlst_grid: PyReadonlyArray2, + // Buildings mask for GVF + buildings: PyReadonlyArray2, + lc_grid: Option>, + // Anisotropic sky inputs (None for isotropic) + shmat: Option>, + vegshmat: Option>, + vbshmat: Option>, + // Thermal state (mutable, updated each timestep) + firstdaytime: i32, + timeadd: f32, + timestep_dec: f32, + tgmap1: PyReadonlyArray2, + tgmap1_e: PyReadonlyArray2, + tgmap1_s: PyReadonlyArray2, + tgmap1_w: PyReadonlyArray2, + tgmap1_n: PyReadonlyArray2, + tgout1: PyReadonlyArray2, + // Valid pixel mask (1=valid, 0=NaN/nodata — skip computation for invalid pixels) + valid_mask: PyReadonlyArray2, + // Optional output selection bitmask for Python conversion (tmrt always returned) + output_mask: Option, +) -> PyResult { + // Borrow all arrays (zero-copy from numpy) + let valid_v = valid_mask.as_array(); + let dsm_v = dsm.as_array(); + let cdsm_v = cdsm.as_ref().map(|a| a.as_array()); + let tdsm_v = tdsm.as_ref().map(|a| a.as_array()); + let bush_v = bush.as_ref().map(|a| a.as_array()); + let wall_ht_v = wall_ht.as_ref().map(|a| a.as_array()); + let wall_asp_v = wall_asp.as_ref().map(|a| a.as_array()); + let svf_v = svf.as_array(); + let svf_n_v = svf_n.as_array(); + let svf_e_v = svf_e.as_array(); + let svf_s_v = svf_s.as_array(); + let svf_w_v = svf_w.as_array(); + let svf_veg_v = svf_veg.as_array(); + let svf_veg_n_v = svf_veg_n.as_array(); + let svf_veg_e_v = svf_veg_e.as_array(); + let svf_veg_s_v = svf_veg_s.as_array(); + let svf_veg_w_v = svf_veg_w.as_array(); + let svf_aveg_v = svf_aveg.as_array(); + let svf_aveg_n_v = svf_aveg_n.as_array(); + let svf_aveg_e_v = svf_aveg_e.as_array(); + let svf_aveg_s_v = svf_aveg_s.as_array(); + let svf_aveg_w_v = svf_aveg_w.as_array(); + let svfbuveg_v = svfbuveg.as_array(); + let svfalfa_v = svfalfa.as_array(); + let alb_grid_v = alb_grid.as_array(); + let emis_grid_v = emis_grid.as_array(); + let tgk_grid_v = tgk_grid.as_array(); + let tstart_grid_v = tstart_grid.as_array(); + let tmaxlst_grid_v = tmaxlst_grid.as_array(); + let buildings_v = buildings.as_array(); + let lc_grid_v = lc_grid.as_ref().map(|a| a.as_array()); + let tgmap1_v = tgmap1.as_array(); + let tgmap1_e_v = tgmap1_e.as_array(); + let tgmap1_s_v = tgmap1_s.as_array(); + let tgmap1_w_v = tgmap1_w.as_array(); + let tgmap1_n_v = tgmap1_n.as_array(); + let tgout1_v = tgout1.as_array(); + + // Borrow anisotropic arrays (if provided) + let shmat_v = shmat.as_ref().map(|a| a.as_array()); + let vegshmat_v = vegshmat.as_ref().map(|a| a.as_array()); + let vbshmat_v = vbshmat.as_ref().map(|a| a.as_array()); + let output_mask_bits = output_mask.unwrap_or(OUT_ALL); + let want_shadow = (output_mask_bits & OUT_SHADOW) != 0; + let want_kdown = (output_mask_bits & OUT_KDOWN) != 0; + let want_kup = (output_mask_bits & OUT_KUP) != 0; + let want_ldown = (output_mask_bits & OUT_LDOWN) != 0; + let want_lup = (output_mask_bits & OUT_LUP) != 0; + + if config.has_walls && (wall_ht_v.is_none() || wall_asp_v.is_none()) { + return Err(pyo3::exceptions::PyValueError::new_err( + "config.has_walls=true requires both wall_ht and wall_asp inputs", + )); + } + // Extract GVF cache reference (pure Rust data) before releasing the GIL + let gvf_cache_inner = gvf_cache.map(|c| &c.inner); + + // SAFETY: All array views borrow from PyReadonlyArray parameters that are alive + // for the entire function call. Releasing the GIL only allows other Python + // threads to run — it does not invalidate our borrows or trigger GC of the + // backing numpy arrays. + let raw = unsafe { + allow_threads_unchecked(py, || { + let shape = dsm_v.dim(); + + // Wall aspect in radians for shadows + let wall_asp_rad: Option> = wall_asp_v.map(|a| a.mapv(|d| d * PI / 180.0)); + let wall_asp_rad_view = wall_asp_rad.as_ref().map(|a| a.view()); + + // ── Step 1: Shadows ────────────────────────────────────────────────── + let t_shadow = Instant::now(); + let shadow_result: ShadowingResultRust = calculate_shadows_rust( + weather.sun_azimuth, + weather.sun_altitude, + config.pixel_size, + config.max_height, + dsm_v, + if config.use_veg { cdsm_v } else { None }, + if config.use_veg { tdsm_v } else { None }, + if config.use_veg { bush_v } else { None }, + if config.has_walls { wall_ht_v } else { None }, + if config.has_walls { + wall_asp_rad_view + } else { + None + }, + None, // walls_scheme + None, // aspect_scheme + false, // need_full_wall_outputs (pipeline only needs wall_sun) + 3.0, // min_sun_altitude + config.max_shadow_distance_m, + ); + + // Combine shadows with vegetation transmissivity + let bldg_sh = &shadow_result.bldg_sh; + let shadow = if config.use_veg { + let veg_sh = &shadow_result.veg_sh; + bldg_sh - &((1.0 - veg_sh) * (1.0 - weather.psi)) + } else { + bldg_sh.clone() + }; + let shadow_f32 = shadow; + + let wallsun = shadow_result + .wall_sun + .unwrap_or_else(|| Array2::zeros(shape)); + + let shadow_dur = t_shadow.elapsed(); + + // ── Step 2: Ground Temperature ─────────────────────────────────────── + let t_ground = Instant::now(); + let ground: GroundTempResult = compute_ground_temperature_pure( + weather.sun_altitude, + weather.altmax, + weather.dectime, + weather.snup, + weather.global_rad, + weather.rad_g0, + weather.zen_deg, + tgk_grid_v, + tstart_grid_v, + tmaxlst_grid_v, + config.tgk_wall, + config.tstart_wall, + config.tmaxlst_wall, + ); + + let ground_dur = t_ground.elapsed(); + + // ── Step 3: GVF ───────────────────────────────────────────────────── + let t_gvf = Instant::now(); + let first = { + let h = human.height.round(); + if h == 0.0 { + 1.0 + } else { + h + } + }; + let second = (human.height * 20.0).round(); + + let gvf: GvfResultPure = if config.has_walls { + if let Some(cache) = gvf_cache_inner { + // Use cached geometry — thermal-only pass + gvf_calc_with_cache( + cache, + wallsun.view(), + buildings_v, + shadow_f32.view(), + ground.tg.view(), + ground.tg_wall, + weather.ta, + emis_grid_v, + config.emis_wall, + alb_grid_v, + SBC, + config.albedo_wall, + weather.ta, // twater = ta + lc_grid_v, + lc_grid_v.is_some(), + ) + } else { + // Full GVF (first timestep or no cache) + let wh = wall_ht_v.unwrap(); + gvf_calc_pure( + wallsun.view(), + wh, + buildings_v, + config.pixel_size, + shadow_f32.view(), + first, + second, + wall_asp_v.unwrap(), + ground.tg.view(), + ground.tg_wall, + weather.ta, + emis_grid_v, + config.emis_wall, + alb_grid_v, + SBC, + config.albedo_wall, + weather.ta, // twater = ta + lc_grid_v, + lc_grid_v.is_some(), + ) + } + } else { + // Simplified GVF (no walls) - compute inline + let gvf_simple = 1.0 - &svf_v; + let tg_with_shadow = &ground.tg * &shadow_f32; + // Lup = emis × SBC × (Ta + Tg_shadow + 273.15)^4 + let lup_simple = { + let mut arr = Array2::::zeros(shape); + Zip::indexed(&mut arr).par_for_each(|(r, c), out| { + if valid_v[[r, c]] == 0 { + *out = f32::NAN; + return; + } + let t = weather.ta + tg_with_shadow[[r, c]] + KELVIN_OFFSET; + *out = emis_grid_v[[r, c]] * SBC * t.powi(4); + }); + arr + }; + let gvfalb_simple = &alb_grid_v * &gvf_simple; + + GvfResultPure { + gvf_lup: lup_simple.clone(), + gvfalb: gvfalb_simple.clone(), + gvfalbnosh: Some(alb_grid_v.to_owned()), + gvf_lup_e: lup_simple.clone(), + gvfalb_e: gvfalb_simple.clone(), + gvfalbnosh_e: Some(alb_grid_v.to_owned()), + gvf_lup_s: lup_simple.clone(), + gvfalb_s: gvfalb_simple.clone(), + gvfalbnosh_s: Some(alb_grid_v.to_owned()), + gvf_lup_w: lup_simple.clone(), + gvfalb_w: gvfalb_simple.clone(), + gvfalbnosh_w: Some(alb_grid_v.to_owned()), + gvf_lup_n: lup_simple.clone(), + gvfalb_n: gvfalb_simple, + gvfalbnosh_n: Some(alb_grid_v.to_owned()), + gvf_sum: Some(Array2::zeros(shape)), + gvf_norm: Some(Array2::ones(shape)), + } + }; + + let gvf_dur = t_gvf.elapsed(); + + // ── Step 4: Thermal Delay ──────────────────────────────────────────── + let t_delay = Instant::now(); + let tg_temp = &ground.tg * &shadow_f32 + weather.ta; + + let delay = ts_wave_delay_batch_pure( + gvf.gvf_lup.view(), + gvf.gvf_lup_e.view(), + gvf.gvf_lup_s.view(), + gvf.gvf_lup_w.view(), + gvf.gvf_lup_n.view(), + tg_temp.view(), + firstdaytime, + timeadd, + timestep_dec, + tgmap1_v, + tgmap1_e_v, + tgmap1_s_v, + tgmap1_w_v, + tgmap1_n_v, + tgout1_v, + ); + + let delay_dur = t_delay.elapsed(); + + // ── Step 5: Radiation ───────────────────────────────────────────────── + let t_radiation = Instant::now(); + let esky = compute_esky(weather.ta, weather.rh); + let sin_alt = (weather.sun_altitude * PI / 180.0).sin(); + let rad_i = weather.direct_rad; + let rad_d = weather.diffuse_rad; + let rad_g = weather.global_rad; + let psi = weather.psi; + let cyl = human.is_standing; + + // F_sh (cylindric wedge shadow fraction) — shared by both paths + let zen_rad = weather.sun_zenith * PI / 180.0; + let f_sh = cylindric_wedge_pure_masked(zen_rad, svfalfa_v, Some(valid_v)); + + // Kup helper used in both isotropic and anisotropic paths. + // In cached-GVF mode, read gvfalbnosh* directly from geometry cache to + // avoid per-timestep cloning of those static arrays. + let compute_kup_with = + |gvfalbnosh: ArrayView2, + gvfalbnosh_e: ArrayView2, + gvfalbnosh_s: ArrayView2, + gvfalbnosh_w: ArrayView2, + gvfalbnosh_n: ArrayView2| { + compute_kup( + rad_i, + rad_d, + rad_g, + weather.sun_altitude, + svfbuveg_v, + config.albedo_wall, + f_sh.view(), + gvf.gvfalb.view(), + gvf.gvfalb_e.view(), + gvf.gvfalb_s.view(), + gvf.gvfalb_w.view(), + gvf.gvfalb_n.view(), + gvfalbnosh, + gvfalbnosh_e, + gvfalbnosh_s, + gvfalbnosh_w, + gvfalbnosh_n, + valid_v, + ) + }; + let compute_kup_all = || { + if let Some(cache) = gvf_cache_inner { + compute_kup_with( + cache.cached_albnosh.view(), + cache.cached_albnosh_e.view(), + cache.cached_albnosh_s.view(), + cache.cached_albnosh_w.view(), + cache.cached_albnosh_n.view(), + ) + } else { + let gvfalbnosh = gvf + .gvfalbnosh + .as_ref() + .expect("gvfalbnosh missing without cache"); + let gvfalbnosh_e = gvf + .gvfalbnosh_e + .as_ref() + .expect("gvfalbnosh_e missing without cache"); + let gvfalbnosh_s = gvf + .gvfalbnosh_s + .as_ref() + .expect("gvfalbnosh_s missing without cache"); + let gvfalbnosh_w = gvf + .gvfalbnosh_w + .as_ref() + .expect("gvfalbnosh_w missing without cache"); + let gvfalbnosh_n = gvf + .gvfalbnosh_n + .as_ref() + .expect("gvfalbnosh_n missing without cache"); + compute_kup_with( + gvfalbnosh.view(), + gvfalbnosh_e.view(), + gvfalbnosh_s.view(), + gvfalbnosh_w.view(), + gvfalbnosh_n.view(), + ) + } + }; + + // Branch: anisotropic vs isotropic + let use_aniso = config.use_anisotropic + && shmat_v.is_some() + && vegshmat_v.is_some() + && vbshmat_v.is_some(); + + let (kup, kdown, ldown, kside_dirs_sum, lside_dirs_sum, kside_total, lside_total) = + if use_aniso { + // === Anisotropic sky === + let shmat_a = shmat_v.unwrap(); + let vegshmat_a = vegshmat_v.unwrap(); + let vbshmat_a = vbshmat_v.unwrap(); + + // Perez sky luminance distribution (computed in Rust — no Python round-trip) + let lv_arr = crate::perez::perez_v3( + weather.zen_deg, + weather.sun_azimuth, + weather.diffuse_rad, + weather.direct_rad, + weather.jday, + weather.patch_option, + ); + let patch_lut = patch_lut_for_option_cached(weather.patch_option); + let patch_altitude_arr = ArrayView1::from(patch_lut.altitudes.as_slice()); + let patch_azimuth_arr = ArrayView1::from(patch_lut.azimuths.as_slice()); + let steradians_arr = ArrayView1::from(patch_lut.steradians.as_slice()); + let patch_altitude_sin_arr = + ArrayView1::from(patch_lut.altitude_sin.as_slice()); + + // ASVF from SVF (arccos(sqrt(clip(svf, 0, 1)))) cached by SVF buffer. + let asvf_cache = asvf_for_svf_cached(svf_v); + let asvf_arr = ArrayView2::from_shape(shape, asvf_cache.as_slice()) + .expect("ASVF cache shape mismatch"); + + // Esky anisotropic (Jonsson + CI correction) + let esky_a = { + let ci = weather.clearness_index; + if ci < 0.95 { + ci * esky + (1.0 - ci) + } else { + esky + } + }; + + // Full anisotropic sky calculation (ldown, kside, lside totals) + // Try GPU path first; fall back to CPU if unavailable. + let deg2rad = PI / 180.0; + #[allow(unused_variables)] + let (lum_chi, rad_tot) = if weather.sun_altitude > 0.0 { + let patch_luminance = lv_arr.column(2); + let mut rad_tot = 0.0f32; + let n_patches = patch_luminance.len(); + for i in 0..n_patches { + rad_tot += + patch_luminance[i] * steradians_arr[i] * patch_altitude_sin_arr[i]; + } + if rad_tot > 0.0 { + (patch_luminance.mapv(|lum| (lum * rad_d) / rad_tot), rad_tot) + } else { + (Array1::::zeros(n_patches), 0.0) + } + } else { + (Array1::::zeros(lv_arr.shape()[0]), 0.0) + }; + + #[allow(unused_variables)] + let (_, esky_band) = + crate::emissivity_models::model2(&lv_arr, esky_a, weather.ta); + + // Launch anisotropic GPU dispatch early, then compute Kup/lside_dirs + // while GPU work is in flight. + #[cfg(feature = "gpu")] + let mut gpu_ctx = None; + #[cfg(feature = "gpu")] + let mut gpu_pending = None; + + #[cfg(feature = "gpu")] + if cyl { + if let Some(ctx) = get_aniso_gpu_context() { + match ctx.dispatch_begin( + shmat_a, + vegshmat_a, + vbshmat_a, + asvf_arr, + delay.lup.view(), + valid_v, + patch_altitude_arr, + patch_azimuth_arr, + steradians_arr, + esky_band.view(), + lum_chi.view(), + weather.sun_altitude, + weather.sun_azimuth, + weather.ta, + cyl, + config.albedo_wall, + ground.tg_wall, + config.emis_wall, + rad_i, + rad_d, + psi, + rad_tot, + ) { + Ok(pending) => { + gpu_ctx = Some(ctx); + gpu_pending = Some(pending); + } + Err(e) => { + eprintln!( + "[GPU] Anisotropic dispatch begin failed: {}. CPU fallback.", + e + ); + } + } + } + } + + // Shared thermal side inputs (always needed in anisotropic mode). + let (kup, kup_e, kup_s, kup_w, kup_n) = compute_kup_all(); + + let lside_dirs_sum = lside_dirs_sum_aniso_from_lup( + delay.lup_e.view(), + delay.lup_s.view(), + delay.lup_w.view(), + delay.lup_n.view(), + valid_v, + ); + + #[cfg(feature = "gpu")] + let gpu_result = if let (Some(ctx), Some(pending)) = (gpu_ctx, gpu_pending) { + match ctx.dispatch_end(pending) { + Ok(gpu) => Some(gpu), + Err(e) => { + eprintln!( + "[GPU] Anisotropic dispatch end failed: {}. CPU fallback.", + e + ); + None + } + } + } else { + None + }; + + // Compute anisotropic sky: GPU path + CPU fallback + let mut used_gpu = false; + #[allow(unused_mut)] + let mut ani_ldown = Array2::::zeros(shape); + #[allow(unused_mut)] + let mut ani_lside = Array2::::zeros(shape); + #[allow(unused_mut)] + let mut ani_kside = Array2::::zeros(shape); + #[allow(unused_mut)] + let mut drad = Array2::::zeros(shape); + #[allow(unused_mut)] + let mut ani_kside_dirs_sum = Array2::::zeros(shape); + + #[cfg(feature = "gpu")] + if let Some(gpu) = gpu_result { + // GPU path: derive kside and k-directional from GPU partial outputs + let kside_i = if cyl { + &shadow_f32 * rad_i * (weather.sun_altitude * deg2rad).cos() + } else { + Array2::::zeros(shape) + }; + if weather.sun_altitude > 0.0 { + ani_kside = kside_i + &gpu.kside_partial; + ani_kside_dirs_sum = kside_dirs_sum_aniso_from_kup( + kup_e.view(), + kup_s.view(), + kup_w.view(), + kup_n.view(), + valid_v, + ); + } + ani_ldown = gpu.ldown; + ani_lside = gpu.lside; + drad = gpu.drad; + used_gpu = true; + } + + if !used_gpu { + // drad via direct accumulation from packed shadow matrices. + // Shadow matrices are bitpacked: 1 bit per patch, 8 patches per byte. + // ani_lum = sum_i((sh_i - (1 - veg_i) * (1 - psi)) * lv_i) + let lv_col2 = lv_arr.column(2); + let ani_lum = + compute_ani_lum_from_packed(shmat_a, vegshmat_a, lv_col2, psi, valid_v); + drad = ani_lum.mapv(|v| v * rad_d); + + let ani = anisotropic_sky_pure( + shmat_a, + vegshmat_a, + vbshmat_a, + weather.sun_altitude, + weather.sun_azimuth, + esky_a, + weather.ta, + cyl, + false, // wall_scheme + config.albedo_wall, + ground.tg_wall, + config.emis_wall, + rad_i, + rad_d, + asvf_arr, + lv_arr.view(), + steradians_arr, + delay.lup.view(), + lv_arr.view(), + shadow_f32.view(), + kup_e.view(), + kup_s.view(), + kup_w.view(), + kup_n.view(), + None, // voxel_table + None, // voxel_maps + Some(valid_v), + ); + ani_ldown = ani.ldown; + ani_lside = ani.lside; + ani_kside = ani.kside; + ani_kside_dirs_sum = side_sum_from_directional( + ani.knorth.view(), + ani.keast.view(), + ani.ksouth.view(), + ani.kwest.view(), + valid_v, + ); + } + + // Kdown (shared formula, but with anisotropic drad) + let kdown = compute_kdown( + rad_i, + rad_d, + rad_g, + shadow_f32.view(), + sin_alt, + svfbuveg_v, + config.albedo_wall, + f_sh.view(), + drad.view(), + valid_v, + ); + + // From anisotropic: ldown from ani_sky, lside from lside_veg, kside from ani_sky + ( + kup, + kdown, + ani_ldown, + ani_kside_dirs_sum, + lside_dirs_sum, + ani_kside, + ani_lside, + ) + } else { + // === Isotropic sky === + let (kup, kup_e, kup_s, kup_w, kup_n) = compute_kup_all(); + + // drad (isotropic diffuse) + let drad = svfbuveg_v.mapv(|sv| rad_d * sv); + + // Ldown + let ldown = compute_ldown( + esky, + weather.ta, + ground.tg_wall, + svf_v, + svf_veg_v, + svf_aveg_v, + config.emis_wall, + weather.clearness_index, + valid_v, + ); + + // kside_veg (isotropic) + let kside = kside_veg_isotropic_pure( + rad_i, + rad_d, + rad_g, + shadow_f32.view(), + svf_s_v, + svf_w_v, + svf_n_v, + svf_e_v, + svf_veg_e_v, + svf_veg_s_v, + svf_veg_w_v, + svf_veg_n_v, + weather.sun_azimuth, + weather.sun_altitude, + psi, + 0.0, // t (instrument offset) + config.albedo_wall, + f_sh.view(), + kup_e.view(), + kup_s.view(), + kup_w.view(), + kup_n.view(), + cyl, + Some(valid_v), + ); + + // lside_veg (isotropic) + let lside = lside_veg_pure( + svf_s_v, + svf_w_v, + svf_n_v, + svf_e_v, + svf_veg_e_v, + svf_veg_s_v, + svf_veg_w_v, + svf_veg_n_v, + svf_aveg_e_v, + svf_aveg_s_v, + svf_aveg_w_v, + svf_aveg_n_v, + weather.sun_azimuth, + weather.sun_altitude, + weather.ta, + ground.tg_wall, + SBC, + config.emis_wall, + ldown.view(), + esky, + 0.0, // t + f_sh.view(), + weather.clearness_index, + delay.lup_e.view(), + delay.lup_s.view(), + delay.lup_w.view(), + delay.lup_n.view(), + false, // isotropic + Some(valid_v), + ); + + // Kdown + let kdown = compute_kdown( + rad_i, + rad_d, + rad_g, + shadow_f32.view(), + sin_alt, + svfbuveg_v, + config.albedo_wall, + f_sh.view(), + drad.view(), + valid_v, + ); + + // Isotropic: kside_total = kside_i, lside_total = zeros + let kside_dirs_sum = side_sum_from_directional( + kside.knorth.view(), + kside.keast.view(), + kside.ksouth.view(), + kside.kwest.view(), + valid_v, + ); + let lside_dirs_sum = side_sum_from_directional( + lside.lnorth.view(), + lside.least.view(), + lside.lsouth.view(), + lside.lwest.view(), + valid_v, + ); + ( + kup, + kdown, + ldown, + kside_dirs_sum, + lside_dirs_sum, + kside.kside_i, + Array2::::zeros(shape), + ) + }; + + let radiation_dur = t_radiation.elapsed(); + + // ── Step 6: Tmrt ───────────────────────────────────────────────────── + let t_tmrt = Instant::now(); + let tmrt = compute_tmrt_from_dir_sums_pure( + kdown.view(), + kup.view(), + ldown.view(), + delay.lup.view(), + kside_dirs_sum.view(), + lside_dirs_sum.view(), + kside_total.view(), + lside_total.view(), + human.abs_k, + human.abs_l, + human.is_standing, + use_aniso, + ); + let tmrt_dur = t_tmrt.elapsed(); + + if timing_enabled() { + let total = + shadow_dur + ground_dur + gvf_dur + delay_dur + radiation_dur + tmrt_dur; + let total_ms = total.as_secs_f64() * 1000.0; + let shadow_ms = shadow_dur.as_secs_f64() * 1000.0; + let ground_ms = ground_dur.as_secs_f64() * 1000.0; + let gvf_ms = gvf_dur.as_secs_f64() * 1000.0; + let delay_ms = delay_dur.as_secs_f64() * 1000.0; + let rad_ms = radiation_dur.as_secs_f64() * 1000.0; + let tmrt_ms = tmrt_dur.as_secs_f64() * 1000.0; + // GPU duty cycle: shadow always uses GPU (when available); + // radiation includes GPU aniso dispatch when anisotropic is active. + let gpu_ms = shadow_ms + if use_aniso { rad_ms } else { 0.0 }; + let duty = if total_ms > 0.0 { + gpu_ms / total_ms * 100.0 + } else { + 0.0 + }; + eprintln!( + "[TIMING] shadow={:.1}ms ground={:.1}ms gvf={:.1}ms delay={:.1}ms \ + radiation={:.1}ms tmrt={:.1}ms | total={:.1}ms gpu_duty={:.0}%", + shadow_ms, ground_ms, gvf_ms, delay_ms, rad_ms, tmrt_ms, total_ms, duty, + ); + } + + TimestepResultRaw { + tmrt, + shadow: if want_shadow { Some(shadow_f32) } else { None }, + kdown: if want_kdown { Some(kdown) } else { None }, + kup: if want_kup { Some(kup) } else { None }, + ldown: if want_ldown { Some(ldown) } else { None }, + lup: if want_lup { Some(delay.lup) } else { None }, + timeadd: delay.timeadd, + tgmap1: delay.tgmap1, + tgmap1_e: delay.tgmap1_e, + tgmap1_s: delay.tgmap1_s, + tgmap1_w: delay.tgmap1_w, + tgmap1_n: delay.tgmap1_n, + tgout1: delay.tgout1, + } + }) + }; // end allow_threads_unchecked + + // ── Convert final outputs to PyArrays (needs GIL) ──────────────────── + Ok(TimestepResult { + tmrt: raw.tmrt.into_pyarray(py).unbind(), + shadow: if want_shadow { + Some( + raw.shadow + .expect("shadow missing despite output mask") + .into_pyarray(py) + .unbind(), + ) + } else { + None + }, + kdown: if want_kdown { + Some( + raw.kdown + .expect("kdown missing despite output mask") + .into_pyarray(py) + .unbind(), + ) + } else { + None + }, + kup: if want_kup { + Some( + raw.kup + .expect("kup missing despite output mask") + .into_pyarray(py) + .unbind(), + ) + } else { + None + }, + ldown: if want_ldown { + Some( + raw.ldown + .expect("ldown missing despite output mask") + .into_pyarray(py) + .unbind(), + ) + } else { + None + }, + lup: if want_lup { + Some( + raw.lup + .expect("lup missing despite output mask") + .into_pyarray(py) + .unbind(), + ) + } else { + None + }, + timeadd: raw.timeadd, + tgmap1: raw.tgmap1.into_pyarray(py).unbind(), + tgmap1_e: raw.tgmap1_e.into_pyarray(py).unbind(), + tgmap1_s: raw.tgmap1_s.into_pyarray(py).unbind(), + tgmap1_w: raw.tgmap1_w.into_pyarray(py).unbind(), + tgmap1_n: raw.tgmap1_n.into_pyarray(py).unbind(), + tgout1: raw.tgout1.into_pyarray(py).unbind(), + }) +} diff --git a/rust/src/shadowing.rs b/rust/src/shadowing.rs index 7e6b969..051bd31 100644 --- a/rust/src/shadowing.rs +++ b/rust/src/shadowing.rs @@ -22,7 +22,7 @@ static GPU_CONTEXT: OnceLock> = OnceLock::new(); static GPU_ENABLED: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(true); #[cfg(feature = "gpu")] -fn get_gpu_context() -> Option<&'static ShadowGpuContext> { +pub(crate) fn get_gpu_context() -> Option<&'static ShadowGpuContext> { // Check if GPU is enabled if !GPU_ENABLED.load(std::sync::atomic::Ordering::Relaxed) { return None; @@ -68,6 +68,26 @@ pub fn is_gpu_enabled() -> bool { GPU_ENABLED.load(std::sync::atomic::Ordering::Relaxed) } +#[cfg(feature = "gpu")] +#[pyfunction] +/// Return GPU buffer limits as a dict, or None if GPU is unavailable. +/// +/// Keys: +/// - "max_buffer_size": u64 — largest single wgpu buffer in bytes +/// - "backend": str — GPU backend name ("Metal", "Vulkan", "Dx12", "Gl", etc.) +/// +/// Initialises the GPU context lazily on first call. +pub fn gpu_limits(py: Python<'_>) -> PyResult> { + let ctx = match get_gpu_context() { + Some(c) => c, + None => return Ok(None), + }; + let dict = pyo3::types::PyDict::new(py); + dict.set_item("max_buffer_size", ctx.max_buffer_size)?; + dict.set_item("backend", format!("{:?}", ctx.backend))?; + Ok(Some(dict.into())) +} + /// Rust-native result struct for internal shadow calculations. pub(crate) struct ShadowingResultRust { pub bldg_sh: Array2, @@ -122,13 +142,49 @@ pub(crate) fn calculate_shadows_rust( aspect_view_opt: Option>, walls_scheme_view_opt: Option>, aspect_scheme_view_opt: Option>, + need_full_wall_outputs: bool, min_sun_elev_deg: f32, + max_shadow_distance_m: f32, ) -> ShadowingResultRust { let shape = dsm_view.shape(); let num_rows = shape[0]; let num_cols = shape[1]; let dim = (num_rows, num_cols); + // Handle zenith case (altitude >= 89.5°): no shadows cast from directly overhead. + // This avoids tan(90°) = infinity which breaks the shadow propagation loop. + // For SVF calculations, zenith patches represent looking straight up - all points + // can see the sky in this direction (no obstruction). + if altitude_deg >= 89.5 { + return ShadowingResultRust { + bldg_sh: Array2::::ones(dim), + veg_sh: Array2::::ones(dim), + veg_blocks_bldg_sh: Array2::::ones(dim), + wall_sh: if need_full_wall_outputs { + walls_view_opt.map(|_| Array2::::zeros(dim)) + } else { + None + }, + wall_sun: walls_view_opt.map(|w| w.to_owned()), + wall_sh_veg: if need_full_wall_outputs { + walls_view_opt.map(|_| Array2::::zeros(dim)) + } else { + None + }, + face_sh: if need_full_wall_outputs { + walls_view_opt.map(|_| Array2::::zeros(dim)) + } else { + None + }, + face_sun: if need_full_wall_outputs { + walls_view_opt.map(|w| w.mapv(|v| if v > 0.0 { 1.0 } else { 0.0 })) + } else { + None + }, + sh_on_wall: walls_scheme_view_opt.map(|_| Array2::::zeros(dim)), + }; + } + // GPU acceleration path: use GPU if available for all shadow types #[cfg(feature = "gpu")] { @@ -141,11 +197,14 @@ pub(crate) fn calculate_shadows_rust( bush_view_opt, walls_view_opt, aspect_view_opt, + walls_scheme_view_opt.is_some() && aspect_scheme_view_opt.is_some(), + need_full_wall_outputs, azimuth_deg, altitude_deg, scale, max_local_dsm_ht, min_sun_elev_deg, + max_shadow_distance_m, ) { Ok(gpu_result) => { // Handle sh_on_wall if wall scheme is present @@ -222,25 +281,26 @@ pub(crate) fn calculate_shadows_rust( && bush_view_opt.is_some(); // Allocate arrays for vegetation only if all inputs are present - let (mut veg_sh, mut veg_blocks_bldg_sh, mut propagated_veg_sh_height) = if veg_inputs_present { - let bush_view = bush_view_opt.as_ref().unwrap(); - let veg_canopy_dsm_view = veg_canopy_dsm_view_opt.as_ref().unwrap(); - ( - bush_view.mapv(|v| if v > 1.0 { 1.0 } else { 0.0 }), - Array2::::zeros(dim), - { - let mut arr = Array2::::zeros(dim); - arr.assign(veg_canopy_dsm_view); - arr - }, - ) - } else { - ( - Array2::::zeros(dim), - Array2::::zeros(dim), - Array2::::zeros(dim), - ) - }; + let (mut veg_sh, mut veg_blocks_bldg_sh, mut propagated_veg_sh_height) = + if let (Some(bush_view), Some(veg_canopy_dsm_view)) = + (bush_view_opt, veg_canopy_dsm_view_opt) + { + ( + bush_view.mapv(|v| if v > 1.0 { 1.0 } else { 0.0 }), + Array2::::zeros(dim), + { + let mut arr = Array2::::zeros(dim); + arr.assign(&veg_canopy_dsm_view); + arr + }, + ) + } else { + ( + Array2::::zeros(dim), + Array2::::zeros(dim), + Array2::::zeros(dim), + ) + }; let mut bldg_sh = Array2::::zeros(dim); let mut propagated_bldg_sh_height = Array2::::zeros(dim); @@ -263,9 +323,17 @@ pub(crate) fn calculate_shadows_rust( let mut ds: f32; let mut index = 0.0; - // clamp elevation used for reach computation + // Horizontal reach: derived from height, optionally capped by max_shadow_distance_m. + // On mountainous terrain the height-derived reach can be huge, so the distance + // cap prevents tracing rays for kilometres while the dz guard (below) still + // uses the full terrain relief for correct vertical cutoff. let min_sun_elev_rad = min_sun_elev_deg.to_radians(); - let max_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); + let height_reach_m = max_local_dsm_ht / min_sun_elev_rad.tan(); + let max_reach_m = if max_shadow_distance_m > 0.0 { + height_reach_m.min(max_shadow_distance_m) + } else { + height_reach_m + }; let max_radius_pixels = (max_reach_m / scale).ceil() as usize; let max_index = max_radius_pixels as f32; // index uses f32 @@ -326,27 +394,34 @@ pub(crate) fn calculate_shadows_rust( let mut bldg_sh_dst_slice = bldg_sh.slice_mut(s![xp1c..xp1c + minx, yp1c..yp1c + miny]); par_azip!((prop_h in &mut prop_bldg_h_dst_slice, &dsm_src in &dsm_src_slice) { - let shifted_dsm = dsm_src - dz; - *prop_h = prop_h.max(shifted_dsm); + if dsm_src.is_finite() { + let shifted_dsm = dsm_src - dz; + *prop_h = prop_h.max(shifted_dsm); + } }); par_azip!((bldg_sh_flag in &mut bldg_sh_dst_slice, &prop_h in &prop_bldg_h_dst_slice, &dsm_target in &dsm_dst_slice) { - *bldg_sh_flag = if prop_h > dsm_target { 1.0 } else { 0.0 }; + if dsm_target.is_finite() { + *bldg_sh_flag = if prop_h > dsm_target { 1.0 } else { 0.0 }; + } else { + *bldg_sh_flag = f32::NAN; + } }); // Vegetation shadow calculation on the slice - if veg_inputs_present { - let veg_canopy_dsm_view = veg_canopy_dsm_view_opt.as_ref().unwrap(); - let veg_trunk_dsm_view = veg_trunk_dsm_view_opt.as_ref().unwrap(); - + if let (Some(veg_canopy_dsm_view), Some(veg_trunk_dsm_view)) = + (veg_canopy_dsm_view_opt, veg_trunk_dsm_view_opt) + { let veg_canopy_src_slice = veg_canopy_dsm_view.slice(s![xc1c..xc1c + minx, yc1c..yc1c + miny]); let mut prop_veg_h_dst_slice = propagated_veg_sh_height.slice_mut(s![xp1c..xp1c + minx, yp1c..yp1c + miny]); par_azip!((prop_veg_h in &mut prop_veg_h_dst_slice, &source_veg_canopy in &veg_canopy_src_slice) { - let shifted_veg_canopy = source_veg_canopy - dz; - *prop_veg_h = prop_veg_h.max(shifted_veg_canopy); + if source_veg_canopy.is_finite() { + let shifted_veg_canopy = source_veg_canopy - dz; + *prop_veg_h = prop_veg_h.max(shifted_veg_canopy); + } }); let veg_trunk_src_slice = @@ -621,6 +696,7 @@ fn shade_on_walls( } #[pyfunction] +#[pyo3(signature = (azimuth_deg, altitude_deg, scale, max_local_dsm_ht, dsm, veg_canopy_dsm=None, veg_trunk_dsm=None, bush=None, walls=None, aspect=None, walls_scheme=None, aspect_scheme=None, min_sun_elev_deg=None, max_shadow_distance_m=None))] /// Calculates shadow maps for buildings, vegetation, and walls given DSM and sun position (Python wrapper). /// /// This function handles Python type conversions and calls the internal Rust shadow calculation logic. @@ -641,6 +717,7 @@ fn shade_on_walls( /// * `aspect` - Optional wall aspect/orientation layer (radians or degrees). Required if `walls` is provided. /// * `walls_scheme` - Optional alternative wall height layer for specific calculations /// * `aspect_scheme` - Optional alternative wall aspect layer +/// * `max_shadow_distance_m` - Optional: Maximum horizontal shadow distance in metres (0 = no cap) /// /// # Returns /// * `ShadowingResult` struct containing various shadow maps (ground, vegetation, walls) as PyArrays. @@ -659,6 +736,7 @@ pub fn calculate_shadows_wall_ht_25( walls_scheme: Option>, aspect_scheme: Option>, min_sun_elev_deg: Option, + max_shadow_distance_m: Option, ) -> PyResult { let dsm_view = dsm.as_array(); let shape = dsm_view.shape(); @@ -672,9 +750,18 @@ pub fn calculate_shadows_wall_ht_25( let num_veg_inputs = veg_inputs_provided.iter().filter(|&&x| x).count(); let (veg_canopy_dsm_view_opt, veg_trunk_dsm_view_opt, bush_view_opt) = if num_veg_inputs == 3 { - let veg_canopy_view = veg_canopy_dsm.as_ref().unwrap().as_array(); - let veg_trunk_view = veg_trunk_dsm.as_ref().unwrap().as_array(); - let bush_view = bush.as_ref().unwrap().as_array(); + let veg_canopy_view = veg_canopy_dsm + .as_ref() + .ok_or_else(|| pyo3::exceptions::PyValueError::new_err("veg_canopy_dsm is missing"))? + .as_array(); + let veg_trunk_view = veg_trunk_dsm + .as_ref() + .ok_or_else(|| pyo3::exceptions::PyValueError::new_err("veg_trunk_dsm is missing"))? + .as_array(); + let bush_view = bush + .as_ref() + .ok_or_else(|| pyo3::exceptions::PyValueError::new_err("bush is missing"))? + .as_array(); if veg_canopy_view.shape() != shape { return Err(pyo3::exceptions::PyValueError::new_err( "veg_canopy_dsm must have the same shape as dsm.", @@ -756,7 +843,9 @@ pub fn calculate_shadows_wall_ht_25( aspect_view_opt, walls_scheme_view_opt, aspect_scheme_view_opt, + true, min_sun_elev_deg.unwrap_or(5.0_f32), + max_shadow_distance_m.unwrap_or(0.0_f32), ); let py_result = ShadowingResult { diff --git a/rust/src/sky.rs b/rust/src/sky.rs index 5f294e0..f2e4c51 100644 --- a/rust/src/sky.rs +++ b/rust/src/sky.rs @@ -1,5 +1,5 @@ use crate::{emissivity_models, patch_radiation, sunlit_shaded_patches}; -use ndarray::{Array1, Array2}; +use ndarray::{Array1, Array2, ArrayView1, ArrayView2, ArrayView3}; use numpy::{ IntoPyArray, PyArray1, PyArray2, PyReadonlyArray1, PyReadonlyArray2, PyReadonlyArray3, }; @@ -8,6 +8,90 @@ use rayon::prelude::*; const PI: f32 = std::f32::consts::PI; const SBC: f32 = 5.67051e-8; // Stefan-Boltzmann constant +const MIN_SUN_ELEVATION_RAD: f32 = 3.0 * PI / 180.0; // 3° threshold for low sun guard + +/// Extract a single shadow bit from a bitpacked shadow matrix. +/// Shape: (rows, cols, n_pack) where n_pack = ceil(n_patches / 8). +/// Returns true if the shadow bit is set (was 255 in the original u8 format). +#[inline(always)] +fn get_shadow_bit(packed: &ArrayView3, r: usize, c: usize, patch: usize) -> bool { + (packed[[r, c, patch >> 3]] >> (patch & 7)) & 1 == 1 +} + +/// Sun position parameters +#[pyclass] +#[derive(Clone)] +pub struct SunParams { + #[pyo3(get, set)] + pub altitude: f32, + #[pyo3(get, set)] + pub azimuth: f32, +} + +#[pymethods] +impl SunParams { + #[new] + pub fn new(altitude: f32, azimuth: f32) -> Self { + Self { altitude, azimuth } + } +} + +/// Sky model parameters +#[pyclass] +#[derive(Clone)] +pub struct SkyParams { + #[pyo3(get, set)] + pub esky: f32, + #[pyo3(get, set)] + pub ta: f32, + #[pyo3(get, set)] + pub cyl: bool, + #[pyo3(get, set)] + pub wall_scheme: bool, + #[pyo3(get, set)] + pub albedo: f32, +} + +#[pymethods] +impl SkyParams { + #[new] + pub fn new(esky: f32, ta: f32, cyl: bool, wall_scheme: bool, albedo: f32) -> Self { + Self { + esky, + ta, + cyl, + wall_scheme, + albedo, + } + } +} + +/// Surface radiation parameters +#[pyclass] +#[derive(Clone)] +pub struct SurfaceParams { + #[pyo3(get, set)] + pub tgwall: f32, + #[pyo3(get, set)] + pub ewall: f32, + #[pyo3(get, set)] + pub rad_i: f32, + #[pyo3(get, set)] + pub rad_d: f32, +} + +#[pymethods] +impl SurfaceParams { + #[new] + pub fn new(tgwall: f32, ewall: f32, rad_i: f32, rad_d: f32) -> Self { + Self { + tgwall, + ewall, + rad_i, + rad_d, + } + } +} #[pyclass] pub struct SkyResult { @@ -99,66 +183,89 @@ impl PixelResult { ldown_ref: 0.0, } } + + fn nan() -> Self { + Self { + lside_sky: f32::NAN, + ldown_sky: f32::NAN, + lside_veg: f32::NAN, + ldown_veg: f32::NAN, + lside_sun: f32::NAN, + lside_sh: f32::NAN, + ldown_sun: f32::NAN, + ldown_sh: f32::NAN, + kside_d: f32::NAN, + kref_sun: f32::NAN, + kref_sh: f32::NAN, + kref_veg: f32::NAN, + least: f32::NAN, + lsouth: f32::NAN, + lwest: f32::NAN, + lnorth: f32::NAN, + lside_ref: f32::NAN, + ldown_ref: f32::NAN, + } + } } -#[pyfunction] +/// Pure-ndarray result from anisotropic sky calculation (no PyO3 types). +pub(crate) struct SkyResultPure { + pub ldown: Array2, + pub lside: Array2, + pub lside_sky: Array2, + pub lside_veg: Array2, + pub lside_sh: Array2, + pub lside_sun: Array2, + pub lside_ref: Array2, + pub least: Array2, + pub lsouth: Array2, + pub lwest: Array2, + pub lnorth: Array2, + pub keast: Array2, + pub ksouth: Array2, + pub kwest: Array2, + pub knorth: Array2, + pub kside_i: Array2, + pub kside_d: Array2, + pub kside: Array2, +} + +/// Pure-ndarray anisotropic sky calculation, callable from pipeline.rs. #[allow(clippy::too_many_arguments)] #[allow(non_snake_case)] -pub fn anisotropic_sky( - py: Python, - shmat: PyReadonlyArray3, - vegshmat: PyReadonlyArray3, - vbshvegshmat: PyReadonlyArray3, +pub(crate) fn anisotropic_sky_pure( + shmat: ArrayView3, + vegshmat: ArrayView3, + vbshvegshmat: ArrayView3, solar_altitude: f32, solar_azimuth: f32, - asvf: PyReadonlyArray2, - cyl: bool, esky: f32, - l_patches: PyReadonlyArray2, - wall_scheme: bool, - voxel_table: Option>, - voxel_maps: Option>, - steradians: PyReadonlyArray1, ta: f32, + cyl: bool, + wall_scheme: bool, + albedo: f32, tgwall: f32, ewall: f32, - lup: PyReadonlyArray2, rad_i: f32, rad_d: f32, - _rad_g: f32, - lv: PyReadonlyArray2, - albedo: f32, - _anisotropic_diffuse: bool, - _diffsh: PyReadonlyArray3, - shadow: PyReadonlyArray2, - kup_e: PyReadonlyArray2, - kup_s: PyReadonlyArray2, - kup_w: PyReadonlyArray2, - kup_n: PyReadonlyArray2, - _current_step: i32, -) -> PyResult> { - // Convert PyReadonlyArray to ArrayView for easier manipulation - let shmat = shmat.as_array(); - let vegshmat = vegshmat.as_array(); - let vbshvegshmat = vbshvegshmat.as_array(); - let asvf = asvf.as_array(); - let l_patches = l_patches.as_array(); - let voxel_table = voxel_table.as_ref().map(|v| v.as_array()); - let voxel_maps = voxel_maps.as_ref().map(|v| v.as_array()); - let steradians = steradians.as_array(); - let lup = lup.as_array(); - let lv = lv.as_array(); - let shadow = shadow.as_array(); - let kup_e = kup_e.as_array(); - let kup_s = kup_s.as_array(); - let kup_w = kup_w.as_array(); - let kup_n = kup_n.as_array(); - + asvf: ArrayView2, + l_patches: ArrayView2, + steradians: ArrayView1, + lup: ArrayView2, + lv: ArrayView2, + shadow: ArrayView2, + kup_e: ArrayView2, + kup_s: ArrayView2, + kup_w: ArrayView2, + kup_n: ArrayView2, + voxel_table: Option>, + voxel_maps: Option>, + valid: Option>, +) -> SkyResultPure { let rows = shmat.shape()[0]; let cols = shmat.shape()[1]; let n_patches = l_patches.shape()[0]; - // Output arrays let mut lside_sky = Array2::::zeros((rows, cols)); let mut ldown_sky = Array2::::zeros((rows, cols)); let mut lside_veg = Array2::::zeros((rows, cols)); @@ -178,16 +285,9 @@ pub fn anisotropic_sky( let mut lside_ref = Array2::::zeros((rows, cols)); let mut ldown_ref = Array2::::zeros((rows, cols)); - // Patch altitudes and azimuths let patch_altitude = l_patches.column(0).to_owned(); let patch_azimuth = l_patches.column(1).to_owned(); - // Calculate unique altitudes for returning from function - let mut skyalt_vec: Vec = patch_altitude.iter().cloned().collect(); - skyalt_vec.sort_by(|a, b| a.partial_cmp(b).unwrap()); - skyalt_vec.dedup(); - let skyalt = Array1::::from(skyalt_vec); - let deg2rad = PI / 180.0; // Shortwave normalization @@ -205,15 +305,19 @@ pub fn anisotropic_sky( let (_patch_emissivity_normalized, esky_band) = emissivity_models::model2(&l_patches.to_owned(), esky, ta); - // Create a flat list of pixel indices to parallelize over + // Main parallel computation over pixels let pixel_indices: Vec<(usize, usize)> = (0..rows) .flat_map(|r| (0..cols).map(move |c| (r, c))) .collect(); - // Main parallel computation over pixels let pixel_results: Vec = pixel_indices .into_par_iter() .map(|(r, c)| { + if let Some(ref v) = valid { + if v[[r, c]] == 0 { + return PixelResult::nan(); + } + } let mut pres = PixelResult::new(); let pixel_asvf = asvf[[r, c]]; @@ -222,15 +326,17 @@ pub fn anisotropic_sky( let p_azi = patch_azimuth[i]; let steradian = steradians[i]; - let temp_sky = shmat[[r, c, i]] == 1.0 && vegshmat[[r, c, i]] == 1.0; - let temp_vegsh = vegshmat[[r, c, i]] == 0.0 || vbshvegshmat[[r, c, i]] == 0.0; - let temp_sh = (1.0 - shmat[[r, c, i]]) * vbshvegshmat[[r, c, i]] == 1.0; + let sh = get_shadow_bit(&shmat, r, c, i); + let vsh = get_shadow_bit(&vegshmat, r, c, i); + let vbsh = get_shadow_bit(&vbshvegshmat, r, c, i); + let temp_sky = sh && vsh; + let temp_vegsh = !vsh || !vbsh; + let temp_sh = !sh && vbsh; if cyl { let angle_of_incidence = (p_alt * deg2rad).cos(); let angle_of_incidence_h = (p_alt * deg2rad).sin(); - // Longwave from sky if temp_sky { let temp_emissivity = esky_band[i]; let ta_k = ta + 273.15; @@ -251,7 +357,6 @@ pub fn anisotropic_sky( pres.lnorth += ln; } - // Longwave from vegetation if temp_vegsh { let (ls, ld, le, lso, lw, ln) = patch_radiation::longwave_from_veg_pixel( steradian, @@ -270,7 +375,6 @@ pub fn anisotropic_sky( pres.lnorth += ln; } - // Longwave from buildings if temp_sh { let (sunlit_patch, shaded_patch) = sunlit_shaded_patches::shaded_or_sunlit_pixel( @@ -283,7 +387,7 @@ pub fn anisotropic_sky( if !wall_scheme { let azimuth_difference = (solar_azimuth - p_azi).abs(); - let (lside_sun, lside_sh, ldown_sun, ldown_sh, le, lso, lw, ln) = + let (ls_sun, ls_sh, ld_sun, ld_sh, le, lso, lw, ln) = patch_radiation::longwave_from_buildings_pixel( steradian, angle_of_incidence, @@ -297,10 +401,10 @@ pub fn anisotropic_sky( ta, tgwall, ); - pres.lside_sun += lside_sun; - pres.lside_sh += lside_sh; - pres.ldown_sun += ldown_sun; - pres.ldown_sh += ldown_sh; + pres.lside_sun += ls_sun; + pres.lside_sh += ls_sh; + pres.ldown_sun += ld_sun; + pres.ldown_sh += ld_sh; pres.least += le; pres.lsouth += lso; pres.lwest += lw; @@ -308,28 +412,26 @@ pub fn anisotropic_sky( } else { let voxel_map_val = voxel_maps.as_ref().unwrap()[[r, c, i]]; if voxel_map_val > 0.0 { - // Wall - let (lside_sun, lside_sh, ldown_sun, ldown_sh, le, lso, lw, ln) = + let (ls_sun, ls_sh, ld_sun, ld_sh, le, lso, lw, ln) = patch_radiation::longwave_from_buildings_wall_scheme_pixel( - voxel_table.as_ref().unwrap().view(), + *voxel_table.as_ref().unwrap(), voxel_map_val as usize, steradian, angle_of_incidence, angle_of_incidence_h, p_azi, ); - pres.lside_sun += lside_sun; - pres.lside_sh += lside_sh; - pres.ldown_sun += ldown_sun; - pres.ldown_sh += ldown_sh; + pres.lside_sun += ls_sun; + pres.lside_sh += ls_sh; + pres.ldown_sun += ld_sun; + pres.ldown_sh += ld_sh; pres.least += le; pres.lsouth += lso; pres.lwest += lw; pres.lnorth += ln; } else { - // Roof let azimuth_difference = (solar_azimuth - p_azi).abs(); - let (lside_sun, lside_sh, ldown_sun, ldown_sh, le, lso, lw, ln) = + let (ls_sun, ls_sh, ld_sun, ld_sh, le, lso, lw, ln) = patch_radiation::longwave_from_buildings_pixel( steradian, angle_of_incidence, @@ -343,10 +445,10 @@ pub fn anisotropic_sky( ta, tgwall, ); - pres.lside_sun += lside_sun; - pres.lside_sh += lside_sh; - pres.ldown_sun += ldown_sun; - pres.ldown_sh += ldown_sh; + pres.lside_sun += ls_sun; + pres.lside_sh += ls_sh; + pres.ldown_sun += ld_sun; + pres.ldown_sh += ld_sh; pres.least += le; pres.lsouth += lso; pres.lwest += lw; @@ -355,7 +457,6 @@ pub fn anisotropic_sky( } } - // Shortwave from sky if solar_altitude > 0.0 { if temp_sky { pres.kside_d += lum_chi[i] * angle_of_incidence * steradian; @@ -387,15 +488,15 @@ pub fn anisotropic_sky( } } - // Reflected longwave calculation (loop over patches again for this pixel) + // Reflected longwave let mut pres_with_reflection = pres; for i in 0..n_patches { let p_alt = patch_altitude[i]; let p_azi = patch_azimuth[i]; let steradian = steradians[i]; - let temp_sh = shmat[[r, c, i]] == 0.0 - || vegshmat[[r, c, i]] == 0.0 - || vbshvegshmat[[r, c, i]] == 0.0; + let temp_sh = !get_shadow_bit(&shmat, r, c, i) + || !get_shadow_bit(&vegshmat, r, c, i) + || !get_shadow_bit(&vbshvegshmat, r, c, i); if temp_sh { let angle_of_incidence = (p_alt * deg2rad).cos(); @@ -421,7 +522,7 @@ pub fn anisotropic_sky( }) .collect(); - // Populate the final 2D arrays from the results + // Populate final 2D arrays for (idx, pres) in pixel_results.into_iter().enumerate() { let r = idx / cols; let c = idx % cols; @@ -445,11 +546,9 @@ pub fn anisotropic_sky( ldown_ref[[r, c]] = pres.ldown_ref; } - // Sum of all Lside components (sky, vegetation, sunlit and shaded buildings, reflected) let lside = &lside_sky + &lside_veg + &lside_sh + &lside_sun + &lside_ref; let ldown = &ldown_sky + &ldown_veg + &ldown_sh + &ldown_sun + &ldown_ref; - // Direct radiation let mut kside_i = Array2::::zeros((rows, cols)); if cyl { kside_i = &shadow * rad_i * (solar_altitude * deg2rad).cos(); @@ -468,27 +567,278 @@ pub fn anisotropic_sky( ksouth = &kup_s * 0.5; } + SkyResultPure { + ldown, + lside, + lside_sky, + lside_veg, + lside_sh, + lside_sun, + lside_ref, + least, + lsouth, + lwest, + lnorth, + keast, + ksouth, + kwest, + knorth, + kside_i, + kside_d, + kside, + } +} + +/// Pure-ndarray weighted patch sum, callable from pipeline.rs. +pub(crate) fn weighted_patch_sum_pure( + patches: ArrayView3, + weights: ArrayView1, +) -> Array2 { + let rows = patches.shape()[0]; + let cols = patches.shape()[1]; + let n_patches = patches.shape()[2]; + + let pixel_results: Vec = (0..rows * cols) + .into_par_iter() + .map(|idx| { + let r = idx / cols; + let c = idx % cols; + let mut sum = 0.0f32; + for i in 0..n_patches { + sum += patches[[r, c, i]] * weights[[i]]; + } + sum + }) + .collect(); + + Array2::from_shape_vec((rows, cols), pixel_results).unwrap() +} + +#[pyfunction] +#[allow(clippy::too_many_arguments)] +#[allow(non_snake_case)] +pub fn anisotropic_sky( + py: Python, + shmat: PyReadonlyArray3, + vegshmat: PyReadonlyArray3, + vbshvegshmat: PyReadonlyArray3, + sun: &SunParams, + asvf: PyReadonlyArray2, + sky: &SkyParams, + l_patches: PyReadonlyArray2, + voxel_table: Option>, + voxel_maps: Option>, + steradians: PyReadonlyArray1, + surface: &SurfaceParams, + lup: PyReadonlyArray2, + lv: PyReadonlyArray2, + shadow: PyReadonlyArray2, + kup_e: PyReadonlyArray2, + kup_s: PyReadonlyArray2, + kup_w: PyReadonlyArray2, + kup_n: PyReadonlyArray2, +) -> PyResult> { + let voxel_table_view = voxel_table.as_ref().map(|v| v.as_array()); + let voxel_maps_view = voxel_maps.as_ref().map(|v| v.as_array()); + + // Compute unique altitudes for the PyO3 return value + let l_patches_v = l_patches.as_array(); + let patch_altitude = l_patches_v.column(0); + let mut skyalt_vec: Vec = patch_altitude.iter().cloned().collect(); + skyalt_vec.sort_by(|a, b| a.partial_cmp(b).unwrap()); + skyalt_vec.dedup(); + let skyalt = Array1::::from(skyalt_vec); + + let pure_result = anisotropic_sky_pure( + shmat.as_array(), + vegshmat.as_array(), + vbshvegshmat.as_array(), + sun.altitude, + sun.azimuth, + sky.esky, + sky.ta, + sky.cyl, + sky.wall_scheme, + sky.albedo, + surface.tgwall, + surface.ewall, + surface.rad_i, + surface.rad_d, + asvf.as_array(), + l_patches_v, + steradians.as_array(), + lup.as_array(), + lv.as_array(), + shadow.as_array(), + kup_e.as_array(), + kup_s.as_array(), + kup_w.as_array(), + kup_n.as_array(), + voxel_table_view, + voxel_maps_view, + None, + ); + + let steradians_owned = steradians.as_array().to_owned(); + let result = SkyResult { - ldown: ldown.into_pyarray(py).unbind(), - lside: lside.into_pyarray(py).unbind(), - lside_sky: lside_sky.into_pyarray(py).unbind(), - lside_veg: lside_veg.into_pyarray(py).unbind(), - lside_sh: lside_sh.into_pyarray(py).unbind(), - lside_sun: lside_sun.into_pyarray(py).unbind(), - lside_ref: lside_ref.into_pyarray(py).unbind(), - least: least.into_pyarray(py).unbind(), - lwest: lwest.into_pyarray(py).unbind(), - lnorth: lnorth.into_pyarray(py).unbind(), - lsouth: lsouth.into_pyarray(py).unbind(), - keast: keast.into_pyarray(py).unbind(), - ksouth: ksouth.into_pyarray(py).unbind(), - kwest: kwest.into_pyarray(py).unbind(), - knorth: knorth.into_pyarray(py).unbind(), - kside_i: kside_i.into_pyarray(py).unbind(), - kside_d: kside_d.into_pyarray(py).unbind(), - kside: kside.into_pyarray(py).unbind(), - steradians: steradians.mapv(|v| v).into_pyarray(py).unbind(), + ldown: pure_result.ldown.into_pyarray(py).unbind(), + lside: pure_result.lside.into_pyarray(py).unbind(), + lside_sky: pure_result.lside_sky.into_pyarray(py).unbind(), + lside_veg: pure_result.lside_veg.into_pyarray(py).unbind(), + lside_sh: pure_result.lside_sh.into_pyarray(py).unbind(), + lside_sun: pure_result.lside_sun.into_pyarray(py).unbind(), + lside_ref: pure_result.lside_ref.into_pyarray(py).unbind(), + least: pure_result.least.into_pyarray(py).unbind(), + lwest: pure_result.lwest.into_pyarray(py).unbind(), + lnorth: pure_result.lnorth.into_pyarray(py).unbind(), + lsouth: pure_result.lsouth.into_pyarray(py).unbind(), + keast: pure_result.keast.into_pyarray(py).unbind(), + ksouth: pure_result.ksouth.into_pyarray(py).unbind(), + kwest: pure_result.kwest.into_pyarray(py).unbind(), + knorth: pure_result.knorth.into_pyarray(py).unbind(), + kside_i: pure_result.kside_i.into_pyarray(py).unbind(), + kside_d: pure_result.kside_d.into_pyarray(py).unbind(), + kside: pure_result.kside.into_pyarray(py).unbind(), + steradians: steradians_owned.into_pyarray(py).unbind(), skyalt: skyalt.into_pyarray(py).unbind(), }; Py::new(py, result) } + +/// Per-pixel cylindric wedge shadow fraction calculation. +/// +/// Computes F_sh for a single pixel given pre-computed tan(zenith) and the +/// SVF-weighted building angle for that pixel. +#[allow(non_snake_case)] +fn cylindric_wedge_pixel(tan_zen: f32, svfalfa_val: f32) -> f32 { + let tan_alfa = svfalfa_val.tan().max(1e-6); + let ba = 1.0 / tan_alfa; + let tan_product = (tan_alfa * tan_zen).max(1e-6); + + let xa = 1.0 - 2.0 / tan_product; + let ha = 2.0 / tan_product; + let hkil = 2.0 * ba * ha; + + let ukil = if xa < 0.0 { + let qa = tan_zen / 2.0; + let za = (ba * ba - qa * qa / 4.0).max(0.0).sqrt(); + let phi = (za / qa.max(1e-10)).atan(); + let cos_phi = phi.cos(); + let sin_phi = phi.sin(); + let denom = (1.0 - cos_phi).max(1e-10); + let a = (sin_phi - phi * cos_phi) / denom; + 2.0 * ba * xa * a + } else { + 0.0 + }; + + let s_surf = hkil + ukil; + (2.0 * PI * ba - s_surf) / (2.0 * PI * ba) +} + +/// Pure-ndarray implementation of cylindric wedge shadow fraction. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). +#[allow(non_snake_case)] +pub(crate) fn cylindric_wedge_pure(zen: f32, svfalfa: ArrayView2) -> Array2 { + cylindric_wedge_pure_masked(zen, svfalfa, None) +} + +pub(crate) fn cylindric_wedge_pure_masked( + zen: f32, + svfalfa: ArrayView2, + valid: Option>, +) -> Array2 { + let rows = svfalfa.shape()[0]; + let cols = svfalfa.shape()[1]; + + // Guard against low sun angles where tan(zen) → infinity + let altitude_rad = PI / 2.0 - zen; + if altitude_rad < MIN_SUN_ELEVATION_RAD { + return Array2::::ones((rows, cols)); + } + + let tan_zen = zen.tan(); + + let pixel_results: Vec = (0..rows * cols) + .into_par_iter() + .map(|idx| { + let r = idx / cols; + let c = idx % cols; + if let Some(ref v) = valid { + if v[[r, c]] == 0 { + return f32::NAN; + } + } + cylindric_wedge_pixel(tan_zen, svfalfa[[r, c]]) + }) + .collect(); + + Array2::from_shape_vec((rows, cols), pixel_results).unwrap() +} + +/// Fraction of sunlit walls based on sun altitude and SVF-weighted building angles. +/// +/// Args: +/// zen: Sun zenith angle (radians, scalar) +/// svfalfa: SVF-related angle grid (2D array, radians) +/// +/// Returns: +/// F_sh: Shadow fraction grid (0 = fully sunlit, 1 = fully shaded) +/// +/// At very low sun altitudes (< 3°), returns F_sh = 1.0 to avoid +/// numerical instability from tan(zen) approaching infinity. +#[pyfunction] +#[allow(non_snake_case)] +pub fn cylindric_wedge( + py: Python, + zen: f32, + svfalfa: PyReadonlyArray2, +) -> PyResult>> { + let result = cylindric_wedge_pure(zen, svfalfa.as_array()); + Ok(result.into_pyarray(py).unbind()) +} + +/// Weighted sum over the patch dimension of a 3D array. +/// +/// Computes: result[r, c] = sum_i(patches[r, c, i] * weights[i]) +/// +/// This replaces the Python loop: +/// for idx in range(n_patches): +/// ani_lum += diffsh[:,:,idx] * lv[idx, 2] +/// +/// Args: +/// patches: 3D array (rows, cols, n_patches) - e.g. diffuse shadow matrix +/// weights: 1D array (n_patches,) - e.g. Perez luminance weights +/// +/// Returns: +/// 2D array (rows, cols) - weighted sum +#[pyfunction] +pub fn weighted_patch_sum( + py: Python, + patches: PyReadonlyArray3, + weights: PyReadonlyArray1, +) -> PyResult>> { + let patches = patches.as_array(); + let weights = weights.as_array(); + let rows = patches.shape()[0]; + let cols = patches.shape()[1]; + let n_patches = patches.shape()[2]; + + let pixel_results: Vec = (0..rows * cols) + .into_par_iter() + .map(|idx| { + let r = idx / cols; + let c = idx % cols; + let mut sum = 0.0f32; + for i in 0..n_patches { + sum += patches[[r, c, i]] * weights[[i]]; + } + sum + }) + .collect(); + + let result = Array2::from_shape_vec((rows, cols), pixel_results) + .map_err(|e| pyo3::exceptions::PyValueError::new_err(e.to_string()))?; + Ok(result.into_pyarray(py).unbind()) +} diff --git a/rust/src/skyview.rs b/rust/src/skyview.rs index 3fcab37..c650877 100644 --- a/rust/src/skyview.rs +++ b/rust/src/skyview.rs @@ -1,17 +1,147 @@ use core::f32; -use ndarray::{Array2, Array3, ArrayView2, Zip}; +use ndarray::{s, Array2, Array3, ArrayView2, Zip}; use numpy::{IntoPyArray, PyArray2, PyArray3, PyReadonlyArray2}; use pyo3::prelude::*; +#[cfg(feature = "gpu")] +use std::collections::VecDeque; use std::f32::consts::PI; -use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::Arc; // Import the correct result struct from shadowing use crate::shadowing::{calculate_shadows_rust, ShadowingResultRust}; +/// Compute shadows for SVF: uses GPU-optimized path when available, CPU fallback otherwise. +/// +/// The GPU path skips the wall shader and copies only 3 arrays (instead of 10), +/// saving ~70% staging bandwidth per patch. +fn compute_svf_shadows( + dsm: ArrayView2, + veg_canopy: Option>, + veg_trunk: Option>, + bush: Option>, + azimuth: f32, + altitude: f32, + scale: f32, + max_dsm_ht: f32, + min_sun_elev: f32, + max_shadow_distance_m: f32, +) -> ShadowingResultRust { + #[cfg(feature = "gpu")] + { + if let Some(gpu_ctx) = crate::shadowing::get_gpu_context() { + match gpu_ctx.compute_shadows_for_svf( + dsm, + veg_canopy, + veg_trunk, + bush, + azimuth, + altitude, + scale, + max_dsm_ht, + min_sun_elev, + max_shadow_distance_m, + ) { + Ok(r) => { + let dim = dsm.dim(); + return ShadowingResultRust { + bldg_sh: r.bldg_sh, + veg_sh: r.veg_sh.unwrap_or_else(|| Array2::ones(dim)), + veg_blocks_bldg_sh: r + .veg_blocks_bldg_sh + .unwrap_or_else(|| Array2::ones(dim)), + wall_sh: None, + wall_sun: None, + wall_sh_veg: None, + face_sh: None, + face_sun: None, + sh_on_wall: None, + }; + } + Err(e) => { + eprintln!("[GPU] SVF shadow failed: {}. Falling back to CPU.", e); + } + } + } + } + + // CPU fallback + calculate_shadows_rust( + azimuth, + altitude, + scale, + max_dsm_ht, + dsm, + veg_canopy, + veg_trunk, + bush, + None, + None, + None, + None, + false, + min_sun_elev, + max_shadow_distance_m, + ) +} + // Correction factor applied in finalize step const LAST_ANNULUS_CORRECTION: f32 = 3.0459e-4; +/// Pre-computed total weights for a single sky patch. +/// +/// Since the shadow array is identical across all annuli within a patch, the +/// accumulation `Σ(wᵢ × sh) = (Σwᵢ) × sh` allows collapsing the inner annulus +/// loop from ~10 iterations to a single weighted accumulation. +struct PatchWeights { + weight_iso: f32, + weight_n: f32, + weight_e: f32, + weight_s: f32, + weight_w: f32, +} + +/// Sum annulus weights for a patch, collapsing ~10 annuli to scalar totals. +fn precompute_patch_weights(patch: &PatchInfo) -> PatchWeights { + let n = 90.0_f32; + let common_w_factor = (1.0 / (2.0 * PI)) * (PI / (2.0 * n)).sin(); + let steprad_iso = (360.0 / patch.azimuth_patches) * (PI / 180.0); + let steprad_aniso = (360.0 / patch.azimuth_patches_aniso) * (PI / 180.0); + + let mut sin_term_sum = 0.0_f32; + for annulus_idx in patch.annulino_start..=patch.annulino_end { + let annulus = 91.0 - annulus_idx as f32; + sin_term_sum += ((PI * (2.0 * annulus - 1.0)) / (2.0 * n)).sin(); + } + + let total_iso = steprad_iso * common_w_factor * sin_term_sum; + let total_aniso = steprad_aniso * common_w_factor * sin_term_sum; + + PatchWeights { + weight_iso: total_iso, + weight_n: if patch.azimuth >= 270.0 || patch.azimuth < 90.0 { + total_aniso + } else { + 0.0 + }, + weight_e: if patch.azimuth >= 0.0 && patch.azimuth < 180.0 { + total_aniso + } else { + 0.0 + }, + weight_s: if patch.azimuth >= 90.0 && patch.azimuth < 270.0 { + total_aniso + } else { + 0.0 + }, + weight_w: if patch.azimuth >= 180.0 && patch.azimuth < 360.0 { + total_aniso + } else { + 0.0 + }, + } +} + // Struct to hold patch configurations pub struct PatchInfo { @@ -23,7 +153,7 @@ pub struct PatchInfo { pub annulino_end: i32, } -fn create_patches(option: u8) -> Vec { +fn create_patches(option: u8) -> PyResult> { let (annulino, altitudes, azi_starts, azimuth_patches) = match option { 1 => ( vec![0, 12, 24, 36, 48, 60, 72, 84, 90], @@ -49,7 +179,12 @@ fn create_patches(option: u8) -> Vec { vec![0, 0, 4, 4, 2, 2, 5, 5, 8, 8, 0, 0, 10, 10, 0], vec![62, 62, 60, 60, 56, 56, 48, 48, 38, 38, 26, 26, 14, 14, 2], ), - _ => panic!("Unsupported patch option: {}", option), + _ => { + return Err(pyo3::exceptions::PyValueError::new_err(format!( + "Unsupported patch option: {} (valid: 1, 2, 3, 4)", + option + ))); + } }; // Iterate over the patch configurations and create PatchInfo instances @@ -71,7 +206,7 @@ fn create_patches(option: u8) -> Vec { }); } } - patches + Ok(patches) } // Structure to hold SVF results for Python @@ -108,11 +243,11 @@ pub struct SvfResult { #[pyo3(get)] pub svf_veg_blocks_bldg_sh_west: Py>, #[pyo3(get)] - pub bldg_sh_matrix: Py>, + pub bldg_sh_matrix: Py>, #[pyo3(get)] - pub veg_sh_matrix: Py>, + pub veg_sh_matrix: Py>, #[pyo3(get)] - pub veg_blocks_bldg_sh_matrix: Py>, + pub veg_blocks_bldg_sh_matrix: Py>, } // Intermediate (pure Rust) SVF result used to avoid holding the GIL during compute @@ -132,16 +267,18 @@ pub struct SvfIntermediate { pub svf_veg_blocks_bldg_sh_e: Array2, pub svf_veg_blocks_bldg_sh_s: Array2, pub svf_veg_blocks_bldg_sh_w: Array2, - pub bldg_sh_matrix: Array3, - pub veg_sh_matrix: Array3, - pub veg_blocks_bldg_sh_matrix: Array3, + pub bldg_sh_matrix: Array3, + pub veg_sh_matrix: Array3, + pub veg_blocks_bldg_sh_matrix: Array3, } impl SvfIntermediate { /// Create a zero-initialized SvfIntermediate with the given dimensions. + /// Shadow matrices use bitpacked format: shape (rows, cols, ceil(patches/8)). pub fn zeros(num_rows: usize, num_cols: usize, total_patches: usize) -> Self { let shape2 = (num_rows, num_cols); - let shape3 = (num_rows, num_cols, total_patches); + let n_pack = pack_bytes(total_patches); + let shape3_packed = (num_rows, num_cols, n_pack); SvfIntermediate { svf: Array2::::zeros(shape2), @@ -159,13 +296,19 @@ impl SvfIntermediate { svf_veg_blocks_bldg_sh_e: Array2::::zeros(shape2), svf_veg_blocks_bldg_sh_s: Array2::::zeros(shape2), svf_veg_blocks_bldg_sh_w: Array2::::zeros(shape2), - bldg_sh_matrix: Array3::::zeros(shape3), - veg_sh_matrix: Array3::::zeros(shape3), - veg_blocks_bldg_sh_matrix: Array3::::zeros(shape3), + bldg_sh_matrix: Array3::::zeros(shape3_packed), + veg_sh_matrix: Array3::::zeros(shape3_packed), + veg_blocks_bldg_sh_matrix: Array3::::zeros(shape3_packed), } } } +/// Number of packed bytes needed for n_patches: ceil(n / 8). +#[inline(always)] +fn pack_bytes(n_patches: usize) -> usize { + (n_patches + 7) / 8 +} + fn prepare_bushes(vegdem: ArrayView2, vegdem2: ArrayView2) -> Array2 { // Allocate output array with same shape as input let mut bush_areas = Array2::::zeros(vegdem.raw_dim()); @@ -192,6 +335,8 @@ fn calculate_svf_inner( patch_option: u8, min_sun_elev_deg: Option, progress_counter: Option>, + cancel_flag: Option>, + max_shadow_distance_m: f32, ) -> PyResult { // Convert owned arrays to views for internal processing let dsm_f32 = dsm_owned.view(); @@ -205,19 +350,17 @@ fn calculate_svf_inner( let bush_f32 = prepare_bushes(vegdem_f32.view(), vegdem2_f32.view()); // Create sky patches (use patch_option argument) - let patches = create_patches(patch_option); + let patches = create_patches(patch_option)?; let total_patches = patches.len(); // Needed for 3D array dimensions // Create a single intermediate result and allocate all arrays there let mut inter = SvfIntermediate::zeros(num_rows, num_cols, total_patches); - // Process patches sequentially: compute shadows (may be parallel internally), - // immediately write shadow slices, then compute the per-patch contribution - // using local parallelism (row-chunked) and merge into accumulator. - for (patch_idx, patch) in patches.iter().enumerate() { - let dsm_view = dsm_f32.view(); - // Only pass vegetation views if usevegdem is true, otherwise pass None - let (vegdem_view, vegdem2_view, bush_view) = if usevegdem { + // Try GPU SVF accumulation path: shadow + accumulate in one GPU submission per patch, + // SVF values stay on GPU (no per-patch readback), read once at end. + #[cfg(feature = "gpu")] + let use_gpu_svf = if let Some(gpu_ctx) = crate::shadowing::get_gpu_context() { + let (vc, vt, b) = if usevegdem { ( Some(vegdem_f32.view()), Some(vegdem2_f32.view()), @@ -226,76 +369,232 @@ fn calculate_svf_inner( } else { (None, None, None) }; - // Calculate shadows for this patch - let shadow_result: ShadowingResultRust = calculate_shadows_rust( - patch.azimuth, - patch.altitude, - scale, - max_local_dsm_ht, - dsm_view, - vegdem_view, - vegdem2_view, - bush_view, - None, - None, - None, - None, - min_sun_elev_deg.unwrap_or(5.0_f32), - ); - // --- Assign the shadow slices into the 3D matrices --- - inter - .bldg_sh_matrix - .slice_mut(ndarray::s![.., .., patch_idx]) - .assign(&shadow_result.bldg_sh); - if usevegdem { - inter - .veg_sh_matrix - .slice_mut(ndarray::s![.., .., patch_idx]) - .assign(&shadow_result.veg_sh); - inter - .veg_blocks_bldg_sh_matrix - .slice_mut(ndarray::s![.., .., patch_idx]) - .assign(&shadow_result.veg_blocks_bldg_sh); + match gpu_ctx.init_svf_accumulation( + num_rows, + num_cols, + usevegdem, + total_patches, + dsm_f32.view(), + vc, + vt, + b, + ) { + Ok(()) => true, + Err(e) => { + eprintln!("[GPU] SVF accumulation init failed: {}. CPU fallback.", e); + false + } } + } else { + false + }; + #[cfg(not(feature = "gpu"))] + let use_gpu_svf = false; - // --- Per-patch vectorized accumulation (per-pixel) --- - // --- Algorithmic block: Patch/annulus loop, weights, and accumulation --- - let n = 90.0; - let common_w_factor = (1.0 / (2.0 * PI)) * (PI / (2.0 * n)).sin(); - let steprad_iso = (360.0 / patch.azimuth_patches) * (PI / 180.0); - let steprad_aniso = (360.0 / patch.azimuth_patches_aniso) * (PI / 180.0); + // Process patches: GPU pipelined path or CPU fallback + if use_gpu_svf { + #[cfg(feature = "gpu")] + { + let gpu_ctx = crate::shadowing::get_gpu_context().ok_or_else(|| { + pyo3::exceptions::PyRuntimeError::new_err( + "GPU context became unavailable during SVF execution", + ) + })?; + let min_elev = min_sun_elev_deg.unwrap_or(5.0_f32); + const MAX_INFLIGHT_SVF_SUBMITS: usize = 8; + let progress_cap = patches.len().saturating_sub(1); + let mut inflight_submissions: VecDeque = VecDeque::new(); + let mut completed_patches: usize = 0; - for annulus_idx in patch.annulino_start..=patch.annulino_end { - let annulus = 91.0 - annulus_idx as f32; - let sin_term = ((PI * (2.0 * annulus - 1.0)) / (2.0 * n)).sin(); - let common_w_part = common_w_factor * sin_term; + for (patch_idx, patch) in patches.iter().enumerate() { + let pw = precompute_patch_weights(patch); + let submission_index = gpu_ctx + .dispatch_shadow_and_accumulate_svf( + patch_idx, + patch.azimuth, + patch.altitude, + scale, + max_local_dsm_ht, + min_elev, + max_shadow_distance_m, + pw.weight_iso, + pw.weight_n, + pw.weight_e, + pw.weight_s, + pw.weight_w, + ) + .map_err(|e| { + pyo3::exceptions::PyRuntimeError::new_err(format!( + "GPU SVF dispatch failed at patch {}: {}", + patch_idx, e + )) + })?; + inflight_submissions.push_back(submission_index); - let weight_iso = steprad_iso * common_w_part; - let weight_aniso = steprad_aniso * common_w_part; + // Dispatch is non-blocking. Update progress only after submitted + // GPU work is observed complete to avoid a progress bar that races ahead. + if inflight_submissions.len() >= MAX_INFLIGHT_SVF_SUBMITS { + if let Some(done_idx) = inflight_submissions.pop_front() { + gpu_ctx.wait_for_submission(done_idx).map_err(|e| { + pyo3::exceptions::PyRuntimeError::new_err(format!( + "GPU SVF synchronization failed at patch {}: {}", + patch_idx, e + )) + })?; + completed_patches += 1; + if let Some(ref counter) = progress_counter { + counter.store(completed_patches.min(progress_cap), Ordering::SeqCst); + } + } + } - // Precompute directional anisotropic weights for this patch - let weight_e = if patch.azimuth >= 0.0 && patch.azimuth < 180.0 { - weight_aniso - } else { - 0.0 - }; - let weight_s = if patch.azimuth >= 90.0 && patch.azimuth < 270.0 { - weight_aniso - } else { - 0.0 - }; - let weight_w = if patch.azimuth >= 180.0 && patch.azimuth < 360.0 { - weight_aniso - } else { - 0.0 - }; - let weight_n = if patch.azimuth >= 270.0 || patch.azimuth < 90.0 { - weight_aniso + // Check cancellation flag between patches + if let Some(ref flag) = cancel_flag { + if flag.load(Ordering::SeqCst) { + return Err(pyo3::exceptions::PyInterruptedError::new_err( + "SVF computation cancelled", + )); + } + } + } + + // Drain remaining submissions so progress advances smoothly during GPU work, + // not only at the final readback barrier. + while let Some(done_idx) = inflight_submissions.pop_front() { + gpu_ctx.wait_for_submission(done_idx).map_err(|e| { + pyo3::exceptions::PyRuntimeError::new_err(format!( + "GPU SVF synchronization failed while draining submissions: {}", + e + )) + })?; + completed_patches += 1; + if let Some(ref counter) = progress_counter { + counter.store(completed_patches.min(progress_cap), Ordering::SeqCst); + } + if let Some(ref flag) = cancel_flag { + if flag.load(Ordering::SeqCst) { + return Err(pyo3::exceptions::PyInterruptedError::new_err( + "SVF computation cancelled", + )); + } + } + } + + // Read back final bitpacked shadow matrices once. + let bitpacked = gpu_ctx.read_svf_bitpacked_shadows().map_err(|e| { + pyo3::exceptions::PyRuntimeError::new_err(format!( + "GPU SVF bitpacked shadow readback failed: {}", + e + )) + })?; + inter.bldg_sh_matrix = bitpacked.bldg_sh_matrix; + inter.veg_sh_matrix = bitpacked.veg_sh_matrix; + inter.veg_blocks_bldg_sh_matrix = bitpacked.veg_blocks_bldg_sh_matrix; + + // Read back accumulated SVF values from GPU + let svf = gpu_ctx.read_svf_results().map_err(|e| { + pyo3::exceptions::PyRuntimeError::new_err(format!("GPU SVF readback failed: {}", e)) + })?; + + inter.svf = svf.svf; + inter.svf_n = svf.svf_n; + inter.svf_e = svf.svf_e; + inter.svf_s = svf.svf_s; + inter.svf_w = svf.svf_w; + + if usevegdem { + if let Some(v) = svf.svf_veg { + inter.svf_veg = v; + } + if let Some(v) = svf.svf_veg_n { + inter.svf_veg_n = v; + } + if let Some(v) = svf.svf_veg_e { + inter.svf_veg_e = v; + } + if let Some(v) = svf.svf_veg_s { + inter.svf_veg_s = v; + } + if let Some(v) = svf.svf_veg_w { + inter.svf_veg_w = v; + } + if let Some(v) = svf.svf_aveg { + inter.svf_veg_blocks_bldg_sh = v; + } + if let Some(v) = svf.svf_aveg_n { + inter.svf_veg_blocks_bldg_sh_n = v; + } + if let Some(v) = svf.svf_aveg_e { + inter.svf_veg_blocks_bldg_sh_e = v; + } + if let Some(v) = svf.svf_aveg_s { + inter.svf_veg_blocks_bldg_sh_s = v; + } + if let Some(v) = svf.svf_aveg_w { + inter.svf_veg_blocks_bldg_sh_w = v; + } + } + + // Keep one progress step for finalize/packing so UI does not show + // 100% while CPU post-processing is still running. + if let Some(ref counter) = progress_counter { + counter.store(progress_cap, Ordering::SeqCst); + } + } + } else { + for (patch_idx, patch) in patches.iter().enumerate() { + // CPU fallback path + let dsm_view = dsm_f32.view(); + let (vegdem_view, vegdem2_view, bush_view) = if usevegdem { + ( + Some(vegdem_f32.view()), + Some(vegdem2_f32.view()), + Some(bush_f32.view()), + ) } else { - 0.0 + (None, None, None) }; - // Accumulate building shadows (parallel, SIMD-friendly) + let shadow_result = compute_svf_shadows( + dsm_view, + vegdem_view, + vegdem2_view, + bush_view, + patch.azimuth, + patch.altitude, + scale, + max_local_dsm_ht, + min_sun_elev_deg.unwrap_or(5.0_f32), + max_shadow_distance_m, + ); + + // Bitpack f32 shadows into matrices (bit=1 means shadow value >= 0.5) + { + let byte_idx = patch_idx >> 3; + let bit_mask = 1u8 << (patch_idx & 7); + for r in 0..num_rows { + for c in 0..num_cols { + if shadow_result.bldg_sh[[r, c]] >= 0.5 { + inter.bldg_sh_matrix[[r, c, byte_idx]] |= bit_mask; + } + } + } + if usevegdem { + for r in 0..num_rows { + for c in 0..num_cols { + if shadow_result.veg_sh[[r, c]] >= 0.5 { + inter.veg_sh_matrix[[r, c, byte_idx]] |= bit_mask; + } + if shadow_result.veg_blocks_bldg_sh[[r, c]] >= 0.5 { + inter.veg_blocks_bldg_sh_matrix[[r, c, byte_idx]] |= bit_mask; + } + } + } + } + } + + let pw = precompute_patch_weights(patch); + Zip::from(&shadow_result.bldg_sh) .and(&mut inter.svf) .and(&mut inter.svf_e) @@ -303,15 +602,14 @@ fn calculate_svf_inner( .and(&mut inter.svf_w) .and(&mut inter.svf_n) .par_for_each(|&b, svf, svf_e, svf_s, svf_w, svf_n| { - *svf += weight_iso * b; - *svf_e += weight_e * b; - *svf_s += weight_s * b; - *svf_w += weight_w * b; - *svf_n += weight_n * b; + *svf += pw.weight_iso * b; + *svf_e += pw.weight_e * b; + *svf_s += pw.weight_s * b; + *svf_w += pw.weight_w * b; + *svf_n += pw.weight_n * b; }); if usevegdem { - // Accumulate vegetation shadows Zip::from(&shadow_result.veg_sh) .and(&mut inter.svf_veg) .and(&mut inter.svf_veg_e) @@ -319,14 +617,13 @@ fn calculate_svf_inner( .and(&mut inter.svf_veg_w) .and(&mut inter.svf_veg_n) .par_for_each(|&veg, svf_v, svf_v_e, svf_v_s, svf_v_w, svf_v_n| { - *svf_v += weight_iso * veg; - *svf_v_e += weight_e * veg; - *svf_v_s += weight_s * veg; - *svf_v_w += weight_w * veg; - *svf_v_n += weight_n * veg; + *svf_v += pw.weight_iso * veg; + *svf_v_e += pw.weight_e * veg; + *svf_v_s += pw.weight_s * veg; + *svf_v_w += pw.weight_w * veg; + *svf_v_n += pw.weight_n * veg; }); - // Accumulate veg-blocks-building shadows Zip::from(&shadow_result.veg_blocks_bldg_sh) .and(&mut inter.svf_veg_blocks_bldg_sh) .and(&mut inter.svf_veg_blocks_bldg_sh_e) @@ -335,124 +632,178 @@ fn calculate_svf_inner( .and(&mut inter.svf_veg_blocks_bldg_sh_n) .par_for_each( |&veg_bldg, svf_v_b, svf_v_be, svf_v_bs, svf_v_bw, svf_v_bn| { - *svf_v_b += weight_iso * veg_bldg; - *svf_v_be += weight_e * veg_bldg; - *svf_v_bs += weight_s * veg_bldg; - *svf_v_bw += weight_w * veg_bldg; - *svf_v_bn += weight_n * veg_bldg; + *svf_v_b += pw.weight_iso * veg_bldg; + *svf_v_be += pw.weight_e * veg_bldg; + *svf_v_bs += pw.weight_s * veg_bldg; + *svf_v_bw += pw.weight_w * veg_bldg; + *svf_v_bn += pw.weight_n * veg_bldg; }, ); - } // end if usevegdem - } // end annulus loop + } - // Update progress counter after this patch is fully processed - if let Some(ref counter) = progress_counter { - counter.fetch_add(1, Ordering::SeqCst); + // Update progress counter + if let Some(ref counter) = progress_counter { + counter.fetch_add(1, Ordering::SeqCst); + } + + // Check cancellation flag + if let Some(ref flag) = cancel_flag { + if flag.load(Ordering::SeqCst) { + return Err(pyo3::exceptions::PyInterruptedError::new_err( + "SVF computation cancelled", + )); + } + } } - } // end patch loop + } // Finalize: apply last-annulus correction and clamp values, same semantics as the previous finalize + let has_nan = dsm_f32.iter().any(|v| v.is_nan()); + inter.svf_s += LAST_ANNULUS_CORRECTION; inter.svf_w += LAST_ANNULUS_CORRECTION; - inter.svf.mapv_inplace(|x| x.min(1.0)); - inter.svf_n.mapv_inplace(|x| x.min(1.0)); - inter.svf_e.mapv_inplace(|x| x.min(1.0)); - inter.svf_s.mapv_inplace(|x| x.min(1.0)); - inter.svf_w.mapv_inplace(|x| x.min(1.0)); - - // Set NaN in outputs for NaN pixels in DSM - Zip::from(&mut inter.svf) - .and(&mut inter.svf_n) - .and(&mut inter.svf_e) - .and(&mut inter.svf_s) - .and(&mut inter.svf_w) - .and(&dsm_f32) - .for_each(|svf, svf_n, svf_e, svf_s, svf_w, &dsm_val| { - if dsm_val.is_nan() { - *svf = f32::NAN; - *svf_n = f32::NAN; - *svf_e = f32::NAN; - *svf_s = f32::NAN; - *svf_w = f32::NAN; - } - }); - - if usevegdem { - // Create correction array for veg components - let last_veg = Array2::from_shape_fn((num_rows, num_cols), |(row_idx, col_idx)| { - if vegdem2_f32[[row_idx, col_idx]] == 0.0 { - LAST_ANNULUS_CORRECTION - } else { - 0.0 - } - }); + Zip::from(&mut inter.svf).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_n).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_e).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_s).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_w).par_for_each(|x| *x = x.min(1.0)); - inter.svf_veg_s += &last_veg; - inter.svf_veg_w += &last_veg; - inter.svf_veg_blocks_bldg_sh_s += &last_veg; - inter.svf_veg_blocks_bldg_sh_w += &last_veg; - - inter.svf_veg.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_n.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_e.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_s.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_w.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_blocks_bldg_sh.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_blocks_bldg_sh_n.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_blocks_bldg_sh_e.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_blocks_bldg_sh_s.mapv_inplace(|x| x.min(1.0)); - inter.svf_veg_blocks_bldg_sh_w.mapv_inplace(|x| x.min(1.0)); - - // Set NaN in veg outputs for NaN pixels in DSM (split into two operations due to Zip limit) - Zip::from(&mut inter.svf_veg) - .and(&mut inter.svf_veg_n) - .and(&mut inter.svf_veg_e) - .and(&mut inter.svf_veg_s) - .and(&mut inter.svf_veg_w) + // Set NaN in outputs only when needed. + if has_nan { + Zip::from(&mut inter.svf) + .and(&mut inter.svf_n) + .and(&mut inter.svf_e) + .and(&mut inter.svf_s) + .and(&mut inter.svf_w) .and(&dsm_f32) - .for_each(|svf_veg, svf_veg_n, svf_veg_e, svf_veg_s, svf_veg_w, &dsm_val| { + .par_for_each(|svf, svf_n, svf_e, svf_s, svf_w, &dsm_val| { if dsm_val.is_nan() { - *svf_veg = f32::NAN; - *svf_veg_n = f32::NAN; - *svf_veg_e = f32::NAN; - *svf_veg_s = f32::NAN; - *svf_veg_w = f32::NAN; + *svf = f32::NAN; + *svf_n = f32::NAN; + *svf_e = f32::NAN; + *svf_s = f32::NAN; + *svf_w = f32::NAN; } }); + } - Zip::from(&mut inter.svf_veg_blocks_bldg_sh) - .and(&mut inter.svf_veg_blocks_bldg_sh_n) - .and(&mut inter.svf_veg_blocks_bldg_sh_e) - .and(&mut inter.svf_veg_blocks_bldg_sh_s) - .and(&mut inter.svf_veg_blocks_bldg_sh_w) - .and(&dsm_f32) - .for_each(|svf_vb, svf_vb_n, svf_vb_e, svf_vb_s, svf_vb_w, &dsm_val| { - if dsm_val.is_nan() { - *svf_vb = f32::NAN; - *svf_vb_n = f32::NAN; - *svf_vb_e = f32::NAN; - *svf_vb_s = f32::NAN; - *svf_vb_w = f32::NAN; + if usevegdem { + // Apply directional correction in-place without allocating a temporary full-grid array. + Zip::from(&mut inter.svf_veg_s) + .and(&vegdem2_f32) + .par_for_each(|svf_veg_s, &veg2| { + if veg2 == 0.0 { + *svf_veg_s += LAST_ANNULUS_CORRECTION; + } + }); + Zip::from(&mut inter.svf_veg_w) + .and(&vegdem2_f32) + .par_for_each(|svf_veg_w, &veg2| { + if veg2 == 0.0 { + *svf_veg_w += LAST_ANNULUS_CORRECTION; + } + }); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_s) + .and(&vegdem2_f32) + .par_for_each(|svf_vb_s, &veg2| { + if veg2 == 0.0 { + *svf_vb_s += LAST_ANNULUS_CORRECTION; } }); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_w) + .and(&vegdem2_f32) + .par_for_each(|svf_vb_w, &veg2| { + if veg2 == 0.0 { + *svf_vb_w += LAST_ANNULUS_CORRECTION; + } + }); + + Zip::from(&mut inter.svf_veg).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_n).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_e).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_s).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_w).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_n).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_e).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_s).par_for_each(|x| *x = x.min(1.0)); + Zip::from(&mut inter.svf_veg_blocks_bldg_sh_w).par_for_each(|x| *x = x.min(1.0)); + + // Set NaN in veg outputs only when needed (split into two operations due to Zip limit) + if has_nan { + Zip::from(&mut inter.svf_veg) + .and(&mut inter.svf_veg_n) + .and(&mut inter.svf_veg_e) + .and(&mut inter.svf_veg_s) + .and(&mut inter.svf_veg_w) + .and(&dsm_f32) + .par_for_each( + |svf_veg, svf_veg_n, svf_veg_e, svf_veg_s, svf_veg_w, &dsm_val| { + if dsm_val.is_nan() { + *svf_veg = f32::NAN; + *svf_veg_n = f32::NAN; + *svf_veg_e = f32::NAN; + *svf_veg_s = f32::NAN; + *svf_veg_w = f32::NAN; + } + }, + ); + + Zip::from(&mut inter.svf_veg_blocks_bldg_sh) + .and(&mut inter.svf_veg_blocks_bldg_sh_n) + .and(&mut inter.svf_veg_blocks_bldg_sh_e) + .and(&mut inter.svf_veg_blocks_bldg_sh_s) + .and(&mut inter.svf_veg_blocks_bldg_sh_w) + .and(&dsm_f32) + .par_for_each(|svf_vb, svf_vb_n, svf_vb_e, svf_vb_s, svf_vb_w, &dsm_val| { + if dsm_val.is_nan() { + *svf_vb = f32::NAN; + *svf_vb_n = f32::NAN; + *svf_vb_e = f32::NAN; + *svf_vb_s = f32::NAN; + *svf_vb_w = f32::NAN; + } + }); + } + } + + // When no vegetation, veg shadow matrices must indicate "no blocking": + // veg_sh_matrix: all bits = 1 (sky visible through vegetation at every patch) + // veg_blocks_bldg_sh_matrix: copy of bldg_sh_matrix (only buildings matter) + let n_pack = pack_bytes(total_patches); + if !usevegdem { + inter.veg_sh_matrix.fill(0xFF); + inter + .veg_blocks_bldg_sh_matrix + .assign(&inter.bldg_sh_matrix); } - // Set NaN in shadow matrices for NaN pixels in DSM - for row in 0..num_rows { - for col in 0..num_cols { - if dsm_f32[[row, col]].is_nan() { - for patch_idx in 0..total_patches { - inter.bldg_sh_matrix[[row, col, patch_idx]] = f32::NAN; - if usevegdem { - inter.veg_sh_matrix[[row, col, patch_idx]] = f32::NAN; - inter.veg_blocks_bldg_sh_matrix[[row, col, patch_idx]] = f32::NAN; + // Zero out bitpacked shadow matrices for NaN pixels only when needed. + // Run per-byte in parallel over pixels to avoid a long single-core tail. + if has_nan { + for bi in 0..n_pack { + let mut bldg_plane = inter.bldg_sh_matrix.slice_mut(s![.., .., bi]); + let mut veg_plane = inter.veg_sh_matrix.slice_mut(s![.., .., bi]); + let mut vb_plane = inter.veg_blocks_bldg_sh_matrix.slice_mut(s![.., .., bi]); + Zip::from(&dsm_f32) + .and(&mut bldg_plane) + .and(&mut veg_plane) + .and(&mut vb_plane) + .par_for_each(|&dsm_val, bldg, veg, vb| { + if dsm_val.is_nan() { + *bldg = 0; + *veg = 0; + *vb = 0; } - } - } + }); } } + if let Some(ref counter) = progress_counter { + counter.store(patches.len(), Ordering::SeqCst); + } + Ok(inter) } @@ -483,8 +834,101 @@ fn svf_intermediate_to_py(py: Python, inter: SvfIntermediate) -> PyResult PyResult { + let rows = inter.svf.nrows(); + let cols = inter.svf.ncols(); + if row_start >= row_end || col_start >= col_end || row_end > rows || col_end > cols { + return Err(pyo3::exceptions::PyValueError::new_err(format!( + "Invalid core window: rows [{}, {}), cols [{}, {}) for tile {}x{}", + row_start, row_end, col_start, col_end, rows, cols + ))); + } + + Ok(SvfIntermediate { + svf: inter + .svf + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_n: inter + .svf_n + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_e: inter + .svf_e + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_s: inter + .svf_s + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_w: inter + .svf_w + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg: inter + .svf_veg + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_n: inter + .svf_veg_n + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_e: inter + .svf_veg_e + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_s: inter + .svf_veg_s + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_w: inter + .svf_veg_w + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_blocks_bldg_sh: inter + .svf_veg_blocks_bldg_sh + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_blocks_bldg_sh_n: inter + .svf_veg_blocks_bldg_sh_n + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_blocks_bldg_sh_e: inter + .svf_veg_blocks_bldg_sh_e + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_blocks_bldg_sh_s: inter + .svf_veg_blocks_bldg_sh_s + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + svf_veg_blocks_bldg_sh_w: inter + .svf_veg_blocks_bldg_sh_w + .slice(s![row_start..row_end, col_start..col_end]) + .to_owned(), + bldg_sh_matrix: inter + .bldg_sh_matrix + .slice(s![row_start..row_end, col_start..col_end, ..]) + .to_owned(), + veg_sh_matrix: inter + .veg_sh_matrix + .slice(s![row_start..row_end, col_start..col_end, ..]) + .to_owned(), + veg_blocks_bldg_sh_matrix: inter + .veg_blocks_bldg_sh_matrix + .slice(s![row_start..row_end, col_start..col_end, ..]) + .to_owned(), + }) +} + // Keep existing pyfunction wrapper for backward compatibility (ignores progress) #[pyfunction] +#[pyo3(signature = (dsm_py, vegdem_py, vegdem2_py, scale, usevegdem, max_local_dsm_ht, patch_option=None, min_sun_elev_deg=None, _progress_callback=None, max_shadow_distance_m=None))] pub fn calculate_svf( py: Python, dsm_py: PyReadonlyArray2, @@ -496,8 +940,10 @@ pub fn calculate_svf( patch_option: Option, // New argument for patch option min_sun_elev_deg: Option, _progress_callback: Option, + max_shadow_distance_m: Option, ) -> PyResult> { let patch_option = patch_option.unwrap_or(2); + let max_shadow_dist = max_shadow_distance_m.unwrap_or(0.0); // Copy Python arrays into owned Rust arrays so computation can run without the GIL let dsm_owned = dsm_py.as_array().to_owned(); let vegdem_owned = vegdem_py.as_array().to_owned(); @@ -513,15 +959,18 @@ pub fn calculate_svf( patch_option, min_sun_elev_deg, None, + None, + max_shadow_dist, ) })?; svf_intermediate_to_py(py, inter) } -// New pyclass runner that exposes a progress() method and a calculate_svf that updates an internal counter +// New pyclass runner that exposes progress() and cancel() methods #[pyclass] pub struct SkyviewRunner { progress: Arc, + cancelled: Arc, } impl Default for SkyviewRunner { @@ -536,6 +985,7 @@ impl SkyviewRunner { pub fn new() -> Self { Self { progress: Arc::new(AtomicUsize::new(0)), + cancelled: Arc::new(AtomicBool::new(false)), } } @@ -543,6 +993,11 @@ impl SkyviewRunner { self.progress.load(Ordering::SeqCst) } + pub fn cancel(&self) { + self.cancelled.store(true, Ordering::SeqCst); + } + + #[pyo3(signature = (dsm_py, vegdem_py, vegdem2_py, scale, usevegdem, max_local_dsm_ht, patch_option=None, min_sun_elev_deg=None, max_shadow_distance_m=None))] pub fn calculate_svf( &self, py: Python, @@ -554,10 +1009,13 @@ impl SkyviewRunner { max_local_dsm_ht: f32, patch_option: Option, min_sun_elev_deg: Option, + max_shadow_distance_m: Option, ) -> PyResult> { let patch_option = patch_option.unwrap_or(2); - // reset progress + let max_shadow_dist = max_shadow_distance_m.unwrap_or(0.0); + // reset progress and cancel flag self.progress.store(0, Ordering::SeqCst); + self.cancelled.store(false, Ordering::SeqCst); // Copy arrays to owned buffers and run without the GIL so progress can be polled let dsm_owned = dsm_py.as_array().to_owned(); let vegdem_owned = vegdem_py.as_array().to_owned(); @@ -573,8 +1031,60 @@ impl SkyviewRunner { patch_option, min_sun_elev_deg, Some(self.progress.clone()), + Some(self.cancelled.clone()), + max_shadow_dist, ) })?; svf_intermediate_to_py(py, inter) } + + #[pyo3(signature = (dsm_py, vegdem_py, vegdem2_py, scale, usevegdem, max_local_dsm_ht, patch_option, min_sun_elev_deg, core_row_start, core_row_end, core_col_start, core_col_end, max_shadow_distance_m=None))] + pub fn calculate_svf_core( + &self, + py: Python, + dsm_py: PyReadonlyArray2, + vegdem_py: PyReadonlyArray2, + vegdem2_py: PyReadonlyArray2, + scale: f32, + usevegdem: bool, + max_local_dsm_ht: f32, + patch_option: Option, + min_sun_elev_deg: Option, + core_row_start: usize, + core_row_end: usize, + core_col_start: usize, + core_col_end: usize, + max_shadow_distance_m: Option, + ) -> PyResult> { + let patch_option = patch_option.unwrap_or(2); + let max_shadow_dist = max_shadow_distance_m.unwrap_or(0.0); + self.progress.store(0, Ordering::SeqCst); + self.cancelled.store(false, Ordering::SeqCst); + let dsm_owned = dsm_py.as_array().to_owned(); + let vegdem_owned = vegdem_py.as_array().to_owned(); + let vegdem2_owned = vegdem2_py.as_array().to_owned(); + let inter = py.allow_threads(|| { + calculate_svf_inner( + dsm_owned, + vegdem_owned, + vegdem2_owned, + scale, + usevegdem, + max_local_dsm_ht, + patch_option, + min_sun_elev_deg, + Some(self.progress.clone()), + Some(self.cancelled.clone()), + max_shadow_dist, + ) + })?; + let core_inter = crop_svf_intermediate( + inter, + core_row_start, + core_row_end, + core_col_start, + core_col_end, + )?; + svf_intermediate_to_py(py, core_inter) + } } diff --git a/rust/src/sun.rs b/rust/src/sun.rs index bd5f43a..69389ec 100644 --- a/rust/src/sun.rs +++ b/rust/src/sun.rs @@ -160,29 +160,38 @@ pub fn sun_on_surface( *f_val = f_val.min(tempbu_val); }); - let shadow2 = &tempsh * &f; - weightsumsh += &shadow2; - - let lupsh = &tempLupsh * &f; - weightsumLupsh += &lupsh; - - let albsh = &tempalbsh * &f; - weightsumalbsh += &albsh; - - let albnosh = &tempalbnosh * &f; - weightsumalbnosh += &albnosh; - + // Accumulate thermal quantities weighted by geometry (fused — no temp allocations) + Zip::from(&mut weightsumsh) + .and(&tempsh) + .and(&f) + .for_each(|w, &s, &fv| *w += s * fv); + Zip::from(&mut weightsumLupsh) + .and(&tempLupsh) + .and(&f) + .for_each(|w, &l, &fv| *w += l * fv); + Zip::from(&mut weightsumalbsh) + .and(&tempalbsh) + .and(&f) + .for_each(|w, &a, &fv| *w += a * fv); + Zip::from(&mut weightsumalbnosh) + .and(&tempalbnosh) + .and(&f) + .for_each(|w, &a, &fv| *w += a * fv); + + // Wall tracking (fused — eliminates tempb, tempbwall allocations) tempwallsun .slice_mut(x_p_slice) - .assign(&sunwall_mut.slice(x_c_slice)); - let tempb = &tempwallsun * &f; - let tempbwall = &f * -1. + 1.; - - tempbub.zip_mut_with(&tempb, |bub_val, &b| { - *bub_val = if *bub_val + b > 0. { 1. } else { 0. }; - }); - tempbubwall.zip_mut_with(&tempbwall, |bubwall_val, &bwall| { - *bubwall_val = if *bubwall_val + bwall > 0. { 1. } else { 0. }; + .assign(&sunwall.slice(x_c_slice)); + Zip::from(&mut tempbub) + .and(&tempwallsun) + .and(&f) + .for_each(|bub, &ws, &fv| { + let b = ws * fv; + *bub = if *bub + b > 0. { 1. } else { 0. }; + }); + Zip::from(&mut tempbubwall).and(&f).for_each(|bubw, &fv| { + let bwall = 1. - fv; + *bubw = if *bubw + bwall > 0. { 1. } else { 0. }; }); weightsumLwall.zip_mut_with(&tempbub, |w, &b| *w += b * lwall); @@ -326,3 +335,155 @@ pub fn sun_on_surface( (gvf, gvfLup, gvfalb, gvfalbnosh, gvf2) } + +/// Thermal-only version of `sun_on_surface` using cached geometry. +/// +/// Skips all building ray-tracing (f accumulation, buildings shifting, tempbubwall). +/// Uses precomputed `blocking_distance` to reconstruct f inline. +/// Returns (gvfLup, gvfalb) — the thermal-dependent outputs only. +/// `gvfalbnosh` is taken from the geometry cache by the caller. +#[allow(clippy::too_many_arguments)] +#[allow(non_snake_case)] +pub fn sun_on_surface_cached( + geom: &crate::gvf_geometry::AzimuthGeometry, + sunwall_mask: ArrayView2, + lup: ArrayView2, + albshadow: ArrayView2, + lwall: f32, + wall_albedo: f32, + first: f32, + second: f32, +) -> (Array2, Array2) { + let (sizex, sizey) = (lup.nrows(), lup.ncols()); + + // Only keep the wall latch state; shifted thermal terms are accumulated + // directly from source slices to avoid per-step full-grid temp buffers. + let mut tempbub = Array2::::zeros((sizex, sizey)); + + // Thermal accumulators only + let mut weightsumwall = Array2::::zeros((sizex, sizey)); + let mut weightsumLupsh = Array2::::zeros((sizex, sizey)); + let mut weightsumLwall = Array2::::zeros((sizex, sizey)); + let mut weightsumalbsh = Array2::::zeros((sizex, sizey)); + let mut weightsumalbwall = Array2::::zeros((sizex, sizey)); + + let mut weightsumwall_first = Array2::::zeros((sizex, sizey)); + let mut weightsumLwall_first = Array2::::zeros((sizex, sizey)); + let mut weightsumLupsh_first = Array2::::zeros((sizex, sizey)); + let mut weightsumalbwall_first = Array2::::zeros((sizex, sizey)); + let mut weightsumalbsh_first = Array2::::zeros((sizex, sizey)); + + let bd = &geom.blocking_distance; + + for (n, &(dx, dy)) in geom.shifts.iter().enumerate() { + let n_u16 = n as u16; + + let absdx = dx.abs(); + let absdy = dy.abs(); + let xc1 = (dx + absdx) / 2; + let xc2 = sizex as isize + (dx - absdx) / 2; + let yc1 = (dy + absdy) / 2; + let yc2 = sizey as isize + (dy - absdy) / 2; + let xp1 = -(dx - absdx) / 2; + let xp2 = sizex as isize - (dx + absdx) / 2; + let yp1 = -(dy - absdy) / 2; + let yp2 = sizey as isize - (dy + absdy) / 2; + + let x_c_slice = s![xc1..xc2, yc1..yc2]; + let x_p_slice = s![xp1..xp2, yp1..yp2]; + + // Accumulate shifted thermal terms directly on active destination slice. + // f = 1 if n < blocking_distance[pixel], else 0. + Zip::from(weightsumLupsh.slice_mut(x_p_slice)) + .and(lup.slice(x_c_slice)) + .and(bd.slice(x_p_slice)) + .for_each(|w, &l, &b| { + if n_u16 < b { + *w += l; + } + }); + Zip::from(weightsumalbsh.slice_mut(x_p_slice)) + .and(albshadow.slice(x_c_slice)) + .and(bd.slice(x_p_slice)) + .for_each(|w, &a, &b| { + if n_u16 < b { + *w += a; + } + }); + + // Wall-sun latch update on active destination slice. + Zip::from(tempbub.slice_mut(x_p_slice)) + .and(sunwall_mask.slice(x_c_slice)) + .and(bd.slice(x_p_slice)) + .for_each(|bub, &ws, &b| { + if n_u16 < b && *bub + ws > 0. { + *bub = 1.; + } + }); + + weightsumLwall.zip_mut_with(&tempbub, |w, &b| *w += b * lwall); + weightsumalbwall.zip_mut_with(&tempbub, |w, &b| *w += b * wall_albedo); + weightsumwall.zip_mut_with(&tempbub, |w, &b| *w += b); + + // Snapshot at first-height threshold + if (n + 1) as f32 <= first { + weightsumwall_first.assign(&weightsumwall); + weightsumLwall_first.assign(&weightsumLwall); + weightsumLupsh_first.assign(&weightsumLupsh); + weightsumalbwall_first.assign(&weightsumalbwall); + weightsumalbsh_first.assign(&weightsumalbsh); + } + } + + // Post-loop: compute thermal outputs (same math as original) + let wallsuninfluence_first = weightsumwall_first.mapv(|x| (x > 0.) as i32 as f32); + let wallsuninfluence_second = weightsumwall.mapv(|x| (x > 0.) as i32 as f32); + + // keep correction (uses facesh from cache) + let mut keep = Array2::::zeros((sizex, sizey)); + Zip::from(&mut keep) + .and(&weightsumwall) + .and(&geom.facesh) + .for_each(|k, &w, &fsh| { + let val = (if w == second { 1. } else { 0. }) - fsh; + *k = if val == -1. { 0. } else { val }; + }); + + // gvfLup + let gvfLup1 = ((&weightsumLwall_first + &weightsumLupsh_first) / (first + 1.)) + * &wallsuninfluence_first + + (&weightsumLupsh_first / first) * (wallsuninfluence_first.mapv(|x| 1. - x)); + + let mut weightsumLwall_mut = weightsumLwall.to_owned(); + weightsumLwall_mut.zip_mut_with(&keep, |w, &k| { + if k == 1. { + *w = 0.; + } + }); + + let gvfLup2 = ((&weightsumLwall_mut + &weightsumLupsh) / (second + 1.)) + * &wallsuninfluence_second + + (&weightsumLupsh / second) * (wallsuninfluence_second.mapv(|x| 1. - x)); + + let gvfLup = (&gvfLup1 * 0.5 + &gvfLup2 * 0.4) / 0.9; + + // gvfalb + let gvfalb1 = ((&weightsumalbwall_first + &weightsumalbsh_first) / (first + 1.)) + * &wallsuninfluence_first + + (&weightsumalbsh_first / first) * (wallsuninfluence_first.mapv(|x| 1. - x)); + + let mut weightsumalbwall_mut = weightsumalbwall.to_owned(); + weightsumalbwall_mut.zip_mut_with(&keep, |w, &k| { + if k == 1. { + *w = 0.; + } + }); + + let gvfalb2 = ((&weightsumalbwall_mut + &weightsumalbsh) / (second + 1.)) + * &wallsuninfluence_second + + (&weightsumalbsh / second) * (wallsuninfluence_second.mapv(|x| 1. - x)); + + let gvfalb = (&gvfalb1 * 0.5 + &gvfalb2 * 0.4) / 0.9; + + (gvfLup, gvfalb) +} diff --git a/rust/src/sunlit_shaded_patches.rs b/rust/src/sunlit_shaded_patches.rs index 468cc7d..98c4d38 100644 --- a/rust/src/sunlit_shaded_patches.rs +++ b/rust/src/sunlit_shaded_patches.rs @@ -4,7 +4,6 @@ // Vectorized function removed as it was unused. - /// Calculates whether a single patch is sunlit or shaded. /// This is a scalar version for use inside pixel-parallel loops. #[allow(dead_code)] diff --git a/rust/src/tmrt.rs b/rust/src/tmrt.rs new file mode 100644 index 0000000..987f9a8 --- /dev/null +++ b/rust/src/tmrt.rs @@ -0,0 +1,242 @@ +use ndarray::{Array2, ArrayView2}; +use numpy::{PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; +use rayon::prelude::*; + +/// Physical constants +const SBC: f32 = 5.67e-8; // Stefan-Boltzmann constant (W/m²/K⁴) +const KELVIN_OFFSET: f32 = 273.15; // Kelvin to Celsius conversion + +/// View factors for standing posture +const F_UP_STANDING: f32 = 0.06; +const F_SIDE_STANDING: f32 = 0.22; +const F_CYL_STANDING: f32 = 0.28; + +/// View factors for sitting posture +const F_UP_SITTING: f32 = 0.166666; +const F_SIDE_SITTING: f32 = 0.166666; +const F_CYL_SITTING: f32 = 0.20; + +/// Parameters for Tmrt calculation. +/// +/// Groups scalar parameters to reduce function signature complexity. +#[pyclass] +#[derive(Clone)] +pub struct TmrtParams { + /// Shortwave absorption coefficient (0.70 for clothed human) + #[pyo3(get, set)] + pub abs_k: f32, + /// Longwave absorption coefficient (0.97 for clothed human) + #[pyo3(get, set)] + pub abs_l: f32, + /// True for standing posture, False for sitting + #[pyo3(get, set)] + pub is_standing: bool, + /// Whether anisotropic sky model was used + #[pyo3(get, set)] + pub use_anisotropic_sky: bool, +} + +#[pymethods] +impl TmrtParams { + #[new] + pub fn new(abs_k: f32, abs_l: f32, is_standing: bool, use_anisotropic_sky: bool) -> Self { + Self { + abs_k, + abs_l, + is_standing, + use_anisotropic_sky, + } + } +} + +/// Pure-ndarray implementation of Tmrt calculation. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). +#[allow(clippy::too_many_arguments)] +pub(crate) fn compute_tmrt_pure( + kdown: ArrayView2, + kup: ArrayView2, + ldown: ArrayView2, + lup: ArrayView2, + kside_n: ArrayView2, + kside_e: ArrayView2, + kside_s: ArrayView2, + kside_w: ArrayView2, + lside_n: ArrayView2, + lside_e: ArrayView2, + lside_s: ArrayView2, + lside_w: ArrayView2, + kside_total: ArrayView2, + lside_total: ArrayView2, + abs_k: f32, + abs_l: f32, + is_standing: bool, + use_anisotropic_sky: bool, +) -> Array2 { + let kside_dirs_sum = &kside_n + &kside_e + &kside_s + &kside_w; + let lside_dirs_sum = &lside_n + &lside_e + &lside_s + &lside_w; + + compute_tmrt_from_dir_sums_pure( + kdown, + kup, + ldown, + lup, + kside_dirs_sum.view(), + lside_dirs_sum.view(), + kside_total, + lside_total, + abs_k, + abs_l, + is_standing, + use_anisotropic_sky, + ) +} + +/// Tmrt kernel using pre-summed directional side components. +/// +/// This avoids carrying four directional arrays through the pipeline when only +/// their sum is needed by the final Tmrt equation. +#[allow(clippy::too_many_arguments)] +pub(crate) fn compute_tmrt_from_dir_sums_pure( + kdown: ArrayView2, + kup: ArrayView2, + ldown: ArrayView2, + lup: ArrayView2, + kside_dirs_sum: ArrayView2, + lside_dirs_sum: ArrayView2, + kside_total: ArrayView2, + lside_total: ArrayView2, + abs_k: f32, + abs_l: f32, + is_standing: bool, + use_anisotropic_sky: bool, +) -> Array2 { + let shape = kdown.dim(); + + // Select view factors based on posture + let (f_up, f_side, f_cyl) = if is_standing { + (F_UP_STANDING, F_SIDE_STANDING, F_CYL_STANDING) + } else { + (F_UP_SITTING, F_SIDE_SITTING, F_CYL_SITTING) + }; + + // Allocate output array + let mut tmrt = Array2::::zeros(shape); + + // Compute Tmrt element-wise in parallel + tmrt.as_slice_mut() + .unwrap() + .par_iter_mut() + .enumerate() + .for_each(|(idx, out)| { + let row = idx / shape.1; + let col = idx % shape.1; + + // Skip NaN pixels — upstream NaN propagates as NaN output + let kdown_val = kdown[[row, col]]; + if !kdown_val.is_finite() { + *out = f32::NAN; + return; + } + let kup_val = kup[[row, col]]; + let ldown_val = ldown[[row, col]]; + let lup_val = lup[[row, col]]; + let kside_dirs_sum_val = kside_dirs_sum[[row, col]]; + let lside_dirs_sum_val = lside_dirs_sum[[row, col]]; + let kside_total_val = kside_total[[row, col]]; + let lside_total_val = lside_total[[row, col]]; + + // Compute absorbed shortwave radiation (same formula for both sky models) + let k_absorbed = abs_k + * (kside_total_val * f_cyl + + (kdown_val + kup_val) * f_up + + kside_dirs_sum_val * f_side); + + let l_absorbed = if use_anisotropic_sky { + abs_l + * ((ldown_val + lup_val) * f_up + + lside_total_val * f_cyl + + lside_dirs_sum_val * f_side) + } else { + abs_l * ((ldown_val + lup_val) * f_up + lside_dirs_sum_val * f_side) + }; + + // Total absorbed radiation (Sstr) + let sstr = k_absorbed + l_absorbed; + + // Convert to Tmrt using Stefan-Boltzmann law + // Tmrt = (Sstr / (abs_l × SBC))^0.25 - 273.15 + let tmrt_val = (sstr / (abs_l * SBC)).sqrt().sqrt() - KELVIN_OFFSET; + + // Clip to physically reasonable range + *out = tmrt_val.clamp(-50.0, 80.0); + }); + + tmrt +} + +/// Compute Mean Radiant Temperature (Tmrt) from radiation budget. +/// +/// Tmrt represents the uniform temperature of an imaginary enclosure where +/// the radiant heat exchange with the human body equals that in the actual +/// non-uniform radiant environment. +/// +/// Parameters: +/// - kdown/kup: Downwelling/upwelling shortwave radiation (W/m²) +/// - ldown/lup: Downwelling/upwelling longwave radiation (W/m²) +/// - kside_n/e/s/w: Directional shortwave radiation (W/m²) +/// - lside_n/e/s/w: Directional longwave radiation (W/m²) +/// - kside_total/lside_total: Total radiation on vertical surface (W/m²) +/// - params: TmrtParams with absorption coefficients and posture settings +/// +/// Returns: +/// - Tmrt array in degrees Celsius, clipped to [-50, 80] +/// +/// Formula: +/// Tmrt = (Sstr / (abs_l × SBC))^0.25 - 273.15 +/// where Sstr = absorbed shortwave + absorbed longwave +/// +/// Reference: +/// Lindberg et al. (2008): "SOLWEIG 1.0 - modelling spatial variations +/// of 3D radiant fluxes and mean radiant temperature in complex urban settings" +#[pyfunction] +pub fn compute_tmrt<'py>( + py: Python<'py>, + kdown: PyReadonlyArray2<'py, f32>, + kup: PyReadonlyArray2<'py, f32>, + ldown: PyReadonlyArray2<'py, f32>, + lup: PyReadonlyArray2<'py, f32>, + kside_n: PyReadonlyArray2<'py, f32>, + kside_e: PyReadonlyArray2<'py, f32>, + kside_s: PyReadonlyArray2<'py, f32>, + kside_w: PyReadonlyArray2<'py, f32>, + lside_n: PyReadonlyArray2<'py, f32>, + lside_e: PyReadonlyArray2<'py, f32>, + lside_s: PyReadonlyArray2<'py, f32>, + lside_w: PyReadonlyArray2<'py, f32>, + kside_total: PyReadonlyArray2<'py, f32>, + lside_total: PyReadonlyArray2<'py, f32>, + params: &TmrtParams, +) -> PyResult>> { + let result = compute_tmrt_pure( + kdown.as_array(), + kup.as_array(), + ldown.as_array(), + lup.as_array(), + kside_n.as_array(), + kside_e.as_array(), + kside_s.as_array(), + kside_w.as_array(), + lside_n.as_array(), + lside_e.as_array(), + lside_s.as_array(), + lside_w.as_array(), + kside_total.as_array(), + lside_total.as_array(), + params.abs_k, + params.abs_l, + params.is_standing, + params.use_anisotropic_sky, + ); + Ok(PyArray2::from_owned_array(py, result)) +} diff --git a/rust/src/utci.rs b/rust/src/utci.rs new file mode 100644 index 0000000..e9fedd1 --- /dev/null +++ b/rust/src/utci.rs @@ -0,0 +1,364 @@ +use numpy::{PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; +use rayon::prelude::*; + +/// Calculate UTCI polynomial approximation for a single point. +/// This is the 6th order polynomial from Bröde et al. +#[inline] +fn utci_polynomial(d_tmrt: f32, ta: f32, va: f32, pa: f32) -> f32 { + // Pre-compute powers to reduce redundant calculations + let ta2 = ta * ta; + let ta3 = ta2 * ta; + let ta4 = ta3 * ta; + let ta5 = ta4 * ta; + let ta6 = ta5 * ta; + + let va2 = va * va; + let va3 = va2 * va; + let va4 = va3 * va; + let va5 = va4 * va; + let va6 = va5 * va; + + let d2 = d_tmrt * d_tmrt; + let d3 = d2 * d_tmrt; + let d4 = d3 * d_tmrt; + let d5 = d4 * d_tmrt; + let d6 = d5 * d_tmrt; + + let pa2 = pa * pa; + let pa3 = pa2 * pa; + let pa4 = pa3 * pa; + let pa5 = pa4 * pa; + let pa6 = pa5 * pa; + + ta + 6.07562052e-01 + + (-2.27712343e-02) * ta + + (8.06470249e-04) * ta2 + + (-1.54271372e-04) * ta3 + + (-3.24651735e-06) * ta4 + + (7.32602852e-08) * ta5 + + (1.35959073e-09) * ta6 + + (-2.25836520e00) * va + + (8.80326035e-02) * ta * va + + (2.16844454e-03) * ta2 * va + + (-1.53347087e-05) * ta3 * va + + (-5.72983704e-07) * ta4 * va + + (-2.55090145e-09) * ta5 * va + + (-7.51269505e-01) * va2 + + (-4.08350271e-03) * ta * va2 + + (-5.21670675e-05) * ta2 * va2 + + (1.94544667e-06) * ta3 * va2 + + (1.14099531e-08) * ta4 * va2 + + (1.58137256e-01) * va3 + + (-6.57263143e-05) * ta * va3 + + (2.22697524e-07) * ta2 * va3 + + (-4.16117031e-08) * ta3 * va3 + + (-1.27762753e-02) * va4 + + (9.66891875e-06) * ta * va4 + + (2.52785852e-09) * ta2 * va4 + + (4.56306672e-04) * va5 + + (-1.74202546e-07) * ta * va5 + + (-5.91491269e-06) * va6 + + (3.98374029e-01) * d_tmrt + + (1.83945314e-04) * ta * d_tmrt + + (-1.73754510e-04) * ta2 * d_tmrt + + (-7.60781159e-07) * ta3 * d_tmrt + + (3.77830287e-08) * ta4 * d_tmrt + + (5.43079673e-10) * ta5 * d_tmrt + + (-2.00518269e-02) * va * d_tmrt + + (8.92859837e-04) * ta * va * d_tmrt + + (3.45433048e-06) * ta2 * va * d_tmrt + + (-3.77925774e-07) * ta3 * va * d_tmrt + + (-1.69699377e-09) * ta4 * va * d_tmrt + + (1.69992415e-04) * va2 * d_tmrt + + (-4.99204314e-05) * ta * va2 * d_tmrt + + (2.47417178e-07) * ta2 * va2 * d_tmrt + + (1.07596466e-08) * ta3 * va2 * d_tmrt + + (8.49242932e-05) * va3 * d_tmrt + + (1.35191328e-06) * ta * va3 * d_tmrt + + (-6.21531254e-09) * ta2 * va3 * d_tmrt + + (-4.99410301e-06) * va4 * d_tmrt + + (-1.89489258e-08) * ta * va4 * d_tmrt + + (8.15300114e-08) * va5 * d_tmrt + + (7.55043090e-04) * d2 + + (-5.65095215e-05) * ta * d2 + + (-4.52166564e-07) * ta2 * d2 + + (2.46688878e-08) * ta3 * d2 + + (2.42674348e-10) * ta4 * d2 + + (1.54547250e-04) * va * d2 + + (5.24110970e-06) * ta * va * d2 + + (-8.75874982e-08) * ta2 * va * d2 + + (-1.50743064e-09) * ta3 * va * d2 + + (-1.56236307e-05) * va2 * d2 + + (-1.33895614e-07) * ta * va2 * d2 + + (2.49709824e-09) * ta2 * va2 * d2 + + (6.51711721e-07) * va3 * d2 + + (1.94960053e-09) * ta * va3 * d2 + + (-1.00361113e-08) * va4 * d2 + + (-1.21206673e-05) * d3 + + (-2.18203660e-07) * ta * d3 + + (7.51269482e-09) * ta2 * d3 + + (9.79063848e-11) * ta3 * d3 + + (1.25006734e-06) * va * d3 + + (-1.81584736e-09) * ta * va * d3 + + (-3.52197671e-10) * ta2 * va * d3 + + (-3.36514630e-08) * va2 * d3 + + (1.35908359e-10) * ta * va2 * d3 + + (4.17032620e-10) * va3 * d3 + + (-1.30369025e-09) * d4 + + (4.13908461e-10) * ta * d4 + + (9.22652254e-12) * ta2 * d4 + + (-5.08220384e-09) * va * d4 + + (-2.24730961e-11) * ta * va * d4 + + (1.17139133e-10) * va2 * d4 + + (6.62154879e-10) * d5 + + (4.03863260e-13) * ta * d5 + + (1.95087203e-12) * va * d5 + + (-4.73602469e-12) * d6 + + (5.12733497e00) * pa + + (-3.12788561e-01) * ta * pa + + (-1.96701861e-02) * ta2 * pa + + (9.99690870e-04) * ta3 * pa + + (9.51738512e-06) * ta4 * pa + + (-4.66426341e-07) * ta5 * pa + + (5.48050612e-01) * va * pa + + (-3.30552823e-03) * ta * va * pa + + (-1.64119440e-03) * ta2 * va * pa + + (-5.16670694e-06) * ta3 * va * pa + + (9.52692432e-07) * ta4 * va * pa + + (-4.29223622e-02) * va2 * pa + + (5.00845667e-03) * ta * va2 * pa + + (1.00601257e-06) * ta2 * va2 * pa + + (-1.81748644e-06) * ta3 * va2 * pa + + (-1.25813502e-03) * va3 * pa + + (-1.79330391e-04) * ta * va3 * pa + + (2.34994441e-06) * ta2 * va3 * pa + + (1.29735808e-04) * va4 * pa + + (1.29064870e-06) * ta * va4 * pa + + (-2.28558686e-06) * va5 * pa + + (-3.69476348e-02) * d_tmrt * pa + + (1.62325322e-03) * ta * d_tmrt * pa + + (-3.14279680e-05) * ta2 * d_tmrt * pa + + (2.59835559e-06) * ta3 * d_tmrt * pa + + (-4.77136523e-08) * ta4 * d_tmrt * pa + + (8.64203390e-03) * va * d_tmrt * pa + + (-6.87405181e-04) * ta * va * d_tmrt * pa + + (-9.13863872e-06) * ta2 * va * d_tmrt * pa + + (5.15916806e-07) * ta3 * va * d_tmrt * pa + + (-3.59217476e-05) * va2 * d_tmrt * pa + + (3.28696511e-05) * ta * va2 * d_tmrt * pa + + (-7.10542454e-07) * ta2 * va2 * d_tmrt * pa + + (-1.24382300e-05) * va3 * d_tmrt * pa + + (-7.38584400e-09) * ta * va3 * d_tmrt * pa + + (2.20609296e-07) * va4 * d_tmrt * pa + + (-7.32469180e-04) * d2 * pa + + (-1.87381964e-05) * ta * d2 * pa + + (4.80925239e-06) * ta2 * d2 * pa + + (-8.75492040e-08) * ta3 * d2 * pa + + (2.77862930e-05) * va * d2 * pa + + (-5.06004592e-06) * ta * va * d2 * pa + + (1.14325367e-07) * ta2 * va * d2 * pa + + (2.53016723e-06) * va2 * d2 * pa + + (-1.72857035e-08) * ta * va2 * d2 * pa + + (-3.95079398e-08) * va3 * d2 * pa + + (-3.59413173e-07) * d3 * pa + + (7.04388046e-07) * ta * d3 * pa + + (-1.89309167e-08) * ta2 * d3 * pa + + (-4.79768731e-07) * va * d3 * pa + + (7.96079978e-09) * ta * va * d3 * pa + + (1.62897058e-09) * va2 * d3 * pa + + (3.94367674e-08) * d4 * pa + + (-1.18566247e-09) * ta * d4 * pa + + (3.34678041e-10) * va * d4 * pa + + (-1.15606447e-10) * d5 * pa + + (-2.80626406e00) * pa2 + + (5.48712484e-01) * ta * pa2 + + (-3.99428410e-03) * ta2 * pa2 + + (-9.54009191e-04) * ta3 * pa2 + + (1.93090978e-05) * ta4 * pa2 + + (-3.08806365e-01) * va * pa2 + + (1.16952364e-02) * ta * va * pa2 + + (4.95271903e-04) * ta2 * va * pa2 + + (-1.90710882e-05) * ta3 * va * pa2 + + (2.10787756e-03) * va2 * pa2 + + (-6.98445738e-04) * ta * va2 * pa2 + + (2.30109073e-05) * ta2 * va2 * pa2 + + (4.17856590e-04) * va3 * pa2 + + (-1.27043871e-05) * ta * va3 * pa2 + + (-3.04620472e-06) * va4 * pa2 + + (5.14507424e-02) * d_tmrt * pa2 + + (-4.32510997e-03) * ta * d_tmrt * pa2 + + (8.99281156e-05) * ta2 * d_tmrt * pa2 + + (-7.14663943e-07) * ta3 * d_tmrt * pa2 + + (-2.66016305e-04) * va * d_tmrt * pa2 + + (2.63789586e-04) * ta * va * d_tmrt * pa2 + + (-7.01199003e-06) * ta2 * va * d_tmrt * pa2 + + (-1.06823306e-04) * va2 * d_tmrt * pa2 + + (3.61341136e-06) * ta * va2 * d_tmrt * pa2 + + (2.29748967e-07) * va3 * d_tmrt * pa2 + + (3.04788893e-04) * d2 * pa2 + + (-6.42070836e-05) * ta * d2 * pa2 + + (1.16257971e-06) * ta2 * d2 * pa2 + + (7.68023384e-06) * va * d2 * pa2 + + (-5.47446896e-07) * ta * va * d2 * pa2 + + (-3.59937910e-08) * va2 * d2 * pa2 + + (-4.36497725e-06) * d3 * pa2 + + (1.68737969e-07) * ta * d3 * pa2 + + (2.67489271e-08) * va * d3 * pa2 + + (3.23926897e-09) * d4 * pa2 + + (-3.53874123e-02) * pa3 + + (-2.21201190e-01) * ta * pa3 + + (1.55126038e-02) * ta2 * pa3 + + (-2.63917279e-04) * ta3 * pa3 + + (4.53433455e-02) * va * pa3 + + (-4.32943862e-03) * ta * va * pa3 + + (1.45389826e-04) * ta2 * va * pa3 + + (2.17508610e-04) * va2 * pa3 + + (-6.66724702e-05) * ta * va2 * pa3 + + (3.33217140e-05) * va3 * pa3 + + (-2.26921615e-03) * d_tmrt * pa3 + + (3.80261982e-04) * ta * d_tmrt * pa3 + + (-5.45314314e-09) * ta2 * d_tmrt * pa3 + + (-7.96355448e-04) * va * d_tmrt * pa3 + + (2.53458034e-05) * ta * va * d_tmrt * pa3 + + (-6.31223658e-06) * va2 * d_tmrt * pa3 + + (3.02122035e-04) * d2 * pa3 + + (-4.77403547e-06) * ta * d2 * pa3 + + (1.73825715e-06) * va * d2 * pa3 + + (-4.09087898e-07) * d3 * pa3 + + (6.14155345e-01) * pa4 + + (-6.16755931e-02) * ta * pa4 + + (1.33374846e-03) * ta2 * pa4 + + (3.55375387e-03) * va * pa4 + + (-5.13027851e-04) * ta * va * pa4 + + (1.02449757e-04) * va2 * pa4 + + (-1.48526421e-03) * d_tmrt * pa4 + + (-4.11469183e-05) * ta * d_tmrt * pa4 + + (-6.80434415e-06) * va * d_tmrt * pa4 + + (-9.77675906e-06) * d2 * pa4 + + (8.82773108e-02) * pa5 + + (-3.01859306e-03) * ta * pa5 + + (1.04452989e-03) * va * pa5 + + (2.47090539e-04) * d_tmrt * pa5 + + (1.48348065e-03) * pa6 +} + +/// Calculate saturation vapor pressure using the polynomial from UTCI. +#[inline] +fn saturation_vapor_pressure(ta: f32) -> f32 { + const G: [f32; 8] = [ + -2.8365744e3, + -6.028076559e3, + 1.954263612e1, + -2.737830188e-2, + 1.6261698e-5, + 7.0229056e-10, + -1.8680009e-13, + 2.7150305, + ]; + + let tk = ta + 273.15; + let mut es = G[7] * tk.ln(); + + // Compute tk^(-2), tk^(-1), tk^0, tk^1, ..., tk^4 + let tk_inv2 = 1.0 / (tk * tk); + let tk_inv = 1.0 / tk; + + es += G[0] * tk_inv2; + es += G[1] * tk_inv; + es += G[2]; + es += G[3] * tk; + es += G[4] * tk * tk; + es += G[5] * tk * tk * tk; + es += G[6] * tk * tk * tk * tk; + + (es.exp()) * 0.01 +} + +/// Calculate UTCI for a single point. +/// +/// Parameters: +/// - ta: Air temperature (°C) +/// - rh: Relative humidity (%) +/// - tmrt: Mean radiant temperature (°C) +/// - va10m: Wind speed at 10m height (m/s) +/// +/// Returns: UTCI temperature (°C) or -999 for invalid inputs +#[pyfunction] +pub fn utci_single(ta: f32, rh: f32, tmrt: f32, va10m: f32) -> f32 { + if ta <= -999.0 || rh <= -999.0 || va10m <= -999.0 || tmrt <= -999.0 { + return -999.0; + } + + let es = saturation_vapor_pressure(ta); + let eh_pa = es * rh / 100.0; + let pa = eh_pa / 10.0; // vapor pressure in kPa + let d_tmrt = tmrt - ta; + + utci_polynomial(d_tmrt, ta, va10m, pa) +} + +/// Calculate UTCI for a 2D grid using parallel processing. +/// +/// Parameters: +/// - ta: Air temperature (°C) - scalar +/// - rh: Relative humidity (%) - scalar +/// - tmrt: Mean radiant temperature grid (°C) +/// - va10m: Wind speed grid at 10m height (m/s) +/// +/// Returns: UTCI temperature grid (°C), -9999 for invalid pixels +#[pyfunction] +pub fn utci_grid<'py>( + py: Python<'py>, + ta: f32, + rh: f32, + tmrt: PyReadonlyArray2<'py, f32>, + va10m: PyReadonlyArray2<'py, f32>, +) -> PyResult>> { + let tmrt_arr = tmrt.as_array(); + let va_arr = va10m.as_array(); + + let (rows, cols) = tmrt_arr.dim(); + + // Handle invalid scalar inputs + if ta <= -999.0 || rh <= -999.0 { + let result = ndarray::Array2::from_elem((rows, cols), -999.0f32); + return Ok(PyArray2::from_owned_array(py, result)); + } + + // Pre-compute vapor pressure (constant for entire grid) + let es = saturation_vapor_pressure(ta); + let eh_pa = es * rh / 100.0; + let pa = eh_pa / 10.0; + + // Create output array + let mut result = ndarray::Array2::zeros((rows, cols)); + + // Process in parallel using rayon + result + .as_slice_mut() + .unwrap() + .par_iter_mut() + .enumerate() + .for_each(|(idx, out)| { + let row = idx / cols; + let col = idx % cols; + + let tmrt_val = tmrt_arr[[row, col]]; + let va_val = va_arr[[row, col]]; + + // Check for invalid pixel values (NaN, nodata, non-finite) + if !tmrt_val.is_finite() || !va_val.is_finite() { + *out = f32::NAN; + } else if tmrt_val <= -999.0 || va_val <= -999.0 { + *out = f32::NAN; + } else if va_val > 0.0 { + let d_tmrt = tmrt_val - ta; + *out = utci_polynomial(d_tmrt, ta, va_val, pa); + } else { + *out = f32::NAN; // Invalid wind speed + } + }); + + Ok(PyArray2::from_owned_array(py, result)) +} diff --git a/rust/src/vegetation.rs b/rust/src/vegetation.rs index bffe7ea..49b955c 100644 --- a/rust/src/vegetation.rs +++ b/rust/src/vegetation.rs @@ -1,84 +1,56 @@ -use ndarray::Array2; +use ndarray::{Array2, ArrayView2}; use numpy::{IntoPyArray, PyArray2, PyReadonlyArray2}; use pyo3::prelude::*; use rayon::prelude::*; -/// Result container for lside_veg_v2022a direction-wise longwave fluxes. -#[pyclass] -pub struct LsideVegResult { - #[pyo3(get)] - pub least: Py>, - #[pyo3(get)] - pub lsouth: Py>, - #[pyo3(get)] - pub lwest: Py>, - #[pyo3(get)] - pub lnorth: Py>, +/// Pure result type for lside_veg (no PyO3 dependency). +pub(crate) struct LsideVegPureResult { + pub least: Array2, + pub lsouth: Array2, + pub lwest: Array2, + pub lnorth: Array2, } -/// Vectorized Rust port of Python `Lside_veg_v2022a` operating on grid arrays. -/// Returns a `LsideVegResult` pyclass with four 2D arrays (least, lsouth, lwest, lnorth). -#[pyfunction] +/// Pure-ndarray implementation of Lside_veg_v2022a. +/// Callable from pipeline.rs (fused path) or from the PyO3 wrapper (modular path). #[allow(non_snake_case)] #[allow(clippy::too_many_arguments)] -pub fn lside_veg( - py: Python, - svfS: PyReadonlyArray2, - svfW: PyReadonlyArray2, - svfN: PyReadonlyArray2, - svfE: PyReadonlyArray2, - svfEveg: PyReadonlyArray2, - svfSveg: PyReadonlyArray2, - svfWveg: PyReadonlyArray2, - svfNveg: PyReadonlyArray2, - svfEaveg: PyReadonlyArray2, - svfSaveg: PyReadonlyArray2, - svfWaveg: PyReadonlyArray2, - svfNaveg: PyReadonlyArray2, +pub(crate) fn lside_veg_pure( + svfS: ArrayView2, + svfW: ArrayView2, + svfN: ArrayView2, + svfE: ArrayView2, + svfEveg: ArrayView2, + svfSveg: ArrayView2, + svfWveg: ArrayView2, + svfNveg: ArrayView2, + svfEaveg: ArrayView2, + svfSaveg: ArrayView2, + svfWaveg: ArrayView2, + svfNaveg: ArrayView2, azimuth: f32, altitude: f32, Ta: f32, Tw: f32, SBC: f32, ewall: f32, - Ldown: PyReadonlyArray2, + Ldown: ArrayView2, esky: f32, t: f32, - F_sh: PyReadonlyArray2, + F_sh: ArrayView2, CI: f32, - LupE: PyReadonlyArray2, - LupS: PyReadonlyArray2, - LupW: PyReadonlyArray2, - LupN: PyReadonlyArray2, + LupE: ArrayView2, + LupS: ArrayView2, + LupW: ArrayView2, + LupN: ArrayView2, anisotropic_longwave: bool, -) -> PyResult> { - // Borrow arrays - let svfS = svfS.as_array(); - let svfW = svfW.as_array(); - let svfN = svfN.as_array(); - let svfE = svfE.as_array(); - let svfEveg = svfEveg.as_array(); - let svfSveg = svfSveg.as_array(); - let svfWveg = svfWveg.as_array(); - let svfNveg = svfNveg.as_array(); - let svfEaveg = svfEaveg.as_array(); - let svfSaveg = svfSaveg.as_array(); - let svfWaveg = svfWaveg.as_array(); - let svfNaveg = svfNaveg.as_array(); - let Ldown = Ldown.as_array(); - let LupE = LupE.as_array(); - let LupS = LupS.as_array(); - let LupW = LupW.as_array(); - let LupN = LupN.as_array(); - let F_sh = F_sh.as_array(); - - // Shape validation (all must match shape of svfE) + valid: Option>, +) -> LsideVegPureResult { let shape = svfE.shape(); let (rows, cols) = (shape[0], shape[1]); let vikttot: f32 = 4.4897; let TaK = Ta + 273.15; let TaK_pow4 = TaK.powi(4); - // F_sh is per-cell; scaling to -1..1 handled inside loop per original Python (2*F_sh -1). No global scalar. let c = 1.0 - CI; let Lsky_allsky = esky * SBC * TaK_pow4 * (1.0 - c) + c * SBC * TaK_pow4; let altitude_day = altitude > 0.0; @@ -88,13 +60,11 @@ pub fn lside_veg( let sun_west = azimuth > (360.0 - t) || azimuth <= (180.0 - t); let sun_north = azimuth > (90.0 - t) && azimuth <= (270.0 - t); - // Precompute azimuth temperature offsets (constant per grid) let temp_e = TaK + Tw * ((azimuth - 180.0 + t) * std::f32::consts::PI / 180.0).sin(); let temp_s = TaK + Tw * ((azimuth - 270.0 + t) * std::f32::consts::PI / 180.0).sin(); let temp_w = TaK + Tw * ((azimuth + t) * std::f32::consts::PI / 180.0).sin(); let temp_n = TaK + Tw * ((azimuth - 90.0 + t) * std::f32::consts::PI / 180.0).sin(); - // Polynomial from Lvikt_veg #[inline] fn poly(x: f32) -> f32 { 63.227 * x.powi(6) - 161.51 * x.powi(5) + 156.91 * x.powi(4) - 70.424 * x.powi(3) @@ -102,7 +72,6 @@ pub fn lside_veg( - 0.4863 * x } - // Pre-allocate flat Vecs for each direction let npix = rows * cols; let mut least_vec = vec![0.0f32; npix]; let mut lsouth_vec = vec![0.0f32; npix]; @@ -118,6 +87,15 @@ pub fn lside_veg( .for_each(|(idx, (((least, lsouth), lwest), lnorth))| { let r = idx / cols; let c = idx % cols; + if let Some(ref v) = valid { + if v[[r, c]] == 0 { + *least = f32::NAN; + *lsouth = f32::NAN; + *lwest = f32::NAN; + *lnorth = f32::NAN; + return; + } + } let compute = |svf: f32, svfveg: f32, svfaveg: f32, @@ -204,19 +182,285 @@ pub fn lside_veg( ); }); - // Convert flat Vecs to Array2s - let least = Array2::from_shape_vec((rows, cols), least_vec).unwrap(); - let lsouth = Array2::from_shape_vec((rows, cols), lsouth_vec).unwrap(); - let lwest = Array2::from_shape_vec((rows, cols), lwest_vec).unwrap(); - let lnorth = Array2::from_shape_vec((rows, cols), lnorth_vec).unwrap(); + LsideVegPureResult { + least: Array2::from_shape_vec((rows, cols), least_vec).unwrap(), + lsouth: Array2::from_shape_vec((rows, cols), lsouth_vec).unwrap(), + lwest: Array2::from_shape_vec((rows, cols), lwest_vec).unwrap(), + lnorth: Array2::from_shape_vec((rows, cols), lnorth_vec).unwrap(), + } +} + +/// Pure result type for kside_veg isotropic path (no PyO3 dependency). +pub(crate) struct KsideVegPureResult { + pub keast: Array2, + pub ksouth: Array2, + pub kwest: Array2, + pub knorth: Array2, + pub kside_i: Array2, +} + +/// Pure-ndarray implementation of Kside_veg_v2022a (isotropic path). +/// The anisotropic shortwave uses `anisotropic_sky_pure()` in the fused pipeline. +#[allow(non_snake_case)] +#[allow(clippy::too_many_arguments)] +pub(crate) fn kside_veg_isotropic_pure( + radI: f32, + radD: f32, + radG: f32, + shadow: ArrayView2, + svfS: ArrayView2, + svfW: ArrayView2, + svfN: ArrayView2, + svfE: ArrayView2, + svfEveg: ArrayView2, + svfSveg: ArrayView2, + svfWveg: ArrayView2, + svfNveg: ArrayView2, + azimuth: f32, + altitude: f32, + psi: f32, + t: f32, + albedo: f32, + F_sh: ArrayView2, + KupE: ArrayView2, + KupS: ArrayView2, + KupW: ArrayView2, + KupN: ArrayView2, + cyl: bool, + valid: Option>, +) -> KsideVegPureResult { + let shape = svfE.shape(); + let (rows, cols) = (shape[0], shape[1]); + let deg2rad = std::f32::consts::PI / 180.0; + let vikttot = 4.4897f32; + + let mut Keast = Array2::::zeros((rows, cols)); + let mut Ksouth = Array2::::zeros((rows, cols)); + let mut Kwest = Array2::::zeros((rows, cols)); + let mut Knorth = Array2::::zeros((rows, cols)); + let mut KsideI = Array2::::zeros((rows, cols)); + + #[inline] + fn kvikt_veg(svf: f32, svfveg: f32, vikttot: f32) -> (f32, f32) { + let poly = |x: f32| -> f32 { + 63.227 * x.powi(6) - 161.51 * x.powi(5) + 156.91 * x.powi(4) - 70.424 * x.powi(3) + + 16.773 * x.powi(2) + - 0.4863 * x + }; + let viktwall = (vikttot - poly(svf)) / vikttot; + let svfvegbu = svfveg + svf - 1.0; + let viktveg_tot = (vikttot - poly(svfvegbu)) / vikttot; + let viktveg = viktveg_tot - viktwall; + (viktveg, viktwall) + } + + // Precompute svfviktbuveg arrays + let mut svfviktbuvegE = Array2::::zeros((rows, cols)); + let mut svfviktbuvegS = Array2::::zeros((rows, cols)); + let mut svfviktbuvegW = Array2::::zeros((rows, cols)); + let mut svfviktbuvegN = Array2::::zeros((rows, cols)); + + for r in 0..rows { + for c in 0..cols { + if let Some(ref v) = valid { + if v[[r, c]] == 0 { + continue; + } + } + let (vveg, vwall) = kvikt_veg(svfE[(r, c)], svfEveg[(r, c)], vikttot); + svfviktbuvegE[(r, c)] = vwall + vveg * (1.0 - psi); + let (vveg, vwall) = kvikt_veg(svfS[(r, c)], svfSveg[(r, c)], vikttot); + svfviktbuvegS[(r, c)] = vwall + vveg * (1.0 - psi); + let (vveg, vwall) = kvikt_veg(svfW[(r, c)], svfWveg[(r, c)], vikttot); + svfviktbuvegW[(r, c)] = vwall + vveg * (1.0 - psi); + let (vveg, vwall) = kvikt_veg(svfN[(r, c)], svfNveg[(r, c)], vikttot); + svfviktbuvegN[(r, c)] = vwall + vveg * (1.0 - psi); + } + } + + // Direct radiation + if cyl { + for r in 0..rows { + for c in 0..cols { + KsideI[(r, c)] = shadow[(r, c)] * radI * (altitude * deg2rad).cos(); + } + } + } else { + for r in 0..rows { + for c in 0..cols { + let sh_val = shadow[(r, c)]; + if azimuth > (360.0 - t) || azimuth <= (180.0 - t) { + Keast[(r, c)] = radI + * sh_val + * (altitude * deg2rad).cos() + * ((azimuth + t) * deg2rad).sin(); + } + if azimuth > (90.0 - t) && azimuth <= (270.0 - t) { + Ksouth[(r, c)] = radI + * sh_val + * (altitude * deg2rad).cos() + * ((azimuth - 90.0 + t) * deg2rad).sin(); + } + if azimuth > (180.0 - t) && azimuth <= (360.0 - t) { + Kwest[(r, c)] = radI + * sh_val + * (altitude * deg2rad).cos() + * ((azimuth - 180.0 + t) * deg2rad).sin(); + } + if azimuth <= (90.0 - t) || azimuth > (270.0 - t) { + Knorth[(r, c)] = radI + * sh_val + * (altitude * deg2rad).cos() + * ((azimuth - 270.0 + t) * deg2rad).sin(); + } + } + } + } + + // Isotropic diffuse/reflected radiation + let ke_slice = Keast.as_slice_mut().unwrap(); + let ks_slice = Ksouth.as_slice_mut().unwrap(); + let kw_slice = Kwest.as_slice_mut().unwrap(); + let kn_slice = Knorth.as_slice_mut().unwrap(); + let fsh_slice = F_sh.as_slice().unwrap(); + let svf_e_slice = svfviktbuvegE.as_slice().unwrap(); + let svf_s_slice = svfviktbuvegS.as_slice().unwrap(); + let svf_w_slice = svfviktbuvegW.as_slice().unwrap(); + let svf_n_slice = svfviktbuvegN.as_slice().unwrap(); + let kup_e_slice = KupE.as_slice().unwrap(); + let kup_s_slice = KupS.as_slice().unwrap(); + let kup_w_slice = KupW.as_slice().unwrap(); + let kup_n_slice = KupN.as_slice().unwrap(); + let valid_slice = valid.as_ref().map(|v| v.as_slice().unwrap()); + ke_slice + .par_iter_mut() + .zip(ks_slice.par_iter_mut()) + .zip(kw_slice.par_iter_mut()) + .zip(kn_slice.par_iter_mut()) + .enumerate() + .for_each(|(idx, (((ke, ks), kw), kn))| { + if let Some(ref vs) = valid_slice { + if vs[idx] == 0 { + *ke = f32::NAN; + *ks = f32::NAN; + *kw = f32::NAN; + *kn = f32::NAN; + return; + } + } + let fsh = fsh_slice[idx]; + let svf_e = svf_e_slice[idx]; + let svf_s = svf_s_slice[idx]; + let svf_w = svf_w_slice[idx]; + let svf_n = svf_n_slice[idx]; + let kup_e = kup_e_slice[idx]; + let kup_s = kup_s_slice[idx]; + let kup_w = kup_w_slice[idx]; + let kup_n = kup_n_slice[idx]; + let mix = radG * (1.0 - fsh) + radD * fsh; + *ke += (radD * (1.0 - svf_e) + albedo * (svf_e * mix) + kup_e) * 0.5; + *ks += (radD * (1.0 - svf_s) + albedo * (svf_s * mix) + kup_s) * 0.5; + *kw += (radD * (1.0 - svf_w) + albedo * (svf_w * mix) + kup_w) * 0.5; + *kn += (radD * (1.0 - svf_n) + albedo * (svf_n * mix) + kup_n) * 0.5; + }); + + KsideVegPureResult { + keast: Keast, + ksouth: Ksouth, + kwest: Kwest, + knorth: Knorth, + kside_i: KsideI, + } +} + +/// Result container for lside_veg_v2022a direction-wise longwave fluxes. +#[pyclass] +pub struct LsideVegResult { + #[pyo3(get)] + pub least: Py>, + #[pyo3(get)] + pub lsouth: Py>, + #[pyo3(get)] + pub lwest: Py>, + #[pyo3(get)] + pub lnorth: Py>, +} + +/// Vectorized Rust port of Python `Lside_veg_v2022a` operating on grid arrays. +/// Returns a `LsideVegResult` pyclass with four 2D arrays (least, lsouth, lwest, lnorth). +#[pyfunction] +#[allow(non_snake_case)] +#[allow(clippy::too_many_arguments)] +pub fn lside_veg( + py: Python, + svfS: PyReadonlyArray2, + svfW: PyReadonlyArray2, + svfN: PyReadonlyArray2, + svfE: PyReadonlyArray2, + svfEveg: PyReadonlyArray2, + svfSveg: PyReadonlyArray2, + svfWveg: PyReadonlyArray2, + svfNveg: PyReadonlyArray2, + svfEaveg: PyReadonlyArray2, + svfSaveg: PyReadonlyArray2, + svfWaveg: PyReadonlyArray2, + svfNaveg: PyReadonlyArray2, + azimuth: f32, + altitude: f32, + Ta: f32, + Tw: f32, + SBC: f32, + ewall: f32, + Ldown: PyReadonlyArray2, + esky: f32, + t: f32, + F_sh: PyReadonlyArray2, + CI: f32, + LupE: PyReadonlyArray2, + LupS: PyReadonlyArray2, + LupW: PyReadonlyArray2, + LupN: PyReadonlyArray2, + anisotropic_longwave: bool, +) -> PyResult> { + let result = lside_veg_pure( + svfS.as_array(), + svfW.as_array(), + svfN.as_array(), + svfE.as_array(), + svfEveg.as_array(), + svfSveg.as_array(), + svfWveg.as_array(), + svfNveg.as_array(), + svfEaveg.as_array(), + svfSaveg.as_array(), + svfWaveg.as_array(), + svfNaveg.as_array(), + azimuth, + altitude, + Ta, + Tw, + SBC, + ewall, + Ldown.as_array(), + esky, + t, + F_sh.as_array(), + CI, + LupE.as_array(), + LupS.as_array(), + LupW.as_array(), + LupN.as_array(), + anisotropic_longwave, + None, + ); Py::new( py, LsideVegResult { - least: least.into_pyarray(py).unbind(), - lsouth: lsouth.into_pyarray(py).unbind(), - lwest: lwest.into_pyarray(py).unbind(), - lnorth: lnorth.into_pyarray(py).unbind(), + least: result.least.into_pyarray(py).unbind(), + lsouth: result.lsouth.into_pyarray(py).unbind(), + lwest: result.lwest.into_pyarray(py).unbind(), + lnorth: result.lnorth.into_pyarray(py).unbind(), }, ) } @@ -272,9 +516,9 @@ pub fn kside_veg( anisotropic_diffuse: bool, // 1 -> anisotropic diffsh: Option>, // (rows, cols, patches) asvf: Option>, // sky view factor angle per pixel - shmat: Option>, // building shading matrix (1 sky visible) - vegshmat: Option>, // vegetation shading matrix - vbshvegshmat: Option>, // veg+building shading matrix + shmat: Option>, // building shading matrix (uint8: 0=shadow, 255=sky) + vegshmat: Option>, // vegetation shading matrix + vbshvegshmat: Option>, // veg+building shading matrix ) -> PyResult> { // Borrow base 2D arrays let shadow = shadow.as_array(); @@ -517,12 +761,12 @@ pub fn kside_veg( let angle_inc = pc.cos_alt; let lum = lum_chi[i]; kside_d_loc += diffsh[(r, c, i)] * lum * angle_inc * pc.ster; - let veg_flag = vegshmat[(r, c, i)] == 0.0 || vbshvegshmat[(r, c, i)] == 0.0; + let veg_flag = vegshmat[(r, c, i)] == 0 || vbshvegshmat[(r, c, i)] == 0; if veg_flag { ref_veg += shaded_surface * pc.ster * angle_inc; } - let temp_vbsh = (1.0 - shmat[(r, c, i)]) * vbshvegshmat[(r, c, i)]; - if temp_vbsh == 1.0 { + let temp_vbsh = shmat[(r, c, i)] == 0 && vbshvegshmat[(r, c, i)] == 255; + if temp_vbsh { let (sunlit_patch, shaded_patch) = crate::sunlit_shaded_patches::shaded_or_sunlit_pixel( altitude, @@ -613,7 +857,7 @@ pub fn kside_veg( if pc.is_n { diff_n += diff_val * pc.w_n; } - let veg_flag = vegshmat[(r, c, i)] == 0.0 || vbshvegshmat[(r, c, i)] == 0.0; + let veg_flag = vegshmat[(r, c, i)] == 0 || vbshvegshmat[(r, c, i)] == 0; if veg_flag { if pc.is_e { ref_veg_e += shaded_surface * pc.ster * cos_alt * pc.w_e; @@ -628,8 +872,8 @@ pub fn kside_veg( ref_veg_n += shaded_surface * pc.ster * cos_alt * pc.w_n; } } - let temp_vbsh = (1.0 - shmat[(r, c, i)]) * vbshvegshmat[(r, c, i)]; - if temp_vbsh == 1.0 { + let temp_vbsh = shmat[(r, c, i)] == 0 && vbshvegshmat[(r, c, i)] == 255; + if temp_vbsh { let az_diff = (azimuth - patch_azi[i]).abs(); if az_diff > 90.0 && az_diff < 270.0 { let (sunlit_patch, shaded_patch) = diff --git a/rust/src/wall_aspect.rs b/rust/src/wall_aspect.rs new file mode 100644 index 0000000..9887c4f --- /dev/null +++ b/rust/src/wall_aspect.rs @@ -0,0 +1,434 @@ +//! Wall aspect (orientation) detection using the Goodwin filter algorithm. +//! +//! Determines wall orientation from a binary wall grid and DSM by rotating +//! a linear filter through 180 angles and finding the best alignment. +//! +//! References: +//! - Goodwin NR, Coops NC, Tooke TR, Christen A, Voogt JA (2009) +//! - Lindberg F., Jonsson, P. & Honjo, T. and Wästberg, D. (2015b) + +use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use std::sync::Arc; + +use ndarray::{Array2, ArrayView2, Zip}; +use numpy::{IntoPyArray, PyArray2, PyReadonlyArray2}; +use pyo3::prelude::*; +use rayon::prelude::*; + +/// Rotate a small 2D array by `angle` degrees (counter-clockwise). +/// +/// Supports order=0 (nearest neighbor) and order=1 (bilinear). +/// Uses inverse mapping: for each output pixel, find the source coordinate. +fn rotate_2d(arr: &Array2, angle_deg: f32, order: u8) -> Array2 { + let (rows, cols) = arr.dim(); + let center_y = (rows as f32 - 1.0) / 2.0; + let center_x = (cols as f32 - 1.0) / 2.0; + + let theta = angle_deg * std::f32::consts::PI / 180.0; + let cos_t = theta.cos(); + let sin_t = theta.sin(); + + let mut output = Array2::::zeros((rows, cols)); + + for r in 0..rows { + for c in 0..cols { + let xc = c as f32 - center_x; + let yc = r as f32 - center_y; + + // Inverse rotation to find source coordinates + let src_x = cos_t * xc - sin_t * yc + center_x; + let src_y = sin_t * xc + cos_t * yc + center_y; + + if order == 0 { + // Nearest neighbor + let sx = src_x.round() as i32; + let sy = src_y.round() as i32; + let sx = sx.clamp(0, cols as i32 - 1) as usize; + let sy = sy.clamp(0, rows as i32 - 1) as usize; + output[[r, c]] = arr[[sy, sx]]; + } else { + // Bilinear interpolation + let x0 = src_x.floor() as i32; + let y0 = src_y.floor() as i32; + let x1 = x0 + 1; + let y1 = y0 + 1; + + let x0c = x0.clamp(0, cols as i32 - 1) as usize; + let x1c = x1.clamp(0, cols as i32 - 1) as usize; + let y0c = y0.clamp(0, rows as i32 - 1) as usize; + let y1c = y1.clamp(0, rows as i32 - 1) as usize; + + let wx = (src_x - x0 as f32).clamp(0.0, 1.0); + let wy = (src_y - y0 as f32).clamp(0.0, 1.0); + + output[[r, c]] = arr[[y0c, x0c]] * (1.0 - wx) * (1.0 - wy) + + arr[[y0c, x1c]] * wx * (1.0 - wy) + + arr[[y1c, x0c]] * (1.0 - wx) * wy + + arr[[y1c, x1c]] * wx * wy; + } + } + } + + output +} + +/// Precompute all 180 rotated filter pairs. +fn precompute_filters( + filtersize: usize, + half_ceil: usize, + half_floor: usize, +) -> Vec<(Array2, Array2)> { + let mut filtmatrix = Array2::::zeros((filtersize, filtersize)); + let mut buildfilt = Array2::::zeros((filtersize, filtersize)); + + // filtmatrix: vertical center column = 1 + for r in 0..filtersize { + filtmatrix[[r, half_ceil - 1]] = 1.0; + } + + let n = filtersize - 1; + + // buildfilt: center row, left half = 1, right half = 2 + for c in 0..half_floor { + buildfilt[[half_ceil - 1, c]] = 1.0; + } + for c in half_ceil..filtersize { + buildfilt[[half_ceil - 1, c]] = 2.0; + } + + (0..180) + .map(|h| { + let mut fm = rotate_2d(&filtmatrix, h as f32, 1); // bilinear + fm.mapv_inplace(|v| v.round()); + + let mut bf = rotate_2d(&buildfilt, h as f32, 0); // nearest + bf.mapv_inplace(|v| v.round()); + + let index = 270.0 - h as f32; + + // Special-case corrections matching original Python + if h == 150 || h == 30 { + for r in 0..filtersize { + bf[[r, n]] = 0.0; + } + } + if index == 225.0 { + fm[[0, 0]] = 1.0; + fm[[n, n]] = 1.0; + } + if index == 135.0 { + fm[[0, n]] = 1.0; + fm[[n, 0]] = 1.0; + } + + (fm, bf) + }) + .collect() +} + +/// Compute wall aspect using the Goodwin filter algorithm. +/// +/// Parallelized across wall pixels using Rayon. Each wall pixel independently +/// tests all 180 filter angles to find the best alignment. +pub(crate) fn compute_wall_aspect_pure( + walls_in: ArrayView2, + scale: f32, + dsm: ArrayView2, + progress_counter: Option>, + cancel_flag: Option>, +) -> Result, &'static str> { + let (rows, cols) = walls_in.dim(); + + // Binarize walls + let walls = walls_in.mapv(|v| if v > 0.5 { 1.0 } else { v }); + + // Compute filter size from scale + let filtersize_f = (scale + 1e-10) * 9.0; + let mut filtersize = filtersize_f.floor() as usize; + if filtersize <= 2 { + filtersize = 3; + } else if filtersize != 9 && filtersize % 2 == 0 { + filtersize += 1; + } + + let half_ceil = ((filtersize as f32) / 2.0).ceil() as usize; + let half_floor = ((filtersize as f32) / 2.0).floor() as usize; + + // Precompute all 180 rotated filter pairs (fast, filters are tiny ~9x9) + let filters = precompute_filters(filtersize, half_ceil, half_floor); + + // Iteration bounds (stay within filter radius of edges) + let i_start = half_ceil - 1; + let i_end = rows.saturating_sub(half_ceil + 1); + let j_start = half_ceil - 1; + let j_end = cols.saturating_sub(half_ceil + 1); + + // Collect wall pixel coordinates for parallel processing + let walls_view = walls.view(); + let wall_pixels: Vec<(usize, usize)> = (i_start..i_end) + .flat_map(|i| { + (j_start..j_end).filter_map(move |j| { + if walls_view[[i, j]] >= 0.5 { + Some((i, j)) + } else { + None + } + }) + }) + .collect(); + + let total_pixels = wall_pixels.len(); + + // Reset progress + if let Some(ref counter) = progress_counter { + counter.store(0, Ordering::Relaxed); + } + + // For each wall pixel, find the best angle across all 180 rotations. + // Returns (row, col, best_direction, building_side). + let processed = AtomicUsize::new(0); + let walls_ref = &walls; + let dsm_ref = &dsm; + let filters_ref = &filters; + let progress_ref = &progress_counter; + let cancel_ref = &cancel_flag; + + let results: Vec<(usize, usize, f32, f32)> = wall_pixels + .par_iter() + .map(|&(i, j)| { + // Check cancellation early (skip remaining work) + if let Some(ref flag) = cancel_ref { + if flag.load(Ordering::Relaxed) { + return (i, j, 0.0, 0.0); + } + } + + let mut best_sum = 0.0f32; + let mut best_side = 0.0f32; + let mut best_dir = 0.0f32; + + for (h, (fm, bf)) in filters_ref.iter().enumerate() { + let index = 270.0 - h as f32; + + // Weighted sum of wall neighbors along the rotated filter line + let mut wallscut_sum = 0.0f32; + for di in 0..filtersize { + for dj in 0..filtersize { + wallscut_sum += + walls_ref[[i - half_floor + di, j - half_floor + dj]] * fm[[di, dj]]; + } + } + + if wallscut_sum > best_sum { + best_sum = wallscut_sum; + + // Determine which side of the wall is the building + let mut sum_side1 = 0.0f32; + let mut sum_side2 = 0.0f32; + for di in 0..filtersize { + for dj in 0..filtersize { + let dsm_val = dsm_ref[[i - half_floor + di, j - half_floor + dj]]; + let bf_val = bf[[di, dj]]; + if bf_val == 1.0 { + sum_side1 += dsm_val; + } else if bf_val == 2.0 { + sum_side2 += dsm_val; + } + } + } + + best_side = if sum_side1 > sum_side2 { 1.0 } else { 2.0 }; + best_dir = index; + } + } + + // Update progress (map pixel count to 0..180 range) + let count = processed.fetch_add(1, Ordering::Relaxed) + 1; + if let Some(ref counter) = progress_ref { + if total_pixels > 0 { + let pct = ((count as u64 * 180) / total_pixels as u64) as usize; + counter.store(pct.min(180), Ordering::Relaxed); + } + } + + (i, j, best_dir, best_side) + }) + .collect(); + + // Check cancellation after parallel work completes + if let Some(ref flag) = cancel_flag { + if flag.load(Ordering::Relaxed) { + return Err("Wall aspect computation cancelled"); + } + } + + // Scatter results into output arrays + let mut y = Array2::::zeros((rows, cols)); + let mut x = Array2::::zeros((rows, cols)); + + for &(i, j, dir, side) in &results { + y[[i, j]] = dir; + x[[i, j]] = side; + } + + // Post-processing: adjust angles based on building side + Zip::from(&mut y).and(&x).for_each(|y_val, &x_val| { + if x_val == 1.0 { + *y_val -= 180.0; + } + }); + y.mapv_inplace(|v| if v < 0.0 { v + 360.0 } else { v }); + + // DSM gradient fallback for walls with direction 0 + let dx = 1.0 / scale; + let asp = compute_dsm_aspect(&dsm, dx); + + Zip::from(&mut y) + .and(&walls) + .and(&asp) + .for_each(|y_val, &w, &a| { + if w >= 0.5 && *y_val == 0.0 { + *y_val = a / (std::f32::consts::PI / 180.0); + } + }); + + // Final progress + if let Some(ref counter) = progress_counter { + counter.store(180, Ordering::Relaxed); + } + + Ok(y) +} + +/// Compute DSM aspect (orientation of slope) using numpy.gradient equivalent. +/// +/// Returns aspect in radians matching the Python `get_ders` function. +fn compute_dsm_aspect(dsm: &ArrayView2, dx: f32) -> Array2 { + let (rows, cols) = dsm.dim(); + let mut fy = Array2::::zeros((rows, cols)); + let mut fx = Array2::::zeros((rows, cols)); + + // Compute gradients (matching numpy.gradient behavior) + for i in 0..rows { + for j in 0..cols { + // fy: gradient along axis 0 (rows) + fy[[i, j]] = if i == 0 { + (dsm[[1, j]] - dsm[[0, j]]) / dx + } else if i == rows - 1 { + (dsm[[rows - 1, j]] - dsm[[rows - 2, j]]) / dx + } else { + (dsm[[i + 1, j]] - dsm[[i - 1, j]]) / (2.0 * dx) + }; + + // fx: gradient along axis 1 (cols) + fx[[i, j]] = if j == 0 { + (dsm[[i, 1]] - dsm[[i, 0]]) / dx + } else if j == cols - 1 { + (dsm[[i, cols - 1]] - dsm[[i, cols - 2]]) / dx + } else { + (dsm[[i, j + 1]] - dsm[[i, j - 1]]) / (2.0 * dx) + }; + } + } + + // cart2pol: theta = atan2(fx, fy), then negate, then wrap to [0, 2pi) + // Matching Python: asp = atan2(fx, fy) * -1, then wrap negatives + let mut asp = Array2::::zeros((rows, cols)); + Zip::from(&mut asp) + .and(&fy) + .and(&fx) + .for_each(|a, &fy_val, &fx_val| { + let mut theta = fy_val.atan2(fx_val); + theta = -theta; + if theta < 0.0 { + theta += 2.0 * std::f32::consts::PI; + } + *a = theta; + }); + + asp +} + +/// PyO3 wrapper for wall aspect computation (no progress reporting). +#[pyfunction] +pub fn compute_wall_aspect( + py: Python<'_>, + walls: PyReadonlyArray2, + scale: f32, + dsm: PyReadonlyArray2, +) -> PyResult>> { + let walls_view = walls.as_array(); + let dsm_view = dsm.as_array(); + + let result = compute_wall_aspect_pure(walls_view, scale, dsm_view, None, None) + .map_err(|e| pyo3::exceptions::PyInterruptedError::new_err(e))?; + Ok(result.into_pyarray(py).unbind()) +} + +/// Runner that exposes pollable progress() and cancel() methods for wall aspect computation. +/// +/// Usage from Python: +/// runner = WallAspectRunner() +/// # launch runner.compute(...) in a thread +/// # poll runner.progress() from main thread (returns 0..180) +/// # call runner.cancel() to request early termination +#[pyclass] +pub struct WallAspectRunner { + progress: Arc, + cancelled: Arc, +} + +impl Default for WallAspectRunner { + fn default() -> Self { + Self::new() + } +} + +#[pymethods] +impl WallAspectRunner { + #[new] + pub fn new() -> Self { + Self { + progress: Arc::new(AtomicUsize::new(0)), + cancelled: Arc::new(AtomicBool::new(false)), + } + } + + /// Returns progress mapped to 0..180 range. + pub fn progress(&self) -> usize { + self.progress.load(Ordering::Relaxed) + } + + /// Request cancellation of the running computation. + pub fn cancel(&self) { + self.cancelled.store(true, Ordering::Relaxed); + } + + /// Compute wall aspect, releasing the GIL so progress()/cancel() can be called. + pub fn compute( + &self, + py: Python<'_>, + walls: PyReadonlyArray2, + scale: f32, + dsm: PyReadonlyArray2, + ) -> PyResult>> { + // Reset progress and cancel flag + self.progress.store(0, Ordering::Relaxed); + self.cancelled.store(false, Ordering::Relaxed); + + // Copy to owned arrays so we can release the GIL + let walls_owned = walls.as_array().to_owned(); + let dsm_owned = dsm.as_array().to_owned(); + let counter = Some(self.progress.clone()); + let cancel = Some(self.cancelled.clone()); + + let result = py.allow_threads(|| { + compute_wall_aspect_pure(walls_owned.view(), scale, dsm_owned.view(), counter, cancel) + }); + + match result { + Ok(arr) => Ok(arr.into_pyarray(py).unbind()), + Err(msg) => Err(pyo3::exceptions::PyInterruptedError::new_err(msg)), + } + } +} diff --git a/scripts/profile_memory.py b/scripts/profile_memory.py new file mode 100644 index 0000000..b7947e6 --- /dev/null +++ b/scripts/profile_memory.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +"""Memory profiling script for SOLWEIG. + +Measures memory usage at various raster sizes to identify bottlenecks +and verify float32 optimization effectiveness. + +Usage: + python scripts/profile_memory.py [--size 1000] +""" + +import argparse +import tracemalloc +from datetime import datetime + +import numpy as np + +# Start tracing before imports to capture module-level allocations +tracemalloc.start() + + +def format_size(size_bytes: float) -> str: + """Format bytes as human-readable string.""" + for unit in ["B", "KB", "MB", "GB"]: + if abs(size_bytes) < 1024.0: + return f"{size_bytes:.1f} {unit}" + size_bytes /= 1024.0 + return f"{size_bytes:.1f} TB" + + +def create_synthetic_surface(size: int): + """Create synthetic urban surface for testing.""" + from solweig import Location, SurfaceData + + # Create DSM with some buildings + np.random.seed(42) + dsm = np.ones((size, size), dtype=np.float32) * 10.0 # Ground at 10m + + # Add random buildings + n_buildings = size // 20 + for _ in range(n_buildings): + x, y = np.random.randint(10, size - 10, 2) + w, h = np.random.randint(5, 15, 2) + height = np.random.uniform(15, 40) + dsm[y : y + h, x : x + w] = height + + # Create vegetation DSM (relative heights) + cdsm = np.zeros((size, size), dtype=np.float32) + n_trees = size // 10 + for _ in range(n_trees): + x, y = np.random.randint(5, size - 5, 2) + if dsm[y, x] < 12: # Only place trees on ground + r = np.random.randint(2, 5) + h = np.random.uniform(3, 8) + y1, y2 = max(0, y - r), min(size, y + r) + x1, x2 = max(0, x - r), min(size, x + r) + cdsm[y1:y2, x1:x2] = np.maximum(cdsm[y1:y2, x1:x2], h) + + # Create land cover (integer array) + land_cover = np.ones((size, size), dtype=np.int32) * 5 # Default grass + land_cover[dsm > 12] = 2 # Buildings (ID 2 in UMEP standard) + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, land_cover=land_cover, pixel_size=1.0) + + location = Location(latitude=37.98, longitude=23.73) # Athens + + return surface, location + + +def profile_calculation(size: int) -> dict: + """Run a single timestep and measure memory usage.""" + from solweig import Weather, calculate + + # Reset tracing + tracemalloc.reset_peak() + + # Create surface + surface, location = create_synthetic_surface(size) + surface.preprocess() + + after_surface = tracemalloc.get_traced_memory() + + # Create weather for noon + weather = Weather(datetime=datetime(2024, 7, 21, 12, 0), ta=30.0, rh=50.0, global_rad=800.0, ws=2.0) + + # Run calculation + result = calculate(surface, location, weather) + + after_calc = tracemalloc.get_traced_memory() + + return { + "size": size, + "pixels": size * size, + "surface_current": after_surface[0], + "surface_peak": after_surface[1], + "calc_current": after_calc[0], + "calc_peak": after_calc[1], + "tmrt_mean": float(np.nanmean(result.tmrt)), + } + + +def main(): + parser = argparse.ArgumentParser(description="Profile SOLWEIG memory usage") + parser.add_argument("--size", type=int, default=500, help="Grid size (default: 500)") + parser.add_argument("--scale", action="store_true", help="Test multiple sizes") + args = parser.parse_args() + + print("=" * 60) + print("SOLWEIG Memory Profiler") + print("=" * 60) + + if args.scale: + sizes = [100, 200, 400, 800] + if args.size > 800: + sizes.append(args.size) + else: + sizes = [args.size] + + results = [] + for size in sizes: + print(f"\nTesting {size}x{size} grid ({size * size:,} pixels)...") + try: + result = profile_calculation(size) + results.append(result) + + print(" Surface creation:") + print(f" Current: {format_size(result['surface_current'])}") + print(f" Peak: {format_size(result['surface_peak'])}") + print(" After calculation:") + print(f" Current: {format_size(result['calc_current'])}") + print(f" Peak: {format_size(result['calc_peak'])}") + print(f" Tmrt mean: {result['tmrt_mean']:.1f}°C") + + # Estimate bytes per pixel + bytes_per_pixel = result["calc_peak"] / (size * size) + print(f" Peak memory per pixel: {bytes_per_pixel:.1f} bytes") + + except MemoryError: + print(f" MemoryError at size {size}") + break + except Exception as e: + print(f" Error: {e}") + break + + # Summary + if len(results) > 1: + print("\n" + "=" * 60) + print("Summary (Peak Memory)") + print("=" * 60) + print(f"{'Size':>8} {'Pixels':>12} {'Peak Mem':>12} {'Per Pixel':>12}") + print("-" * 46) + for r in results: + bpp = r["calc_peak"] / r["pixels"] + print(f"{r['size']:>8} {r['pixels']:>12,} {format_size(r['calc_peak']):>12} {bpp:>10.1f} B") + + # Extrapolate to 10k x 10k + if len(results) >= 2: + last = results[-1] + bpp = last["calc_peak"] / last["pixels"] + est_10k = bpp * 10000 * 10000 + print(f"\nEstimated memory for 10k×10k: {format_size(est_10k)}") + + +if __name__ == "__main__": + main() diff --git a/specs/OVERVIEW.md b/specs/OVERVIEW.md new file mode 100644 index 0000000..0891e49 --- /dev/null +++ b/specs/OVERVIEW.md @@ -0,0 +1,165 @@ +# SOLWEIG Algorithm Overview + +SOLWEIG (Solar and Longwave Environmental Irradiance Geometry) calculates mean radiant temperature (Tmrt) and thermal comfort indices in complex urban environments. + +**Primary References:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. +- Lindberg F, Grimmond CSB (2011) "The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas: model development and evaluation." Theoretical and Applied Climatology 105, 311-323. +- Lindberg F, Grimmond CSB, Martilli A (2015) "Sunlit fractions on urban facets - Impact of spatial resolution and approach." Urban Climate 12, 65-84. + +## Specification Files + +| File | Description | +| ---------------------------------------------- | ----------------------------------------------- | +| [OVERVIEW.md](OVERVIEW.md) | This file - pipeline and relationships | +| [shadows.md](shadows.md) | Shadow casting algorithm | +| [svf.md](svf.md) | Sky View Factor calculation | +| [gvf.md](gvf.md) | Ground View Factor calculation | +| [radiation.md](radiation.md) | Shortwave and longwave radiation | +| [ground_temperature.md](ground_temperature.md) | Surface temperature and thermal delay model | +| [tmrt.md](tmrt.md) | Mean Radiant Temperature | +| [utci.md](utci.md) | Universal Thermal Climate Index | +| [pet.md](pet.md) | Physiological Equivalent Temperature | +| [technical.md](technical.md) | Implementation details (tiling, precision, GPU) | +| [runtime-contract.md](runtime-contract.md) | Runtime API preconditions and output semantics | + +`runtime-contract.md` is the normative source for API/runtime behavior +(SVF/anisotropic preconditions, shadow convention, and return semantics). + +## Pipeline + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ INPUT DATA │ +├─────────────────────────────────────────────────────────────────────────┤ +│ Geometry │ Weather │ Time │ +│ - DSM │ - Air temp (Ta) │ - Date/time │ +│ - CDSM (veg) │ - Humidity (RH) │ - Location (lat/lon) │ +│ - Buildings │ - Wind speed │ │ +│ - Walls │ - Global rad (G) │ │ +└────────┬────────────────────┬─────────────────────┬────────────────────┘ + │ │ │ + ▼ │ │ +┌─────────────────┐ │ │ +│ SHADOWS │◄──────────┼─────────────────────┘ +│ (shadows.md) │ │ Sun position +└────────┬────────┘ │ + │ shadow mask │ + ▼ │ +┌─────────────────┐ │ +│ SVF │ │ +│ (svf.md) │ │ +└────────┬────────┘ │ + │ sky view factors │ + ▼ │ +┌─────────────────┐ │ +│ GVF │ │ +│ (gvf.md) │ │ +└────────┬────────┘ │ + │ ground view factors│ + ▼ ▼ +┌─────────────────────────────────────────┐ +│ RADIATION │ +│ (radiation.md) │ +│ ┌─────────────┐ ┌─────────────┐ │ +│ │ Shortwave K │ │ Longwave L │ │ +│ │ Kdown, Kup │ │ Ldown, Lup │ │ +│ │ Kside │ │ Lside │ │ +│ └─────────────┘ └─────────────┘ │ +└────────────────────┬────────────────────┘ + │ all radiation fluxes + ▼ + ┌─────────────────┐ + │ Tmrt │ + │ (tmrt.md) │ + └────────┬────────┘ + │ mean radiant temperature + ┌─────────┴─────────┐ + ▼ ▼ +┌─────────────────┐ ┌─────────────────┐ +│ UTCI │ │ PET │ +│ (utci.md) │ │ (pet.md) │ +└─────────────────┘ └─────────────────┘ + thermal comfort indices +``` + +## Module Dependencies + +| Module | Depends On | Produces | +| ------------- | -------------------------------- | -------------------------- | +| **Shadows** | DSM, sun position | Shadow mask (per timestep) | +| **SVF** | DSM, CDSM | Sky view factors (static) | +| **GVF** | SVF, walls, albedo | Ground view factors | +| **Radiation** | Shadows, SVF, GVF, weather | K and L fluxes | +| **Tmrt** | All radiation fluxes | Mean radiant temperature | +| **UTCI** | Tmrt, Ta, RH, wind | Thermal comfort index | +| **PET** | Tmrt, Ta, RH, wind, human params | Thermal comfort index | + +## Static vs Dynamic Calculations + +**Calculated Once (static geometry):** + +- SVF - depends only on DSM geometry +- GVF - depends on SVF and surface properties + +**Calculated Per Timestep:** + +- Shadows - sun position changes +- Radiation - sun position + weather changes +- Tmrt - radiation changes +- UTCI/PET - all inputs change + +## Key Physical Principles + +### 1. Shadow Casting + +Shadows are cast opposite to the sun direction. Shadow length depends on obstacle height and sun altitude: `L = h / tan(α)`. + +### 2. Sky View Factor + +SVF represents the fraction of sky visible from a point. In open terrain SVF=1, in deep canyons SVF<0.5. Affects how much sky radiation reaches the surface. + +### 3. Radiation Balance + +Total radiation at a point combines: + +- **Direct shortwave (I)** - blocked by shadows +- **Diffuse shortwave (D)** - reduced by low SVF +- **Reflected shortwave** - from ground and walls +- **Longwave from sky** - depends on SVF and cloud cover +- **Longwave from ground** - depends on ground temperature +- **Longwave from walls** - depends on wall temperature and view factor + +### 4. Mean Radiant Temperature + +Tmrt integrates radiation from all directions, weighted by human body geometry: + +``` +Tmrt = (Sstr / (ε × σ))^0.25 - 273.15 +``` + +Where Sstr = absorbed radiation from all 6 directions. + +### 5. Thermal Comfort + +UTCI and PET translate the physical environment (Tmrt, Ta, wind, humidity) into equivalent temperatures that represent physiological response. + +## Coordinate Conventions + +- **DSM arrays**: Row 0 = North, increasing rows = South +- **Azimuth**: 0° = North, 90° = East, 180° = South, 270° = West +- **Altitude**: 0° = horizon, 90° = zenith (directly overhead) + +## Units + +| Quantity | Unit | +| ---------------- | -------------------- | +| Elevation/height | meters (m) | +| Temperature | degrees Celsius (°C) | +| Radiation | W/m² | +| Wind speed | m/s | +| Humidity | % (relative) | +| SVF/GVF | dimensionless (0-1) | +| Pixel size | meters | diff --git a/specs/ground_temperature.md b/specs/ground_temperature.md new file mode 100644 index 0000000..0505f3b --- /dev/null +++ b/specs/ground_temperature.md @@ -0,0 +1,180 @@ +# Ground Temperature Model + +Surface temperature parameterization for ground longwave emission calculations. + +**Primary References:** + +- Lindberg F, Onomura S, Grimmond CSB (2016) "Influence of ground surface characteristics on the mean radiant temperature in urban areas." International Journal of Biometeorology 60(9):1439-1452. +- Lindberg F, Grimmond CSB (2011) "The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas." Theoretical and Applied Climatology 105:311-323. + +## Overview + +Ground surface temperature directly affects upwelling longwave radiation (Lup), which contributes significantly to mean radiant temperature in urban environments. The model accounts for: + +1. **Solar heating** - Direct and diffuse radiation absorption +2. **Thermal inertia** - Delayed response due to material heat capacity +3. **Surface properties** - Albedo, emissivity, thermal conductivity + +## TsWaveDelay Model + +The thermal delay model simulates ground temperature response to changing radiation conditions using an exponential decay function. + +### Equation + +```text +T_ground(t) = T_current × (1 - w) + T_previous × w + +where: + w = exp(-33.27 × Δt) + Δt = time since last update (fraction of day) +``` + +### Parameters + +| Parameter | Value | Description | +| --------- | ----- | ----------- | +| Decay constant | 33.27 | Thermal response rate (day⁻¹) | +| Time threshold | 59/1440 | Minimum time step (~59 minutes) | + +### Physical Interpretation + +The decay constant (33.27 day⁻¹) corresponds to a thermal time constant of approximately: + +```text +τ = 1 / 33.27 ≈ 0.030 days ≈ 43 minutes +``` + +This represents the characteristic time for surface temperature to respond to changes in radiative forcing. After one time constant: + +- 63% of adjustment to new equilibrium +- After 3τ (~2 hours): 95% adjustment + +### Algorithm + +```python +def TsWaveDelay(T_current, first_morning, time_accumulated, timestep, T_previous): + """ + Apply thermal delay to ground temperature. + + Args: + T_current: Current radiative equilibrium temperature + first_morning: True if first timestep after sunrise + time_accumulated: Time since last full update (fraction of day) + timestep: Current timestep duration (fraction of day) + T_previous: Previous delayed temperature + + Returns: + T_delayed: Temperature with thermal inertia applied + time_accumulated: Updated time accumulator + T_previous: Updated previous temperature for next iteration + """ + if first_morning: + T_previous = T_current + + if time_accumulated >= 59/1440: # ~59 minutes threshold + weight = exp(-33.27 * time_accumulated) + T_previous = T_current * (1 - weight) + T_previous * weight + T_delayed = T_previous + time_accumulated = timestep if timestep > 59/1440 else 0 + else: + time_accumulated += timestep + weight = exp(-33.27 * time_accumulated) + T_delayed = T_current * (1 - weight) + T_previous * weight + + return T_delayed, time_accumulated, T_previous +``` + +## Surface Temperature Parameterization + +For computing the instantaneous radiative equilibrium temperature, SOLWEIG uses a linear parameterization based on solar altitude. + +### Linear Model + +```text +T_surface = Tstart + k × α_max + +where: + Tstart = surface temperature at sunrise (°C offset from Ta) + k = temperature increase per degree of solar altitude (°C/°) + α_max = maximum solar altitude during the day (°) +``` + +### Land Cover Parameters + +| Surface Type | Tstart (°C) | k (°C/°) | TmaxLST | Source | +| ------------ | ----------- | -------- | ------- | ------ | +| Cobblestone | -3.41 | 0.37 | 15:00 | Lindberg et al. (2016) | +| Dark asphalt | -9.78 | 0.58 | 15:00 | Lindberg et al. (2016) | +| Grass | -3.38 | 0.21 | 14:00 | Lindberg et al. (2016) | +| Bare soil | -3.01 | 0.33 | 14:00 | Estimated | +| Water | 0.0 | 0.05 | 16:00 | Estimated | + +Note: Tstart is the temperature offset from air temperature at sunrise. Negative values indicate surfaces cooler than air at dawn. + +## Properties + +### Thermal Inertia Effects + +1. **Morning lag** - Surfaces warm slower than instantaneous equilibrium +2. **Afternoon persistence** - Surfaces remain warm after solar maximum +3. **Evening cooling** - Gradual temperature decrease after sunset + +### Material Dependence + +4. **High thermal mass** (concrete, stone): Slower response, τ > 1 hour +5. **Low thermal mass** (thin asphalt): Faster response, τ < 30 minutes +6. **Vegetation**: Complex due to evapotranspiration + +### Diurnal Pattern + +```text +Morning: T_ground < T_equilibrium (heating lag) +Midday: T_ground ≈ T_equilibrium (near steady state) +Afternoon: T_ground > T_equilibrium (cooling lag) +Night: T_ground slowly approaches T_air +``` + +## Implementation Notes + +### State Management + +The thermal delay model requires state to be carried between timesteps: + +- `T_previous`: Last computed delayed temperature +- `time_accumulated`: Time since last weight reset + +For accurate results, use `calculate_timeseries()` which automatically manages thermal state. Single-timestep calculations with `calculate()` will not capture thermal inertia effects. + +### Directional Components + +Ground temperature affects directional Lup components (Lup_E, Lup_S, Lup_W, Lup_N) which are computed using Ground View Factors in each direction. + +### Nighttime Behavior + +At night (sun_altitude ≤ 0): + +- No solar heating contribution +- Temperature decays toward air temperature +- Emissivity assumed constant (typically 0.95) + +## Validation Status + +The TsWaveDelay model parameters (decay constant 33.27) require validation against: + +- [ ] In-situ surface temperature measurements +- [ ] Comparison with force-restore energy balance models +- [ ] Sensitivity analysis for different surface types + +The current parameterization is empirical and may need adjustment for specific climates or surface materials. + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**Ground Temperature Model:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. +- Lindberg F, Onomura S, Grimmond CSB (2016) "Influence of ground surface characteristics on the mean radiant temperature in urban areas." International Journal of Biometeorology 60(9), 1439-1452. +- Offerle B, Grimmond CSB, Oke TR (2003) "Parameterization of net all-wave radiation for urban areas." Journal of Applied Meteorology 42(8), 1157-1173. diff --git a/specs/gvf.md b/specs/gvf.md new file mode 100644 index 0000000..3e8b996 --- /dev/null +++ b/specs/gvf.md @@ -0,0 +1,167 @@ +# Ground View Factor (GVF) + +Fraction of the hemisphere occupied by ground and wall surfaces (as opposed to sky). Determines how much reflected shortwave and emitted longwave radiation from surfaces reaches a point. + +**Reference:** Lindberg et al. (2008) Section 2.3 + +## Equation + +GVF is complementary to SVF for an unobstructed point: + +```text +GVF = 1 - SVF (simplified, for flat ground) +``` + +In practice, GVF accounts for the actual ground and wall surfaces visible: + +```text +GVF = Σ (surface_area × view_factor × surface_property) +``` + +Where surface_property can be albedo (for reflected shortwave) or emissivity (for longwave). + +## Wall Integration Method + +**Reference:** Lindberg et al. (2008) Section 2.3, Holmer et al. (2015) "SOLWEIG-POI: a new model for estimating Tmrt at points of interest" + +When walls are present, GVF is computed using geometric integration of visible surfaces from a person's height above ground. The method considers: + +### Full GVF Calculation (with walls) + +The implementation in `gvf.py` calls the Rust `gvf_calc` function which: + +1. **Person height parameters**: Uses human height to determine view geometry + + - `first = round(height)` - primary height parameter + - `second = round(height × 20)` - finer height discretization + +2. **Wall visibility**: For each pixel, integrates visible wall surfaces in all directions + + - Wall heights (`wall_ht`) define vertical obstruction + - Wall aspects (`wall_asp`) define cardinal orientation + - Shadow fraction adjusts wall temperature contribution + +3. **Directional components**: Splits GVF into cardinal directions (N, E, S, W) + + - Ground contribution: Based on distance and elevation angle + - Wall contribution: Based on wall height, orientation, and temperature + +4. **Temperature-weighted emission**: Longwave GVF includes thermal emission + + ```text + Lup = ε_surface × σ × T_surface^4 × GVF + ``` + + Where: + + - Sunlit walls: T_wall = T_air + Tg_wall + - Shaded walls: T_wall = T_air + - Ground: T_ground = T_air + Tg (shadow-dependent) + +5. **Albedo weighting**: Shortwave GVF weighted by surface albedo + + ```text + GVF_alb = albedo × GVF + ``` + +### Simplified GVF (no walls) + +When wall data is unavailable, uses simplified calculation: + +```text +GVF_simple = 1 - SVF +Lup = ε_ground × σ × (T_air + Tg × shadow)^4 +GVF_alb = albedo_ground × GVF_simple +``` + +This assumes only ground surfaces contribute (no walls). + +## Inputs + +| Input | Type | Description | +| ----- | ---- | ----------- | +| SVF arrays | 2D arrays (0-1) | Sky view factors (overall + directional) | +| walls | 2D array (m) | Wall height grid | +| albedo | float or 2D array | Ground surface albedo (0-1) | +| emissivity | float or 2D array | Ground surface emissivity (~0.95) | + +## Outputs + +| Output | Type | Description | +| ------ | ---- | ----------- | +| gvf_lup | 2D array | Ground view factor for longwave up | +| gvf_alb | 2D array | Ground view factor weighted by albedo | +| gvf_east | 2D array | GVF from eastern direction | +| gvf_south | 2D array | GVF from southern direction | +| gvf_west | 2D array | GVF from western direction | +| gvf_north | 2D array | GVF from northern direction | + +## Properties + +### Range Properties + +1. **GVF in range [0, 1]** + - GVF = 0: no ground/walls visible (open sky above) + - GVF = 1: completely enclosed (no sky visible) + +2. **GVF + SVF ≈ 1** + - For horizontal surfaces: GVF ≈ 1 - SVF + - Small deviations due to wall contributions + +### Geometric Properties + +3. **Flat open terrain has GVF ≈ 0** + - No walls or elevated surfaces to reflect/emit + - Only ground below contributes + +4. **Urban canyon has high GVF** + - Walls on both sides increase GVF + - More reflected radiation in canyons + +5. **Higher walls increase GVF** + - Taller buildings → more wall surface visible + - More longwave emission from walls + +### Directional Properties + +6. **Directional GVF depends on wall orientation** + - East-facing wall contributes to gvf_west (seen from west) + - Asymmetric building layout → asymmetric directional GVF + +## Role in Radiation + +GVF determines how much radiation comes from surfaces vs sky: + +**Reflected Shortwave (Kup)**: +```text +Kup = (I + D) × GVF_alb × ground_albedo +``` + +**Longwave from Ground (Lup)**: +```text +Lup = ε × σ × Tground^4 × GVF_lup +``` + +**Longwave from Walls**: +```text +Lwall = ε × σ × Twall^4 × wall_view_factor +``` + +## Relationship to SVF + +| Location | SVF | GVF | Characteristic | +| -------- | --- | --- | -------------- | +| Open field | ~1.0 | ~0.0 | Sky-dominated | +| Street canyon | ~0.4 | ~0.6 | Mixed | +| Courtyard | ~0.2 | ~0.8 | Surface-dominated | +| Under canopy | ~0.1 | ~0.9 | Enclosed | + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**GVF Model:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. diff --git a/specs/pet.md b/specs/pet.md new file mode 100644 index 0000000..792abff --- /dev/null +++ b/specs/pet.md @@ -0,0 +1,233 @@ +# Physiological Equivalent Temperature (PET) + +The air temperature at which, in a typical indoor setting, the human energy budget is balanced with the same core and skin temperature as under the actual outdoor conditions. + +**Primary References:** + +- Höppe P (1999) "The physiological equivalent temperature - a universal index for the biometeorological assessment of the thermal environment." International Journal of Biometeorology 43:71-75. +- Mayer H, Höppe P (1987) "Thermal comfort of man in different urban environments." Theoretical and Applied Climatology 38:43-49. +- VDI 3787 Part 2 (2008) "Environmental Meteorology - Methods for the human biometeorological evaluation of climate and air quality for urban and regional planning." + +## MEMI Energy Balance Model + +**Reference:** Höppe P (1984) "Die Energiebilanz des Menschen." Wiss Mitt Meteorol Inst Univ München 49. + +PET is calculated using the Munich Energy Balance Model for Individuals (MEMI), a two-node model of human thermoregulation: + +```text +M + W = R + C + E_sk + E_re + S +``` + +Where: + +- M = metabolic rate (W) +- W = mechanical work (W), typically ~0 for sedentary activities +- R = net radiation heat flow (W) +- C = convective heat flow (W) +- E_sk = latent heat flow from skin evaporation (W) +- E_re = respiratory heat loss (latent + sensible) (W) +- S = body heat storage (W), positive = body warming + +**PET Definition:** The air temperature at which, in a reference indoor environment (Tmrt = Ta, v = 0.1 m/s, RH = 50%), the human body would have the same core and skin temperature as in the actual outdoor environment. + +### Metabolic Rate + +**Reference:** ISO 8996:2021 "Ergonomics of the thermal environment - Determination of metabolic rate." + +| Activity | Metabolic Rate (W/m²) | Description | +|----------|----------------------|-------------| +| Resting | 58 | Lying quietly | +| Sitting | 65 | Office work | +| Standing relaxed | 70 | Standing still | +| Light walking | 80 | 2 km/h (SOLWEIG default) | +| Normal walking | 110 | 4 km/h | +| Brisk walking | 150 | 6 km/h | + +The default SOLWEIG value of 80 W/m² represents a person standing or slowly walking outdoors. + +## Inputs + +| Input | Type | Description | +| ----- | ---- | ----------- | +| Ta | float or 2D array (°C) | Air temperature | +| Tmrt | float or 2D array (°C) | Mean radiant temperature | +| v | float or 2D array (m/s) | Wind speed | +| RH | float or 2D array (%) | Relative humidity | +| age | float (years) | Person's age | +| height | float (m) | Person's height | +| weight | float (kg) | Person's weight | +| sex | int | 1=male, 2=female | +| activity | float (W/m²) | Metabolic activity level | +| clothing | float (clo) | Clothing insulation | + +## Outputs + +| Output | Type | Description | +| ------ | ---- | ----------- | +| PET | float or 2D array (°C) | Physiological Equivalent Temperature | + +## Default Human Parameters + +| Parameter | Default | Description | +| --------- | ------- | ----------- | +| age | 35 years | Middle-aged adult | +| height | 1.75 m | Average height | +| weight | 75 kg | Average weight | +| sex | 1 (male) | Reference person | +| activity | 80 W/m² | Light walking | +| clothing | 0.9 clo | Summer business attire | + +## Comfort Categories + +| PET (°C) | Thermal Perception | Grade of Stress | +| -------- | ------------------ | --------------- | +| > 41 | Very hot | Extreme heat stress | +| 35 to 41 | Hot | Strong heat stress | +| 29 to 35 | Warm | Moderate heat stress | +| 23 to 29 | Slightly warm | Slight heat stress | +| 18 to 23 | Comfortable | No thermal stress | +| 13 to 18 | Slightly cool | Slight cold stress | +| 8 to 13 | Cool | Moderate cold stress | +| 4 to 8 | Cold | Strong cold stress | +| < 4 | Very cold | Extreme cold stress | + +## Properties + +### Fundamental Properties + +1. **PET is person-specific** + - Varies with age, sex, fitness level + - Same environment can have different PET for different people + +2. **PET reference is indoor** + - Reference: Tmrt=Ta, v=0.1m/s, RH=50% + - PET=21°C is comfortable indoors + +### Radiation Properties + +3. **Higher Tmrt → higher PET** + - Radiation increases heat load + - Sun to shade: ΔPET ≈ 5-15°C + +4. **PET more sensitive to radiation than UTCI** + - Direct sun has larger effect on PET + - Better captures radiant heat stress + +### Personal Factor Properties + +5. **Activity increases PET** + - Higher metabolic rate → more heat generated + - Running vs standing: ΔPET ≈ 5-10°C + +6. **Clothing affects PET bidirectionally** + - In heat: more clothing → higher PET + - In cold: more clothing → lower PET (better insulated) + +7. **Age affects thermoregulation** + - Elderly have reduced sweating capacity + - Children have higher surface-to-mass ratio + +### Wind Properties + +8. **Wind generally reduces PET** + - Convective heat loss increases + - Less effective at high humidity + +## Comparison: PET vs UTCI + +| Aspect | PET | UTCI | +| ------ | --- | ---- | +| Reference | Indoor environment | Outdoor walking | +| Personal factors | Yes (age, sex, etc.) | No (fixed person) | +| Clothing | Variable input | Fixed (adaptive) | +| Activity | Variable input | Fixed (walking 4 km/h) | +| Computation | Iterative solver | Polynomial | +| Speed | Slower | Faster | + +## Typical Values + +| Condition | Ta | Tmrt | PET | Perception | +| --------- | -- | ---- | --- | ---------- | +| Hot sunny | 35 | 65 | 48 | Very hot | +| Hot shaded | 35 | 40 | 38 | Hot | +| Pleasant | 22 | 25 | 22 | Comfortable | +| Cool shade | 18 | 18 | 17 | Slightly cool | +| Cold | 5 | 5 | 5 | Cold | + +## Implementation Notes + +### Iterative Solution + +PET requires solving the energy balance iteratively to find the equivalent temperature. The algorithm: + +1. Initialize with Ta as first guess +2. Compute skin and core temperatures for actual conditions +3. Find indoor Ta that produces same temperatures +4. Convergence typically within 10-20 iterations (tolerance ~0.01°C) + +### Body Surface Area (DuBois Formula) + +**Reference:** DuBois D, DuBois EF (1916) "A formula to estimate the approximate surface area if height and weight be known." Archives of Internal Medicine 17:863-871. + +The body surface area A_body (m²) is calculated from height (m) and weight (kg): + +```text +A_body = 0.203 × height^0.725 × weight^0.425 +``` + +This empirical formula, derived from direct body surface measurements, remains the standard for thermoregulation calculations. For the default person (1.75m, 75kg): + +```text +A_body = 0.203 × 1.75^0.725 × 75^0.425 ≈ 1.90 m² +``` + +### Clothing Insulation + +**Reference:** ISO 9920:2007 "Ergonomics of the thermal environment - Estimation of thermal insulation and water vapour resistance of a clothing ensemble." + +Clothing insulation is measured in clo units (1 clo = 0.155 m²K/W): + +| Ensemble | Insulation (clo) | Description | +|----------|------------------|-------------| +| Shorts only | 0.1 | Minimal | +| Light summer | 0.5 | T-shirt, shorts | +| Summer business | 0.9 | Shirt, trousers (SOLWEIG default) | +| Winter indoor | 1.0 | Sweater, trousers | +| Winter outdoor | 1.5-2.0 | Coat, layers | + +The clothing area factor accounts for increased surface area due to clothing: + +```text +f_cl = 1 + 0.15 × I_cl +``` + +Where I_cl is clothing insulation in clo. + +### Convective Heat Transfer + +**Reference:** Fanger PO (1970) "Thermal Comfort: Analysis and Applications in Environmental Engineering." Danish Technical Press, Copenhagen. + +Convective heat transfer coefficient (W/m²K): + +```text +h_c = 2.38 × |T_skin - T_air|^0.25 (natural convection) +h_c = 12.1 × √v (forced convection, v in m/s) +``` + +The larger of the two values is used. + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**PET Model:** + +- Höppe P (1999) "The physiological equivalent temperature - a universal index for the biometeorological assessment of the thermal environment." International Journal of Biometeorology 43(2), 71-75. +- Matzarakis A, Mayer H, Iziomon MG (1999) "Applications of a universal thermal index: physiological equivalent temperature." International Journal of Biometeorology 43(2), 76-84. + +**Human Thermal Balance:** + +- Fanger PO (1970) "Thermal Comfort: Analysis and Applications in Environmental Engineering." Danish Technical Press, Copenhagen. +- Gagge AP, Fobelets AP, Berglund LG (1986) "A standard predictive index of human response to the thermal environment." ASHRAE Transactions 92, 709-731. diff --git a/specs/radiation.md b/specs/radiation.md new file mode 100644 index 0000000..3026d85 --- /dev/null +++ b/specs/radiation.md @@ -0,0 +1,356 @@ +# Radiation + +Shortwave (solar) and longwave (thermal) radiation calculations. + +**Primary References:** + +- Lindberg et al. (2008) Sections 2.4-2.6 +- Jonsson et al. (2006) - Sky emissivity formulation +- Reindl et al. (1990) - Diffuse fraction correlations +- Perez et al. (1993) - Anisotropic sky luminance distribution + +## Overview + +Radiation at any point comes from six directions (up, down, and four cardinal sides), split into shortwave (K) and longwave (L) components: + +```text +Total radiation = Shortwave (K) + Longwave (L) + = (Kdown + Kup + Kside) + (Ldown + Lup + Lside) +``` + +## Shortwave Radiation (K) + +Solar radiation, wavelengths ~0.3-3 μm. + +### Diffuse Fraction (Reindl Model) + +Global radiation (G) is split into direct (I) and diffuse (D) using the Reindl et al. (1990) correlation. + +**Reference:** Reindl DT, Beckman WA, Duffie JA (1990) "Diffuse fraction correlations." Solar Energy 45(1):1-7. + +The model uses piecewise correlations based on clearness index (Kt): + +```text +G = I + D +Kt = G / I0_et (clearness index = ratio to extraterrestrial) + +If Kt ≤ 0.3 (overcast): + D/G = 1 - 0.232×Kt + 0.0239×sin(α) - 0.000682×Ta + 0.0195×RH + +If 0.3 < Kt < 0.78 (partly cloudy): + D/G = 1.329 - 1.716×Kt + 0.267×sin(α) - 0.00357×Ta + 0.106×RH + +If Kt ≥ 0.78 (clear): + D/G = 0.426×Kt - 0.256×sin(α) + 0.00349×Ta + 0.0734×RH +``` + +Where: + +- α = solar altitude angle (radians) +- Ta = air temperature (°C) +- RH = relative humidity (fraction, 0-1) + +When Ta and RH are unavailable, simplified correlations using only Kt are used. + +**Properties:** + +1. Clear sky: D/G ≈ 0.1-0.2 (mostly direct) +2. Overcast: D/G ≈ 0.9-1.0 (mostly diffuse) +3. D/G increases at low sun altitudes + +### Anisotropic Diffuse Sky (Perez Model) + +For improved accuracy, diffuse radiation can use anisotropic sky luminance distribution. + +**Reference:** Perez R, Seals R, Michalsky J (1993) "All-weather model for sky luminance distribution - Preliminary configuration and validation." Solar Energy 50(3):235-245. + +The Perez model divides the sky into three components: + +1. **Isotropic background** - uniform diffuse +2. **Circumsolar brightening** - enhanced near sun disk +3. **Horizon brightening** - enhanced near horizon + +#### Sky Luminance Distribution + +The relative luminance L at any sky element is given by: + +```text +L(θ,γ) = (1 + a×exp(b/cos(θ))) × (1 + c×exp(d×γ) + e×cos²(γ)) +``` + +Where: + +- θ = zenith angle of sky element (radians) +- γ = angular distance from sun (radians) +- a, b, c, d, e = coefficients from 8 sky clearness bins + +#### Sky Clearness Categories + +Sky clearness parameter ε determines coefficient bins: + +| Bin | ε Range | Description | Typical Condition | +| --- | ---------- | --------------- | -------------------- | +| 1 | ε < 1.065 | Very overcast | Heavy cloud cover | +| 2 | 1.065-1.23 | Overcast | Thick clouds | +| 3 | 1.23-1.50 | Cloudy | Medium clouds | +| 4 | 1.50-1.95 | Partly cloudy | Scattered clouds | +| 5 | 1.95-2.80 | Partly clear | Few clouds | +| 6 | 2.80-4.50 | Clear | Mostly clear | +| 7 | 4.50-6.20 | Very clear | Exceptionally clear | +| 8 | ε > 6.20 | Extremely clear | Desert/high altitude | + +The clearness parameter ε is computed from: + +```text +ε = (D + I)/D + 5.535×10⁻⁶×θz³ / (1 + 5.535×10⁻⁶×θz³) +``` + +Where θz is the solar zenith angle in degrees. + +#### Implementation in SOLWEIG + +The Rust implementation (`Perez_v3` in `rust/src/sky.rs` and Python wrapper in `physics/Perez_v3.py`): + +1. Computes sky clearness bin from solar geometry and radiation +2. Retrieves coefficients (a, b, c, d, e) for the bin +3. Evaluates luminance L for each sky patch (altitude, azimuth) +4. Normalizes to ensure integration equals diffuse radiation +5. Returns patch luminance array for anisotropic radiation calculation + +The patch luminance is then used to weight diffuse radiation: + +```text +drad = Σ (D × L_patch × visibility_patch × steradian_patch) +``` + +This provides spatially-varying diffuse radiation accounting for sky luminance distribution. + +### Kdown (Diffuse from Sky) + +Diffuse shortwave from the sky hemisphere: + +```text +Isotropic: Kdown = D × SVF +Anisotropic: Kdown = Σ(D × L_patch × SVF_patch) +``` + +**Properties:** + +1. Kdown proportional to SVF +2. Higher SVF → more diffuse radiation +3. Range: 0 to ~500 W/m² (typical clear sky diffuse) + +### Kup (Reflected from Ground) + +Shortwave reflected upward from ground: + +```text +Kup = (I × shadow + D) × albedo × GVF +``` + +**Properties:** + +1. Higher albedo → more reflection +2. Shaded areas reflect less (no direct component) +3. Range: 0 to ~200 W/m² (depends on albedo) + +### Kside (Direct + Reflected to Walls) + +Shortwave reaching vertical surfaces: + +```text +Kside = I × cos(incidence_angle) × shadow_factor + reflected +``` + +**Properties:** + +1. Depends on wall orientation relative to sun +2. South-facing walls receive more in Northern Hemisphere +3. Directional: Keast, Ksouth, Kwest, Knorth + +## Longwave Radiation (L) + +Thermal radiation, wavelengths ~3-100 μm. + +### Sky Emissivity + +**Reference:** Jonsson P, Eliasson I, Holmer B, Grimmond CSB (2006) "Longwave incoming radiation in the Tropics: Results from field work in three African cities." Theoretical and Applied Climatology 85:185-201. + +Sky emissivity is computed from air temperature and humidity: + +```text +ea = 6.107 × 10^((7.5 × Ta) / (237.3 + Ta)) × (RH / 100) +msteg = 46.5 × (ea / Ta_K) +ε_sky = 1 - (1 + msteg) × exp(-√(1.2 + 3.0 × msteg)) +``` + +Where: + +- ea = water vapor pressure (hPa) +- Ta = air temperature (°C) +- Ta_K = air temperature (K) +- RH = relative humidity (%) + +**Typical values:** + +- Clear dry sky: ε_sky ≈ 0.60-0.75 +- Clear humid sky: ε_sky ≈ 0.75-0.85 +- Cloudy sky: ε_sky → 1.0 + +**Cloud correction:** +When clearness index CI < 0.95 (non-clear conditions): + +```text +ε_sky_effective = CI × ε_sky + (1 - CI) × 1.0 +``` + +### Ldown (Sky Longwave) + +Thermal emission from atmosphere: + +```text +Ldown = ε_sky × σ × T_air^4 × SVF + + wall_contribution + + vegetation_contribution +``` + +Where: + +- ε_sky = sky emissivity (computed above) +- σ = Stefan-Boltzmann constant (5.67 × 10⁻⁸ W/m²K⁴) +- T_air = air temperature (K) + +**Properties:** + +1. Increases with humidity and cloud cover +2. Clear sky: Ldown ≈ 250-350 W/m² +3. Overcast: Ldown ≈ 350-450 W/m² + +### Lup (Ground Longwave) + +Thermal emission from ground surface: + +```text +Lup = ε_ground × σ × T_ground^4 × GVF +``` + +**Properties:** + +1. Increases with ground temperature +2. Hot asphalt can emit >500 W/m² +3. ε_ground typically 0.90-0.98 + +### Lside (Wall Longwave) + +Thermal emission from building walls: + +```text +Lside = ε_wall × σ × T_wall^4 × wall_view_factor +``` + +**Properties:** + +1. Sun-heated walls emit more +2. Directional: Least, Lsouth, Lwest, Lnorth +3. Important in urban canyons + +## Properties Summary + +### Conservation + +1. **Energy conservation**: Total radiation balanced +2. **Reciprocity**: View factors are symmetric + +### Shadow Effects + +1. **Shadows block direct shortwave only** + - Diffuse and longwave unaffected by shadows + - Shaded areas still receive Kdown, Ldown, Lup + +2. **Shadow reduces total K significantly** + - Sun to shade: ΔK ≈ 200-800 W/m² (depending on direct beam) + +### SVF Effects + +1. **Low SVF reduces sky radiation** + - Both Kdown and Ldown reduced + - But Lside from walls increases + +2. **Urban canyon radiation balance** + - Lower Kdown, Ldown (less sky) + - Higher Kup, Lside (more surfaces) + +### Temperature Effects + +1. **Hot surfaces increase longwave** + - Lup increases with ground temperature + - Can dominate radiation budget on hot days + +### Typical Values + +| Component | Clear Day Noon | Shaded | Night | +| -------------- | -------------- | ------- | ------- | +| Kdown | 100-200 | 100-200 | 0 | +| Kup | 50-150 | 30-100 | 0 | +| Kside (sunlit) | 200-600 | 0 | 0 | +| I (direct) | 600-900 | 0 | 0 | +| Ldown | 300-400 | 300-400 | 250-350 | +| Lup | 400-600 | 350-500 | 300-450 | +| Lside | 350-550 | 350-500 | 300-450 | + +All values in W/m². + +## Implementation Notes + +### Clearness Index Calculation + +**Reference:** Crawford TM, Duchon CE (1999) "An improved parameterization for estimating effective atmospheric emissivity for use in calculating daytime downwelling longwave radiation." Journal of Applied Meteorology 38:474-480. + +The clearness index CI is computed from global radiation compared to theoretical clear-sky radiation: + +```text +I0 = Itoa × cos(zen) × Trpg × Tw × D × Tar +CI = G / I0 +``` + +Where transmission coefficients account for: + +- Trpg = Rayleigh scattering and permanent gases +- Tw = water vapor absorption +- Tar = aerosol attenuation +- D = sun-earth distance correction + +### Isotropic vs Anisotropic Mode + +The model supports two diffuse radiation modes: + +1. **Isotropic** (default): Uniform diffuse sky, faster computation +2. **Anisotropic** (Perez): Non-uniform sky luminance, requires shadow matrices + +Use anisotropic mode when: + +- High accuracy required near buildings +- Studying directional radiation effects +- SVF < 0.7 (urban canyons) + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**Radiation Model:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. + +**Diffuse Fraction:** + +- Reindl DT, Beckman WA, Duffie JA (1990) "Diffuse fraction correlations." Solar Energy 45(1), 1-7. + +**Anisotropic Sky:** + +- Perez R, Seals R, Michalsky J (1993) "All-weather model for sky luminance distribution - Preliminary configuration and validation." Solar Energy 50(3), 235-245. + +**Sky Emissivity:** + +- Jonsson P, Bennet C, Eliasson I, Selin Lindgren E (2006) "Suspended particulate matter and its relations to the urban climate in Dar es Salaam, Tanzania." Atmospheric Environment 40(25), 4797-4807. diff --git a/specs/runtime-contract.md b/specs/runtime-contract.md new file mode 100644 index 0000000..9c55a88 --- /dev/null +++ b/specs/runtime-contract.md @@ -0,0 +1,49 @@ +# Runtime Contract + +This document defines the normative runtime behavior for the Python API and +its tiled/non-tiled execution paths. If implementation and documentation +conflict, this contract is the source of truth. + +## Preconditions + +1. **SVF availability is required for all `calculate*` calls** + - `calculate()`, `calculate_timeseries()`, `calculate_tiled()`, and + `calculate_timeseries_tiled()` require SVF data to already be present on + `surface.svf` or `precomputed.svf`. + - SVF may be prepared by: + - `SurfaceData.prepare(...)` (computes/caches SVF when missing), or + - `surface.compute_svf()` for in-memory/manual workflows. + - Runtime calculation paths must not silently compute SVF as a fallback. + +2. **Explicit anisotropic requests have strict input requirements** + - If anisotropic sky is explicitly requested via + `use_anisotropic_sky=True`, shadow matrices must already be available on + `surface.shadow_matrices` or `precomputed.shadow_matrices`. + - Missing shadow matrices must raise `MissingPrecomputedData`. + - Runtime must not silently downgrade to isotropic sky when anisotropic is + explicitly requested. + +## Output Conventions + +1. **Shadow convention** + - `shadow` uses `1.0 = sunlit`, `0.0 = shaded`. + - This convention applies to daytime and nighttime outputs. + +2. **Timeseries return semantics** + - `timestep_outputs=["tmrt", "shadow", ...]`: returned `SolweigResult` objects + in `summary.results` keep the requested arrays in memory. + - `timestep_outputs=None` (default): implementation frees arrays after + aggregation to minimize memory use; `summary.results` is empty. + +## Default Behavior + +1. Default anisotropic behavior is consistent across public entry points: + - `calculate()` and `calculate_tiled()` use the same anisotropic default. +2. Thermal state chaining for timeseries remains automatic and is unaffected by + output streaming mode. + +## Documentation Requirements + +1. User docs and examples must state SVF is explicit at `calculate*` runtime. +2. Docs must not claim anisotropic shadow matrices are auto-generated during + calculation; preparation must be explicit via preprocessing helpers. diff --git a/specs/shadows.md b/specs/shadows.md new file mode 100644 index 0000000..40deeeb --- /dev/null +++ b/specs/shadows.md @@ -0,0 +1,180 @@ +# Shadow Calculation + +Calculates where shadows fall based on sun position, buildings, and vegetation. + +**Reference:** Lindberg et al. (2008) Section 2.2 - Shadow casting algorithm + +## Equations + +### Shadow Length +``` +L = h / tan(α) +``` +- L = shadow length (meters) +- h = obstacle height above ground (meters) +- α = sun altitude angle (degrees) + +### Ray Marching +The algorithm traces rays from each ground pixel toward the sun: +``` +dx = -sign(cos(θ)) × step / tan(θ) # When E-W dominant +dy = sign(sin(θ)) × step # When E-W dominant +dz = (ds × step × tan(α)) / scale # Height gain per step +``` +- θ = sun azimuth (radians) +- ds = path length correction for diagonal movement + +### Shadow Condition +A pixel is sunlit if no obstacle along the ray to the sun is tall enough: +``` +sunlit[y,x] = 1 if propagated_height <= DSM[y,x] + = 0 otherwise +``` + +## Inputs + +| Input | Type | Description | +|-------|------|-------------| +| DSM | 2D array (m) | Digital Surface Model - elevation including buildings | +| sun_altitude | float (0-90°) | Sun elevation above horizon | +| sun_azimuth | float (0-360°) | Sun direction (0=N, 90=E, 180=S, 270=W) | +| pixel_size | float (m) | Resolution of DSM | +| CDSM | 2D array (m) | Optional: Canopy DSM for vegetation shadows | + +## Outputs + +| Output | Type | Description | +|--------|------|-------------| +| bldg_sh | 2D array | Building shadow mask (1=sunlit, 0=shadow) | +| veg_sh | 2D array | Vegetation shadow (accounts for transmissivity) | +| wall_sh | 2D array | Shadow height on walls | + +## Properties + +### Critical Properties + +1. **No shadows below horizon** + - When: sun_altitude ≤ 0° + - Then: all pixels are "sunlit" (no shadows cast) + - Reason: No direct beam radiation when sun below horizon + +2. **Flat terrain = no shadows** + - When: DSM is uniform (no elevation differences) + - Then: sunlit mask is all ones + - Reason: No obstacles to cast shadows + +3. **Binary shadow values** + - Building shadows are discrete: 0 or 1 + - No partial shadows (penumbra) in building shadow model + - Vegetation can have fractional values due to transmissivity + +### Geometric Properties + +4. **Shadows opposite sun direction** + - Sun from south (180°) → shadows extend north (toward row 0) + - Sun from east (90°) → shadows extend west (toward col 0) + +5. **Lower sun = longer shadows** + - As altitude decreases, shadow area increases + - At 45°: shadow length = obstacle height + - At 30°: shadow length ≈ 1.73 × height + - At 15°: shadow length ≈ 3.73 × height + +6. **Taller obstacles = longer shadows** + - Shadow length proportional to height: L ∝ h + +7. **Shadow length follows equation** + - Measured shadow length ≈ h / tan(α) within ±15% + - Tolerance accounts for pixel discretization + +### Rooftop Properties + +8. **Building tops are sunlit** + - Rooftops (local maxima) receive direct sun when altitude > 0 + - Unless shaded by taller neighboring buildings + +## Vegetation Shadows + +Vegetation shadows differ from building shadows due to partial light transmission through foliage. + +**Primary References:** + +- Konarska J, Lindberg F, Larsson A, Thorsson S, Holmer B (2014) "Transmissivity of solar radiation through crowns of single urban trees—application for outdoor thermal comfort modelling." Theoretical and Applied Climatology 117:363-376. +- Lindberg F, Grimmond CSB (2011) "The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas." Theoretical and Applied Climatology 105:311-323. + +### Canopy Transmissivity + +**Reference:** Konarska et al. (2014) + +Light transmission through tree canopies varies with species, leaf area index (LAI), and season: + +| Tree Type | Transmissivity | LAI | Description | +|-----------|----------------|-----|-------------| +| Dense deciduous (summer) | 0.02-0.05 | 5-7 | Oak, maple in full leaf | +| Medium deciduous | 0.05-0.15 | 3-5 | Typical urban trees | +| Open canopy | 0.15-0.30 | 2-3 | Young trees, sparse crown | +| Conifers | 0.10-0.20 | 4-6 | Year-round | +| Deciduous (winter) | 0.60-0.80 | 0-1 | Bare branches only | + +**SOLWEIG default:** 0.03 (3%) - represents dense summer canopy, conservative for shade provision studies. + +The transmitted radiation through vegetation: + +```text +I_transmitted = I_direct × transmissivity +``` + +### Trunk Zone Ratio + +**Reference:** Lindberg & Grimmond (2011) + +The trunk zone is the lower portion of the tree where only the solid trunk exists (no foliage). This zone casts solid shadows like buildings. + +```text +trunk_height = total_tree_height × trunk_ratio +canopy_height = total_tree_height × (1 - trunk_ratio) +``` + +**SOLWEIG default:** trunk_ratio = 0.25 (25%) + +This means for a 10m tree: +- Trunk zone: 0-2.5m (solid shadow) +- Canopy zone: 2.5-10m (transmissive shadow) + +Typical values by tree type: + +| Tree Form | Trunk Ratio | Example Species | +|-----------|-------------|-----------------| +| Street tree (pollarded) | 0.30-0.40 | Plane tree, linden | +| Natural form | 0.20-0.30 | Oak, beech | +| Conifer | 0.10-0.20 | Pine, spruce | +| Low-branching | 0.05-0.15 | Magnolia, ornamental | + +### Vegetation Shadow Formula + +```text +veg_shadow = 1.0 if ray passes only through trunk (solid) + = transmissivity if ray passes through canopy + = 0.0 if ray is unobstructed +``` + +The combined shadow (building + vegetation): + +```text +combined_sunlit = bldg_sh - (1 - veg_sh) × (1 - transmissivity) +``` + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**Shadow Algorithm:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. + +**Vegetation Shadows:** + +- Lindberg F, Grimmond CSB (2011) "The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas: model development and evaluation." Theoretical and Applied Climatology 105, 311-323. +- Konarska J, Lindberg F, Larsson A, Thorsson S, Holmer B (2014) "Transmissivity of solar radiation through crowns of single urban trees—application for outdoor thermal comfort modelling." Theoretical and Applied Climatology 117, 363-376. diff --git a/specs/svf.md b/specs/svf.md new file mode 100644 index 0000000..f876bb7 --- /dev/null +++ b/specs/svf.md @@ -0,0 +1,184 @@ +# Sky View Factor (SVF) + +Fraction of the sky hemisphere visible from each point. Determines how much diffuse sky radiation and longwave sky emission reaches the surface. + +**Reference:** Lindberg et al. (2008) Section 2.1, Lindberg & Grimmond (2011) + +## Equation + +SVF is the ratio of radiation received from the sky to that from an unobstructed hemisphere: + +```text +SVF = Ω_sky / 2π +``` + +Where Ω_sky is the solid angle of visible sky (steradians). + +## Patch-Based Calculation Method + +**Reference:** Robinson & Stone (1990) "Solar Radiation Modelling in the Urban Context", Building and Environment 25(3):201-209. + +SOLWEIG uses the patch-based method where the sky hemisphere is divided into discrete angular patches (annuli). SVF is computed by testing visibility to each patch and weighting by the patch's solid angle. + +### Patch Configuration + +The sky is divided into concentric annuli (altitude bands) from 0° to 90° elevation. Each annulus is further subdivided into azimuthal patches. Standard configurations: + +- **Option 1** (145 patches): Coarse for fast computation +- **Option 2** (153 patches - default): Balance of accuracy and speed +- **Option 3** (306 patches): Fine for high accuracy +- **Option 4** (930 patches): Research-grade resolution + +For option 2 (153 patches): + +- 8 altitude bands: 6°, 18°, 30°, 42°, 54°, 66°, 78°, 90° +- Azimuthal divisions per band: 31, 30, 28, 24, 19, 13, 7, 1 + +### Solid Angle Weight Calculation + +Each patch's contribution to SVF is weighted by its solid angle (steradian): + +```text +w_patch = Δφ × (sin(θ_max) - sin(θ_min)) +``` + +Where: +- Δφ = azimuthal width of patch (radians) +- θ_min, θ_max = altitude bounds of annulus (radians) + +For patch in annulus i with n_i azimuthal divisions: + +```text +Δφ_i = 2π / n_i +w_i = Δφ_i × (sin(θ_i + Δθ/2) - sin(θ_i - Δθ/2)) +``` + +### SVF Accumulation Formula + +```text +SVF = Σ_patches (w_patch × visibility_patch) +``` + +Where: +- w_patch = solid angle weight for the patch +- visibility_patch = 1 if patch center is unobstructed, 0 if blocked by DSM + +### Directional SVF + +Directional components split patches by azimuth quadrant: + +```text +SVF_east = Σ (w_patch × visibility) for 0° ≤ azimuth < 180° +SVF_south = Σ (w_patch × visibility) for 90° ≤ azimuth < 270° +SVF_west = Σ (w_patch × visibility) for 180° ≤ azimuth < 360° +SVF_north = Σ (w_patch × visibility) for 270° ≤ azimuth < 90° +``` + +### Algorithm Implementation + +The Rust implementation in `skyview.rs` computes SVF using: + +1. **Shadow casting**: For each patch (altitude, azimuth), cast shadows from the DSM +2. **Weight computation**: Calculate solid angle weight using annulus bounds +3. **Accumulation**: Sum weighted visibility across all patches per pixel +4. **Correction factor**: Apply final correction (3.0459e-4) for numerical stability + +## Inputs + +| Input | Type | Description | +| ----- | ---- | ----------- | +| DSM | 2D array (m) | Digital Surface Model | +| CDSM | 2D array (m) | Canopy DSM for vegetation (optional) | +| pixel_size | float (m) | Resolution | + +## Outputs + +| Output | Type | Description | +| ------ | ---- | ----------- | +| svf | 2D array (0-1) | Overall sky view factor | +| svf_north | 2D array (0-1) | SVF from northern sky quadrant | +| svf_east | 2D array (0-1) | SVF from eastern sky quadrant | +| svf_south | 2D array (0-1) | SVF from southern sky quadrant | +| svf_west | 2D array (0-1) | SVF from western sky quadrant | +| svf_veg | 2D array (0-1) | SVF accounting for vegetation | + +## Properties + +### Range Properties + +1. **SVF in range [0, 1]** + - SVF = 0: no sky visible (e.g., inside building) + - SVF = 1: full hemisphere visible (open field) + - All intermediate values valid + +2. **Directional SVF in range [0, 1]** + - Each directional component (N, E, S, W) also bounded by [0, 1] + +### Geometric Properties + +3. **Flat open terrain = SVF of 1** + - No obstructions → full sky visibility + - Tolerance: SVF > 0.95 for truly flat DSM + +4. **Deep canyon has low SVF** + - Urban canyon with H/W ratio > 2 → SVF < 0.5 + - H = building height, W = street width + +5. **Taller obstacles reduce SVF** + - Higher buildings nearby → lower ground-level SVF + - SVF decreases monotonically with obstacle height + +6. **Rooftops have high SVF** + - Building tops (local maxima) have SVF close to 1 + - Only reduced if taller buildings nearby + +7. **Building density reduces SVF** + - More buildings → lower ground-level SVF + - SVF is a measure of urban density/openness + +### Symmetry Properties + +8. **Symmetric obstacles give symmetric directional SVF** + - Square courtyard center has equal N/E/S/W SVF + - Asymmetric buildings create asymmetric directional SVF + +## Directional SVF + +Directional components split the sky into quadrants: + +```text + N (svf_north) + | + W ---+--- E + | + S +``` + +Used for calculating radiation from different sky directions, important for: + +- Anisotropic sky radiance (brighter near sun) +- Wall orientation effects +- Asymmetric shading + +## Vegetation Effects + +Trees reduce SVF but not completely (light passes through canopy): + +- **SVF_veg**: Sky view through vegetation canopy +- Accounts for leaf area index and transmissivity +- SVF_veg ≥ SVF (vegetation blocks less than buildings) + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**SVF Algorithm:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. +- Lindberg F, Grimmond CSB (2011) "The influence of vegetation and building morphology on shadow patterns and mean radiant temperatures in urban areas: model development and evaluation." Theoretical and Applied Climatology 105, 311-323. + +**Patch-Based Method:** + +- Robinson D, Stone A (1990) "Solar Radiation Modelling in the Urban Context." Building and Environment 25(3), 201-209. diff --git a/specs/technical.md b/specs/technical.md new file mode 100644 index 0000000..ecf560f --- /dev/null +++ b/specs/technical.md @@ -0,0 +1,156 @@ +# Technical Implementation + +Implementation details, performance considerations, and computational requirements. + +Normative runtime/API behavior is specified in +[runtime-contract.md](runtime-contract.md). This file focuses on implementation +details rather than public precondition semantics. + +## Data Types + +### Float32 Precision + +All raster calculations use **float32** (single precision) rather than float64: + +- **Reason**: GPU compatibility, memory efficiency, sufficient precision +- **Precision**: ~7 significant digits +- **Range**: ±3.4 × 10³⁸ + +**Properties:** + +1. All DSM/CDSM values stored as float32 +2. All output rasters are float32 +3. Intermediate calculations may use higher precision internally +4. Results should match float64 within 0.1% for typical urban values + +### Integer Types + +- Shadow masks: uint8 or bool (0/1 values) +- Indices and counts: int32 or int64 + +## Tiling + +Large rasters are processed in tiles to manage memory: + +### Tile Properties + +1. **Tile size**: Configurable, typically 256×256 to 1024×1024 pixels +2. **Overlap**: Tiles overlap by `max_shadow_reach` to avoid edge artifacts +3. **Seamless output**: Stitched results should be identical to full-raster processing + +### Shadow Reach Calculation + +Overlap must accommodate the longest possible shadow: + +```text +max_shadow_reach = max_building_height / tan(min_sun_altitude) +``` + +At min_sun_altitude = 5°: +- 50m building → ~572m shadow → 572 pixels at 1m resolution + +### Tile Processing Order + +1. Tiles can be processed in parallel (independent) +2. Edge pixels use overlap region for context +3. Only interior pixels written to output + +## GPU Acceleration + +Optional GPU support for shadow and SVF calculations: + +### GPU Properties + +1. **Automatic fallback**: If GPU unavailable, uses CPU +2. **Equivalent results**: GPU and CPU produce identical outputs (within float32 precision) +3. **Memory management**: Large rasters automatically tiled for GPU memory limits + +### GPU-Accelerated Operations + +- Shadow casting (ray marching) +- SVF patch visibility checks +- Parallel pixel operations + +### CPU-Only Operations + +- File I/O +- Coordinate transformations +- Final result assembly + +## Coordinate Systems + +### Raster Coordinates + +- Row 0 = North edge of raster +- Column 0 = West edge of raster +- Increasing row index = moving South +- Increasing column index = moving East + +### Sun Position + +- Azimuth: 0° = North, 90° = East, 180° = South, 270° = West +- Altitude: 0° = horizon, 90° = zenith + +### Geographic Coordinates + +- Input rasters should have valid CRS (coordinate reference system) +- WGS84 (EPSG:4326) used for sun position calculations +- Local projected CRS used for distance calculations + +## Memory Management + +### Typical Memory Usage + +| Operation | Memory per megapixel | +| --------- | -------------------- | +| Single raster (float32) | ~4 MB | +| SVF calculation | ~50 MB (multiple arrays) | +| Full SOLWEIG run | ~200 MB | + +### Memory Properties + +1. Peak memory scales with tile size, not total raster size +2. Intermediate arrays released after use +3. Output written incrementally for large rasters + +## Numerical Stability + +### Edge Cases + +1. **Sun at horizon (altitude ≈ 0°)**: Shadow length approaches infinity + - Handled by clamping to max_shadow_reach + - No shadows computed when altitude ≤ 0° + +2. **Very tall buildings**: May exceed shadow reach + - Warning if buildings exceed reasonable height + +3. **Flat terrain**: Division by zero avoided + - max_height = 0 handled gracefully + +### NaN Handling + +1. Input NaN values indicate no-data (outside study area) +2. NaN propagates through calculations +3. Output NaN indicates invalid/missing result + +## Performance Targets + +| Operation | Target | Notes | +| --------- | ------ | ----- | +| Shadow (1 timestep) | <1s per megapixel | GPU | +| SVF | <30s per megapixel | GPU | +| Full day (48 timesteps) | <5 min per megapixel | GPU | + +## Reproducibility + +### Deterministic Results + +1. Same inputs → same outputs (bitwise identical) +2. No random number generation in core algorithms +3. Parallel processing order does not affect results + +### Version Compatibility + +1. Output format stable across minor versions +2. Algorithm changes documented in changelog +3. Regression tests verify consistency diff --git a/specs/tmrt.md b/specs/tmrt.md new file mode 100644 index 0000000..ed6ec2f --- /dev/null +++ b/specs/tmrt.md @@ -0,0 +1,325 @@ +# Mean Radiant Temperature (Tmrt) + +The uniform temperature of an imaginary black enclosure that would result in the same radiant heat exchange as the actual non-uniform environment. + +**Primary References:** + +- ISO 7726:1998 "Ergonomics of the thermal environment - Instruments for measuring physical quantities" +- Lindberg et al. (2008) Section 2.7 +- Höppe P (1992) "Ein neues Verfahren zur Bestimmung der mittleren Strahlungstemperatur im Freien." Wetter und Leben 44:147-151 + +## Equation + +### Absorbed Radiation (Sstr) + +Total radiation absorbed by a human body from all directions: + +```text +Sstr = absK × (Kside×Fside + (Kdown+Kup)×Fup) + + absL × (Lside×Fside + (Ldown+Lup)×Fup) +``` + +Where: + +- absK = shortwave absorption coefficient (0.70 for clothed human) +- absL = longwave absorption coefficient (0.97 for clothed human) +- Fside = view factor for sides (depends on posture) +- Fup = view factor for top/bottom (depends on posture) + +### Mean Radiant Temperature + +```text +Tmrt = (Sstr / (absL × σ))^0.25 - 273.15 +``` + +Where σ = Stefan-Boltzmann constant (5.67 × 10⁻⁸ W/m²K⁴). + +## Absorption Coefficients + +**Reference:** ISO 7726:1998 "Ergonomics of the thermal environment - Instruments for measuring physical quantities" + +The human body absorbs radiation differently for shortwave (solar) and longwave (thermal) wavelengths: + +| Coefficient | Value | Description | Source | +| ----------- | ----- | -------------------------------- | ---------------- | +| absK | 0.70 | Shortwave (solar) absorption | ISO 7726 Table 4 | +| absL | 0.97 | Longwave (thermal) absorption | ISO 7726 Table 4 | + +### Physical Basis + +**Shortwave (absK = 0.70):** + +- Represents average absorption of clothed human body in solar spectrum (0.3-3 μm) +- Varies with clothing color and material: + - White clothing: absK ≈ 0.40-0.50 + - Medium grey clothing: absK ≈ 0.70 (standard reference) + - Dark clothing: absK ≈ 0.85-0.90 +- 0.70 is the ISO 7726 standard value for typical outdoor clothing +- Remaining (1 - absK) = 0.30 is reflected + +**Longwave (absL = 0.97):** + +- Human body absorption/emission in thermal infrared spectrum (3-100 μm) +- Based on Kirchhoff's law: absorptivity = emissivity at thermal equilibrium +- Physical basis: + - Human skin emissivity ≈ 0.98 (consistent across skin tones) + - Typical clothing emissivity ≈ 0.95-0.97 (most fabrics) + - Weighted average for clothed person ≈ 0.97 +- ISO 7726 standard value: 0.97 +- Nearly all thermal radiation is absorbed (only 3% reflected) + +### Standards and Implementation + +**ISO 7726:1998 Reference Values:** + +The ISO 7726 standard (Table 4, Section 4.2.3) specifies: + +- absK = 0.70 for solar radiation absorption +- absL = 0.97 for longwave radiation absorption + +These values are used for standardized Mean Radiant Temperature measurements. + +**Implementation in SOLWEIG:** + +The default values in `HumanParams` (defined in `models.py`): + +```python +@dataclass +class HumanParams: + posture: str = "standing" + abs_k: float = 0.7 # ISO 7726 standard + abs_l: float = 0.97 # ISO 7726 standard +``` + +**Historical Note on absL Discrepancy:** + +Earlier SOLWEIG versions and some literature sources use absL = 0.95 instead of 0.97. Both values are physically reasonable: + +- 0.95: Conservative estimate, more common in early thermal comfort studies +- 0.97: ISO 7726 standard, more accurate for typical clothing + +This implementation follows ISO 7726 and uses 0.97 as the default. Users can override via `HumanParams(abs_l=0.95)` for compatibility with older studies. + +**Impact on Tmrt:** + +The difference between absL = 0.95 and 0.97 has minimal effect on calculated Tmrt: + +```text +Tmrt = (Sstr / (abs_l × σ))^0.25 - 273.15 + +For typical Sstr = 400 W/m²: + abs_l = 0.97 → Tmrt ≈ 40.5°C + abs_l = 0.95 → Tmrt ≈ 40.7°C + Difference: ~0.2°C (negligible for most applications) +``` + +## Inputs + +| Input | Type | Description | +| ----- | ---- | ----------- | +| Kdown | 2D array (W/m²) | Diffuse shortwave from sky | +| Kup | 2D array (W/m²) | Reflected shortwave from ground | +| Kside | 2D arrays (W/m²) | Direct + reflected shortwave (E,S,W,N) | +| Ldown | 2D array (W/m²) | Longwave from sky | +| Lup | 2D array (W/m²) | Longwave from ground | +| Lside | 2D arrays (W/m²) | Longwave from walls (E,S,W,N) | +| absK | float | Shortwave absorption (default 0.70) | +| absL | float | Longwave absorption (default 0.97) | +| posture | string | "standing" or "sitting" | + +## Outputs + +| Output | Type | Description | +| ------ | ---- | ----------- | +| Tmrt | 2D array (°C) | Mean radiant temperature grid | + +## Posture View Factors + +Human body geometry affects how radiation is received. View factors represent the fraction of radiation from each direction that is intercepted by the body. + +**Primary Reference:** Mayer H, Höppe P (1987) "Thermal comfort of man in different urban environments." Theoretical and Applied Climatology 38:43-49. + +**Additional References:** + +- Fanger PO (1970) "Thermal Comfort", Danish Technical Press +- VDI 3787 Part 2 (2008) "Environmental Meteorology - Methods for the human biometeorological evaluation of climate and air quality" + +| Posture | Fup | Fside | Total | Model Description | +| -------- | ----- | ----- | -------------------------- | ----------------- | +| Standing | 0.06 | 0.22 | 0.06×2 + 0.22×4 = 1.00 | Vertical cylinder | +| Sitting | 0.166 | 0.166 | 0.166×2 + 0.166×4 = 1.00 | Modified cylinder | + +### Physical Derivation + +**Standing Posture (Vertical Cylinder Model):** + +The human body is approximated as a vertical cylinder with height H and diameter D, where H/D ≈ 8-10 (typical body proportions). + +View factor calculation: + +1. **Upward/downward view factor (Fup):** + - Circular cross-section area: A_horizontal = πD²/4 + - Total body surface area: A_total ≈ πDH (neglecting top/bottom caps) + - Projected area ratio: Fup ≈ (πD²/4) / (πDH/2) ≈ D/(2H) + - For H/D ≈ 8.5: Fup ≈ 1/17 ≈ 0.06 + +2. **Sideward view factor per direction (Fside):** + - Projected area per cardinal direction (E, S, W, N): A_side = H×D/2 + - View factor per direction: Fside ≈ (H×D/2) / (πDH/2) ≈ 1/π ≈ 0.318 + - Accounting for body curvature and posture: Fside ≈ 0.22 (empirically determined) + +3. **Validation:** + + ```text + Total = 2×Fup + 4×Fside + = 2×0.06 + 4×0.22 + = 0.12 + 0.88 + = 1.00 ✓ + ``` + +**Sitting Posture (Modified Cylinder):** + +For a sitting person, the body is more compact with increased horizontal cross-section: + +1. **Height reduction:** Effective height H_sitting ≈ 0.6×H_standing +2. **Width increase:** Effective width increases due to bent posture +3. **Equal distribution:** More uniform view factor distribution + - Fup = Fside = 0.166 (simplified model) + - Total = 6×0.166 ≈ 1.00 ✓ + +### Implementation Notes + +**Direct Beam Projection (f_cyl):** + +For direct solar radiation on vertical body surfaces, an additional projection factor f_cyl is used: + +| Posture | f_cyl | Description | +| -------- | ----- | --------------------------------------- | +| Standing | 0.28 | Projected area for cylinder from sun | +| Sitting | 0.20 | Reduced projection for compact posture | + +The f_cyl factor accounts for the cylindrical projection of direct beam radiation, distinct from the hemispherical view factors (Fup, Fside) used for diffuse radiation. + +**Source Code Reference:** + +View factors are defined in `components/tmrt.py` and `components/radiation.py`: + +```python +if posture == "standing": + f_up = 0.06 + f_side = 0.22 + f_cyl = 0.28 +else: # sitting + f_up = 0.166666 + f_side = 0.166666 + f_cyl = 0.20 +``` + +These values match the ISO 7726 and VDI 3787 standards for thermal comfort assessment. + +## Properties + +### Fundamental Properties + +1. **Tmrt defined for any radiation environment** + - Always computable if radiation inputs are valid + - Range typically -20°C to +80°C in urban environments + +2. **Tmrt = Ta when no radiation difference** + - In uniform temperature enclosure with no sun + - Night with overcast sky approaches this + +### Sun/Shade Properties + +3. **Sunlit Tmrt > Shaded Tmrt (daytime)** + - Direct sun adds 10-30°C to Tmrt + - Largest effect at midday, clear sky + +4. **Shadow reduces Tmrt significantly** + - Moving from sun to shade: ΔTmrt ≈ 10-30°C + - Most important thermal comfort intervention + +### SVF Properties + +5. **Higher SVF → higher Tmrt (daytime)** + - More sky radiation received + - Open areas warmer than canyons (radiation-wise) + +6. **Lower SVF → higher Tmrt (nighttime)** + - Less longwave loss to cold sky + - Urban heat island effect + +### Surface Temperature Properties + +7. **Hot ground increases Tmrt** + - Lup increases with ground temperature + - Asphalt vs grass: ΔTmrt ≈ 5-15°C + +8. **Hot walls increase Tmrt** + - Sun-heated walls emit more longwave + - South-facing walls hottest in afternoon + +### Temporal Properties + +9. **Tmrt peaks in early afternoon** + - Maximum direct radiation + - Ground and walls heated + +10. **Tmrt > Ta during day, Tmrt < Ta at night** + - Daytime: sun adds radiation + - Nighttime: surfaces cooler than air + +## Typical Values + +| Condition | Tmrt | Ta | ΔT | +| --------- | ---- | -- | -- | +| Clear day, sun | 55-70°C | 30°C | +25-40°C | +| Clear day, shade | 35-45°C | 30°C | +5-15°C | +| Overcast day | 25-35°C | 25°C | 0-10°C | +| Clear night | 10-20°C | 20°C | -10-0°C | +| Winter sun | 20-35°C | 5°C | +15-30°C | + +## Significance + +Tmrt is the key variable for outdoor thermal comfort: + +- Dominates heat stress in hot climates +- More important than air temperature for comfort +- Directly modifiable through shade provision +- Input to UTCI and PET calculations + +## Tmrt Calculation Implementation + +### Directional Radiation Summation + +For directional shortwave and longwave, the model computes separate fluxes for each cardinal direction (N, E, S, W) and sums them with appropriate view factors: + +```text +Kside = Keast + Ksouth + Kwest + Knorth +Lside = Least + Lsouth + Lwest + Lnorth +``` + +### Kelvin Offset + +The formula converts from Kelvin to Celsius using: + +```text +Tmrt_celsius = Tmrt_kelvin - 273.15 +``` + +Some legacy implementations used -273.2 (rounded). The modern implementation uses the exact value. + +### Numerical Stability + +When Sstr ≤ 0 (very rare, indicates model error), the implementation clamps to a minimum value to avoid invalid fourth-root operations. + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**Tmrt Model:** + +- Lindberg F, Holmer B, Thorsson S (2008) "SOLWEIG 1.0 - Modelling spatial variations of 3D radiant fluxes and mean radiant temperature in complex urban settings." International Journal of Biometeorology 52(7), 697-713. +- Höppe P (1992) "A new procedure to determine the mean radiant temperature outdoors." Wetter und Leben 44, 147-151. diff --git a/specs/utci.md b/specs/utci.md new file mode 100644 index 0000000..d33445a --- /dev/null +++ b/specs/utci.md @@ -0,0 +1,138 @@ +# Universal Thermal Climate Index (UTCI) + +Equivalent temperature representing the physiological response to the thermal environment. Based on a multi-node human thermoregulation model. + +**Reference:** Bröde et al. (2012), Fiala et al. (2012) + +## Equation + +UTCI is computed from a 6th-order polynomial approximation of the Fiala model: + +```text +UTCI = Ta + offset(Ta, Tmrt-Ta, va, Pa) +``` + +The offset is a complex polynomial function (~200 terms) of: +- Ta = air temperature (°C) +- ΔTmrt = Tmrt - Ta (°C) +- va = wind speed at 10m (m/s) +- Pa = water vapor pressure (hPa) + +## Inputs + +| Input | Type | Description | +| ----- | ---- | ----------- | +| Ta | float or 2D array (°C) | Air temperature | +| Tmrt | float or 2D array (°C) | Mean radiant temperature | +| va | float or 2D array (m/s) | Wind speed at 10m height | +| RH | float or 2D array (%) | Relative humidity | + +## Outputs + +| Output | Type | Description | +| ------ | ---- | ----------- | +| UTCI | float or 2D array (°C) | Universal Thermal Climate Index | + +## Stress Categories + +| UTCI (°C) | Stress Category | Physiological Response | +| --------- | --------------- | ---------------------- | +| > 46 | Extreme heat stress | Heat stroke risk | +| 38 to 46 | Very strong heat stress | Failure of thermoregulation | +| 32 to 38 | Strong heat stress | Strong sweating, dehydration | +| 26 to 32 | Moderate heat stress | Sweating, discomfort | +| 9 to 26 | No thermal stress | Comfort zone | +| 0 to 9 | Slight cold stress | Vasoconstriction | +| -13 to 0 | Moderate cold stress | Shivering begins | +| -27 to -13 | Strong cold stress | Risk of frostbite | +| -40 to -27 | Very strong cold stress | Numbness, hypothermia risk | +| < -40 | Extreme cold stress | Frostbite in minutes | + +## Properties + +### Fundamental Properties + +1. **UTCI is an equivalent temperature** + - Units are °C + - Represents how the environment "feels" + - Reference: walking outdoors at 4 km/h + +2. **Valid input ranges** + - Ta: -50°C to +50°C + - Tmrt-Ta: -30°C to +70°C + - va: 0.5 to 17 m/s + - RH: 5% to 100% + +### Radiation Properties + +3. **Higher Tmrt → higher UTCI** + - Radiation increases thermal stress + - Sun to shade: ΔUTCI ≈ 5-20°C + +4. **UTCI ≈ Ta when Tmrt = Ta and low wind** + - No radiation difference, no wind chill + - Neutral reference condition + +### Wind Properties + +5. **Wind reduces UTCI in heat** + - Convective cooling + - Effect saturates at high wind speeds + +6. **Wind reduces UTCI in cold** + - Wind chill effect + - Stronger effect in cold conditions + +### Humidity Properties + +7. **Humidity effect small in cold** + - Water vapor pressure low at cold temperatures + - Main effect is in warm/hot conditions + +8. **High humidity increases UTCI in heat** + - Impairs evaporative cooling + - Tropical conditions feel hotter + +## Comparison with Other Indices + +| Index | Accounts for | Limitations | +| ----- | ------------ | ----------- | +| UTCI | Ta, Tmrt, wind, humidity | Fixed clothing/activity | +| PET | Ta, Tmrt, wind, humidity, person | More parameters needed | +| Heat Index | Ta, humidity | No radiation or wind | +| Wind Chill | Ta, wind | Cold only, no radiation | + +## Typical Values + +| Condition | Ta | Tmrt | Wind | UTCI | Category | +| --------- | -- | ---- | ---- | ---- | -------- | +| Hot sunny | 35 | 65 | 1 | 45 | Very strong heat | +| Hot shaded | 35 | 40 | 1 | 36 | Strong heat | +| Comfortable | 22 | 25 | 2 | 22 | No stress | +| Cold windy | -5 | -5 | 10 | -15 | Strong cold | +| Cold calm | -5 | -5 | 1 | -6 | Moderate cold | + +## Implementation Notes + +1. **Wind height adjustment** + - Input wind typically at 10m height + - Model assumes standard reference height + +2. **Polynomial approximation** + - ~200 coefficient polynomial + - Accurate within ±0.5°C of full model + +3. **Extrapolation warning** + - Results outside valid ranges may be unreliable + - Clamp or flag out-of-range inputs + +## References + +**Primary UMEP Citation:** + +- Lindberg F, Grimmond CSB, Gabey A, Huang B, Kent CW, Sun T, Theeuwes N, Järvi L, Ward H, Capel-Timms I, Chang YY, Jonsson P, Krave N, Liu D, Meyer D, Olofson F, Tan JG, Wästberg D, Xue L, Zhang Z (2018) "Urban Multi-scale Environmental Predictor (UMEP) - An integrated tool for city-based climate services." Environmental Modelling and Software 99, 70-87. [doi:10.1016/j.envsoft.2017.09.020](https://doi.org/10.1016/j.envsoft.2017.09.020) + +**UTCI Model:** + +- Błażejczyk K, Jendritzky G, Bröde P, Fiala D, Havenith G, Epstein Y, Psikuta A, Kampmann B (2013) "An introduction to the Universal Thermal Climate Index (UTCI)." Geographia Polonica 86(1), 5-10. +- Bröde P, Fiala D, Błażejczyk K, Holmér I, Jendritzky G, Kampmann B, Tinz B, Havenith G (2012) "Deriving the operational procedure for the Universal Thermal Climate Index (UTCI)." International Journal of Biometeorology 56(3), 481-494. diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..5e73fe6 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,154 @@ +# SOLWEIG Test Suite + +This document explains the testing strategy used during the SOLWEIG modernization effort. + +## Three-Layer Testing Strategy + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Layer 1: SPEC PROPERTY TESTS (mock data) │ +│ - Verify physical invariants from specs │ +│ - Fast, deterministic, easy to debug │ +│ - "Does the algorithm behave correctly?" │ +├─────────────────────────────────────────────────────────────────┤ +│ Layer 2: GOLDEN REGRESSION TESTS (demo data) │ +│ - Pre-computed outputs from known-good runs │ +│ - Catch any numerical drift during refactoring │ +│ - "Does output still match what we expect?" │ +├─────────────────────────────────────────────────────────────────┤ +│ Layer 3: RUST vs UMEP COMPARISON (demo data) │ +│ - Existing test_rustalgos.py │ +│ - Verify Rust matches original Python implementation │ +│ - "Does Rust produce same results as reference?" │ +└─────────────────────────────────────────────────────────────────┘ +``` + +## Layer 1: Spec Property Tests + +**Location:** `tests/spec/` + +**Purpose:** Verify that algorithms satisfy physical invariants defined in `specs/*.md`. + +**Data:** Synthetic/mock DSMs created in test fixtures (small, fast, deterministic). + +**Examples:** + +- "No shadows when sun altitude ≤ 0" +- "SVF always in range [0, 1]" +- "Flat terrain has SVF = 1" +- "Taller buildings cast longer shadows" + +**Rationale:** These tests verify the algorithm behaves correctly according to physics. They use simple synthetic data so failures are easy to diagnose. If a spec test fails, you know exactly which physical property was violated. + +**Files:** + +- `test_shadows.py` - 8 shadow properties +- `test_svf.py` - 8 SVF properties +- `test_tmrt.py` - 10 Tmrt properties (planned) +- `test_radiation.py` - 7 radiation properties (planned) +- `test_utci.py` - 8 UTCI properties (planned) +- `test_pet.py` - 8 PET properties (planned) + +## Layer 2: Golden Regression Tests + +**Location:** `tests/golden/` + +**Purpose:** Ensure outputs don't change unexpectedly during refactoring. + +**Data:** Real demo data (Athens, Gothenburg) with pre-computed reference outputs. + +**How it works:** + +1. Run the algorithm on demo data with current (known-good) code +2. Save outputs as `.npy` files (golden fixtures) +3. Future test runs compare new output against golden fixtures +4. Any numerical difference fails the test + +**Rationale:** During modernization, we'll change APIs, consolidate parameters, and refactor code. Golden tests catch any accidental changes to numerical output, even subtle floating-point differences. If a golden test fails, the output changed - investigate whether it's intentional. + +**Files:** + +- `fixtures/` - Pre-computed reference outputs +- `test_golden_shadows.py` - Shadow regression tests +- `test_golden_svf.py` - SVF regression tests +- `test_golden_tmrt.py` - Tmrt regression tests + +## Layer 3: Rust vs UMEP Comparison + +**Location:** `tests/rustalgos/` + +**Purpose:** Verify Rust implementations match the original Python UMEP code exactly. + +**Data:** Real demo data (Athens, Gothenburg). + +**How it works:** + +1. Run the original Python UMEP implementation +2. Run the Rust implementation with identical inputs +3. Compare outputs element-by-element +4. Report match percentage and mean difference + +**Rationale:** The Rust code is a performance-optimized rewrite of the original Python. This layer ensures the Rust code produces identical results to the reference Python implementation. These tests also measure speedup (typically 10-30x faster). + +**Files:** + +- `test_rustalgos.py` - Comprehensive Rust vs Python comparison + +## When to Run Each Layer + +| Situation | Layer 1 | Layer 2 | Layer 3 | +| ------------------------------ | ------- | ------- | ------- | +| Quick check during development | ✅ | | | +| Before committing changes | ✅ | ✅ | | +| Before merging PR | ✅ | ✅ | ✅ | +| After changing algorithm logic | ✅ | ✅ | ✅ | +| After Rust code changes | ✅ | | ✅ | + +## Running Tests + +```bash +# Run all spec tests (fast, ~10 seconds) +uv run pytest tests/spec/ -v + +# Run golden regression tests +uv run pytest tests/golden/ -v + +# Run Rust vs UMEP comparison (requires demo data) +uv run python -c "from tests.rustalgos.test_rustalgos import test_shadowing; test_shadowing()" + +# Run everything +uv run pytest tests/ -v + +# Run performance regression benchmarks +uv run pytest tests/benchmarks/ -v + +# If CI is slower, scale runtime budgets (example: +50% headroom) +SOLWEIG_PERF_BUDGET_SCALE=1.5 uv run pytest tests/benchmarks/ -v +``` + +Performance benchmarks are intended for local/reproducible environments and are not run in CI. +Each run appends logs to: +- `tests/benchmarks/logs/performance_matrix_history.csv` (long-form records) +- `tests/benchmarks/logs/performance_matrix_history.md` (matrix snapshot per run) +Logged metadata includes hardware context (CPU counts, RAM total/available, GPU availability/backend/max buffer size). + +## Adding New Tests + +### Adding a Spec Property Test + +1. Check the relevant spec file in `specs/*.md` +2. Identify the property to test +3. Create a test function with synthetic data that verifies the property +4. Name it `test_property_N_description` where N matches the spec + +### Adding a Golden Test + +1. Run the algorithm on demo data with current code +2. Save output: `np.save("tests/golden/fixtures/name.npy", output)` +3. Create test that loads fixture and compares with `np.testing.assert_allclose()` + +### Adding a Rust vs UMEP Test + +1. Follow the pattern in `test_rustalgos.py` +2. Run both Python and Rust implementations +3. Use `compare_results()` helper to check match percentage diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py new file mode 100644 index 0000000..b428bc3 --- /dev/null +++ b/tests/benchmarks/__init__.py @@ -0,0 +1 @@ +"""Benchmark tests for performance and memory regression detection.""" diff --git a/tests/benchmarks/test_memory_benchmark.py b/tests/benchmarks/test_memory_benchmark.py new file mode 100644 index 0000000..dbac7ab --- /dev/null +++ b/tests/benchmarks/test_memory_benchmark.py @@ -0,0 +1,141 @@ +"""Memory benchmark tests for CI regression detection. + +These tests verify that memory usage stays within expected bounds. +They run on small grids to be fast in CI while still detecting regressions. + +Memory target: ~370 bytes/pixel (measured Feb 2026 baseline) +Regression threshold: 500 bytes/pixel (35% headroom for variance) +""" + +import tracemalloc +from datetime import datetime + +import numpy as np +import pytest +from solweig import Location, SurfaceData, Weather, calculate + +pytestmark = pytest.mark.slow + + +class TestMemoryBenchmark: + """Memory usage benchmarks for CI.""" + + # Target: ~370 bytes/pixel (Feb 2026 baseline) + # Threshold: 500 bytes/pixel (35% headroom for CI variance) + MAX_BYTES_PER_PIXEL = 500 + + @pytest.fixture + def benchmark_surface(self): + """Create a 150x150 benchmark surface. + + Small enough to be fast in CI, large enough to amortize fixed overhead. + """ + size = 150 + np.random.seed(42) + + dsm = np.ones((size, size), dtype=np.float32) * 10.0 + + # Add a few low buildings (5m above ground to keep buffer small) + for _ in range(5): + x, y = np.random.randint(15, size - 15, 2) + w, h = np.random.randint(5, 10, 2) + dsm[y : y + h, x : x + w] = 15.0 # 5m above ground + + land_cover = np.ones((size, size), dtype=np.int32) * 5 + land_cover[dsm > 12] = 2 + + from conftest import make_mock_svf + + return SurfaceData( + dsm=dsm, + land_cover=land_cover, + pixel_size=1.0, + svf=make_mock_svf((size, size)), + ) + + @pytest.fixture + def benchmark_location(self): + """Athens, Greece - good sun angle for testing.""" + return Location(latitude=37.98, longitude=23.73, utc_offset=2) + + @pytest.fixture + def benchmark_weather(self): + """Summer noon conditions.""" + return Weather( + datetime=datetime(2024, 7, 21, 12, 0), + ta=30.0, + rh=50.0, + global_rad=800.0, + ws=2.0, + ) + + def test_memory_per_pixel_within_threshold(self, benchmark_surface, benchmark_location, benchmark_weather): + """Verify memory usage stays within acceptable bounds. + + This test catches memory regressions (e.g., accidental float64 usage, + leaked allocations, or inefficient intermediate arrays). + """ + # Preprocess surface first (one-time cost not counted in per-timestep) + benchmark_surface.preprocess() + + # Start memory tracing + tracemalloc.start() + tracemalloc.reset_peak() + + # Run calculation + result = calculate(benchmark_surface, benchmark_location, benchmark_weather) + + # Get peak memory + _, peak = tracemalloc.get_traced_memory() + tracemalloc.stop() + + # Calculate bytes per pixel + n_pixels = benchmark_surface.shape[0] * benchmark_surface.shape[1] + bytes_per_pixel = peak / n_pixels + + # Verify result is valid (sanity check) + assert result.tmrt is not None + assert np.isfinite(result.tmrt).sum() > 0.8 * n_pixels + + # Verify memory within threshold + assert bytes_per_pixel < self.MAX_BYTES_PER_PIXEL, ( + f"Memory regression detected: {bytes_per_pixel:.1f} bytes/pixel " + f"exceeds threshold of {self.MAX_BYTES_PER_PIXEL} bytes/pixel. " + f"Peak memory: {peak / 1024 / 1024:.1f} MB for {n_pixels:,} pixels." + ) + + def test_float32_arrays_used(self, benchmark_surface, benchmark_location, benchmark_weather): + """Verify output arrays use float32 (not float64).""" + benchmark_surface.preprocess() + + result = calculate(benchmark_surface, benchmark_location, benchmark_weather) + + # All output arrays should be float32 + assert result.tmrt.dtype == np.float32, f"tmrt dtype is {result.tmrt.dtype}, expected float32" + if result.shadow is not None: + assert result.shadow.dtype == np.float32, f"shadow dtype is {result.shadow.dtype}" + if result.kdown is not None: + assert result.kdown.dtype == np.float32, f"kdown dtype is {result.kdown.dtype}" + if result.kup is not None: + assert result.kup.dtype == np.float32, f"kup dtype is {result.kup.dtype}" + if result.ldown is not None: + assert result.ldown.dtype == np.float32, f"ldown dtype is {result.ldown.dtype}" + if result.lup is not None: + assert result.lup.dtype == np.float32, f"lup dtype is {result.lup.dtype}" + + def test_surface_arrays_float32(self): + """Verify surface data arrays use float32.""" + size = 100 + np.random.seed(42) + + dsm = np.ones((size, size), dtype=np.float32) * 10.0 + cdsm = np.ones((size, size), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, pixel_size=1.0) + surface.preprocess() + + assert surface.dsm.dtype == np.float32 + if surface.cdsm is not None: + assert surface.cdsm.dtype == np.float32 + if surface.dem is not None: + assert surface.dem.dtype == np.float32 diff --git a/tests/benchmarks/test_performance_matrix_benchmark.py b/tests/benchmarks/test_performance_matrix_benchmark.py new file mode 100644 index 0000000..002d15a --- /dev/null +++ b/tests/benchmarks/test_performance_matrix_benchmark.py @@ -0,0 +1,564 @@ +"""Performance benchmark matrix for API and plugin regression detection. + +Matrix coverage: +- Frontend: API, QGIS plugin +- Execution mode: non-tiled, tiled +- Sky model: isotropic, anisotropic + +This catches regressions with: +1) absolute runtime budgets per scenario +2) relative ratio checks across paired scenarios +""" + +from __future__ import annotations + +import csv +import os +import platform +import re +import subprocess +import sys +import time +from datetime import UTC, datetime, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +import solweig +from conftest import make_mock_svf +from solweig import Location, SurfaceData, Weather +from solweig.models.precomputed import ShadowArrays + +from tests.qgis_mocks import install, install_osgeo, uninstall_osgeo + +install() # Must run before importing plugin modules. +install_osgeo() +from qgis_plugin.solweig_qgis.algorithms.calculation.solweig_calculation import ( # noqa: E402 + SolweigCalculationAlgorithm, +) + +uninstall_osgeo() + +pytestmark = pytest.mark.slow + +# Scale all absolute thresholds in slower CI/VMs: +# SOLWEIG_PERF_BUDGET_SCALE=1.5 pytest tests/benchmarks/... +PERF_BUDGET_SCALE = float(os.environ.get("SOLWEIG_PERF_BUDGET_SCALE", "1.0")) + +ABSOLUTE_BUDGET_SECONDS = { + "api_non_tiled_isotropic": 0.15, + "api_non_tiled_anisotropic": 0.30, + "api_tiled_isotropic": 0.30, + "api_tiled_anisotropic": 0.60, + "plugin_non_tiled_isotropic": 0.40, + "plugin_non_tiled_anisotropic": 0.70, + "plugin_tiled_isotropic": 0.80, + "plugin_tiled_anisotropic": 1.30, +} + +MAX_RATIO_ANISO_OVER_ISO = 4.0 +MAX_RATIO_TILED_OVER_NON_TILED = 4.0 +MAX_RATIO_PLUGIN_OVER_API = 6.0 + +_LOG_DIR = Path(__file__).resolve().parent / "logs" +_CSV_LOG_PATH = _LOG_DIR / "performance_matrix_history.csv" +_MD_LOG_PATH = _LOG_DIR / "performance_matrix_history.md" + + +def _scenario_id(frontend: str, tiled: bool, anisotropic: bool) -> str: + tiled_label = "tiled" if tiled else "non_tiled" + sky_label = "anisotropic" if anisotropic else "isotropic" + return f"{frontend}_{tiled_label}_{sky_label}" + + +def _make_surface(size: int = 320) -> SurfaceData: + """Create a synthetic surface that works for isotropic and anisotropic paths.""" + n_patches = 153 + n_pack = (n_patches + 7) // 8 + + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + dsm[110:210, 120:220] = 10.0 # 5 m relative building + + surface = SurfaceData( + dsm=dsm, + pixel_size=1.0, + svf=make_mock_svf((size, size)), + ) + surface.shadow_matrices = ShadowArrays( + _shmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _vegshmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _vbshmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _n_patches=n_patches, + ) + # Plugin helper methods expect georeference metadata on the surface. + surface._geotransform = [0.0, 1.0, 0.0, 0.0, 0.0, -1.0] + surface._crs_wkt = 'LOCAL_CS["benchmark"]' + return surface + + +def _make_location() -> Location: + return Location(latitude=57.7, longitude=12.0, utc_offset=1) + + +def _make_weather() -> Weather: + return Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=27.0, + rh=45.0, + global_rad=800.0, + ws=2.0, + ) + + +def _make_weather_series() -> list[Weather]: + base = datetime(2024, 7, 15, 12, 0) + return [ + Weather( + datetime=base + timedelta(hours=i), + ta=27.0 + i, + rh=45.0, + global_rad=800.0, + ws=2.0, + ) + for i in range(2) + ] + + +def _assert_valid_tmrt(tmrt: np.ndarray) -> None: + assert tmrt is not None + finite = np.isfinite(tmrt) + assert finite.any(), "Expected finite Tmrt values" + assert finite.mean() > 0.8, "Too many invalid Tmrt pixels" + + +def _median_runtime_seconds(fn, repeats: int = 3) -> tuple[float, list[float]]: + """Warm up once, then return median runtime over repeated runs.""" + fn() # Warm-up pass for fairer timing. + samples: list[float] = [] + for _ in range(repeats): + t0 = time.perf_counter() + fn() + samples.append(time.perf_counter() - t0) + samples.sort() + return samples[len(samples) // 2], samples + + +def _run_api_case(tiled: bool, anisotropic: bool) -> None: + surface = _make_surface() + location = _make_location() + weather = _make_weather() + + if tiled: + result = solweig.calculate_tiled( + surface=surface, + location=location, + weather=weather, + tile_size=256, + use_anisotropic_sky=anisotropic, + tile_workers=2, + tile_queue_depth=1, + prefetch_tiles=True, + max_shadow_distance_m=80.0, + progress_callback=lambda *_args: None, # Disable tqdm in benchmark runs. + ) + else: + result = solweig.calculate( + surface=surface, + location=location, + weather=weather, + use_anisotropic_sky=anisotropic, + max_shadow_distance_m=80.0, + ) + + _assert_valid_tmrt(result.tmrt) + + +def _run_plugin_case(tiled: bool, anisotropic: bool) -> None: + algo = SolweigCalculationAlgorithm() + + feedback = MagicMock() + feedback.isCanceled.return_value = False + + with ( + patch("solweig.tiling._should_use_tiling", return_value=tiled), + patch("solweig.tiling._calculate_auto_tile_size", return_value=256), + ): + n_results, tmrt_stats = algo._run_timeseries( + solweig=solweig, + surface=_make_surface(), + location=_make_location(), + weather_series=_make_weather_series(), + human=solweig.HumanParams(), + use_anisotropic_sky=anisotropic, + conifer=False, + physics=None, + precomputed=None, + output_dir="/tmp/solweig-benchmarks", + selected_outputs=["tmrt"], + max_shadow_distance_m=80.0, + materials=None, + heat_thresholds_day=[], + heat_thresholds_night=[], + feedback=feedback, + ) + + assert n_results == 2 + assert "mean" in tmrt_stats + assert np.isfinite(tmrt_stats["mean"]) + + +@pytest.fixture(scope="module") +def perf_matrix() -> dict[str, dict[str, float | list[float]]]: + """Measure median runtimes for all 8 benchmark scenarios.""" + measurements: dict[str, dict[str, float | list[float]]] = {} + + for frontend in ("api", "plugin"): + for tiled in (False, True): + for anisotropic in (False, True): + sid = _scenario_id(frontend, tiled, anisotropic) + runner = ( + (lambda t=tiled, a=anisotropic: _run_api_case(t, a)) + if frontend == "api" + else (lambda t=tiled, a=anisotropic: _run_plugin_case(t, a)) + ) + median_s, samples = _median_runtime_seconds(runner, repeats=3) + measurements[sid] = {"median_s": median_s, "samples_s": samples} + + _append_performance_log(measurements) + return measurements + + +def _runtime(perf_matrix: dict[str, dict[str, float | list[float]]], sid: str) -> float: + val = perf_matrix[sid]["median_s"] + assert isinstance(val, (int, float)) + return float(val) + + +def _runtime_matrix_rows( + perf_matrix: dict[str, dict[str, float | list[float]]], +) -> list[tuple[str, str, str, str, str]]: + def _fmt(frontend: str, tiled: bool, anisotropic: bool) -> str: + sid = _scenario_id(frontend, tiled, anisotropic) + return f"{_runtime(perf_matrix, sid):.4f}" + + return [ + ( + "api", + _fmt("api", False, False), + _fmt("api", False, True), + _fmt("api", True, False), + _fmt("api", True, True), + ), + ( + "plugin", + _fmt("plugin", False, False), + _fmt("plugin", False, True), + _fmt("plugin", True, False), + _fmt("plugin", True, True), + ), + ] + + +def _git_commit_short() -> str: + try: + return ( + subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], stderr=subprocess.DEVNULL, text=True) + .strip() + .lower() + ) + except Exception: + return "unknown" + + +def _cpu_count_available() -> int | None: + """CPU count available to this process (affinity-aware when possible).""" + try: + return len(os.sched_getaffinity(0)) # type: ignore[attr-defined] + except Exception: + return None + + +def _ram_total_bytes() -> int | None: + """Best-effort total physical RAM detection.""" + try: + pages = os.sysconf("SC_PHYS_PAGES") + page_size = os.sysconf("SC_PAGE_SIZE") + if pages > 0 and page_size > 0: + return int(pages * page_size) + except Exception: + pass + + system = platform.system() + if system == "Darwin": + try: + out = subprocess.check_output(["sysctl", "-n", "hw.memsize"], text=True).strip() + return int(out) + except Exception: + return None + if system == "Windows": + try: + import ctypes + + class _MemoryStatusEx(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("ullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + status = _MemoryStatusEx() + status.dwLength = ctypes.sizeof(status) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(status)) # type: ignore[attr-defined] + return int(status.ullTotalPhys) + except Exception: + return None + return None + + +def _ram_available_bytes() -> int | None: + """Best-effort available RAM detection at benchmark runtime.""" + system = platform.system() + if system == "Linux": + try: + meminfo = Path("/proc/meminfo").read_text(encoding="utf-8") + match = re.search(r"^MemAvailable:\s+(\d+)\s+kB$", meminfo, flags=re.MULTILINE) + if match: + return int(match.group(1)) * 1024 + except Exception: + return None + if system == "Darwin": + try: + page_size = int(subprocess.check_output(["sysctl", "-n", "hw.pagesize"], text=True).strip()) + vm_stat = subprocess.check_output(["vm_stat"], text=True) + page_counts = {} + for key in ("Pages free", "Pages inactive", "Pages speculative"): + match = re.search(rf"^{re.escape(key)}:\s+(\d+)\.$", vm_stat, flags=re.MULTILINE) + page_counts[key] = int(match.group(1)) if match else 0 + available_pages = ( + page_counts["Pages free"] + page_counts["Pages inactive"] + page_counts["Pages speculative"] + ) + return available_pages * page_size + except Exception: + return None + if system == "Windows": + try: + import ctypes + + class _MemoryStatusEx(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("ullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + status = _MemoryStatusEx() + status.dwLength = ctypes.sizeof(status) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(status)) # type: ignore[attr-defined] + return int(status.ullAvailPhys) + except Exception: + return None + return None + + +def _gpu_hardware_info() -> dict[str, str | int | bool | None]: + """Collect best-effort GPU capability metadata from solweig runtime.""" + gpu_compiled = bool(getattr(solweig, "GPU_ENABLED", False)) + try: + gpu_available = bool(solweig.is_gpu_available()) + except Exception: + gpu_available = False + try: + backend = solweig.get_compute_backend() + except Exception: + backend = "unknown" + try: + limits = solweig.get_gpu_limits() or {} + max_buffer_size = int(limits["max_buffer_size"]) if "max_buffer_size" in limits else None + except Exception: + max_buffer_size = None + + return { + "gpu_compiled": gpu_compiled, + "gpu_available": gpu_available, + "gpu_backend": backend, + "gpu_max_buffer_size": max_buffer_size, + } + + +def _format_bytes(value: int | None) -> str: + if value is None: + return "unknown" + gib = value / (1024**3) + return f"{value} ({gib:.2f} GiB)" + + +def _append_performance_log(perf_matrix: dict[str, dict[str, float | list[float]]]) -> None: + """Append a run record to benchmark logs (CSV + markdown matrix).""" + _LOG_DIR.mkdir(parents=True, exist_ok=True) + + now = datetime.now(UTC) + run_id = now.strftime("%Y%m%dT%H%M%SZ") + timestamp = now.isoformat(timespec="seconds") + py_ver = sys.version.split()[0] + commit = _git_commit_short() + system = f"{platform.system()} {platform.release()}" + machine = platform.machine() + cpu_count_logical = os.cpu_count() or 0 + cpu_count_available = _cpu_count_available() + ram_total = _ram_total_bytes() + ram_available = _ram_available_bytes() + gpu_info = _gpu_hardware_info() + + csv_fields = [ + "run_id", + "timestamp_utc", + "git_commit", + "python_version", + "system", + "machine", + "cpu_count_logical", + "cpu_count_available", + "ram_total_bytes", + "ram_available_bytes", + "gpu_compiled", + "gpu_available", + "gpu_backend", + "gpu_max_buffer_size", + "budget_scale", + "scenario", + "median_seconds", + "samples_seconds", + ] + reset_header = False + if _CSV_LOG_PATH.exists(): + current_header = _CSV_LOG_PATH.read_text(encoding="utf-8").splitlines()[:1] + if not current_header or current_header[0] != ",".join(csv_fields): + reset_header = True + + if reset_header: + backup_name = _CSV_LOG_PATH.with_suffix(f".bak-{run_id}.csv") + _CSV_LOG_PATH.rename(backup_name) + + write_header = not _CSV_LOG_PATH.exists() + with _CSV_LOG_PATH.open("a", encoding="utf-8", newline="") as fh: + writer = csv.DictWriter(fh, fieldnames=csv_fields) + if write_header: + writer.writeheader() + for scenario, payload in sorted(perf_matrix.items()): + median_s = float(payload["median_s"]) # type: ignore[arg-type] + samples = ";".join(f"{float(x):.6f}" for x in payload["samples_s"]) # type: ignore[arg-type] + writer.writerow( + { + "run_id": run_id, + "timestamp_utc": timestamp, + "git_commit": commit, + "python_version": py_ver, + "system": system, + "machine": machine, + "cpu_count_logical": cpu_count_logical, + "cpu_count_available": cpu_count_available, + "ram_total_bytes": ram_total, + "ram_available_bytes": ram_available, + "gpu_compiled": gpu_info["gpu_compiled"], + "gpu_available": gpu_info["gpu_available"], + "gpu_backend": gpu_info["gpu_backend"], + "gpu_max_buffer_size": gpu_info["gpu_max_buffer_size"], + "budget_scale": f"{PERF_BUDGET_SCALE:.2f}", + "scenario": scenario, + "median_seconds": f"{median_s:.6f}", + "samples_seconds": samples, + } + ) + + lines = [ + f"## {run_id}", + f"- timestamp_utc: {timestamp}", + f"- git_commit: {commit}", + f"- python: {py_ver}", + f"- system: {system}", + f"- machine: {machine}", + f"- cpu_count_logical: {cpu_count_logical}", + f"- cpu_count_available: {cpu_count_available if cpu_count_available is not None else 'unknown'}", + f"- ram_total: {_format_bytes(ram_total)}", + f"- ram_available: {_format_bytes(ram_available)}", + f"- gpu_compiled: {gpu_info['gpu_compiled']}", + f"- gpu_available: {gpu_info['gpu_available']}", + f"- gpu_backend: {gpu_info['gpu_backend']}", + ( + "- gpu_max_buffer_size: " + + ( + _format_bytes(gpu_info["gpu_max_buffer_size"]) + if isinstance(gpu_info["gpu_max_buffer_size"], int) + else "unknown" + ) + ), + f"- budget_scale: {PERF_BUDGET_SCALE:.2f}", + "", + "| frontend | non_tiled_isotropic_s | non_tiled_anisotropic_s | tiled_isotropic_s | tiled_anisotropic_s |", + "|---|---:|---:|---:|---:|", + ] + for frontend, nti, nta, ti, ta in _runtime_matrix_rows(perf_matrix): + lines.append(f"| {frontend} | {nti} | {nta} | {ti} | {ta} |") + lines.append("") + + with _MD_LOG_PATH.open("a", encoding="utf-8") as fh: + fh.write("\n".join(lines)) + + +def test_performance_matrix_absolute_budgets(perf_matrix): + """Catch large runtime regressions in each benchmark scenario.""" + for sid, budget_s in ABSOLUTE_BUDGET_SECONDS.items(): + measured_s = _runtime(perf_matrix, sid) + threshold_s = budget_s * PERF_BUDGET_SCALE + assert measured_s <= threshold_s, ( + f"Performance regression in {sid}: " + f"{measured_s:.3f}s > budget {threshold_s:.3f}s " + f"(raw budget={budget_s:.3f}s, scale={PERF_BUDGET_SCALE:.2f})" + ) + + +def test_performance_matrix_relative_regressions(perf_matrix): + """Cross-check scenario ratios to catch path-specific slowdowns.""" + # anisotropic / isotropic ratios (same frontend + tiling mode) + for frontend in ("api", "plugin"): + for tiled in (False, True): + iso = _runtime(perf_matrix, _scenario_id(frontend, tiled, False)) + aniso = _runtime(perf_matrix, _scenario_id(frontend, tiled, True)) + assert aniso / iso <= MAX_RATIO_ANISO_OVER_ISO, ( + f"{frontend} {'tiled' if tiled else 'non-tiled'} anisotropic regression: " + f"ratio {aniso / iso:.2f} > {MAX_RATIO_ANISO_OVER_ISO:.2f}" + ) + + # tiled / non-tiled ratios (same frontend + sky mode) + for frontend in ("api", "plugin"): + for anisotropic in (False, True): + non_tiled = _runtime(perf_matrix, _scenario_id(frontend, False, anisotropic)) + tiled = _runtime(perf_matrix, _scenario_id(frontend, True, anisotropic)) + assert tiled / non_tiled <= MAX_RATIO_TILED_OVER_NON_TILED, ( + f"{frontend} {'anisotropic' if anisotropic else 'isotropic'} tiled regression: " + f"ratio {tiled / non_tiled:.2f} > {MAX_RATIO_TILED_OVER_NON_TILED:.2f}" + ) + + # plugin / API ratios (same tiling + sky mode) + for tiled in (False, True): + for anisotropic in (False, True): + api_rt = _runtime(perf_matrix, _scenario_id("api", tiled, anisotropic)) + plugin_rt = _runtime(perf_matrix, _scenario_id("plugin", tiled, anisotropic)) + assert plugin_rt / api_rt <= MAX_RATIO_PLUGIN_OVER_API, ( + f"Plugin overhead regression ({'tiled' if tiled else 'non-tiled'}, " + f"{'anisotropic' if anisotropic else 'isotropic'}): " + f"ratio {plugin_rt / api_rt:.2f} > {MAX_RATIO_PLUGIN_OVER_API:.2f}" + ) diff --git a/tests/benchmarks/test_tiling_benchmark.py b/tests/benchmarks/test_tiling_benchmark.py new file mode 100644 index 0000000..017ebdf --- /dev/null +++ b/tests/benchmarks/test_tiling_benchmark.py @@ -0,0 +1,132 @@ +"""Tiling performance benchmark scaffold for CI regression detection. + +This suite focuses on orchestration-level regressions: +- worker-count scaling does not collapse +- bounded in-flight scheduling runs correctly +""" + +from __future__ import annotations + +import time +from datetime import datetime, timedelta + +import numpy as np +import pytest +from conftest import make_mock_svf +from solweig import Location, SurfaceData, Weather +from solweig.tiling import calculate_tiled, calculate_timeseries_tiled + +pytestmark = pytest.mark.slow + + +class TestTilingBenchmark: + """Benchmark scaffold for tiled orchestration performance.""" + + @pytest.fixture + def benchmark_surface(self): + size = 520 # 3x3 tiles with tile_size=256 and zero overlap + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + return SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + + @pytest.fixture + def benchmark_location(self): + return Location(latitude=57.7, longitude=12.0, utc_offset=1) + + @pytest.fixture + def benchmark_weather_series(self): + base = datetime(2024, 7, 15, 11, 0) + return [ + Weather( + datetime=base + timedelta(hours=i), + ta=24.0 + i, + rh=50.0, + global_rad=700.0, + ws=2.0, + ) + for i in range(3) + ] + + def test_tile_workers_scaling_sanity(self, benchmark_surface, benchmark_location, benchmark_weather_series): + """Two workers should not be significantly slower than one worker.""" + t0 = time.perf_counter() + summary_1w = calculate_timeseries_tiled( + benchmark_surface, + benchmark_weather_series, + benchmark_location, + tile_workers=1, + tile_queue_depth=0, + prefetch_tiles=False, + ) + t1 = time.perf_counter() + summary_2w = calculate_timeseries_tiled( + benchmark_surface, + benchmark_weather_series, + benchmark_location, + tile_workers=2, + tile_queue_depth=2, + prefetch_tiles=True, + ) + t2 = time.perf_counter() + + elapsed_1w = t1 - t0 + elapsed_2w = t2 - t1 + + assert len(summary_1w) == len(benchmark_weather_series) + assert len(summary_2w) == len(benchmark_weather_series) + assert elapsed_2w <= elapsed_1w * 1.25, ( + f"Tiled scaling regression: 2 workers too slow ({elapsed_2w:.3f}s vs {elapsed_1w:.3f}s)" + ) + + def test_bounded_inflight_runtime_controls(self, benchmark_surface, benchmark_location, benchmark_weather_series): + """Bounded in-flight scheduling executes correctly with small queue depth.""" + summary = calculate_timeseries_tiled( + benchmark_surface, + benchmark_weather_series, + benchmark_location, + tile_workers=2, + tile_queue_depth=0, + prefetch_tiles=False, + timestep_outputs=["tmrt"], + ) + assert len(summary) == len(benchmark_weather_series) + assert all(r.tmrt is not None for r in summary.results) + + def test_anisotropic_tiled_runtime_smoke(self, benchmark_location): + """Anisotropic tiled path runs with non-zero overlap/runtime controls.""" + from solweig.models.precomputed import ShadowArrays + + size = 300 + n_patches = 153 + n_pack = (n_patches + 7) // 8 + + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + dsm[120:180, 120:180] = 10.0 # 5 m relative height -> non-zero overlap + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + surface.shadow_matrices = ShadowArrays( + _shmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _vegshmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _vbshmat_u8=np.full((size, size, n_pack), 0xFF, dtype=np.uint8), + _n_patches=n_patches, + ) + + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=27.0, + rh=45.0, + global_rad=800.0, + ws=2.0, + ) + + result = calculate_tiled( + surface=surface, + location=benchmark_location, + weather=weather, + tile_size=256, + use_anisotropic_sky=True, + tile_workers=2, + tile_queue_depth=1, + prefetch_tiles=True, + max_shadow_distance_m=80.0, + ) + assert result.tmrt is not None + assert np.isfinite(result.tmrt).sum() > 0 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..926b3ec --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,53 @@ +"""Shared pytest configuration and path setup.""" + +import sys +from pathlib import Path + +import numpy as np +import pytest + +# Ensure the project root is on sys.path so that both `tests.qgis_mocks` +# and `qgis_plugin.*` imports work regardless of how pytest is invoked. +_project_root = str(Path(__file__).resolve().parent.parent) +if _project_root not in sys.path: + sys.path.insert(0, _project_root) + + +def pytest_configure(config: pytest.Config) -> None: + """Fail early if the Rust extension was built in debug mode. + + Debug builds are 5-20x slower and make the test suite impractically slow. + Rebuild with: ``maturin develop --release`` + """ + import solweig + + if not getattr(solweig, "RELEASE_BUILD", True): + pytest.exit( + "Rust extension was built in DEBUG mode — tests will be too slow.\n" + "Rebuild with: maturin develop --release", + returncode=1, + ) + + +def make_mock_svf(shape: tuple[int, ...]): + """Create a mock SvfArrays for tests (fully open sky).""" + from solweig.models.precomputed import SvfArrays + + ones = np.ones(shape, dtype=np.float32) + return SvfArrays( + svf=ones.copy(), + svf_north=ones.copy(), + svf_east=ones.copy(), + svf_south=ones.copy(), + svf_west=ones.copy(), + svf_veg=ones.copy(), + svf_veg_north=ones.copy(), + svf_veg_east=ones.copy(), + svf_veg_south=ones.copy(), + svf_veg_west=ones.copy(), + svf_aveg=ones.copy(), + svf_aveg_north=ones.copy(), + svf_aveg_east=ones.copy(), + svf_aveg_south=ones.copy(), + svf_aveg_west=ones.copy(), + ) diff --git a/tests/golden/README.md b/tests/golden/README.md new file mode 100644 index 0000000..43c8813 --- /dev/null +++ b/tests/golden/README.md @@ -0,0 +1,128 @@ +# Golden Tests Report + +This directory contains golden regression tests for SOLWEIG's Rust implementations. +These tests compare the Rust algorithms against pre-computed reference fixtures to +ensure consistency and catch regressions. + +## Test Summary + +**Total Golden Tests: 100** + +| Module | Test File | Tests | Description | +|--------|-----------|-------|-------------| +| Anisotropic Sky | `test_golden_anisotropic_sky.py` | 16 | Direction-dependent sky radiation model | +| Ground Temp | `test_golden_ground_temp.py` | 6 | TsWaveDelay thermal inertia model | +| GVF | `test_golden_gvf.py` | 13 | Ground View Factor calculations | +| PET | `test_golden_pet.py` | 10 | Physiological Equivalent Temperature | +| Radiation | `test_golden_radiation.py` | 14 | Kside/Lside (shortwave/longwave) via vegetation module | +| Shadows | `test_golden_shadows.py` | 8 | Building and vegetation shadow calculations | +| SVF | `test_golden_svf.py` | 8 | Sky View Factor (total, directional, vegetation) | +| Tmrt | `test_golden_tmrt.py` | 6 | Mean Radiant Temperature | +| UTCI | `test_golden_utci.py` | 12 | Universal Thermal Climate Index | +| Wall Temp | `test_golden_walls.py` | 7 | Ground and wall temperature deviations | + +## Covered Rust Modules + +### Exposed via PyO3 (with Golden Tests) + +| Rust Module | Python Import | Golden Test | +|-------------|---------------|-------------| +| `shadowing.rs` | `solweig.rustalgos.shadowing` | `test_golden_shadows.py` | +| `skyview.rs` | `solweig.rustalgos.skyview` | `test_golden_svf.py` | +| `vegetation.rs` | `solweig.rustalgos.vegetation` | `test_golden_radiation.py` | +| `gvf.rs` | `solweig.rustalgos.gvf` | `test_golden_gvf.py` | +| `utci.rs` | `solweig.rustalgos.utci` | `test_golden_utci.py` | +| `pet.rs` | `solweig.rustalgos.pet` | `test_golden_pet.py` | +| `tmrt.rs` | `solweig.rustalgos.tmrt` | `test_golden_tmrt.py` | +| `ground.rs` | `solweig.rustalgos.ground` | `test_golden_walls.py`, `test_golden_ground_temp.py` | +| `sky.rs` | `solweig.rustalgos.sky` | `test_golden_anisotropic_sky.py` | + +### Internal Modules (Covered by Higher-Level Tests) + +These modules are internal implementation details not exposed to Python. +They are tested indirectly through the higher-level functions that use them. + +| Internal Module | Used By | Coverage | +|----------------|---------|----------| +| `sun.rs` | `sky.rs`, `vegetation.rs` | Covered by anisotropic_sky, radiation tests | +| `patch_radiation.rs` | `sky.rs` | Covered by anisotropic_sky tests | +| `sunlit_shaded_patches.rs` | `sky.rs`, `vegetation.rs` | Covered by anisotropic_sky, radiation tests | +| `emissivity_models.rs` | `sky.rs` | Covered by anisotropic_sky tests | + +## Ground Truth Sources + +Golden fixtures are generated from different sources depending on the algorithm: + +| Algorithm | Ground Truth | Rationale | +|-----------|--------------|-----------| +| Shadows | UMEP Python | Reference implementation, validated against field measurements | +| SVF | UMEP Python | Reference implementation | +| Radiation | UMEP Python | Reference formulas from Lindberg et al. (2008) | +| GVF | UMEP Python | Ground view factor calculations | +| UTCI | UMEP Python | 6th-order polynomial approximation | +| PET | UMEP Python | Iterative energy balance solver | +| Tmrt | Formula-based | Stefan-Boltzmann formula: `(Sstr / (abs_l * SBC))^0.25 - 273.15` | +| Ground Temp | UMEP Python | TsWaveDelay exponential decay model | +| Wall Temp | Rust (regression) | Numerical consistency checks | +| Anisotropic Sky | Rust (regression) | Complex model with numerical consistency checks | + +## Running Tests + +```bash +# Run all golden tests +uv run pytest tests/golden/ -v + +# Run specific module tests +uv run pytest tests/golden/test_golden_shadows.py -v + +# Run with coverage +uv run pytest tests/golden/ --cov=solweig.rustalgos +``` + +## Regenerating Fixtures + +Fixtures are generated from the reference implementations. To regenerate: + +```bash +uv run python tests/golden/generate_fixtures.py +``` + +**Warning:** Regenerating fixtures will overwrite existing reference data. +Only do this when intentionally updating the ground truth. + +## Test Categories + +Each golden test file contains multiple test categories: + +1. **Golden Regression Tests**: Compare against pre-computed fixtures +2. **Physical Property Tests**: Verify outputs satisfy physical constraints +3. **Shape Consistency Tests**: Verify output dimensions match inputs +4. **Behavioral Tests**: Verify correct response to input changes + +## Tolerance Settings + +| Algorithm | RTOL | ATOL | Notes | +|-----------|------|------|-------| +| Shadows | 1e-6 | 1e-6 | Binary masks, high precision | +| SVF | 0.01 | 0.02 | 2% tolerance for complex geometry | +| Radiation | 1e-4 | 1e-4 | Physical radiation values | +| UTCI | 1e-3 | 0.05 | 0.05°C absolute tolerance | +| PET | 0.01 | 0.1 | Iterative solver, 0.1°C tolerance | +| Tmrt | 1e-4 | 0.01 | 0.01°C absolute tolerance | +| Anisotropic Sky | 1e-4 | 0.1 | Complex radiation model | + +## Adding New Tests + +1. Add fixture generator in `generate_fixtures.py` +2. Create test file `test_golden_.py` +3. Include physical property tests (not just regression) +4. Document ground truth source +5. Update this README + +## References + +- Lindberg et al. (2008): SOLWEIG 1.0 radiation model +- Lindberg et al. (2016): SOLWEIG 2016a updates +- Perez et al. (1993): Anisotropic sky luminance distribution +- Jendritzky et al. (2012): UTCI formulation +- Höppe (1999): PET energy balance model diff --git a/tests/golden/__init__.py b/tests/golden/__init__.py new file mode 100644 index 0000000..9028c6d --- /dev/null +++ b/tests/golden/__init__.py @@ -0,0 +1,6 @@ +""" +Golden regression tests for SOLWEIG algorithms. + +These tests compare current algorithm outputs against pre-computed +reference outputs to catch numerical drift during refactoring. +""" diff --git a/tests/golden/fixtures/aniso_sky_output.npz b/tests/golden/fixtures/aniso_sky_output.npz new file mode 100644 index 0000000..01d3f8e Binary files /dev/null and b/tests/golden/fixtures/aniso_sky_output.npz differ diff --git a/tests/golden/fixtures/ground_temp_case1.npz b/tests/golden/fixtures/ground_temp_case1.npz new file mode 100644 index 0000000..a55c083 Binary files /dev/null and b/tests/golden/fixtures/ground_temp_case1.npz differ diff --git a/tests/golden/fixtures/ground_temp_case2.npz b/tests/golden/fixtures/ground_temp_case2.npz new file mode 100644 index 0000000..b4b41e2 Binary files /dev/null and b/tests/golden/fixtures/ground_temp_case2.npz differ diff --git a/tests/golden/fixtures/ground_temp_case3.npz b/tests/golden/fixtures/ground_temp_case3.npz new file mode 100644 index 0000000..030cc86 Binary files /dev/null and b/tests/golden/fixtures/ground_temp_case3.npz differ diff --git a/tests/golden/fixtures/ground_temp_input_gvflup.npy b/tests/golden/fixtures/ground_temp_input_gvflup.npy new file mode 100644 index 0000000..8bdc07a Binary files /dev/null and b/tests/golden/fixtures/ground_temp_input_gvflup.npy differ diff --git a/tests/golden/fixtures/ground_temp_input_tgmap1.npy b/tests/golden/fixtures/ground_temp_input_tgmap1.npy new file mode 100644 index 0000000..e7502cf Binary files /dev/null and b/tests/golden/fixtures/ground_temp_input_tgmap1.npy differ diff --git a/tests/golden/fixtures/gvf_alb.npy b/tests/golden/fixtures/gvf_alb.npy new file mode 100644 index 0000000..a2244f3 Binary files /dev/null and b/tests/golden/fixtures/gvf_alb.npy differ diff --git a/tests/golden/fixtures/gvf_input_tg.npy b/tests/golden/fixtures/gvf_input_tg.npy new file mode 100644 index 0000000..b3eca26 Binary files /dev/null and b/tests/golden/fixtures/gvf_input_tg.npy differ diff --git a/tests/golden/fixtures/gvf_lup.npy b/tests/golden/fixtures/gvf_lup.npy new file mode 100644 index 0000000..bee6cb1 Binary files /dev/null and b/tests/golden/fixtures/gvf_lup.npy differ diff --git a/tests/golden/fixtures/gvf_norm.npy b/tests/golden/fixtures/gvf_norm.npy new file mode 100644 index 0000000..fcca105 Binary files /dev/null and b/tests/golden/fixtures/gvf_norm.npy differ diff --git a/tests/golden/fixtures/input_bush.npy b/tests/golden/fixtures/input_bush.npy new file mode 100644 index 0000000..591924f Binary files /dev/null and b/tests/golden/fixtures/input_bush.npy differ diff --git a/tests/golden/fixtures/input_cdsm.npy b/tests/golden/fixtures/input_cdsm.npy new file mode 100644 index 0000000..f524275 Binary files /dev/null and b/tests/golden/fixtures/input_cdsm.npy differ diff --git a/tests/golden/fixtures/input_dsm.npy b/tests/golden/fixtures/input_dsm.npy new file mode 100644 index 0000000..4d90807 Binary files /dev/null and b/tests/golden/fixtures/input_dsm.npy differ diff --git a/tests/golden/fixtures/input_params.npz b/tests/golden/fixtures/input_params.npz new file mode 100644 index 0000000..93bbbc6 Binary files /dev/null and b/tests/golden/fixtures/input_params.npz differ diff --git a/tests/golden/fixtures/input_tdsm.npy b/tests/golden/fixtures/input_tdsm.npy new file mode 100644 index 0000000..3e11438 Binary files /dev/null and b/tests/golden/fixtures/input_tdsm.npy differ diff --git a/tests/golden/fixtures/input_wall_asp.npy b/tests/golden/fixtures/input_wall_asp.npy new file mode 100644 index 0000000..5c96fd7 Binary files /dev/null and b/tests/golden/fixtures/input_wall_asp.npy differ diff --git a/tests/golden/fixtures/input_wall_ht.npy b/tests/golden/fixtures/input_wall_ht.npy new file mode 100644 index 0000000..97498e1 Binary files /dev/null and b/tests/golden/fixtures/input_wall_ht.npy differ diff --git a/tests/golden/fixtures/pet_grid_output.npy b/tests/golden/fixtures/pet_grid_output.npy new file mode 100644 index 0000000..f1aa28d Binary files /dev/null and b/tests/golden/fixtures/pet_grid_output.npy differ diff --git a/tests/golden/fixtures/pet_grid_params.npz b/tests/golden/fixtures/pet_grid_params.npz new file mode 100644 index 0000000..a57988d Binary files /dev/null and b/tests/golden/fixtures/pet_grid_params.npz differ diff --git a/tests/golden/fixtures/pet_grid_tmrt.npy b/tests/golden/fixtures/pet_grid_tmrt.npy new file mode 100644 index 0000000..6f2bdfc Binary files /dev/null and b/tests/golden/fixtures/pet_grid_tmrt.npy differ diff --git a/tests/golden/fixtures/pet_grid_va.npy b/tests/golden/fixtures/pet_grid_va.npy new file mode 100644 index 0000000..57f37b8 Binary files /dev/null and b/tests/golden/fixtures/pet_grid_va.npy differ diff --git a/tests/golden/fixtures/pet_single_point.npz b/tests/golden/fixtures/pet_single_point.npz new file mode 100644 index 0000000..4e4803c Binary files /dev/null and b/tests/golden/fixtures/pet_single_point.npz differ diff --git a/tests/golden/fixtures/radiation_aniso_kside_d.npy b/tests/golden/fixtures/radiation_aniso_kside_d.npy new file mode 100644 index 0000000..7862a3d Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_kside_d.npy differ diff --git a/tests/golden/fixtures/radiation_aniso_kside_e.npy b/tests/golden/fixtures/radiation_aniso_kside_e.npy new file mode 100644 index 0000000..ce8c898 Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_kside_e.npy differ diff --git a/tests/golden/fixtures/radiation_aniso_kside_i.npy b/tests/golden/fixtures/radiation_aniso_kside_i.npy new file mode 100644 index 0000000..54f7d0e Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_kside_i.npy differ diff --git a/tests/golden/fixtures/radiation_aniso_kside_s.npy b/tests/golden/fixtures/radiation_aniso_kside_s.npy new file mode 100644 index 0000000..ce8c898 Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_kside_s.npy differ diff --git a/tests/golden/fixtures/radiation_aniso_lside_e.npy b/tests/golden/fixtures/radiation_aniso_lside_e.npy new file mode 100644 index 0000000..81771d5 Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_lside_e.npy differ diff --git a/tests/golden/fixtures/radiation_aniso_lside_s.npy b/tests/golden/fixtures/radiation_aniso_lside_s.npy new file mode 100644 index 0000000..81771d5 Binary files /dev/null and b/tests/golden/fixtures/radiation_aniso_lside_s.npy differ diff --git a/tests/golden/fixtures/radiation_kside_e.npy b/tests/golden/fixtures/radiation_kside_e.npy new file mode 100644 index 0000000..f705c6f Binary files /dev/null and b/tests/golden/fixtures/radiation_kside_e.npy differ diff --git a/tests/golden/fixtures/radiation_kside_s.npy b/tests/golden/fixtures/radiation_kside_s.npy new file mode 100644 index 0000000..d4350da Binary files /dev/null and b/tests/golden/fixtures/radiation_kside_s.npy differ diff --git a/tests/golden/fixtures/radiation_lside_e.npy b/tests/golden/fixtures/radiation_lside_e.npy new file mode 100644 index 0000000..8a678cb Binary files /dev/null and b/tests/golden/fixtures/radiation_lside_e.npy differ diff --git a/tests/golden/fixtures/radiation_lside_s.npy b/tests/golden/fixtures/radiation_lside_s.npy new file mode 100644 index 0000000..88deed3 Binary files /dev/null and b/tests/golden/fixtures/radiation_lside_s.npy differ diff --git a/tests/golden/fixtures/shadow_afternoon_bldg_sh.npy b/tests/golden/fixtures/shadow_afternoon_bldg_sh.npy new file mode 100644 index 0000000..fc04ca9 Binary files /dev/null and b/tests/golden/fixtures/shadow_afternoon_bldg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_afternoon_veg_sh.npy b/tests/golden/fixtures/shadow_afternoon_veg_sh.npy new file mode 100644 index 0000000..a689a13 Binary files /dev/null and b/tests/golden/fixtures/shadow_afternoon_veg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_afternoon_wall_sh.npy b/tests/golden/fixtures/shadow_afternoon_wall_sh.npy new file mode 100644 index 0000000..af2e4a4 Binary files /dev/null and b/tests/golden/fixtures/shadow_afternoon_wall_sh.npy differ diff --git a/tests/golden/fixtures/shadow_afternoon_wall_sun.npy b/tests/golden/fixtures/shadow_afternoon_wall_sun.npy new file mode 100644 index 0000000..b73646c Binary files /dev/null and b/tests/golden/fixtures/shadow_afternoon_wall_sun.npy differ diff --git a/tests/golden/fixtures/shadow_metadata.npz b/tests/golden/fixtures/shadow_metadata.npz new file mode 100644 index 0000000..6ffcff8 Binary files /dev/null and b/tests/golden/fixtures/shadow_metadata.npz differ diff --git a/tests/golden/fixtures/shadow_morning_bldg_sh.npy b/tests/golden/fixtures/shadow_morning_bldg_sh.npy new file mode 100644 index 0000000..7b9ec7b Binary files /dev/null and b/tests/golden/fixtures/shadow_morning_bldg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_morning_veg_sh.npy b/tests/golden/fixtures/shadow_morning_veg_sh.npy new file mode 100644 index 0000000..251d2cc Binary files /dev/null and b/tests/golden/fixtures/shadow_morning_veg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_morning_wall_sh.npy b/tests/golden/fixtures/shadow_morning_wall_sh.npy new file mode 100644 index 0000000..8e61e45 Binary files /dev/null and b/tests/golden/fixtures/shadow_morning_wall_sh.npy differ diff --git a/tests/golden/fixtures/shadow_morning_wall_sun.npy b/tests/golden/fixtures/shadow_morning_wall_sun.npy new file mode 100644 index 0000000..f7e8bd4 Binary files /dev/null and b/tests/golden/fixtures/shadow_morning_wall_sun.npy differ diff --git a/tests/golden/fixtures/shadow_noon_bldg_sh.npy b/tests/golden/fixtures/shadow_noon_bldg_sh.npy new file mode 100644 index 0000000..255b5db Binary files /dev/null and b/tests/golden/fixtures/shadow_noon_bldg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_noon_veg_sh.npy b/tests/golden/fixtures/shadow_noon_veg_sh.npy new file mode 100644 index 0000000..bb6edb5 Binary files /dev/null and b/tests/golden/fixtures/shadow_noon_veg_sh.npy differ diff --git a/tests/golden/fixtures/shadow_noon_wall_sh.npy b/tests/golden/fixtures/shadow_noon_wall_sh.npy new file mode 100644 index 0000000..52c53b4 Binary files /dev/null and b/tests/golden/fixtures/shadow_noon_wall_sh.npy differ diff --git a/tests/golden/fixtures/shadow_noon_wall_sun.npy b/tests/golden/fixtures/shadow_noon_wall_sun.npy new file mode 100644 index 0000000..4dd2959 Binary files /dev/null and b/tests/golden/fixtures/shadow_noon_wall_sun.npy differ diff --git a/tests/golden/fixtures/svf_east.npy b/tests/golden/fixtures/svf_east.npy new file mode 100644 index 0000000..c192634 Binary files /dev/null and b/tests/golden/fixtures/svf_east.npy differ diff --git a/tests/golden/fixtures/svf_metadata.npz b/tests/golden/fixtures/svf_metadata.npz new file mode 100644 index 0000000..6ffcff8 Binary files /dev/null and b/tests/golden/fixtures/svf_metadata.npz differ diff --git a/tests/golden/fixtures/svf_north.npy b/tests/golden/fixtures/svf_north.npy new file mode 100644 index 0000000..a2f56bb Binary files /dev/null and b/tests/golden/fixtures/svf_north.npy differ diff --git a/tests/golden/fixtures/svf_south.npy b/tests/golden/fixtures/svf_south.npy new file mode 100644 index 0000000..528c197 Binary files /dev/null and b/tests/golden/fixtures/svf_south.npy differ diff --git a/tests/golden/fixtures/svf_total.npy b/tests/golden/fixtures/svf_total.npy new file mode 100644 index 0000000..e5a6e34 Binary files /dev/null and b/tests/golden/fixtures/svf_total.npy differ diff --git a/tests/golden/fixtures/svf_veg.npy b/tests/golden/fixtures/svf_veg.npy new file mode 100644 index 0000000..8f66270 Binary files /dev/null and b/tests/golden/fixtures/svf_veg.npy differ diff --git a/tests/golden/fixtures/svf_west.npy b/tests/golden/fixtures/svf_west.npy new file mode 100644 index 0000000..dc31c43 Binary files /dev/null and b/tests/golden/fixtures/svf_west.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kdown.npy b/tests/golden/fixtures/tmrt_input_kdown.npy new file mode 100644 index 0000000..e73e85d Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kdown.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kside_e.npy b/tests/golden/fixtures/tmrt_input_kside_e.npy new file mode 100644 index 0000000..501dabe Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kside_e.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kside_n.npy b/tests/golden/fixtures/tmrt_input_kside_n.npy new file mode 100644 index 0000000..1b3a6bd Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kside_n.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kside_s.npy b/tests/golden/fixtures/tmrt_input_kside_s.npy new file mode 100644 index 0000000..d0b1872 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kside_s.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kside_total.npy b/tests/golden/fixtures/tmrt_input_kside_total.npy new file mode 100644 index 0000000..a1e63c8 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kside_total.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kside_w.npy b/tests/golden/fixtures/tmrt_input_kside_w.npy new file mode 100644 index 0000000..9134427 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kside_w.npy differ diff --git a/tests/golden/fixtures/tmrt_input_kup.npy b/tests/golden/fixtures/tmrt_input_kup.npy new file mode 100644 index 0000000..964c628 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_kup.npy differ diff --git a/tests/golden/fixtures/tmrt_input_ldown.npy b/tests/golden/fixtures/tmrt_input_ldown.npy new file mode 100644 index 0000000..28943c6 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_ldown.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lside_e.npy b/tests/golden/fixtures/tmrt_input_lside_e.npy new file mode 100644 index 0000000..5cdbe65 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lside_e.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lside_n.npy b/tests/golden/fixtures/tmrt_input_lside_n.npy new file mode 100644 index 0000000..3b0aea9 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lside_n.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lside_s.npy b/tests/golden/fixtures/tmrt_input_lside_s.npy new file mode 100644 index 0000000..8f0c87b Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lside_s.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lside_total.npy b/tests/golden/fixtures/tmrt_input_lside_total.npy new file mode 100644 index 0000000..0f8923f Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lside_total.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lside_w.npy b/tests/golden/fixtures/tmrt_input_lside_w.npy new file mode 100644 index 0000000..fee811f Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lside_w.npy differ diff --git a/tests/golden/fixtures/tmrt_input_lup.npy b/tests/golden/fixtures/tmrt_input_lup.npy new file mode 100644 index 0000000..5e04be7 Binary files /dev/null and b/tests/golden/fixtures/tmrt_input_lup.npy differ diff --git a/tests/golden/fixtures/tmrt_output_aniso.npy b/tests/golden/fixtures/tmrt_output_aniso.npy new file mode 100644 index 0000000..03ee786 Binary files /dev/null and b/tests/golden/fixtures/tmrt_output_aniso.npy differ diff --git a/tests/golden/fixtures/tmrt_output_iso.npy b/tests/golden/fixtures/tmrt_output_iso.npy new file mode 100644 index 0000000..6d2d3db Binary files /dev/null and b/tests/golden/fixtures/tmrt_output_iso.npy differ diff --git a/tests/golden/fixtures/tmrt_params.npz b/tests/golden/fixtures/tmrt_params.npz new file mode 100644 index 0000000..ee8a06d Binary files /dev/null and b/tests/golden/fixtures/tmrt_params.npz differ diff --git a/tests/golden/fixtures/utci_grid_output.npy b/tests/golden/fixtures/utci_grid_output.npy new file mode 100644 index 0000000..9097ee6 Binary files /dev/null and b/tests/golden/fixtures/utci_grid_output.npy differ diff --git a/tests/golden/fixtures/utci_grid_params.npz b/tests/golden/fixtures/utci_grid_params.npz new file mode 100644 index 0000000..e07c2a3 Binary files /dev/null and b/tests/golden/fixtures/utci_grid_params.npz differ diff --git a/tests/golden/fixtures/utci_grid_tmrt.npy b/tests/golden/fixtures/utci_grid_tmrt.npy new file mode 100644 index 0000000..b2232f2 Binary files /dev/null and b/tests/golden/fixtures/utci_grid_tmrt.npy differ diff --git a/tests/golden/fixtures/utci_grid_va.npy b/tests/golden/fixtures/utci_grid_va.npy new file mode 100644 index 0000000..8c721f6 Binary files /dev/null and b/tests/golden/fixtures/utci_grid_va.npy differ diff --git a/tests/golden/fixtures/utci_single_point.npz b/tests/golden/fixtures/utci_single_point.npz new file mode 100644 index 0000000..bc68037 Binary files /dev/null and b/tests/golden/fixtures/utci_single_point.npz differ diff --git a/tests/golden/fixtures/wall_temp_input_alb.npy b/tests/golden/fixtures/wall_temp_input_alb.npy new file mode 100644 index 0000000..888a689 Binary files /dev/null and b/tests/golden/fixtures/wall_temp_input_alb.npy differ diff --git a/tests/golden/fixtures/wall_temp_input_emis.npy b/tests/golden/fixtures/wall_temp_input_emis.npy new file mode 100644 index 0000000..cc02fb7 Binary files /dev/null and b/tests/golden/fixtures/wall_temp_input_emis.npy differ diff --git a/tests/golden/fixtures/wall_temp_input_tgk.npy b/tests/golden/fixtures/wall_temp_input_tgk.npy new file mode 100644 index 0000000..dd6d17f Binary files /dev/null and b/tests/golden/fixtures/wall_temp_input_tgk.npy differ diff --git a/tests/golden/fixtures/wall_temp_input_tmaxlst.npy b/tests/golden/fixtures/wall_temp_input_tmaxlst.npy new file mode 100644 index 0000000..8a64621 Binary files /dev/null and b/tests/golden/fixtures/wall_temp_input_tmaxlst.npy differ diff --git a/tests/golden/fixtures/wall_temp_input_tstart.npy b/tests/golden/fixtures/wall_temp_input_tstart.npy new file mode 100644 index 0000000..b4af5c6 Binary files /dev/null and b/tests/golden/fixtures/wall_temp_input_tstart.npy differ diff --git a/tests/golden/fixtures/wall_temp_output.npz b/tests/golden/fixtures/wall_temp_output.npz new file mode 100644 index 0000000..0f026de Binary files /dev/null and b/tests/golden/fixtures/wall_temp_output.npz differ diff --git a/tests/golden/generate_fixtures.py b/tests/golden/generate_fixtures.py new file mode 100644 index 0000000..80ef89e --- /dev/null +++ b/tests/golden/generate_fixtures.py @@ -0,0 +1,1294 @@ +""" +Golden Fixture Generator + +Run this script once to generate golden fixtures from the Athens demo data. +These fixtures serve as regression reference points during modernization. + +IMPORTANT: These fixtures are generated using the **original UMEP Python module** +as ground truth. This ensures we have a neutral reference that doesn't change +during Rust modernization. The tests then verify that Rust matches UMEP Python. + +Usage: + uv run python tests/golden/generate_fixtures.py +""" + +from pathlib import Path + +import numpy as np +from umep.functions.SOLWEIGpython.solweig_runner_core import SolweigRunCore +from umep.functions.svf_functions import svfForProcessing153 +from umep.util.SEBESOLWEIGCommonFiles.shadowingfunction_wallheight_23 import ( + shadowingfunction_wallheight_23, +) + +# Paths +FIXTURES_DIR = Path(__file__).parent / "fixtures" +CONFIG_PATH = "tests/rustalgos/test_config_shadows.ini" +PARAMS_PATH = "tests/rustalgos/test_params_solweig.json" + + +def ensure_fixtures_dir(): + """Create fixtures directory if it doesn't exist.""" + FIXTURES_DIR.mkdir(parents=True, exist_ok=True) + + +def generate_shadow_fixtures(): + """Generate golden fixtures for shadow calculations using UMEP Python.""" + print("Generating shadow fixtures (using UMEP Python as ground truth)...") + + # Load demo data using existing test infrastructure + SWC = SolweigRunCore( + config_path_str=CONFIG_PATH, + params_json_path=PARAMS_PATH, + ) + + dsm = SWC.raster_data.dsm.astype(np.float32) + assert SWC.raster_data.cdsm is not None + assert SWC.raster_data.tdsm is not None + assert SWC.raster_data.bush is not None + cdsm = SWC.raster_data.cdsm.astype(np.float32) + tdsm = SWC.raster_data.tdsm.astype(np.float32) + bush = SWC.raster_data.bush.astype(np.float32) + wall_ht = SWC.raster_data.wallheight.astype(np.float32) + wall_asp = (SWC.raster_data.wallaspect * np.pi / 180.0).astype(np.float32) + + # Test with multiple sun positions + sun_positions = [ + {"name": "morning", "azimuth": 90.0, "altitude": 30.0}, + {"name": "noon", "azimuth": 180.0, "altitude": 60.0}, + {"name": "afternoon", "azimuth": 270.0, "altitude": 45.0}, + ] + + for pos in sun_positions: + print(f" Computing shadows for {pos['name']}...") + # Use UMEP Python shadowingfunction_wallheight_23 + # Returns: (veg_sh, bldg_sh, veg_blocks_bldg_sh, wall_sh, wall_sun, + # wall_sh_veg, face_sh, face_sun) + ( + veg_sh, + bldg_sh, + _veg_blocks_bldg_sh, + wall_sh, + wall_sun, + _wall_sh_veg, + _face_sh, + _face_sun, + ) = shadowingfunction_wallheight_23( + dsm, + cdsm, + tdsm, + pos["azimuth"], + pos["altitude"], + SWC.raster_data.scale, + SWC.raster_data.amaxvalue, + bush, + wall_ht, + wall_asp, + ) + + # Save each shadow component + prefix = f"shadow_{pos['name']}" + np.save(FIXTURES_DIR / f"{prefix}_bldg_sh.npy", np.array(bldg_sh)) + np.save(FIXTURES_DIR / f"{prefix}_veg_sh.npy", np.array(veg_sh)) + np.save(FIXTURES_DIR / f"{prefix}_wall_sh.npy", np.array(wall_sh)) + np.save(FIXTURES_DIR / f"{prefix}_wall_sun.npy", np.array(wall_sun)) + + # Save input metadata for reproducibility + np.savez( + FIXTURES_DIR / "shadow_metadata.npz", + dsm_shape=dsm.shape, + scale=SWC.raster_data.scale, + amaxvalue=SWC.raster_data.amaxvalue, + ) + + print(" Shadow fixtures saved.") + + +def generate_svf_fixtures(): + """Generate golden fixtures for SVF calculations using UMEP Python.""" + print("Generating SVF fixtures (using UMEP Python as ground truth)...") + + # Load from pre-saved input fixtures to avoid SolweigRunCore dependency + dsm = np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32) + cdsm_abs = np.load(FIXTURES_DIR / "input_cdsm.npy").astype(np.float32) + tdsm_abs = np.load(FIXTURES_DIR / "input_tdsm.npy").astype(np.float32) + params = dict(np.load(FIXTURES_DIR / "input_params.npz")) + scale = float(params["scale"]) + amaxvalue = float(params["amaxvalue"]) + + # IMPORTANT: The installed UMEP svfForProcessing153 expects RELATIVE vegetation + # heights (height above ground), not absolute elevations. It internally adds DSM + # to convert to absolute heights. Our input data has absolute heights, so we must + # convert to relative before calling UMEP. + # + # Conversion: relative_height = absolute_height - DSM + # Where vegetation doesn't exist (CDSM <= DSM), set to 0. + cdsm_rel = np.maximum(cdsm_abs - dsm, 0).astype(np.float32) + tdsm_rel = np.maximum(tdsm_abs - dsm, 0).astype(np.float32) + cdsm_rel[cdsm_abs <= dsm] = 0 + tdsm_rel[tdsm_abs <= dsm] = 0 + + print(" Computing SVF (this may take a moment)...") + # Use UMEP Python svfForProcessing153 + # Returns a dictionary with keys: svf, svfE, svfS, svfW, svfN, svfveg, etc. + # Note: UMEP expects relative heights; Rust expects absolute heights. + # Both produce equivalent results when properly configured. + result = svfForProcessing153( + dsm, + cdsm_rel, # Relative vegetation heights (UMEP expectation) + tdsm_rel, # Relative trunk heights (UMEP expectation) + scale, + 1, # usevegdem (1 = True) + ) + + # Save SVF components (mapping UMEP Python keys to fixture names) + np.save(FIXTURES_DIR / "svf_total.npy", np.array(result["svf"])) + np.save(FIXTURES_DIR / "svf_north.npy", np.array(result["svfN"])) + np.save(FIXTURES_DIR / "svf_east.npy", np.array(result["svfE"])) + np.save(FIXTURES_DIR / "svf_south.npy", np.array(result["svfS"])) + np.save(FIXTURES_DIR / "svf_west.npy", np.array(result["svfW"])) + np.save(FIXTURES_DIR / "svf_veg.npy", np.array(result["svfveg"])) + + # Save metadata + np.savez( + FIXTURES_DIR / "svf_metadata.npz", + dsm_shape=dsm.shape, + scale=scale, + amaxvalue=amaxvalue, + ) + + print(" SVF fixtures saved.") + + +def generate_input_fixtures(): + """Save input data as fixtures for test isolation.""" + print("Generating input fixtures...") + + SWC = SolweigRunCore( + config_path_str=CONFIG_PATH, + params_json_path=PARAMS_PATH, + ) + + # Save input rasters + assert SWC.raster_data.cdsm is not None + assert SWC.raster_data.tdsm is not None + assert SWC.raster_data.bush is not None + np.save(FIXTURES_DIR / "input_dsm.npy", SWC.raster_data.dsm.astype(np.float32)) + np.save(FIXTURES_DIR / "input_cdsm.npy", SWC.raster_data.cdsm.astype(np.float32)) + np.save(FIXTURES_DIR / "input_tdsm.npy", SWC.raster_data.tdsm.astype(np.float32)) + np.save(FIXTURES_DIR / "input_bush.npy", SWC.raster_data.bush.astype(np.float32)) + np.save(FIXTURES_DIR / "input_wall_ht.npy", SWC.raster_data.wallheight.astype(np.float32)) + np.save(FIXTURES_DIR / "input_wall_asp.npy", SWC.raster_data.wallaspect.astype(np.float32)) + + # Save scalar parameters + np.savez( + FIXTURES_DIR / "input_params.npz", + scale=SWC.raster_data.scale, + amaxvalue=SWC.raster_data.amaxvalue, + ) + + print(" Input fixtures saved.") + + +def generate_gvf_fixtures(): + """ + Generate golden fixtures for GVF calculations using UMEP Python as ground truth. + """ + print("Generating GVF fixtures (using UMEP Python as ground truth)...") + + from scipy import ndimage + from umep.functions.SOLWEIGpython.gvf_2018a import gvf_2018a + + SBC = 5.67e-8 # Stefan-Boltzmann constant + + # Load input data + dsm = np.load(FIXTURES_DIR / "input_dsm.npy") + wall_ht = np.load(FIXTURES_DIR / "input_wall_ht.npy") + wall_asp = np.load(FIXTURES_DIR / "input_wall_asp.npy") + shadow_noon_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy") + shadow_noon_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy") + wall_sun = np.load(FIXTURES_DIR / "shadow_noon_wall_sun.npy") + params = dict(np.load(FIXTURES_DIR / "input_params.npz")) + + rows, cols = dsm.shape + scale = float(params["scale"]) + + # Create building mask (same logic as Rust) + wall_mask = wall_ht > 0 + struct = ndimage.generate_binary_structure(2, 2) + iterations = int(25 / scale) + 1 + dilated = ndimage.binary_dilation(wall_mask, struct, iterations=iterations) + buildings = (~dilated).astype(np.float64) + + # Inputs matching the test parameters + shadow = (shadow_noon_bldg * shadow_noon_veg).astype(np.float64) + + # Create realistic spatially-varying ground temperature + # - Base: air temperature (25°C) + # - Sunlit areas: +8-12°C warmer (solar heating) + # - Shaded areas: +0-2°C above air temp + # - Add slight random variation for surface heterogeneity + ta = 25.0 + np.random.seed(42) # Reproducible + sun_heating = 10.0 * shadow # shadow=1 means sunlit, shadow=0 means shaded + random_variation = np.random.normal(0, 1.0, (rows, cols)) + tg = (ta + sun_heating + random_variation).astype(np.float64) + + emis_grid = np.full((rows, cols), 0.95, dtype=np.float64) + alb_grid = np.full((rows, cols), 0.15, dtype=np.float64) + lc_grid = None # No land cover grid + + # GVF parameters (matching test_golden_gvf.py) + first = 2.0 # round(height) + second = 36.0 # round(height * 20) for 1.8m + tgwall = 2.0 + ewall = 0.90 + albedo_b = 0.20 + twater = 25.0 + landcover = False + + # Call UMEP Python gvf_2018a + ( + gvfLup, + gvfalb, + gvfalbnosh, + gvfLupE, + gvfalbE, + gvfalbnoshE, + gvfLupS, + gvfalbS, + gvfalbnoshS, + gvfLupW, + gvfalbW, + gvfalbnoshW, + gvfLupN, + gvfalbN, + gvfalbnoshN, + gvfSum, + gvfNorm, + ) = gvf_2018a( + wall_sun.astype(np.float64), + wall_ht.astype(np.float64), + buildings, + scale, + shadow, + first, + second, + wall_asp.astype(np.float64), + tg, + tgwall, + ta, + emis_grid, + ewall, + alb_grid, + SBC, + albedo_b, + rows, + cols, + twater, + lc_grid, + landcover, + ) + + # Save fixtures + np.save(FIXTURES_DIR / "gvf_lup.npy", gvfLup.astype(np.float32)) + np.save(FIXTURES_DIR / "gvf_alb.npy", gvfalb.astype(np.float32)) + np.save(FIXTURES_DIR / "gvf_norm.npy", gvfNorm.astype(np.float32)) + np.save(FIXTURES_DIR / "gvf_input_tg.npy", tg.astype(np.float32)) # Ground temperature input + + print(" GVF fixtures saved (from UMEP Python).") + + +def generate_radiation_fixtures(): + """ + Generate golden fixtures for radiation (Kside/Lside) using UMEP Python as ground truth. + + Uses isotropic mode (anisotropic_diffuse=0, anisotropic_longwave=False) which + doesn't require the complex shadow matrices. + """ + print("Generating radiation fixtures (using UMEP Python as ground truth)...") + + from umep.functions.SOLWEIGpython.Kside_veg_v2022a import Kside_veg_v2022a + from umep.functions.SOLWEIGpython.Lside_veg_v2022a import Lside_veg_v2022a + + SBC = 5.67e-8 # Stefan-Boltzmann constant + + # Load SVF data + svf = np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float64) + svf_n = np.load(FIXTURES_DIR / "svf_north.npy").astype(np.float64) + svf_e = np.load(FIXTURES_DIR / "svf_east.npy").astype(np.float64) + svf_s = np.load(FIXTURES_DIR / "svf_south.npy").astype(np.float64) + svf_w = np.load(FIXTURES_DIR / "svf_west.npy").astype(np.float64) + svf_veg = np.load(FIXTURES_DIR / "svf_veg.npy").astype(np.float64) + + # Load shadow data + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float64) + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float64) + shadow = shadow_bldg * shadow_veg + + rows, cols = svf.shape + + # Weather parameters (matching test_golden_radiation.py) + ta = 25.0 + rad_i = 600.0 + rad_d = 200.0 + rad_g = 800.0 + esky = 0.75 + ci = 0.85 + azimuth = 180.0 # Solar noon + altitude = 60.0 + psi = 0.5 # Vegetation transmissivity + t = 0.0 # Orientation offset + albedo = 0.20 + tw = 2.0 # Wall temperature offset + ewall = 0.90 + + # Synthetic arrays + f_sh = np.full((rows, cols), 0.5, dtype=np.float64) + kup_base = np.full((rows, cols), 50.0, dtype=np.float64) + + # Kside calculation (isotropic mode: anisotropic_diffuse=0) + # In isotropic mode, lv, diffsh, asvf, shmat, vegshmat, vbshvegshmat are not used + Keast, Ksouth, Kwest, Knorth, KsideI, KsideD, Kside = Kside_veg_v2022a( + rad_i, + rad_d, + rad_g, + shadow, + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, # svf_*_veg + azimuth, + altitude, + psi, + t, + albedo, + f_sh, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + cyl=True, # Use cylinder model + lv=None, + anisotropic_diffuse=0, # Isotropic mode + diffsh=None, + rows=rows, + cols=cols, + asvf=None, + shmat=None, + vegshmat=None, + vbshvegshmat=None, + ) + + np.save(FIXTURES_DIR / "radiation_kside_e.npy", Keast.astype(np.float32)) + np.save(FIXTURES_DIR / "radiation_kside_s.npy", Ksouth.astype(np.float32)) + + # Lside calculation (isotropic mode: anisotropic_longwave=False) + ta_k = ta + 273.15 + ldown_base = esky * SBC * (ta_k**4) + ldown = np.full((rows, cols), ldown_base, dtype=np.float64) + lup_base = 0.95 * SBC * (ta_k**4) + lup = np.full((rows, cols), lup_base, dtype=np.float64) + + Least, Lsouth, Lwest, Lnorth = Lside_veg_v2022a( + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, # svf_*_veg + svf_veg, + svf_veg, + svf_veg, + svf_veg, # svf_*_aveg + azimuth, + altitude, + ta, + tw, + SBC, + ewall, + ldown, + esky, + t, + f_sh, + ci, + lup.copy(), + lup.copy(), + lup.copy(), + lup.copy(), + anisotropic_longwave=False, + ) + + np.save(FIXTURES_DIR / "radiation_lside_e.npy", Least.astype(np.float32)) + np.save(FIXTURES_DIR / "radiation_lside_s.npy", Lsouth.astype(np.float32)) + + print(" Radiation fixtures saved (from UMEP Python).") + + +def generate_utci_fixtures(): + """ + Generate golden fixtures for UTCI calculations using UMEP Python as ground truth. + + Tests both single-point and grid calculations with various input combinations. + """ + print("Generating UTCI fixtures (using UMEP Python as ground truth)...") + + from umep.functions.SOLWEIGpython.UTCI_calculations import utci_calculator + + # Test cases: (ta, rh, tmrt, va10m, description) + # Cover a range of realistic outdoor conditions + test_cases = [ + # Normal comfortable conditions + (20.0, 50.0, 22.0, 1.5, "comfortable"), + # Hot summer day + (35.0, 40.0, 55.0, 1.0, "hot_summer"), + # Cold winter day + (5.0, 70.0, 2.0, 3.0, "cold_winter"), + # High humidity tropical + (30.0, 85.0, 38.0, 0.5, "tropical"), + # Windy conditions + (25.0, 60.0, 30.0, 8.0, "windy"), + # High radiation (large Tmrt-Ta delta) + (25.0, 45.0, 60.0, 2.0, "high_radiation"), + # Low wind edge case (minimum valid wind) + (22.0, 55.0, 28.0, 0.1, "low_wind"), + ] + + # Calculate UTCI for each test case using UMEP Python + utci_inputs = [] + utci_outputs = [] + + for ta, rh, tmrt, va, desc in test_cases: + utci = utci_calculator(ta, rh, tmrt, va) + utci_inputs.append([ta, rh, tmrt, va]) + utci_outputs.append(utci) + print(f" {desc}: Ta={ta}°C, RH={rh}%, Tmrt={tmrt}°C, va={va}m/s -> UTCI={utci:.2f}°C") + + # Save fixtures + np.savez( + FIXTURES_DIR / "utci_single_point.npz", + inputs=np.array(utci_inputs, dtype=np.float32), # [n_tests, 4] -> [ta, rh, tmrt, va] + outputs=np.array(utci_outputs, dtype=np.float32), # [n_tests] + descriptions=[desc for _, _, _, _, desc in test_cases], + ) + + # Also generate a grid test case using existing Tmrt-like data + # Create a synthetic Tmrt grid (using SVF as spatial variability source) + svf = np.load(FIXTURES_DIR / "svf_total.npy") + rows, cols = svf.shape + + # Tmrt varies with SVF: lower SVF = more enclosed = warmer Tmrt + ta_grid = 25.0 + tmrt_grid = ta_grid + 15.0 * (1 - svf) + 10.0 * svf # Range: 25-40°C + tmrt_grid = tmrt_grid.astype(np.float32) + + # Wind speed also varies spatially (lower in enclosed areas) + va_grid = 0.5 + 3.0 * svf # Range: 0.5-3.5 m/s + va_grid = va_grid.astype(np.float32) + + # Calculate UTCI grid using UMEP Python (scalar Ta and RH) + from umep.functions.SOLWEIGpython.UTCI_calculations import utci_calculator_grid + + class DummyFeedback: + def isCanceled(self): + return False + + def setProgress(self, _): + pass + + def setProgressText(self, _): + pass + + utci_grid = utci_calculator_grid(ta_grid, 50.0, tmrt_grid, va_grid, DummyFeedback()) + + np.save(FIXTURES_DIR / "utci_grid_tmrt.npy", tmrt_grid) + np.save(FIXTURES_DIR / "utci_grid_va.npy", va_grid) + np.save(FIXTURES_DIR / "utci_grid_output.npy", utci_grid.astype(np.float32)) + np.savez( + FIXTURES_DIR / "utci_grid_params.npz", + ta=ta_grid, + rh=50.0, + ) + + print(" UTCI fixtures saved (from UMEP Python).") + + +def generate_pet_fixtures(): + """ + Generate golden fixtures for PET calculations using UMEP Python as ground truth. + + PET uses an iterative solver and is much slower than UTCI (~50x). + """ + print("Generating PET fixtures (using UMEP Python as ground truth)...") + + from umep.functions.SOLWEIGpython.PET_calculations import _PET + + # Default person parameters (standard adult) + # mbody, age, height, activity, clo, sex + mbody = 75.0 # kg + age = 35.0 # years + height = 1.80 # meters + activity = 80.0 # W (walking slowly) + clo = 0.9 # summer clothing + sex = 1 # male + + # Test cases: (ta, rh, tmrt, va, description) + test_cases = [ + # Comfortable conditions + (20.0, 50.0, 22.0, 1.0, "comfortable"), + # Hot summer day + (35.0, 40.0, 55.0, 1.0, "hot_summer"), + # Cold winter day + (5.0, 70.0, 2.0, 2.0, "cold_winter"), + # High humidity tropical + (30.0, 85.0, 38.0, 0.5, "tropical"), + # High radiation (large Tmrt-Ta delta) + (25.0, 45.0, 55.0, 1.5, "high_radiation"), + ] + + # Calculate PET for each test case using UMEP Python + pet_inputs = [] + pet_outputs = [] + + for ta, rh, tmrt, va, desc in test_cases: + pet_val = _PET(ta, rh, tmrt, va, mbody, age, height, activity, clo, sex) + pet_inputs.append([ta, rh, tmrt, va]) + pet_outputs.append(pet_val) + print(f" {desc}: Ta={ta}°C, RH={rh}%, Tmrt={tmrt}°C, va={va}m/s -> PET={pet_val:.2f}°C") + + # Save fixtures + np.savez( + FIXTURES_DIR / "pet_single_point.npz", + inputs=np.array(pet_inputs, dtype=np.float32), # [n_tests, 4] -> [ta, rh, tmrt, va] + outputs=np.array(pet_outputs, dtype=np.float32), # [n_tests] + descriptions=[desc for _, _, _, _, desc in test_cases], + # Person parameters + mbody=mbody, + age=age, + height=height, + activity=activity, + clo=clo, + sex=sex, + ) + + # Grid test - use small subset due to slow PET calculation + print(" Computing PET grid (small subset, this may take a moment)...") + from umep.functions.SOLWEIGpython.PET_calculations import PET_person, calculate_PET_grid + + # Use existing UTCI grid inputs but crop to smaller size + tmrt_full = np.load(FIXTURES_DIR / "utci_grid_tmrt.npy") + va_full = np.load(FIXTURES_DIR / "utci_grid_va.npy") + + # Crop to 20x20 for faster calculation + crop_size = 20 + tmrt_crop = tmrt_full[:crop_size, :crop_size].copy() + va_crop = va_full[:crop_size, :crop_size].copy() + + ta_grid = 25.0 + rh_grid = 50.0 + + pet_person = PET_person(mbody=mbody, age=age, height=height, activity=activity, sex=sex, clo=clo) + + class DummyFeedback: + def isCanceled(self): + return False + + def setProgress(self, _): + pass + + def setProgressText(self, _): + pass + + pet_grid = calculate_PET_grid(ta_grid, rh_grid, tmrt_crop, va_crop, pet_person, DummyFeedback()) + + np.save(FIXTURES_DIR / "pet_grid_tmrt.npy", tmrt_crop.astype(np.float32)) + np.save(FIXTURES_DIR / "pet_grid_va.npy", va_crop.astype(np.float32)) + np.save(FIXTURES_DIR / "pet_grid_output.npy", pet_grid.astype(np.float32)) + np.savez( + FIXTURES_DIR / "pet_grid_params.npz", + ta=ta_grid, + rh=rh_grid, + mbody=mbody, + age=age, + height=height, + activity=activity, + clo=clo, + sex=sex, + ) + + print(" PET fixtures saved (from UMEP Python).") + + +def generate_tmrt_fixtures(): + """ + Generate golden fixtures for Tmrt calculations. + + Tmrt is computed from radiation budget using the Stefan-Boltzmann formula: + Tmrt = (Sstr / (abs_l * SBC))^0.25 - 273.15 + + We create synthetic but physically-consistent radiation inputs and compute + the expected Tmrt using the same formula as UMEP Python. + """ + print("Generating Tmrt fixtures...") + + SBC = 5.67e-8 # Stefan-Boltzmann constant + rows, cols = 30, 30 + + # Standard absorption coefficients + abs_k = 0.70 # shortwave + abs_l = 0.97 # longwave + + # View factors for standing posture (cylinder) + f_up = 0.06 + f_side = 0.22 + f_cyl = 0.28 + + # Create synthetic radiation inputs + # Use realistic values for summer day conditions + np.random.seed(42) + + # Base radiation values (W/m²) + kdown_base = 800.0 # Global shortwave + kup_base = 120.0 # Reflected shortwave + ldown_base = 380.0 # Atmospheric longwave + lup_base = 450.0 # Surface longwave + + # Create spatial variation (buildings cause shadows, lower Kdown) + svf = np.load(FIXTURES_DIR / "svf_total.npy")[:rows, :cols] # Use SVF for spatial variation + + # Radiation varies with SVF + kdown = (kdown_base * svf + np.random.uniform(0, 50, (rows, cols))).astype(np.float32) + kup = (kup_base * svf + np.random.uniform(0, 20, (rows, cols))).astype(np.float32) + ldown = np.full((rows, cols), ldown_base, dtype=np.float32) + np.random.uniform(-10, 10, (rows, cols)).astype( + np.float32 + ) + lup = (lup_base + 30 * (1 - svf) + np.random.uniform(-5, 5, (rows, cols))).astype(np.float32) + + # Directional radiation (simplified) + kside_n = (0.1 * kdown + np.random.uniform(0, 30, (rows, cols))).astype(np.float32) + kside_e = (0.15 * kdown + np.random.uniform(0, 30, (rows, cols))).astype(np.float32) + kside_s = (0.25 * kdown + np.random.uniform(0, 30, (rows, cols))).astype(np.float32) + kside_w = (0.12 * kdown + np.random.uniform(0, 30, (rows, cols))).astype(np.float32) + kside_total = (0.5 * kdown + np.random.uniform(0, 50, (rows, cols))).astype(np.float32) + + lside_n = (0.25 * ldown + np.random.uniform(0, 20, (rows, cols))).astype(np.float32) + lside_e = (0.25 * ldown + np.random.uniform(0, 20, (rows, cols))).astype(np.float32) + lside_s = (0.25 * ldown + np.random.uniform(0, 20, (rows, cols))).astype(np.float32) + lside_w = (0.25 * ldown + np.random.uniform(0, 20, (rows, cols))).astype(np.float32) + lside_total = (0.6 * ldown + np.random.uniform(0, 30, (rows, cols))).astype(np.float32) + + # Compute expected Tmrt using UMEP formula (anisotropic mode) + # Sstr = absK * (Kside * Fcyl + (Kdown + Kup) * Fup + (Knorth + Keast + Ksouth + Kwest) * Fside) + # + absL * ((Ldown + Lup) * Fup + Lside * Fcyl + (Lnorth + Least + Lsouth + Lwest) * Fside) + + # Anisotropic (use_aniso=True) + sstr_aniso = abs_k * ( + kside_total * f_cyl + (kdown + kup) * f_up + (kside_n + kside_e + kside_s + kside_w) * f_side + ) + abs_l * ((ldown + lup) * f_up + lside_total * f_cyl + (lside_n + lside_e + lside_s + lside_w) * f_side) + tmrt_aniso = np.sqrt(np.sqrt(sstr_aniso / (abs_l * SBC))) - 273.15 + tmrt_aniso = np.clip(tmrt_aniso, -50, 80).astype(np.float32) + + # Isotropic (use_aniso=False) + # In isotropic mode, no Lside*Fcyl term for longwave (only directional components) + sstr_iso = abs_k * ( + kside_total * f_cyl + (kdown + kup) * f_up + (kside_n + kside_e + kside_s + kside_w) * f_side + ) + abs_l * ((ldown + lup) * f_up + (lside_n + lside_e + lside_s + lside_w) * f_side) + tmrt_iso = np.sqrt(np.sqrt(sstr_iso / (abs_l * SBC))) - 273.15 + tmrt_iso = np.clip(tmrt_iso, -50, 80).astype(np.float32) + + # Save inputs + np.save(FIXTURES_DIR / "tmrt_input_kdown.npy", kdown) + np.save(FIXTURES_DIR / "tmrt_input_kup.npy", kup) + np.save(FIXTURES_DIR / "tmrt_input_ldown.npy", ldown) + np.save(FIXTURES_DIR / "tmrt_input_lup.npy", lup) + np.save(FIXTURES_DIR / "tmrt_input_kside_n.npy", kside_n) + np.save(FIXTURES_DIR / "tmrt_input_kside_e.npy", kside_e) + np.save(FIXTURES_DIR / "tmrt_input_kside_s.npy", kside_s) + np.save(FIXTURES_DIR / "tmrt_input_kside_w.npy", kside_w) + np.save(FIXTURES_DIR / "tmrt_input_kside_total.npy", kside_total) + np.save(FIXTURES_DIR / "tmrt_input_lside_n.npy", lside_n) + np.save(FIXTURES_DIR / "tmrt_input_lside_e.npy", lside_e) + np.save(FIXTURES_DIR / "tmrt_input_lside_s.npy", lside_s) + np.save(FIXTURES_DIR / "tmrt_input_lside_w.npy", lside_w) + np.save(FIXTURES_DIR / "tmrt_input_lside_total.npy", lside_total) + + # Save expected outputs + np.save(FIXTURES_DIR / "tmrt_output_aniso.npy", tmrt_aniso) + np.save(FIXTURES_DIR / "tmrt_output_iso.npy", tmrt_iso) + + np.savez( + FIXTURES_DIR / "tmrt_params.npz", + abs_k=abs_k, + abs_l=abs_l, + f_up=f_up, + f_side=f_side, + f_cyl=f_cyl, + ) + + print(f" Tmrt range (aniso): {tmrt_aniso.min():.1f}°C to {tmrt_aniso.max():.1f}°C") + print(f" Tmrt range (iso): {tmrt_iso.min():.1f}°C to {tmrt_iso.max():.1f}°C") + print(" Tmrt fixtures saved.") + + +def generate_ground_temp_fixtures(): + """ + Generate golden fixtures for ground temperature (TsWaveDelay) calculations. + + TsWaveDelay implements thermal inertia for ground temperature using an + exponential decay model with decay constant 33.27 day⁻¹. + + Formula: Lup = Tgmap0 * (1 - weight) + Tgmap1 * weight + where: weight = exp(-33.27 * timeadd) + """ + print("Generating ground temperature fixtures (using UMEP Python as ground truth)...") + + from umep.functions.SOLWEIGpython.TsWaveDelay_2015a import TsWaveDelay_2015a + + rows, cols = 20, 20 + + # Create synthetic gvfLup (current radiative equilibrium) and Tgmap1 (previous temp) + np.random.seed(42) + + # Current radiative equilibrium temperature (varies spatially) + gvfLup = (400 + np.random.uniform(-20, 20, (rows, cols))).astype(np.float64) + + # Previous temperature (slightly different) + Tgmap1_init = (380 + np.random.uniform(-15, 15, (rows, cols))).astype(np.float64) + + # Test case 1: First timestep of the day (firstdaytime=1) + Lup1, timeadd1, Tgmap1_1 = TsWaveDelay_2015a( + gvfLup=gvfLup.copy(), + firstdaytime=1, + timeadd=0.0, + timestepdec=30 / 1440, # 30 minutes + Tgmap1=Tgmap1_init.copy(), + ) + + # Test case 2: Short timestep accumulation (timeadd < 59 min) + Lup2, timeadd2, Tgmap1_2 = TsWaveDelay_2015a( + gvfLup=gvfLup.copy(), + firstdaytime=0, + timeadd=30 / 1440, # 30 minutes accumulated + timestepdec=30 / 1440, # 30 minute step + Tgmap1=Tgmap1_init.copy(), + ) + + # Test case 3: Long timestep (timeadd >= 59 min) + Lup3, timeadd3, Tgmap1_3 = TsWaveDelay_2015a( + gvfLup=gvfLup.copy(), + firstdaytime=0, + timeadd=60 / 1440, # 60 minutes accumulated (above threshold) + timestepdec=60 / 1440, # 60 minute step + Tgmap1=Tgmap1_init.copy(), + ) + + # Save inputs + np.save(FIXTURES_DIR / "ground_temp_input_gvflup.npy", gvfLup.astype(np.float32)) + np.save(FIXTURES_DIR / "ground_temp_input_tgmap1.npy", Tgmap1_init.astype(np.float32)) + + # Save outputs for each test case + np.savez( + FIXTURES_DIR / "ground_temp_case1.npz", + lup=Lup1.astype(np.float32), + timeadd=timeadd1, + tgmap1=Tgmap1_1.astype(np.float32), + input_firstdaytime=1, + input_timeadd=0.0, + input_timestepdec=30 / 1440, + ) + + np.savez( + FIXTURES_DIR / "ground_temp_case2.npz", + lup=Lup2.astype(np.float32), + timeadd=timeadd2, + tgmap1=Tgmap1_2.astype(np.float32), + input_firstdaytime=0, + input_timeadd=30 / 1440, + input_timestepdec=30 / 1440, + ) + + np.savez( + FIXTURES_DIR / "ground_temp_case3.npz", + lup=Lup3.astype(np.float32), + timeadd=timeadd3, + tgmap1=Tgmap1_3.astype(np.float32), + input_firstdaytime=0, + input_timeadd=60 / 1440, + input_timestepdec=60 / 1440, + ) + + print(f" Case 1 (first morning): Lup range {Lup1.min():.1f}-{Lup1.max():.1f}") + print(f" Case 2 (short step): Lup range {Lup2.min():.1f}-{Lup2.max():.1f}, timeadd={timeadd2:.4f}") + print(f" Case 3 (long step): Lup range {Lup3.min():.1f}-{Lup3.max():.1f}, timeadd={timeadd3:.4f}") + print(" Ground temperature fixtures saved (from UMEP Python).") + + +def generate_anisotropic_sky_fixtures(): + """ + Generate golden fixtures for anisotropic sky radiation model. + + Uses the Rust implementation to generate reference values for regression testing. + The anisotropic sky model computes direction-dependent longwave and shortwave + radiation from sky patches, vegetation, and buildings. + """ + print("Generating anisotropic sky fixtures (using Rust implementation)...") + + from solweig.rustalgos import sky + + # Load base inputs + dsm = np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32) + svf = np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32) + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32) + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32) + + rows, cols = dsm.shape + SBC = 5.67e-8 + + # Generate sky patches (simplified Tregenza-style) + def generate_sky_patches(n_alt_bands=4): + patches = [] + alt_bands = [6, 18, 30, 42] + azis_per_band = [30, 24, 24, 18] + for alt, n_azi in zip(alt_bands[:n_alt_bands], azis_per_band[:n_alt_bands], strict=False): + azi_step = 360.0 / n_azi if n_azi > 1 else 0 + for azi_idx in range(n_azi): + azi = azi_idx * azi_step + patches.append([alt, azi]) + return np.array(patches, dtype=np.float32) + + def compute_steradians(l_patches): + n_patches = len(l_patches) + steradians = np.zeros(n_patches, dtype=np.float32) + deg2rad = np.pi / 180.0 + altitudes = l_patches[:, 0] + unique_alts = np.unique(altitudes) + for i, alt in enumerate(unique_alts): + mask = altitudes == alt + count = np.sum(mask) + if i == 0: + ster = (360.0 / count * deg2rad) * np.sin(alt * deg2rad) + else: + prev_alt = unique_alts[i - 1] + delta_alt = (alt - prev_alt) / 2 + ster = (360.0 / count * deg2rad) * ( + np.sin((alt + delta_alt) * deg2rad) - np.sin((prev_alt + delta_alt) * deg2rad) + ) + steradians[mask] = ster + return steradians + + l_patches = generate_sky_patches(n_alt_bands=4) + n_patches = len(l_patches) + steradians = compute_steradians(l_patches) + + # Create 3D shadow matrices + svf_expanded = svf[:, :, np.newaxis] + base_visibility = np.broadcast_to(svf_expanded, (rows, cols, n_patches)).copy() + bldg_factor = shadow_bldg[:, :, np.newaxis] + veg_factor = shadow_veg[:, :, np.newaxis] + + shmat = (base_visibility * np.broadcast_to(bldg_factor, (rows, cols, n_patches))).astype(np.float32) + shmat = (shmat > 0.5).astype(np.float32) + vegshmat = (base_visibility * np.broadcast_to(veg_factor, (rows, cols, n_patches))).astype(np.float32) + vegshmat = (vegshmat > 0.3).astype(np.float32) + vbshvegshmat = (shmat * vegshmat).astype(np.float32) + + # Other inputs + asvf = svf.astype(np.float32) + luminance = 1000 + 500 * np.sin(l_patches[:, 0] * np.pi / 180) + lv = np.column_stack([l_patches, luminance]).astype(np.float32) + ta = 25.0 + ta_k = ta + 273.15 + lup_val = 0.95 * SBC * (ta_k**4) + lup = np.full((rows, cols), lup_val, dtype=np.float32) + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) + + # Create parameter objects + sun_params = sky.SunParams(altitude=60.0, azimuth=180.0) + sky_params = sky.SkyParams(esky=0.75, ta=25.0, cyl=True, wall_scheme=False, albedo=0.20) + surface_params = sky.SurfaceParams(tgwall=2.0, ewall=0.90, rad_i=600.0, rad_d=200.0) + + # Compute result + result = sky.anisotropic_sky( + shmat, + vegshmat, + vbshvegshmat, + sun_params, + asvf, + sky_params, + l_patches, + None, + None, + steradians, + surface_params, + lup, + lv, + shadow, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + ) + + # Save outputs + np.savez( + FIXTURES_DIR / "aniso_sky_output.npz", + ldown=np.array(result.ldown), + lside=np.array(result.lside), + lside_sky=np.array(result.lside_sky), + lside_veg=np.array(result.lside_veg), + kside=np.array(result.kside), + kside_i=np.array(result.kside_i), + kside_d=np.array(result.kside_d), + # Input parameters for reproducibility + sun_altitude=60.0, + sun_azimuth=180.0, + ta=25.0, + esky=0.75, + ) + + print(f" Ldown range: {np.array(result.ldown).min():.1f}-{np.array(result.ldown).max():.1f} W/m²") + print(f" Lside range: {np.array(result.lside).min():.1f}-{np.array(result.lside).max():.1f} W/m²") + print(f" Kside range: {np.array(result.kside).min():.1f}-{np.array(result.kside).max():.1f} W/m²") + print(" Anisotropic sky fixtures saved (from Rust implementation).") + + +def generate_wall_temp_fixtures(): + """ + Generate golden fixtures for wall temperature deviation calculations. + + Uses the Rust implementation to generate reference values for regression testing. + """ + print("Generating wall temperature fixtures (using Rust implementation)...") + + from solweig.rustalgos import ground + + rows, cols = 20, 20 + np.random.seed(42) + + # Test parameters + ta = 25.0 + sun_altitude = 45.0 # Moderate sun + altmax = 65.0 # Max altitude for day + dectime = 0.5 # Noon (12:00 as fraction of day) + snup = 0.25 # Sunrise at 6:00 (6/24 = 0.25) + global_rad = 600.0 # W/m² + rad_g0 = 800.0 # Clear sky + zen_deg = 45.0 # = 90 - altitude + + # Land cover parameters (per-pixel grids) + alb_grid = np.full((rows, cols), 0.15, dtype=np.float32) + emis_grid = np.full((rows, cols), 0.95, dtype=np.float32) + + # TgK and Tstart vary by land cover type (grass, asphalt, concrete, etc.) + # Grass: TgK=0.37, Tstart=-3.41 + # Asphalt: TgK=0.50, Tstart=-2.0 + tgk_grid = np.full((rows, cols), 0.37, dtype=np.float32) + tgk_grid[:10, :] = 0.50 # Upper half is asphalt + + tstart_grid = np.full((rows, cols), -3.41, dtype=np.float32) + tstart_grid[:10, :] = -2.0 # Upper half is asphalt + + tmaxlst_grid = np.full((rows, cols), 15.0, dtype=np.float32) # Max temp at 15:00 + + # Use Rust implementation to generate expected values + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + ta, + sun_altitude, + altmax, + dectime, + snup, + global_rad, + rad_g0, + zen_deg, + alb_grid, + emis_grid, + tgk_grid, + tstart_grid, + tmaxlst_grid, + ) + tg_expected = np.array(tg) + tg_wall_expected = float(tg_wall) + ci_tg_expected = float(ci_tg) + + # Save inputs + np.save(FIXTURES_DIR / "wall_temp_input_alb.npy", alb_grid) + np.save(FIXTURES_DIR / "wall_temp_input_emis.npy", emis_grid) + np.save(FIXTURES_DIR / "wall_temp_input_tgk.npy", tgk_grid) + np.save(FIXTURES_DIR / "wall_temp_input_tstart.npy", tstart_grid) + np.save(FIXTURES_DIR / "wall_temp_input_tmaxlst.npy", tmaxlst_grid) + + # Save expected outputs and parameters + np.savez( + FIXTURES_DIR / "wall_temp_output.npz", + tg=tg_expected, + tg_wall=tg_wall_expected, + ci_tg=ci_tg_expected, + # Input parameters + ta=ta, + sun_altitude=sun_altitude, + altmax=altmax, + dectime=dectime, + snup=snup, + global_rad=global_rad, + rad_g0=rad_g0, + zen_deg=zen_deg, + ) + + print(f" Tg range: {tg_expected.min():.2f}°C to {tg_expected.max():.2f}°C") + print(f" Tg_wall: {tg_wall_expected:.2f}°C") + print(f" CI_Tg: {ci_tg_expected:.4f}") + print(" Wall temperature fixtures saved.") + + +def generate_aniso_radiation_fixtures(): + """ + Generate golden fixtures for anisotropic radiation calculations. + + Uses the Rust implementation to generate reference values for regression testing. + Tests kside_veg and lside_veg in anisotropic mode. + """ + print("Generating anisotropic radiation fixtures (using Rust implementation)...") + + from solweig.constants import SBC + from solweig.rustalgos import vegetation + + # Load input data + dsm = np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32) + svf = np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32) + svf_e = np.load(FIXTURES_DIR / "svf_east.npy").astype(np.float32) + svf_s = np.load(FIXTURES_DIR / "svf_south.npy").astype(np.float32) + svf_w = np.load(FIXTURES_DIR / "svf_west.npy").astype(np.float32) + svf_n = np.load(FIXTURES_DIR / "svf_north.npy").astype(np.float32) + svf_veg = np.load(FIXTURES_DIR / "svf_veg.npy").astype(np.float32) + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32) + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32) + + rows, cols = dsm.shape + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + + ta, rad_i, rad_d, rad_g, esky, ci = 25.0, 600.0, 200.0, 800.0, 0.75, 0.85 + f_sh = np.full((rows, cols), 0.5, dtype=np.float32) + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) + + # Generate sky patches (Tregenza-style) + def generate_sky_patches(n_alt_bands=4): + patches = [] + alt_bands = [6, 18, 30, 42] + azis_per_band = [30, 24, 24, 18] + for alt, n_azi in zip(alt_bands[:n_alt_bands], azis_per_band[:n_alt_bands], strict=False): + azi_step = 360.0 / n_azi if n_azi > 1 else 0 + for azi_idx in range(n_azi): + patches.append([alt, azi_idx * azi_step]) + return np.array(patches, dtype=np.float32) + + l_patches = generate_sky_patches(n_alt_bands=4) + n_patches = len(l_patches) + + # Create luminance values (Perez model simplified) + luminance = 1000 + 500 * np.sin(l_patches[:, 0] * np.pi / 180) + lv = np.column_stack([l_patches, luminance]).astype(np.float32) + + # Create 3D shadow matrices from SVF and shadows + svf_expanded = svf[:, :, np.newaxis] + base_visibility = np.broadcast_to(svf_expanded, (rows, cols, n_patches)).copy() + bldg_factor = shadow_bldg[:, :, np.newaxis] + veg_factor = shadow_veg[:, :, np.newaxis] + + shmat = (base_visibility * np.broadcast_to(bldg_factor, (rows, cols, n_patches))).astype(np.float32) + shmat = (shmat > 0.5).astype(np.float32) + vegshmat = (base_visibility * np.broadcast_to(veg_factor, (rows, cols, n_patches))).astype(np.float32) + vegshmat = (vegshmat > 0.3).astype(np.float32) + vbshvegshmat = (shmat * vegshmat).astype(np.float32) + + # Diffuse shadow (3D - same shape as shmat for diffuse sky patches) + diffsh = shmat.copy() # 3D array (rows, cols, patches) + asvf = svf.copy() + + # Compute anisotropic Kside + kside_result = vegetation.kside_veg( + rad_i, + rad_d, + rad_g, + shadow, + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, # azimuth, altitude (noon) + 0.5, # psi (vegetation transmissivity) + 0.0, # t + 0.20, # albedo + f_sh, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + True, # cyl + lv, # luminance values + True, # anisotropic_diffuse + diffsh, + asvf, + shmat, + vegshmat, + vbshvegshmat, + ) + + # Compute anisotropic Lside + ta_k = ta + 273.15 + ldown = np.full((rows, cols), esky * SBC * (ta_k**4), dtype=np.float32) + lup = np.full((rows, cols), 0.95 * SBC * (ta_k**4), dtype=np.float32) + + lside_result = vegetation.lside_veg( + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, + ta, + 2.0, + SBC, + 0.90, + ldown, + esky, + 0.0, + f_sh, + ci, + lup.copy(), + lup.copy(), + lup.copy(), + lup.copy(), + True, # anisotropic_longwave + ) + + # Save Kside outputs + np.save(FIXTURES_DIR / "radiation_aniso_kside_e.npy", np.array(kside_result.keast)) + np.save(FIXTURES_DIR / "radiation_aniso_kside_s.npy", np.array(kside_result.ksouth)) + np.save(FIXTURES_DIR / "radiation_aniso_kside_i.npy", np.array(kside_result.kside_i)) + np.save(FIXTURES_DIR / "radiation_aniso_kside_d.npy", np.array(kside_result.kside_d)) + + # Save Lside outputs + np.save(FIXTURES_DIR / "radiation_aniso_lside_e.npy", np.array(lside_result.least)) + np.save(FIXTURES_DIR / "radiation_aniso_lside_s.npy", np.array(lside_result.lsouth)) + + print( + f" Kside East range: {np.array(kside_result.keast).min():.1f}-{np.array(kside_result.keast).max():.1f} W/m²" + ) + kside_i = np.array(kside_result.kside_i) + print(f" Kside Direct range: {kside_i.min():.1f}-{kside_i.max():.1f} W/m²") + print( + f" Lside East range: {np.array(lside_result.least).min():.1f}-{np.array(lside_result.least).max():.1f} W/m²" + ) + print(" Anisotropic radiation fixtures saved (from Rust implementation).") + + +def main(): + """Generate all golden fixtures using UMEP Python as ground truth.""" + print("=" * 60) + print("Golden Fixture Generator") + print("=" * 60) + print() + print("IMPORTANT: Shadow and SVF fixtures are generated using the original") + print("UMEP Python module as ground truth. GVF and radiation fixtures are") + print("generated from the current Rust implementation for regression testing.") + print() + print("The golden tests verify that implementations produce consistent,") + print("physically valid outputs.") + print("=" * 60) + + ensure_fixtures_dir() + + # Generate input fixtures first (for test isolation) + generate_input_fixtures() + + # Generate algorithm output fixtures (using UMEP Python) + generate_shadow_fixtures() + generate_svf_fixtures() + + # Generate GVF and radiation fixtures (using current Rust implementation) + # These are for regression testing - the overall Tmrt has been validated + try: + generate_gvf_fixtures() + generate_radiation_fixtures() + except ImportError as e: + print(f" Skipping GVF/radiation fixtures: {e}") + + # Generate UTCI fixtures (using UMEP Python as ground truth) + try: + generate_utci_fixtures() + except ImportError as e: + print(f" Skipping UTCI fixtures: {e}") + + # Generate PET fixtures (using UMEP Python as ground truth) + try: + generate_pet_fixtures() + except ImportError as e: + print(f" Skipping PET fixtures: {e}") + + # Generate Tmrt fixtures + try: + generate_tmrt_fixtures() + except Exception as e: + print(f" Skipping Tmrt fixtures: {e}") + + # Generate ground temperature fixtures (TsWaveDelay) + try: + generate_ground_temp_fixtures() + except ImportError as e: + print(f" Skipping ground temperature fixtures: {e}") + + # Generate wall temperature fixtures + try: + generate_wall_temp_fixtures() + except Exception as e: + print(f" Skipping wall temperature fixtures: {e}") + + # Generate anisotropic sky fixtures + try: + generate_anisotropic_sky_fixtures() + except Exception as e: + print(f" Skipping anisotropic sky fixtures: {e}") + + # Generate anisotropic radiation fixtures + try: + generate_aniso_radiation_fixtures() + except Exception as e: + print(f" Skipping anisotropic radiation fixtures: {e}") + + print("\n" + "=" * 60) + print("All fixtures generated successfully!") + print(f"Location: {FIXTURES_DIR}") + print("=" * 60) + + # List generated files + print("\nGenerated files:") + for f in sorted(FIXTURES_DIR.glob("*.npy")): + size_kb = f.stat().st_size / 1024 + print(f" {f.name}: {size_kb:.1f} KB") + for f in sorted(FIXTURES_DIR.glob("*.npz")): + size_kb = f.stat().st_size / 1024 + print(f" {f.name}: {size_kb:.1f} KB") + + +if __name__ == "__main__": + main() diff --git a/tests/golden/generate_report.py b/tests/golden/generate_report.py new file mode 100644 index 0000000..ec15d9f --- /dev/null +++ b/tests/golden/generate_report.py @@ -0,0 +1,1803 @@ +""" +Golden Test Visual Report Generator + +Generates a comprehensive Markdown report comparing current implementation +outputs against golden fixtures for regression testing. + +Usage: + uv run python tests/golden/generate_report.py + +Output: + temp/golden_report/golden_report.md + temp/golden_report/*.png +""" + +from datetime import datetime +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +from scipy import ndimage + +# Paths +FIXTURES_DIR = Path(__file__).parent / "fixtures" +REPORT_DIR = Path(__file__).parents[2] / "temp" / "golden_report" + + +def ensure_report_dir(): + """Create report directory if it doesn't exist.""" + REPORT_DIR.mkdir(parents=True, exist_ok=True) + + +def load_inputs(): + """Load all input fixtures.""" + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy"), + "cdsm": np.load(FIXTURES_DIR / "input_cdsm.npy"), + "tdsm": np.load(FIXTURES_DIR / "input_tdsm.npy"), + "bush": np.load(FIXTURES_DIR / "input_bush.npy"), + "wall_ht": np.load(FIXTURES_DIR / "input_wall_ht.npy"), + "wall_asp": np.load(FIXTURES_DIR / "input_wall_asp.npy"), + "params": dict(np.load(FIXTURES_DIR / "input_params.npz")), + } + + +def compute_shadows(inputs, azimuth, altitude): + """Compute shadows for given sun position.""" + from solweig.rustalgos import shadowing + + shadowing.disable_gpu() + return shadowing.calculate_shadows_wall_ht_25( + azimuth, + altitude, + float(inputs["params"]["scale"]), + float(inputs["params"]["amaxvalue"]), + inputs["dsm"].astype(np.float32), + inputs["cdsm"].astype(np.float32), + inputs["tdsm"].astype(np.float32), + inputs["bush"].astype(np.float32), + inputs["wall_ht"].astype(np.float32), + (inputs["wall_asp"] * np.pi / 180.0).astype(np.float32), + None, + None, + None, + ) + + +def compute_svf(inputs): + """Compute SVF.""" + from solweig.rustalgos import shadowing, skyview + + shadowing.disable_gpu() + return skyview.calculate_svf( + inputs["dsm"].astype(np.float32), + inputs["cdsm"].astype(np.float32), + inputs["tdsm"].astype(np.float32), + float(inputs["params"]["scale"]), + True, # usevegdem + float(inputs["params"]["amaxvalue"]), + 2, # patch_option + None, + None, + ) + + +def compute_gvf(inputs): + """Compute GVF.""" + from solweig.constants import SBC + from solweig.rustalgos import gvf as gvf_module + from solweig.rustalgos import shadowing + + shadowing.disable_gpu() + + rows, cols = inputs["dsm"].shape + scale = float(inputs["params"]["scale"]) + + # Building mask + wall_mask = inputs["wall_ht"] > 0 + struct = ndimage.generate_binary_structure(2, 2) + iterations = int(25 / scale) + 1 + dilated = ndimage.binary_dilation(wall_mask, struct, iterations=iterations) + buildings = (~dilated).astype(np.float32) + + # Load shadow data + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy") + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy") + wall_sun = np.load(FIXTURES_DIR / "shadow_noon_wall_sun.npy") + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + + # Load ground temperature from fixture (spatially varying) + tg_path = FIXTURES_DIR / "gvf_input_tg.npy" + tg = np.load(tg_path).astype(np.float32) if tg_path.exists() else np.zeros((rows, cols), dtype=np.float32) + + emis_grid = np.full((rows, cols), 0.95, dtype=np.float32) + alb_grid = np.full((rows, cols), 0.15, dtype=np.float32) + + gvf_params = gvf_module.GvfScalarParams( + scale=scale, + first=2.0, + second=36.0, + tgwall=2.0, + ta=25.0, + ewall=0.90, + sbc=SBC, + albedo_b=0.20, + twater=25.0, + landcover=False, + ) + + return gvf_module.gvf_calc( + wall_sun.astype(np.float32), + inputs["wall_ht"].astype(np.float32), + buildings, + shadow, + inputs["wall_asp"].astype(np.float32), + tg, + emis_grid, + alb_grid, + None, + gvf_params, + ) + + +def compute_radiation(inputs): + """Compute Kside and Lside.""" + from solweig.constants import SBC + from solweig.rustalgos import shadowing, vegetation + + shadowing.disable_gpu() + + rows, cols = inputs["dsm"].shape + + # Load SVF and shadow data + svf_e = np.load(FIXTURES_DIR / "svf_east.npy").astype(np.float32) + svf_s = np.load(FIXTURES_DIR / "svf_south.npy").astype(np.float32) + svf_w = np.load(FIXTURES_DIR / "svf_west.npy").astype(np.float32) + svf_n = np.load(FIXTURES_DIR / "svf_north.npy").astype(np.float32) + svf_veg = np.load(FIXTURES_DIR / "svf_veg.npy").astype(np.float32) + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy") + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy") + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + + ta, rad_i, rad_d, rad_g, esky, ci = 25.0, 600.0, 200.0, 800.0, 0.75, 0.85 + f_sh = np.full((rows, cols), 0.5, dtype=np.float32) + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) + + kside = vegetation.kside_veg( + rad_i, + rad_d, + rad_g, + shadow, + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, + 0.5, + 0.0, + 0.20, + f_sh, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + True, + None, + False, + None, + None, + None, + None, + None, + ) + + ta_k = ta + 273.15 + ldown = np.full((rows, cols), esky * SBC * (ta_k**4), dtype=np.float32) + lup = np.full((rows, cols), 0.95 * SBC * (ta_k**4), dtype=np.float32) + + lside = vegetation.lside_veg( + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, + ta, + 2.0, + SBC, + 0.90, + ldown, + esky, + 0.0, + f_sh, + ci, + lup.copy(), + lup.copy(), + lup.copy(), + lup.copy(), + False, + ) + + return kside, lside + + +def plot_context(inputs): + """Generate context plot showing input data.""" + fig, axes = plt.subplots(1, 4, figsize=(20, 4)) + fig.suptitle("Input Context", fontsize=12, fontweight="bold") + + dsm = inputs["dsm"] + cdsm = inputs["cdsm"] + wall_ht = inputs["wall_ht"] + + # Load SVF for context + svf = np.load(FIXTURES_DIR / "svf_total.npy") + + im0 = axes[0].imshow(dsm, cmap="terrain") + axes[0].set_title(f"DSM (m)\n[{dsm.min():.1f}, {dsm.max():.1f}]") + plt.colorbar(im0, ax=axes[0], shrink=0.8) + + im1 = axes[1].imshow(cdsm, cmap="Greens") + axes[1].set_title(f"Canopy DSM (m)\n[{cdsm.min():.1f}, {cdsm.max():.1f}]") + plt.colorbar(im1, ax=axes[1], shrink=0.8) + + im2 = axes[2].imshow(wall_ht, cmap="Oranges") + axes[2].set_title(f"Wall Heights (m)\n[{wall_ht.min():.1f}, {wall_ht.max():.1f}]") + plt.colorbar(im2, ax=axes[2], shrink=0.8) + + im3 = axes[3].imshow(svf, cmap="gray", vmin=0, vmax=1) + axes[3].set_title(f"Sky View Factor\n[{svf.min():.2f}, {svf.max():.2f}]") + plt.colorbar(im3, ax=axes[3], shrink=0.8) + + plt.tight_layout() + plt.savefig(REPORT_DIR / "context.png", dpi=120, bbox_inches="tight") + plt.close() + + +def plot_comparison(current, golden, title, filename, cmap="viridis"): + """Generate comparison plot: UMEP (golden) vs SOLWEIG Rust (current) vs residual.""" + diff = current - golden + + fig, axes = plt.subplots(1, 3, figsize=(15, 4)) + fig.suptitle(title, fontsize=12, fontweight="bold") + + vmax = max(abs(current.max()), abs(golden.max())) + vmin = min(current.min(), golden.min()) + + # Golden fixture = UMEP reference + im0 = axes[0].imshow(golden, cmap=cmap, vmin=vmin, vmax=vmax) + axes[0].set_title("UMEP (Reference)") + plt.colorbar(im0, ax=axes[0], shrink=0.8) + + # Current = SOLWEIG Rust + im1 = axes[1].imshow(current, cmap=cmap, vmin=vmin, vmax=vmax) + axes[1].set_title("SOLWEIG Rust") + plt.colorbar(im1, ax=axes[1], shrink=0.8) + + diff_max = max(abs(diff.min()), abs(diff.max()), 1e-10) + im2 = axes[2].imshow(diff, cmap="RdBu_r", vmin=-diff_max, vmax=diff_max) + axes[2].set_title(f"Residual (Rust - UMEP)\nmax|d|={diff_max:.2e}") + plt.colorbar(im2, ax=axes[2], shrink=0.8) + + plt.tight_layout() + plt.savefig(REPORT_DIR / filename, dpi=120, bbox_inches="tight") + plt.close() + + # Return stats without pass/fail - let caller decide threshold + return { + "max_abs_diff": float(np.abs(diff).max()), + "mean_diff": float(diff.mean()), + "std_diff": float(diff.std()), + "max_value": float(np.abs(golden).max()), # For relative comparisons + } + + +def plot_single_array(arr, title, filename, cmap="viridis"): + """Generate single array plot (for outputs without UMEP reference).""" + fig, ax = plt.subplots(1, 1, figsize=(8, 6)) + fig.suptitle(title, fontsize=12, fontweight="bold") + + im = ax.imshow(arr, cmap=cmap) + ax.set_title(f"[{arr.min():.2f}, {arr.max():.2f}]") + plt.colorbar(im, ax=ax, shrink=0.8) + + plt.tight_layout() + plt.savefig(REPORT_DIR / filename, dpi=120, bbox_inches="tight") + plt.close() + + return { + "min": float(arr.min()), + "max": float(arr.max()), + "mean": float(arr.mean()), + } + + +# --------------------------------------------------------------------------- +# Component generators (compute + compare) +# --------------------------------------------------------------------------- + + +def generate_shadow_comparisons(inputs): + """Generate shadow comparison plots.""" + results = {} + positions = [ + ("morning", 90.0, 30.0), + ("noon", 180.0, 60.0), + ("afternoon", 270.0, 45.0), + ] + + for name, azimuth, altitude in positions: + result = compute_shadows(inputs, azimuth, altitude) + + # Building shadows + golden = np.load(FIXTURES_DIR / f"shadow_{name}_bldg_sh.npy") + current = np.array(result.bldg_sh) + stats = plot_comparison( + current, + golden, + f"Building Shadows - {name.title()} (az={azimuth}, alt={altitude})", + f"shadow_{name}_bldg.png", + cmap="gray_r", + ) + results[f"shadow_{name}_bldg"] = stats + + # Vegetation shadows + golden = np.load(FIXTURES_DIR / f"shadow_{name}_veg_sh.npy") + current = np.array(result.veg_sh) + stats = plot_comparison( + current, + golden, + f"Vegetation Shadows - {name.title()} (az={azimuth}, alt={altitude})", + f"shadow_{name}_veg.png", + cmap="gray_r", + ) + results[f"shadow_{name}_veg"] = stats + + # Wall shadows (shadowed height) + wall_sh_path = FIXTURES_DIR / f"shadow_{name}_wall_sh.npy" + if wall_sh_path.exists() and result.wall_sh is not None: + golden = np.load(wall_sh_path) + current = np.array(result.wall_sh) + stats = plot_comparison( + current, + golden, + f"Wall Shadows - {name.title()} (az={azimuth}, alt={altitude})", + f"shadow_{name}_wall_sh.png", + cmap="Oranges", + ) + results[f"shadow_{name}_wall_sh"] = stats + + # Wall sun (sunlit height) + wall_sun_path = FIXTURES_DIR / f"shadow_{name}_wall_sun.npy" + if wall_sun_path.exists() and result.wall_sun is not None: + golden = np.load(wall_sun_path) + current = np.array(result.wall_sun) + stats = plot_comparison( + current, + golden, + f"Wall Sun - {name.title()} (az={azimuth}, alt={altitude})", + f"shadow_{name}_wall_sun.png", + cmap="YlOrRd", + ) + results[f"shadow_{name}_wall_sun"] = stats + + return results + + +def generate_svf_comparisons(inputs): + """Generate SVF comparison plots.""" + results = {} + result = compute_svf(inputs) + + components = [ + ("svf", "svf_total", "Total SVF"), + ("svf_north", "svf_north", "SVF North"), + ("svf_east", "svf_east", "SVF East"), + ("svf_south", "svf_south", "SVF South"), + ("svf_west", "svf_west", "SVF West"), + ("svf_veg", "svf_veg", "SVF Vegetation"), + ] + + for attr, golden_name, title in components: + golden = np.load(FIXTURES_DIR / f"{golden_name}.npy") + current = np.array(getattr(result, attr)) + stats = plot_comparison(current, golden, title, f"{golden_name}.png", cmap="gray") + results[golden_name] = stats + + return results + + +def generate_gvf_comparisons(inputs): + """Generate GVF comparison plots.""" + results = {} + result = compute_gvf(inputs) + + components = [ + ("gvf_lup", "gvf_lup", "GVF Lup (W/m2)", "hot"), + ("gvfalb", "gvf_alb", "GVF x Albedo", "viridis"), + ("gvf_norm", "gvf_norm", "GVF Normalization", "viridis"), + ] + + for attr, golden_name, title, cmap in components: + golden = np.load(FIXTURES_DIR / f"{golden_name}.npy") + current = np.array(getattr(result, attr)) + stats = plot_comparison(current, golden, title, f"{golden_name}.png", cmap=cmap) + results[golden_name] = stats + + return results + + +def generate_radiation_comparisons(inputs): + """Generate radiation comparison plots (isotropic mode).""" + results = {} + kside, lside = compute_radiation(inputs) + + components = [ + (kside, "keast", "radiation_kside_e", "Kside East - Isotropic (W/m2)", "YlOrRd"), + (kside, "ksouth", "radiation_kside_s", "Kside South - Isotropic (W/m2)", "YlOrRd"), + (lside, "least", "radiation_lside_e", "Lside East - Isotropic (W/m2)", "inferno"), + (lside, "lsouth", "radiation_lside_s", "Lside South - Isotropic (W/m2)", "inferno"), + ] + + for obj, attr, golden_name, title, cmap in components: + golden = np.load(FIXTURES_DIR / f"{golden_name}.npy") + current = np.array(getattr(obj, attr)) + stats = plot_comparison(current, golden, title, f"{golden_name}.png", cmap=cmap) + results[golden_name] = stats + + return results + + +def generate_aniso_radiation_comparisons(inputs): + """Generate anisotropic radiation comparison plots.""" + from solweig.constants import SBC + from solweig.rustalgos import shadowing, vegetation + + results = {} + + # Check if anisotropic fixtures exist + aniso_kside_path = FIXTURES_DIR / "radiation_aniso_kside_e.npy" + if not aniso_kside_path.exists(): + print(" Anisotropic radiation fixtures not found, skipping...") + return results + + shadowing.disable_gpu() + rows, cols = inputs["dsm"].shape + + # Load SVF and shadow data + svf = np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32) + svf_e = np.load(FIXTURES_DIR / "svf_east.npy").astype(np.float32) + svf_s = np.load(FIXTURES_DIR / "svf_south.npy").astype(np.float32) + svf_w = np.load(FIXTURES_DIR / "svf_west.npy").astype(np.float32) + svf_n = np.load(FIXTURES_DIR / "svf_north.npy").astype(np.float32) + svf_veg = np.load(FIXTURES_DIR / "svf_veg.npy").astype(np.float32) + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32) + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32) + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + + ta, rad_i, rad_d, rad_g, esky, ci = 25.0, 600.0, 200.0, 800.0, 0.75, 0.85 + f_sh = np.full((rows, cols), 0.5, dtype=np.float32) + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) + + # Generate sky patches for anisotropic mode + def generate_sky_patches(n_alt_bands=4): + patches = [] + alt_bands = [6, 18, 30, 42] + azis_per_band = [30, 24, 24, 18] + for alt, n_azi in zip(alt_bands[:n_alt_bands], azis_per_band[:n_alt_bands], strict=False): + azi_step = 360.0 / n_azi if n_azi > 1 else 0 + for azi_idx in range(n_azi): + patches.append([alt, azi_idx * azi_step]) + return np.array(patches, dtype=np.float32) + + l_patches = generate_sky_patches(n_alt_bands=4) + n_patches = len(l_patches) + + # Create luminance values (Perez model simplified) + luminance = 1000 + 500 * np.sin(l_patches[:, 0] * np.pi / 180) + lv = np.column_stack([l_patches, luminance]).astype(np.float32) + + # Create 3D shadow matrices from SVF and shadows + svf_expanded = svf[:, :, np.newaxis] + base_visibility = np.broadcast_to(svf_expanded, (rows, cols, n_patches)).copy() + bldg_factor = shadow_bldg[:, :, np.newaxis] + veg_factor = shadow_veg[:, :, np.newaxis] + + shmat = (base_visibility * np.broadcast_to(bldg_factor, (rows, cols, n_patches))).astype(np.float32) + shmat = (shmat > 0.5).astype(np.float32) + vegshmat = (base_visibility * np.broadcast_to(veg_factor, (rows, cols, n_patches))).astype(np.float32) + vegshmat = (vegshmat > 0.3).astype(np.float32) + vbshvegshmat = (shmat * vegshmat).astype(np.float32) + + # Diffuse shadow (3D - same shape as shmat for diffuse sky patches) + diffsh = shmat.copy() # 3D array (rows, cols, patches) + asvf = svf.copy() + + # Compute anisotropic Kside + kside_aniso = vegetation.kside_veg( + rad_i, + rad_d, + rad_g, + shadow, + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, # azimuth, altitude + 0.5, # psi + 0.0, # t + 0.20, # albedo + f_sh, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + True, # cyl + lv, # luminance values + True, # anisotropic_diffuse + diffsh, + asvf, + shmat, + vegshmat, + vbshvegshmat, + ) + + # Compare Kside anisotropic + components = [ + ("keast", "radiation_aniso_kside_e", "Kside East - Anisotropic (W/m2)", "YlOrRd"), + ("ksouth", "radiation_aniso_kside_s", "Kside South - Anisotropic (W/m2)", "YlOrRd"), + ("kside_i", "radiation_aniso_kside_i", "Kside Direct - Anisotropic (W/m2)", "YlOrRd"), + ("kside_d", "radiation_aniso_kside_d", "Kside Diffuse - Anisotropic (W/m2)", "YlOrRd"), + ] + + for attr, golden_name, title, cmap in components: + golden_path = FIXTURES_DIR / f"{golden_name}.npy" + if golden_path.exists(): + golden = np.load(golden_path) + current = np.array(getattr(kside_aniso, attr)) + stats = plot_comparison(current, golden, title, f"{golden_name}.png", cmap=cmap) + results[golden_name] = stats + + # Compute anisotropic Lside + ta_k = ta + 273.15 + ldown = np.full((rows, cols), esky * SBC * (ta_k**4), dtype=np.float32) + lup = np.full((rows, cols), 0.95 * SBC * (ta_k**4), dtype=np.float32) + + lside_aniso = vegetation.lside_veg( + svf_s, + svf_w, + svf_n, + svf_e, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + svf_veg, + 180.0, + 60.0, + ta, + 2.0, + SBC, + 0.90, + ldown, + esky, + 0.0, + f_sh, + ci, + lup.copy(), + lup.copy(), + lup.copy(), + lup.copy(), + True, # anisotropic_longwave + ) + + lside_components = [ + ("least", "radiation_aniso_lside_e", "Lside East - Anisotropic (W/m2)", "inferno"), + ("lsouth", "radiation_aniso_lside_s", "Lside South - Anisotropic (W/m2)", "inferno"), + ] + + for attr, golden_name, title, cmap in lside_components: + golden_path = FIXTURES_DIR / f"{golden_name}.npy" + if golden_path.exists(): + golden = np.load(golden_path) + current = np.array(getattr(lside_aniso, attr)) + stats = plot_comparison(current, golden, title, f"{golden_name}.png", cmap=cmap) + results[golden_name] = stats + + return results + + +def generate_utci_comparisons(): + """Generate UTCI comparison plots.""" + from solweig.rustalgos import utci + + results = {} + + # Load fixtures + params = dict(np.load(FIXTURES_DIR / "utci_grid_params.npz")) + tmrt = np.load(FIXTURES_DIR / "utci_grid_tmrt.npy") + va = np.load(FIXTURES_DIR / "utci_grid_va.npy") + golden = np.load(FIXTURES_DIR / "utci_grid_output.npy") + + # Compute current + current = np.array( + utci.utci_grid( + float(params["ta"]), + float(params["rh"]), + tmrt.astype(np.float32), + va.astype(np.float32), + ) + ) + + stats = plot_comparison( + current, + golden, + f"UTCI Grid (Ta={params['ta']}C, RH={params['rh']}%)", + "utci_grid.png", + cmap="RdYlBu_r", + ) + results["utci_grid"] = stats + + return results + + +def generate_pet_comparisons(): + """Generate PET comparison plots.""" + from solweig.rustalgos import pet + + results = {} + + # Load fixtures + params = dict(np.load(FIXTURES_DIR / "pet_grid_params.npz")) + tmrt = np.load(FIXTURES_DIR / "pet_grid_tmrt.npy") + va = np.load(FIXTURES_DIR / "pet_grid_va.npy") + golden = np.load(FIXTURES_DIR / "pet_grid_output.npy") + + # Compute current + current = np.array( + pet.pet_grid( + float(params["ta"]), + float(params["rh"]), + tmrt.astype(np.float32), + va.astype(np.float32), + float(params["mbody"]), + float(params["age"]), + float(params["height"]), + float(params["activity"]), + float(params["clo"]), + int(params["sex"]), + ) + ) + + # Mask invalid values + valid_mask = golden > -999 + current_masked = np.where(valid_mask, current, np.nan) + golden_masked = np.where(valid_mask, golden, np.nan) + + stats = plot_comparison( + current_masked, + golden_masked, + f"PET Grid (Ta={params['ta']}C, RH={params['rh']}%)", + "pet_grid.png", + cmap="RdYlBu_r", + ) + results["pet_grid"] = stats + + return results + + +def generate_tmrt_comparisons(): + """Generate Tmrt comparison plots.""" + from solweig.rustalgos import tmrt + + results = {} + + # Load fixtures + params = dict(np.load(FIXTURES_DIR / "tmrt_params.npz")) + kdown = np.load(FIXTURES_DIR / "tmrt_input_kdown.npy") + kup = np.load(FIXTURES_DIR / "tmrt_input_kup.npy") + ldown = np.load(FIXTURES_DIR / "tmrt_input_ldown.npy") + lup = np.load(FIXTURES_DIR / "tmrt_input_lup.npy") + kside_n = np.load(FIXTURES_DIR / "tmrt_input_kside_n.npy") + kside_e = np.load(FIXTURES_DIR / "tmrt_input_kside_e.npy") + kside_s = np.load(FIXTURES_DIR / "tmrt_input_kside_s.npy") + kside_w = np.load(FIXTURES_DIR / "tmrt_input_kside_w.npy") + kside_total = np.load(FIXTURES_DIR / "tmrt_input_kside_total.npy") + lside_n = np.load(FIXTURES_DIR / "tmrt_input_lside_n.npy") + lside_e = np.load(FIXTURES_DIR / "tmrt_input_lside_e.npy") + lside_s = np.load(FIXTURES_DIR / "tmrt_input_lside_s.npy") + lside_w = np.load(FIXTURES_DIR / "tmrt_input_lside_w.npy") + lside_total = np.load(FIXTURES_DIR / "tmrt_input_lside_total.npy") + + # Anisotropic mode + golden_aniso = np.load(FIXTURES_DIR / "tmrt_output_aniso.npy") + tmrt_params = tmrt.TmrtParams( + abs_k=float(params["abs_k"]), + abs_l=float(params["abs_l"]), + is_standing=True, + use_anisotropic_sky=True, + ) + current_aniso = np.array( + tmrt.compute_tmrt( + kdown, + kup, + ldown, + lup, + kside_n, + kside_e, + kside_s, + kside_w, + lside_n, + lside_e, + lside_s, + lside_w, + kside_total, + lside_total, + tmrt_params, + ) + ) + + stats = plot_comparison( + current_aniso, + golden_aniso, + "Tmrt Anisotropic (C)", + "tmrt_aniso.png", + cmap="RdYlBu_r", + ) + results["tmrt_aniso"] = stats + + # Isotropic mode + golden_iso = np.load(FIXTURES_DIR / "tmrt_output_iso.npy") + tmrt_params_iso = tmrt.TmrtParams( + abs_k=float(params["abs_k"]), + abs_l=float(params["abs_l"]), + is_standing=True, + use_anisotropic_sky=False, + ) + current_iso = np.array( + tmrt.compute_tmrt( + kdown, + kup, + ldown, + lup, + kside_n, + kside_e, + kside_s, + kside_w, + lside_n, + lside_e, + lside_s, + lside_w, + kside_total, + lside_total, + tmrt_params_iso, + ) + ) + + stats = plot_comparison( + current_iso, + golden_iso, + "Tmrt Isotropic (C)", + "tmrt_iso.png", + cmap="RdYlBu_r", + ) + results["tmrt_iso"] = stats + + return results + + +def generate_ground_temp_comparisons(): + """Generate ground temperature comparison plots (TsWaveDelay model).""" + from solweig.rustalgos import ground + + results = {} + + # Load common inputs + gvflup = np.load(FIXTURES_DIR / "ground_temp_input_gvflup.npy").astype(np.float32) + tgmap1_init = np.load(FIXTURES_DIR / "ground_temp_input_tgmap1.npy").astype(np.float32) + + case_configs = { + 1: {"firstdaytime": True, "timeadd": 0.0, "timestepdec": 30 / 1440, "name": "First Morning"}, + 2: {"firstdaytime": False, "timeadd": 30 / 1440, "timestepdec": 30 / 1440, "name": "Short Step"}, + 3: {"firstdaytime": False, "timeadd": 60 / 1440, "timestepdec": 60 / 1440, "name": "Long Step"}, + } + + for case_num, config in case_configs.items(): + case_path = FIXTURES_DIR / f"ground_temp_case{case_num}.npz" + if not case_path.exists(): + continue + + case_data = dict(np.load(case_path)) + golden_lup = case_data["lup"] + + # Compute current using Rust + current_lup, _, _ = ground.ts_wave_delay( + gvflup.copy(), + config["firstdaytime"], + config["timeadd"], + config["timestepdec"], + tgmap1_init.copy(), + ) + current_lup = np.array(current_lup) + + stats = plot_comparison( + current_lup, + golden_lup, + f"Ground Temp: {config['name']}", + f"ground_temp_case{case_num}.png", + cmap="hot", + ) + results[f"ground_temp_case{case_num}"] = stats + + return results + + +def generate_wall_temp_comparisons(): + """Generate wall temperature comparison plots.""" + from solweig.rustalgos import ground + + results = {} + + # Load fixtures + output = dict(np.load(FIXTURES_DIR / "wall_temp_output.npz")) + alb_grid = np.load(FIXTURES_DIR / "wall_temp_input_alb.npy") + emis_grid = np.load(FIXTURES_DIR / "wall_temp_input_emis.npy") + tgk_grid = np.load(FIXTURES_DIR / "wall_temp_input_tgk.npy") + tstart_grid = np.load(FIXTURES_DIR / "wall_temp_input_tstart.npy") + tmaxlst_grid = np.load(FIXTURES_DIR / "wall_temp_input_tmaxlst.npy") + + # Compute current + tg, tg_wall, ci_tg, _, _ = ground.compute_ground_temperature( + float(output["ta"]), + float(output["sun_altitude"]), + float(output["altmax"]), + float(output["dectime"]), + float(output["snup"]), + float(output["global_rad"]), + float(output["rad_g0"]), + float(output["zen_deg"]), + alb_grid, + emis_grid, + tgk_grid, + tstart_grid, + tmaxlst_grid, + ) + current_tg = np.array(tg) + golden_tg = output["tg"] + + stats = plot_comparison( + current_tg, + golden_tg, + "Ground Temperature Deviation (C)", + "wall_temp_tg.png", + cmap="RdYlBu_r", + ) + results["wall_temp_tg"] = stats + + return results + + +def generate_aniso_sky_comparisons(): + """Generate anisotropic sky comparison plots.""" + from solweig.rustalgos import sky + + results = {} + + # Check if fixtures exist + aniso_path = FIXTURES_DIR / "aniso_sky_output.npz" + if not aniso_path.exists(): + print(" Anisotropic sky fixtures not found, skipping...") + return results + + # Load golden fixtures + golden = dict(np.load(aniso_path)) + + # Load inputs and recompute + dsm = np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32) + svf = np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32) + shadow_bldg = np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32) + shadow_veg = np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32) + + rows, cols = dsm.shape + SBC = 5.67e-8 + + # Generate sky patches + def generate_sky_patches(n_alt_bands=4): + patches = [] + alt_bands = [6, 18, 30, 42] + azis_per_band = [30, 24, 24, 18] + for alt, n_azi in zip(alt_bands[:n_alt_bands], azis_per_band[:n_alt_bands], strict=False): + azi_step = 360.0 / n_azi if n_azi > 1 else 0 + for azi_idx in range(n_azi): + patches.append([alt, azi_idx * azi_step]) + return np.array(patches, dtype=np.float32) + + def compute_steradians(l_patches): + n_patches = len(l_patches) + steradians = np.zeros(n_patches, dtype=np.float32) + deg2rad = np.pi / 180.0 + altitudes = l_patches[:, 0] + unique_alts = np.unique(altitudes) + for i, alt in enumerate(unique_alts): + mask = altitudes == alt + count = np.sum(mask) + if i == 0: + ster = (360.0 / count * deg2rad) * np.sin(alt * deg2rad) + else: + prev_alt = unique_alts[i - 1] + delta_alt = (alt - prev_alt) / 2 + ster = (360.0 / count * deg2rad) * ( + np.sin((alt + delta_alt) * deg2rad) - np.sin((prev_alt + delta_alt) * deg2rad) + ) + steradians[mask] = ster + return steradians + + l_patches = generate_sky_patches(n_alt_bands=4) + n_patches = len(l_patches) + steradians = compute_steradians(l_patches) + + # Create shadow matrices + svf_expanded = svf[:, :, np.newaxis] + base_visibility = np.broadcast_to(svf_expanded, (rows, cols, n_patches)).copy() + bldg_factor = shadow_bldg[:, :, np.newaxis] + veg_factor = shadow_veg[:, :, np.newaxis] + + shmat = (base_visibility * np.broadcast_to(bldg_factor, (rows, cols, n_patches))).astype(np.float32) + shmat = (shmat > 0.5).astype(np.float32) + vegshmat = (base_visibility * np.broadcast_to(veg_factor, (rows, cols, n_patches))).astype(np.float32) + vegshmat = (vegshmat > 0.3).astype(np.float32) + vbshvegshmat = (shmat * vegshmat).astype(np.float32) + + asvf = svf.astype(np.float32) + luminance = 1000 + 500 * np.sin(l_patches[:, 0] * np.pi / 180) + lv = np.column_stack([l_patches, luminance]).astype(np.float32) + ta_k = 25.0 + 273.15 + lup = np.full((rows, cols), 0.95 * SBC * (ta_k**4), dtype=np.float32) + shadow = (shadow_bldg * shadow_veg).astype(np.float32) + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) + + sun_params = sky.SunParams(altitude=60.0, azimuth=180.0) + sky_params = sky.SkyParams(esky=0.75, ta=25.0, cyl=True, wall_scheme=False, albedo=0.20) + surface_params = sky.SurfaceParams(tgwall=2.0, ewall=0.90, rad_i=600.0, rad_d=200.0) + + result = sky.anisotropic_sky( + shmat, + vegshmat, + vbshvegshmat, + sun_params, + asvf, + sky_params, + l_patches, + None, + None, + steradians, + surface_params, + lup, + lv, + shadow, + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + kup_base.copy(), + ) + + # Compare outputs + components = [ + ("ldown", "Ldown (W/m2)", "inferno"), + ("lside", "Lside (W/m2)", "inferno"), + ("kside", "Kside (W/m2)", "YlOrRd"), + ] + + for attr, title, cmap in components: + current = np.array(getattr(result, attr)) + golden_arr = golden[attr] + stats = plot_comparison( + current, + golden_arr, + f"Anisotropic Sky: {title}", + f"aniso_sky_{attr}.png", + cmap=cmap, + ) + results[f"aniso_sky_{attr}"] = stats + + return results + + +# --------------------------------------------------------------------------- +# Sinusoidal ground temperature model: Rust vs UMEP formula comparison +# --------------------------------------------------------------------------- + + +def _umep_ground_temp( + altmax, + dectime_frac, + snup_hours, + global_rad, + rad_g0, + zen_deg, + tgk, + tstart, + tmaxlst, + tgk_wall, + tstart_wall, + tmaxlst_wall, + sun_altitude, +): + """Pure-Python UMEP reference (Solweig_2025a lines 171-199).""" + Tgamp = tgk * altmax + tstart + Tgampwall = tgk_wall * altmax + tstart_wall + + snup_frac = snup_hours / 24.0 + if dectime_frac > snup_frac: + tmaxlst_frac = tmaxlst / 24.0 + phase = (dectime_frac - snup_frac) / (tmaxlst_frac - snup_frac) + Tg = Tgamp * np.sin(phase * np.pi / 2.0) + + tmaxlst_wall_frac = tmaxlst_wall / 24.0 + denom_wall = tmaxlst_wall_frac - snup_frac + phase_wall = (dectime_frac - snup_frac) / denom_wall if denom_wall > 0 else dectime_frac - snup_frac + Tgwall = Tgampwall * np.sin(phase_wall * np.pi / 2.0) + else: + Tg = 0.0 + Tgwall = 0.0 + + if Tgwall < 0: + Tgwall = 0.0 + + if sun_altitude > 0 and rad_g0 > 0: + corr = 0.1473 * np.log(90.0 - zen_deg) + 0.3454 + CI_TgG = (global_rad / rad_g0) + (1.0 - corr) + if CI_TgG > 1.0 or np.isinf(CI_TgG): + CI_TgG = 1.0 + else: + CI_TgG = 1.0 + + Tg = np.maximum(Tg * CI_TgG, 0.0) + Tgwall = Tgwall * CI_TgG + + return Tg, Tgwall, CI_TgG + + +def generate_sinusoidal_ground_temp(inputs=None): + """Generate sinusoidal ground temperature model comparisons. + + This section compares the Rust compute_ground_temperature() against the + UMEP Python reference formula for: + 1. A diurnal curve plot (UMEP vs Rust overlaid) + 2. Multiple scenarios covering all land covers and conditions + """ + from solweig.rustalgos import ground + + results = {} + + # --- Part 1: Diurnal curve comparison --- + shape = (3, 3) + altmax = 55.0 + snup = 5.0 + tgk_val = 0.37 + tstart_val = -3.41 + tmaxlst_val = 15.0 + + hours = np.arange(0, 24.5, 0.5) + rust_tg_curve = [] + umep_tg_curve = [] + rust_wall_curve = [] + umep_wall_curve = [] + + for h in hours: + dectime = h / 24.0 + # Sun altitude approximation (simple sinusoidal) + sun_alt = max(0.0, altmax * np.sin(np.pi * (h - snup) / (21 - snup))) if snup < h < 21 else 0.0 + zen = 90.0 - sun_alt if sun_alt > 0 else 90.0 + + # Global radiation proportional to sun altitude + if sun_alt > 2: + grad = 800.0 * np.sin(sun_alt * np.pi / 180.0) + grad0 = 900.0 * np.sin(sun_alt * np.pi / 180.0) + else: + grad = 0.0 + grad0 = 0.0 + + # Rust + tg, tg_wall, ci, _, _ = ground.compute_ground_temperature( + 20.0, # ta + sun_alt, + altmax, + dectime, + snup, + grad, + grad0, + zen, + np.full(shape, 0.2, dtype=np.float32), + np.full(shape, 0.95, dtype=np.float32), + np.full(shape, tgk_val, dtype=np.float32), + np.full(shape, tstart_val, dtype=np.float32), + np.full(shape, tmaxlst_val, dtype=np.float32), + ) + rust_tg_curve.append(float(np.array(tg)[0, 0])) + rust_wall_curve.append(float(tg_wall)) + + # UMEP + umep_tg, umep_wall, _ = _umep_ground_temp( + altmax, + dectime, + snup, + grad, + grad0, + zen, + tgk_val, + tstart_val, + tmaxlst_val, + tgk_val, + tstart_val, + tmaxlst_val, + sun_alt, + ) + if isinstance(umep_tg, np.ndarray): + umep_tg_curve.append(float(umep_tg.flat[0])) + else: + umep_tg_curve.append(float(umep_tg)) + umep_wall_curve.append(float(umep_wall)) + + rust_tg_curve = np.array(rust_tg_curve) + umep_tg_curve = np.array(umep_tg_curve) + rust_wall_curve = np.array(rust_wall_curve) + umep_wall_curve = np.array(umep_wall_curve) + + # Plot diurnal curves + fig, axes = plt.subplots(1, 2, figsize=(14, 5)) + fig.suptitle("Sinusoidal Ground Temperature: Rust vs UMEP", fontsize=12, fontweight="bold") + + # Ground + axes[0].plot(hours, umep_tg_curve, "b-", label="UMEP (Python)", linewidth=2) + axes[0].plot(hours, rust_tg_curve, "r--", label="Rust", linewidth=2) + axes[0].axvline(x=snup, color="orange", linestyle=":", alpha=0.7, label="Sunrise") + axes[0].axvline(x=tmaxlst_val, color="green", linestyle=":", alpha=0.7, label="TmaxLST") + axes[0].set_xlabel("Hour of day") + axes[0].set_ylabel("Tg (K above Ta)") + axes[0].set_title("Ground Temperature Deviation") + axes[0].legend() + axes[0].grid(True, alpha=0.3) + axes[0].set_xlim(0, 24) + + # Wall + axes[1].plot(hours, umep_wall_curve, "b-", label="UMEP (Python)", linewidth=2) + axes[1].plot(hours, rust_wall_curve, "r--", label="Rust", linewidth=2) + axes[1].axvline(x=snup, color="orange", linestyle=":", alpha=0.7, label="Sunrise") + axes[1].axvline(x=tmaxlst_val, color="green", linestyle=":", alpha=0.7, label="TmaxLST") + axes[1].set_xlabel("Hour of day") + axes[1].set_ylabel("Tg_wall (K above Ta)") + axes[1].set_title("Wall Temperature Deviation") + axes[1].legend() + axes[1].grid(True, alpha=0.3) + axes[1].set_xlim(0, 24) + + plt.tight_layout() + plt.savefig(REPORT_DIR / "sinusoidal_diurnal.png", dpi=120, bbox_inches="tight") + plt.close() + + # Compute stats for diurnal curve + ground_diff = np.max(np.abs(rust_tg_curve - umep_tg_curve)) + wall_diff = np.max(np.abs(rust_wall_curve - umep_wall_curve)) + results["sinusoidal_ground_diurnal"] = { + "max_abs_diff": ground_diff, + "mean_diff": float(np.mean(rust_tg_curve - umep_tg_curve)), + "std_diff": float(np.std(rust_tg_curve - umep_tg_curve)), + "max_value": float(np.max(np.abs(umep_tg_curve))), + } + results["sinusoidal_wall_diurnal"] = { + "max_abs_diff": wall_diff, + "mean_diff": float(np.mean(rust_wall_curve - umep_wall_curve)), + "std_diff": float(np.std(rust_wall_curve - umep_wall_curve)), + "max_value": float(np.max(np.abs(umep_wall_curve))), + } + + # --- Part 2: Multi-scenario formula agreement --- + _s = dict # shorthand + scenarios = [ + # fmt: off + ( + "Noon clear cobble", + _s( + altmax=55, + hour=12, + snup=5, + grad=600, + grad0=650, + zen=35, + sun_alt=55, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Noon clear asphalt", + _s( + altmax=55, + hour=12, + snup=5, + grad=600, + grad0=650, + zen=35, + sun_alt=55, + tgk=0.58, + tstart=-9.78, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Afternoon 18h", + _s( + altmax=55, + hour=18, + snup=5, + grad=300, + grad0=400, + zen=60, + sun_alt=30, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Evening 22h", + _s( + altmax=55, + hour=22, + snup=5, + grad=0, + grad0=0, + zen=90, + sun_alt=0, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Before sunrise", + _s( + altmax=55, + hour=3, + snup=5, + grad=0, + grad0=0, + zen=90, + sun_alt=0, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Peak at TmaxLST", + _s( + altmax=55, + hour=15, + snup=5, + grad=500, + grad0=550, + zen=45, + sun_alt=45, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Cloudy CI low", + _s( + altmax=55, + hour=12, + snup=5, + grad=200, + grad0=650, + zen=35, + sun_alt=55, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Wood wall noon", + _s( + altmax=55, + hour=12, + snup=5, + grad=600, + grad0=650, + zen=35, + sun_alt=55, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.50, + tstart_w=-2.0, + tmaxlst_w=14, + ), + ), + ( + "Brick wall afternoon", + _s( + altmax=55, + hour=18, + snup=5, + grad=300, + grad0=400, + zen=60, + sun_alt=30, + tgk=0.37, + tstart=-3.41, + tmaxlst=15, + tgk_w=0.40, + tstart_w=-4.0, + tmaxlst_w=15, + ), + ), + ( + "Grass morning", + _s( + altmax=55, + hour=8, + snup=5, + grad=300, + grad0=320, + zen=60, + sun_alt=30, + tgk=0.21, + tstart=-3.38, + tmaxlst=14, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "Water", + _s( + altmax=55, + hour=12, + snup=5, + grad=600, + grad0=650, + zen=35, + sun_alt=55, + tgk=0.0, + tstart=0.0, + tmaxlst=12, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=15, + ), + ), + ( + "High lat low sun", + _s( + altmax=15, + hour=12, + snup=9, + grad=100, + grad0=120, + zen=78, + sun_alt=12, + tgk=0.37, + tstart=-3.41, + tmaxlst=13, + tgk_w=0.37, + tstart_w=-3.41, + tmaxlst_w=13, + ), + ), + # fmt: on + ] + + scenario_results = [] + for name, s in scenarios: + dectime = s["hour"] / 24.0 + + # UMEP reference + umep_tg, umep_wall, umep_ci = _umep_ground_temp( + s["altmax"], + dectime, + s["snup"], + s["grad"], + s["grad0"], + s["zen"], + s["tgk"], + s["tstart"], + s["tmaxlst"], + s["tgk_w"], + s["tstart_w"], + s["tmaxlst_w"], + s["sun_alt"], + ) + + # Rust + tgk_grid = np.full(shape, s["tgk"], dtype=np.float32) + tstart_grid = np.full(shape, s["tstart"], dtype=np.float32) + tmaxlst_grid = np.full(shape, s["tmaxlst"], dtype=np.float32) + + rust_tg, rust_wall, rust_ci, _, _ = ground.compute_ground_temperature( + 20.0, + s["sun_alt"], + s["altmax"], + dectime, + s["snup"], + s["grad"], + s["grad0"], + s["zen"], + np.full(shape, 0.2, dtype=np.float32), + np.full(shape, 0.95, dtype=np.float32), + tgk_grid, + tstart_grid, + tmaxlst_grid, + tgk_wall=s["tgk_w"], + tstart_wall=s["tstart_w"], + tmaxlst_wall=s["tmaxlst_w"], + ) + rust_tg_val = float(np.array(rust_tg)[0, 0]) + umep_tg_val = float(umep_tg) if not isinstance(umep_tg, np.ndarray) else float(umep_tg.flat[0]) + + tg_diff = abs(rust_tg_val - umep_tg_val) + wall_diff = abs(float(rust_wall) - float(umep_wall)) + ci_diff = abs(float(rust_ci) - float(umep_ci)) + passed = tg_diff < 1e-4 and wall_diff < 1e-4 and ci_diff < 1e-5 + + scenario_results.append( + { + "name": name, + "rust_tg": rust_tg_val, + "umep_tg": umep_tg_val, + "tg_diff": tg_diff, + "rust_wall": float(rust_wall), + "umep_wall": float(umep_wall), + "wall_diff": wall_diff, + "rust_ci": float(rust_ci), + "umep_ci": float(umep_ci), + "ci_diff": ci_diff, + "passed": passed, + } + ) + + results["_sinusoidal_scenarios"] = scenario_results + return results + + +# --------------------------------------------------------------------------- +# Markdown report generation +# --------------------------------------------------------------------------- + + +def apply_thresholds(all_results): + """Apply component-specific pass/fail thresholds to results.""" + for name, stats in all_results.items(): + if name.startswith("_"): + continue # Skip metadata entries + if "pass" in stats: + continue # Already set + + max_diff = stats.get("max_abs_diff", 0) + max_val = stats.get("max_value", 1.0) + + if "svf_veg" in name: + stats["pass"] = max_diff < 0.02 + stats["threshold"] = "0.02 (1% arch diff)" + elif name.startswith("svf_") or name.startswith("shadow_"): + stats["pass"] = max_diff < 1e-4 + stats["threshold"] = "1e-4" + elif name.startswith("gvf_"): + relative_diff = max_diff / max_val if max_val > 0 else max_diff + stats["pass"] = relative_diff < 1e-3 + stats["threshold"] = "0.1% relative" + elif name.startswith("radiation_aniso_"): + relative_diff = max_diff / max_val if max_val > 0 else max_diff + stats["pass"] = relative_diff < 5e-3 + stats["threshold"] = "0.5% relative" + elif name.startswith("radiation_"): + relative_diff = max_diff / max_val if max_val > 0 else max_diff + stats["pass"] = relative_diff < 1e-3 + stats["threshold"] = "0.1% relative" + elif name.startswith("utci_"): + stats["pass"] = max_diff < 0.1 + stats["threshold"] = "0.1 C" + elif name.startswith("pet_"): + stats["pass"] = max_diff < 0.2 + stats["threshold"] = "0.2 C" + elif name.startswith("tmrt_") or name.startswith("wall_temp_"): + stats["pass"] = max_diff < 0.1 + stats["threshold"] = "0.1 C" + elif name.startswith("aniso_sky_"): + stats["pass"] = max_diff < 0.5 + stats["threshold"] = "0.5 W/m2" + elif name.startswith("sinusoidal_"): + stats["pass"] = max_diff < 1e-3 + stats["threshold"] = "1e-3 C" + else: + stats["pass"] = max_diff < 1e-3 + stats["threshold"] = "1e-3" + + +def generate_markdown_report(all_results): + """Generate Markdown report.""" + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # Count pass/fail (exclude metadata entries) + spatial_results = {k: v for k, v in all_results.items() if not k.startswith("_")} + total = len(spatial_results) + passed = sum(1 for r in spatial_results.values() if r.get("pass", False)) + + # Count sinusoidal scenarios + sinusoidal_scenarios = all_results.get("_sinusoidal_scenarios", []) + sin_total = len(sinusoidal_scenarios) + sin_passed = sum(1 for s in sinusoidal_scenarios if s["passed"]) + + status_icon = "PASS" if passed == total and sin_passed == sin_total else "FAIL" + + lines = [] + lines.append("# UMEP vs SOLWEIG Rust - Golden Test Report") + lines.append("") + lines.append(f"**Generated:** {timestamp}") + lines.append("**Comparison:** UMEP Python (Reference) vs SOLWEIG Rust Implementation") + lines.append(f"**Spatial tests:** {passed}/{total} pass") + if sin_total > 0: + lines.append(f"**Formula agreement:** {sin_passed}/{sin_total} scenarios pass") + lines.append(f"**Overall:** {status_icon}") + lines.append("") + lines.append("Each comparison shows: UMEP (Reference) | SOLWEIG Rust | Residual (Rust - UMEP)") + lines.append("") + + # --- Input context --- + lines.append("## Input Context") + lines.append("") + lines.append("![Input context: DSM, CDSM, Wall Heights, SVF](context.png)") + lines.append("") + + # --- Summary table --- + lines.append("## Spatial Comparison Summary") + lines.append("") + lines.append("| Component | Max |Diff| | Threshold | Mean Diff | Status |") + lines.append("|-----------|------------|-----------|-----------|--------|") + + for name, stats in spatial_results.items(): + status_str = "PASS" if stats.get("pass", False) else "FAIL" + threshold = stats.get("threshold", "1e-3") + max_diff = stats.get("max_abs_diff", 0) + mean_diff = stats.get("mean_diff", 0) + lines.append(f"| {name} | {max_diff:.2e} | {threshold} | {mean_diff:.2e} | {status_str} |") + + lines.append("") + + # --- Visual comparisons grouped by category --- + categories = { + "Shadows": [k for k in spatial_results if k.startswith("shadow_")], + "Sky View Factor": [k for k in spatial_results if k.startswith("svf_")], + "Ground View Factor": [k for k in spatial_results if k.startswith("gvf_")], + "Radiation (Isotropic)": [k for k in spatial_results if k.startswith("radiation_") and "aniso" not in k], + "Radiation (Anisotropic)": [k for k in spatial_results if k.startswith("radiation_aniso_")], + "UTCI": [k for k in spatial_results if k.startswith("utci_")], + "PET": [k for k in spatial_results if k.startswith("pet_")], + "Tmrt": [k for k in spatial_results if k.startswith("tmrt_")], + "Ground Temperature (TsWaveDelay)": [k for k in spatial_results if k.startswith("ground_temp_")], + "Wall Temperature": [k for k in spatial_results if k.startswith("wall_temp_")], + "Anisotropic Sky": [k for k in spatial_results if k.startswith("aniso_sky_")], + } + + lines.append("## Visual Comparisons") + lines.append("") + + for category, keys in categories.items(): + if keys: + lines.append(f"### {category}") + lines.append("") + for key in keys: + status_str = "PASS" if spatial_results[key].get("pass", False) else "FAIL" + lines.append(f"**{key}** ({status_str})") + lines.append("") + lines.append(f"![{key}]({key}.png)") + lines.append("") + + # --- Sinusoidal ground temperature section --- + # Diurnal curves + sinusoidal_ground = spatial_results.get("sinusoidal_ground_diurnal") + sinusoidal_wall = spatial_results.get("sinusoidal_wall_diurnal") + if sinusoidal_ground or sinusoidal_wall or sinusoidal_scenarios: + lines.append("## Sinusoidal Ground Temperature Model") + lines.append("") + lines.append("Compares `compute_ground_temperature()` (Rust) against the UMEP Python") + lines.append("formula from `Solweig_2025a_calc_forprocessing.py` (lines 171-199).") + lines.append("") + + if sinusoidal_ground: + lines.append("### Diurnal Curve (Rust vs UMEP)") + lines.append("") + lines.append("![Sinusoidal diurnal curve: Rust vs UMEP](sinusoidal_diurnal.png)") + lines.append("") + gnd_status = "PASS" if sinusoidal_ground.get("pass", False) else "FAIL" + wall_status = "PASS" if (sinusoidal_wall and sinusoidal_wall.get("pass", False)) else "FAIL" + lines.append(f"- Ground curve max |diff|: {sinusoidal_ground['max_abs_diff']:.2e} ({gnd_status})") + if sinusoidal_wall: + lines.append(f"- Wall curve max |diff|: {sinusoidal_wall['max_abs_diff']:.2e} ({wall_status})") + lines.append("") + + # Scenario table + if sinusoidal_scenarios: + lines.append("### Formula Agreement (12 Scenarios)") + lines.append("") + lines.append(f"**Result:** {sin_passed}/{sin_total} scenarios match within f32 tolerance (atol=1e-4)") + lines.append("") + lines.append("| Scenario | Rust Tg | UMEP Tg | |d Tg| | Rust Wall | UMEP Wall | |d Wall| | CI | Status |") + lines.append("|----------|---------|---------|--------|-----------|-----------|---------|------|--------|") + + for s in sinusoidal_scenarios: + status_str = "PASS" if s["passed"] else "FAIL" + lines.append( + f"| {s['name']} " + f"| {s['rust_tg']:.4f} " + f"| {s['umep_tg']:.4f} " + f"| {s['tg_diff']:.1e} " + f"| {s['rust_wall']:.4f} " + f"| {s['umep_wall']:.4f} " + f"| {s['wall_diff']:.1e} " + f"| {s['rust_ci']:.4f} " + f"| {status_str} |" + ) + + lines.append("") + + # Write file + report_path = REPORT_DIR / "golden_report.md" + with open(report_path, "w") as f: + f.write("\n".join(lines)) + + return report_path + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- + + +def main(): + """Generate complete golden test report.""" + print("=" * 60) + print("Golden Test Visual Report Generator") + print("=" * 60) + + ensure_report_dir() + inputs = load_inputs() + + print("\nGenerating context plot...") + plot_context(inputs) + + all_results = {} + + print("Generating shadow comparisons...") + all_results.update(generate_shadow_comparisons(inputs)) + + print("Generating SVF comparisons...") + all_results.update(generate_svf_comparisons(inputs)) + + print("Generating GVF comparisons...") + all_results.update(generate_gvf_comparisons(inputs)) + + print("Generating radiation comparisons (isotropic)...") + all_results.update(generate_radiation_comparisons(inputs)) + + print("Generating radiation comparisons (anisotropic)...") + try: + all_results.update(generate_aniso_radiation_comparisons(inputs)) + except Exception as e: + print(f" Skipping anisotropic radiation: {e}") + + print("Generating UTCI comparisons...") + try: + all_results.update(generate_utci_comparisons()) + except Exception as e: + print(f" Skipping UTCI: {e}") + + print("Generating PET comparisons...") + try: + all_results.update(generate_pet_comparisons()) + except Exception as e: + print(f" Skipping PET: {e}") + + print("Generating Tmrt comparisons...") + try: + all_results.update(generate_tmrt_comparisons()) + except Exception as e: + print(f" Skipping Tmrt: {e}") + + print("Generating ground temperature comparisons (TsWaveDelay)...") + try: + all_results.update(generate_ground_temp_comparisons()) + except Exception as e: + print(f" Skipping ground temp: {e}") + + print("Generating wall temperature comparisons...") + try: + all_results.update(generate_wall_temp_comparisons()) + except Exception as e: + print(f" Skipping wall temp: {e}") + + print("Generating anisotropic sky comparisons...") + try: + all_results.update(generate_aniso_sky_comparisons()) + except Exception as e: + print(f" Skipping aniso sky: {e}") + + print("Generating sinusoidal ground temperature comparisons...") + try: + all_results.update(generate_sinusoidal_ground_temp()) + except Exception as e: + print(f" Skipping sinusoidal ground temp: {e}") + + # Apply thresholds + apply_thresholds(all_results) + + print("\nGenerating Markdown report...") + report_path = generate_markdown_report(all_results) + + # Print summary + spatial_results = {k: v for k, v in all_results.items() if not k.startswith("_")} + total = len(spatial_results) + passed = sum(1 for r in spatial_results.values() if r.get("pass", False)) + + sinusoidal_scenarios = all_results.get("_sinusoidal_scenarios", []) + sin_total = len(sinusoidal_scenarios) + sin_passed = sum(1 for s in sinusoidal_scenarios if s["passed"]) + + print("\n" + "=" * 60) + print(f"Report generated: {report_path}") + print(f"Spatial comparisons: {passed}/{total} pass") + if sin_total > 0: + print(f"Formula agreement: {sin_passed}/{sin_total} scenarios pass") + print("=" * 60) + + +if __name__ == "__main__": + main() diff --git a/tests/golden/test_golden_anisotropic_sky.py b/tests/golden/test_golden_anisotropic_sky.py new file mode 100644 index 0000000..710398b --- /dev/null +++ b/tests/golden/test_golden_anisotropic_sky.py @@ -0,0 +1,476 @@ +""" +Golden Regression Tests for Anisotropic Sky Radiation Model + +These tests verify the Rust `anisotropic_sky` function produces physically valid +and consistent outputs. The anisotropic sky model computes direction-dependent +longwave and shortwave radiation from sky patches, vegetation, and buildings. + +The anisotropic_sky function is complex with many inputs: +- Shadow matrices (3D) for buildings, vegetation, and combined +- Sky patch geometry (altitude, azimuth, steradians) +- Sun position and radiation parameters +- Ground and wall temperatures + +Test strategy: +- Physical property tests: Verify output ranges and relationships +- Regression tests: Compare against pre-computed golden fixtures +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import sky + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Physical constants +SBC = 5.67e-8 # Stefan-Boltzmann constant + + +def generate_sky_patches(n_alt_bands=6, n_azi_per_band=12): + """Generate a standard hemispherical sky patch grid. + + Uses Tregenza-style hemisphere division. + """ + patches = [] + + # Standard altitude bands (degrees from horizon) + alt_bands = [6, 18, 30, 42, 54, 66, 78, 90] + azis_per_band = [30, 24, 24, 18, 12, 6, 6, 1] # Patches per altitude band + + for _alt_idx, (alt, n_azi) in enumerate(zip(alt_bands[:n_alt_bands], azis_per_band[:n_alt_bands], strict=False)): + azi_step = 360.0 / n_azi if n_azi > 1 else 0 + for azi_idx in range(n_azi): + azi = azi_idx * azi_step + patches.append([alt, azi]) + + return np.array(patches, dtype=np.float32) + + +def compute_steradians(l_patches): + """Compute solid angle (steradians) for each sky patch. + + Based on hemisphere geometry. + """ + n_patches = len(l_patches) + steradians = np.zeros(n_patches, dtype=np.float32) + deg2rad = np.pi / 180.0 + + # Group by altitude + altitudes = l_patches[:, 0] + unique_alts = np.unique(altitudes) + + for i, alt in enumerate(unique_alts): + mask = altitudes == alt + count = np.sum(mask) + + if i == 0: + # First band from horizon + ster = (360.0 / count * deg2rad) * np.sin(alt * deg2rad) + else: + prev_alt = unique_alts[i - 1] + delta_alt = (alt - prev_alt) / 2 + ster = (360.0 / count * deg2rad) * ( + np.sin((alt + delta_alt) * deg2rad) - np.sin((prev_alt + delta_alt) * deg2rad) + ) + + steradians[mask] = ster + + return steradians + + +@pytest.fixture(scope="module") +def input_data(): + """Load base input data from golden fixtures (shared across all tests in module).""" + params = dict(np.load(FIXTURES_DIR / "input_params.npz")) + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32), + "scale": float(params["scale"]), + } + + +@pytest.fixture(scope="module") +def svf_data(): + """Load SVF data from golden fixtures (shared across all tests in module).""" + return { + "svf": np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32), + } + + +@pytest.fixture(scope="module") +def shadow_data(): + """Load shadow data from golden fixtures (shared across all tests in module).""" + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32), + "veg_sh": np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32), + } + + +@pytest.fixture(scope="module") +def aniso_sky_inputs(input_data, svf_data, shadow_data): + """Create inputs for anisotropic_sky calculation.""" + rows, cols = input_data["dsm"].shape + + # Generate sky patches + l_patches = generate_sky_patches(n_alt_bands=4, n_azi_per_band=8) # Simplified for testing + n_patches = len(l_patches) + steradians = compute_steradians(l_patches) + + # Create 3D shadow matrices (rows, cols, patches) + # shmat: 1 = sky visible, 0 = blocked by building + # For testing, use SVF to create approximate patch visibility + svf_expanded = svf_data["svf"][:, :, np.newaxis] + base_visibility = np.broadcast_to(svf_expanded, (rows, cols, n_patches)).copy() + + # Add some spatial variation based on building shadow + bldg_factor = shadow_data["bldg_sh"][:, :, np.newaxis] + veg_factor = shadow_data["veg_sh"][:, :, np.newaxis] + + # shmat: building shadow mask (uint8: 255 = sky visible, 0 = blocked) + shmat_f = base_visibility * np.broadcast_to(bldg_factor, (rows, cols, n_patches)) + shmat = np.where(shmat_f > 0.5, np.uint8(255), np.uint8(0)).astype(np.uint8) + + # vegshmat: vegetation shadow mask (uint8: 255 = sky visible, 0 = blocked) + vegshmat_f = base_visibility * np.broadcast_to(veg_factor, (rows, cols, n_patches)) + vegshmat = np.where(vegshmat_f > 0.3, np.uint8(255), np.uint8(0)).astype(np.uint8) + + # vbshvegshmat: combined building+vegetation shadow (uint8) + vbshvegshmat = np.where((shmat == 255) & (vegshmat == 255), np.uint8(255), np.uint8(0)).astype(np.uint8) + + # asvf: angular sky view factor (use base SVF as approximation) + asvf = svf_data["svf"].astype(np.float32) + + # lv: patch luminance array (alt, azi, luminance) + # luminance varies with altitude (higher = brighter) + luminance = 1000 + 500 * np.sin(l_patches[:, 0] * np.pi / 180) # Higher patches brighter + lv = np.column_stack([l_patches, luminance]).astype(np.float32) + + # Ground upwelling longwave + ta = 25.0 + ta_k = ta + 273.15 + lup_val = 0.95 * SBC * (ta_k**4) + lup = np.full((rows, cols), lup_val, dtype=np.float32) + + # Combined shadow (2D) + shadow = (shadow_data["bldg_sh"] * shadow_data["veg_sh"]).astype(np.float32) + + # Upwelling shortwave per direction + kup_base = np.full((rows, cols), 50.0, dtype=np.float32) # W/m² + + return { + "shmat": shmat, + "vegshmat": vegshmat, + "vbshvegshmat": vbshvegshmat, + "asvf": asvf, + "l_patches": l_patches, + "steradians": steradians, + "lv": lv, + "lup": lup, + "shadow": shadow, + "kup_e": kup_base.copy(), + "kup_s": kup_base.copy(), + "kup_w": kup_base.copy(), + "kup_n": kup_base.copy(), + } + + +@pytest.fixture(scope="module") +def aniso_sky_result(aniso_sky_inputs): + """Compute anisotropic sky result (computed once per module).""" + inputs = aniso_sky_inputs + + # Create parameter objects + sun_params = sky.SunParams( + altitude=60.0, # High sun + azimuth=180.0, # Noon + ) + + sky_params = sky.SkyParams( + esky=0.75, + ta=25.0, + cyl=True, # Cylindrical body model + wall_scheme=False, # Simple wall model + albedo=0.20, + ) + + surface_params = sky.SurfaceParams( + tgwall=2.0, # Wall temperature deviation + ewall=0.90, # Wall emissivity + rad_i=600.0, # Direct radiation W/m² + rad_d=200.0, # Diffuse radiation W/m² + ) + + result = sky.anisotropic_sky( + inputs["shmat"], + inputs["vegshmat"], + inputs["vbshvegshmat"], + sun_params, + inputs["asvf"], + sky_params, + inputs["l_patches"], + None, # voxel_table (optional) + None, # voxel_maps (optional) + inputs["steradians"], + surface_params, + inputs["lup"], + inputs["lv"], + inputs["shadow"], + inputs["kup_e"], + inputs["kup_s"], + inputs["kup_w"], + inputs["kup_n"], + ) + + return result + + +class TestAnisotropicSkyPhysicalProperties: + """Verify anisotropic sky outputs satisfy physical constraints.""" + + def test_ldown_non_negative(self, aniso_sky_result): + """Downwelling longwave should be non-negative.""" + ldown = np.array(aniso_sky_result.ldown) + valid_mask = ~np.isnan(ldown) + assert np.all(ldown[valid_mask] >= 0), "ldown has negative values" + + def test_ldown_reasonable_range(self, aniso_sky_result): + """Downwelling longwave should be in reasonable range (100-600 W/m²).""" + ldown = np.array(aniso_sky_result.ldown) + valid_mask = ~np.isnan(ldown) & (ldown > 0) + if np.any(valid_mask): + mean_val = np.mean(ldown[valid_mask]) + # Typical range for mid-latitude summer + assert mean_val > 50, f"ldown mean {mean_val:.1f} too low" + assert mean_val < 800, f"ldown mean {mean_val:.1f} too high" + + def test_lside_non_negative(self, aniso_sky_result): + """Side longwave should be non-negative.""" + lside = np.array(aniso_sky_result.lside) + valid_mask = ~np.isnan(lside) + assert np.all(lside[valid_mask] >= 0), "lside has negative values" + + def test_lside_components_sum(self, aniso_sky_result): + """Lside should approximately equal sum of components.""" + lside = np.array(aniso_sky_result.lside) + lside_sky = np.array(aniso_sky_result.lside_sky) + lside_veg = np.array(aniso_sky_result.lside_veg) + lside_sh = np.array(aniso_sky_result.lside_sh) + lside_sun = np.array(aniso_sky_result.lside_sun) + lside_ref = np.array(aniso_sky_result.lside_ref) + + # Sum of components + lside_sum = lside_sky + lside_veg + lside_sh + lside_sun + lside_ref + + # Should match total + np.testing.assert_allclose( + lside, lside_sum, rtol=1e-4, atol=1e-4, err_msg="Lside doesn't match sum of components" + ) + + def test_kside_non_negative(self, aniso_sky_result): + """Side shortwave should be non-negative.""" + kside = np.array(aniso_sky_result.kside) + valid_mask = ~np.isnan(kside) + assert np.all(kside[valid_mask] >= 0), "kside has negative values" + + def test_directional_longwave_non_negative(self, aniso_sky_result): + """Directional longwave components should be non-negative.""" + for direction in ["least", "lsouth", "lwest", "lnorth"]: + arr = np.array(getattr(aniso_sky_result, direction)) + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"{direction} has negative values" + + def test_directional_shortwave_non_negative(self, aniso_sky_result): + """Directional shortwave components should be non-negative.""" + for direction in ["keast", "ksouth", "kwest", "knorth"]: + arr = np.array(getattr(aniso_sky_result, direction)) + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"{direction} has negative values" + + +class TestAnisotropicSkyOutputShape: + """Verify output shapes are correct.""" + + def test_ldown_shape_matches_input(self, aniso_sky_result, aniso_sky_inputs): + """Ldown should match input spatial dimensions.""" + expected_shape = aniso_sky_inputs["shadow"].shape + actual_shape = np.array(aniso_sky_result.ldown).shape + assert actual_shape == expected_shape, f"ldown shape {actual_shape} != {expected_shape}" + + def test_all_2d_outputs_match_input(self, aniso_sky_result, aniso_sky_inputs): + """All 2D outputs should match input spatial dimensions.""" + expected_shape = aniso_sky_inputs["shadow"].shape + + attrs_2d = [ + "ldown", + "lside", + "lside_sky", + "lside_veg", + "lside_sh", + "lside_sun", + "lside_ref", + "least", + "lwest", + "lnorth", + "lsouth", + "keast", + "ksouth", + "kwest", + "knorth", + "kside_i", + "kside_d", + "kside", + ] + + for attr in attrs_2d: + arr = np.array(getattr(aniso_sky_result, attr)) + assert arr.shape == expected_shape, f"{attr} shape {arr.shape} != {expected_shape}" + + def test_steradians_matches_patches(self, aniso_sky_result, aniso_sky_inputs): + """Steradians array should match number of patches.""" + n_patches = len(aniso_sky_inputs["l_patches"]) + steradians = np.array(aniso_sky_result.steradians) + assert len(steradians) == n_patches, f"steradians length {len(steradians)} != {n_patches}" + + +class TestAnisotropicSkySunPosition: + """Verify response to sun position changes.""" + + def test_kside_i_responds_to_altitude(self, aniso_sky_inputs): + """Direct shortwave should respond to sun altitude.""" + inputs = aniso_sky_inputs + + # High sun + sun_high = sky.SunParams(altitude=60.0, azimuth=180.0) + sky_params = sky.SkyParams(esky=0.75, ta=25.0, cyl=True, wall_scheme=False, albedo=0.20) + surface_params = sky.SurfaceParams(tgwall=2.0, ewall=0.90, rad_i=600.0, rad_d=200.0) + + result_high = sky.anisotropic_sky( + inputs["shmat"], + inputs["vegshmat"], + inputs["vbshvegshmat"], + sun_high, + inputs["asvf"], + sky_params, + inputs["l_patches"], + None, + None, + inputs["steradians"], + surface_params, + inputs["lup"], + inputs["lv"], + inputs["shadow"], + inputs["kup_e"], + inputs["kup_s"], + inputs["kup_w"], + inputs["kup_n"], + ) + + # Low sun + sun_low = sky.SunParams(altitude=20.0, azimuth=180.0) + result_low = sky.anisotropic_sky( + inputs["shmat"], + inputs["vegshmat"], + inputs["vbshvegshmat"], + sun_low, + inputs["asvf"], + sky_params, + inputs["l_patches"], + None, + None, + inputs["steradians"], + surface_params, + inputs["lup"], + inputs["lv"], + inputs["shadow"], + inputs["kup_e"], + inputs["kup_s"], + inputs["kup_w"], + inputs["kup_n"], + ) + + kside_i_high = np.nanmean(np.array(result_high.kside_i)) + kside_i_low = np.nanmean(np.array(result_low.kside_i)) + + # Lower sun -> more direct on vertical surface + assert kside_i_low > kside_i_high * 0.5, ( + f"kside_i at low sun ({kside_i_low:.1f}) should be > high sun ({kside_i_high:.1f})" + ) + + +class TestAnisotropicSkyRadiationBalance: + """Verify radiation balance relationships.""" + + def test_lside_components_positive(self, aniso_sky_result): + """Individual Lside components should be non-negative.""" + components = ["lside_sky", "lside_veg", "lside_sh", "lside_sun", "lside_ref"] + for comp in components: + arr = np.array(getattr(aniso_sky_result, comp)) + valid_mask = ~np.isnan(arr) + # Small negative values might occur due to numerical precision + assert np.all(arr[valid_mask] >= -1e-3), f"{comp} has significant negative values" + + def test_kside_components_sum(self, aniso_sky_result): + """Kside should equal kside_i + kside_d plus reflected terms.""" + kside = np.array(aniso_sky_result.kside) + kside_i = np.array(aniso_sky_result.kside_i) + kside_d = np.array(aniso_sky_result.kside_d) + + # Kside includes direct (i), diffuse (d), and reflected components + # The sum should be >= kside_i + kside_d + assert np.all(kside >= kside_i + kside_d - 1e-3), "kside should be >= kside_i + kside_d" + + +class TestAnisotropicSkyGoldenRegression: + """Golden regression tests for anisotropic sky model.""" + + @pytest.fixture + def aniso_golden(self): + """Load golden anisotropic sky fixtures if they exist.""" + golden_path = FIXTURES_DIR / "aniso_sky_output.npz" + if golden_path.exists(): + return dict(np.load(golden_path)) + return None + + def test_ldown_matches_golden(self, aniso_sky_result, aniso_golden): + """Ldown should match golden fixture.""" + if aniso_golden is None: + pytest.skip("Golden anisotropic sky fixtures not generated yet") + + np.testing.assert_allclose( + np.array(aniso_sky_result.ldown), + aniso_golden["ldown"], + rtol=1e-4, + atol=0.1, + err_msg="Ldown differs from golden fixture", + ) + + def test_lside_matches_golden(self, aniso_sky_result, aniso_golden): + """Lside should match golden fixture.""" + if aniso_golden is None: + pytest.skip("Golden anisotropic sky fixtures not generated yet") + + np.testing.assert_allclose( + np.array(aniso_sky_result.lside), + aniso_golden["lside"], + rtol=1e-4, + atol=0.1, + err_msg="Lside differs from golden fixture", + ) + + def test_kside_matches_golden(self, aniso_sky_result, aniso_golden): + """Kside should match golden fixture.""" + if aniso_golden is None: + pytest.skip("Golden anisotropic sky fixtures not generated yet") + + np.testing.assert_allclose( + np.array(aniso_sky_result.kside), + aniso_golden["kside"], + rtol=1e-4, + atol=0.1, + err_msg="Kside differs from golden fixture", + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_ground_temp.py b/tests/golden/test_golden_ground_temp.py new file mode 100644 index 0000000..0a00d71 --- /dev/null +++ b/tests/golden/test_golden_ground_temp.py @@ -0,0 +1,750 @@ +""" +Golden Regression Tests for Ground Temperature (TsWaveDelay) Calculations + +These tests verify that the Rust TsWaveDelay implementation correctly applies +thermal inertia to ground temperature using an exponential decay model. + +Formula: Lup = Tgmap0 * (1 - weight) + Tgmap1 * weight +where: weight = exp(-33.27 * timeadd) + +The decay constant 33.27 day⁻¹ corresponds to a time constant of ~43 minutes. +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import ground + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for ground temperature calculations (max observed diff ~3.05e-5) +RTOL = 1e-4 +ATOL = 1e-3 + + +@pytest.fixture(scope="module") +def ground_temp_inputs(): + """Load ground temperature input fixtures.""" + return { + "gvflup": np.load(FIXTURES_DIR / "ground_temp_input_gvflup.npy"), + "tgmap1": np.load(FIXTURES_DIR / "ground_temp_input_tgmap1.npy"), + } + + +@pytest.fixture(scope="module") +def case1_data(): + """Load case 1: first timestep of the day.""" + return dict(np.load(FIXTURES_DIR / "ground_temp_case1.npz")) + + +@pytest.fixture(scope="module") +def case2_data(): + """Load case 2: short timestep accumulation.""" + return dict(np.load(FIXTURES_DIR / "ground_temp_case2.npz")) + + +@pytest.fixture(scope="module") +def case3_data(): + """Load case 3: long timestep.""" + return dict(np.load(FIXTURES_DIR / "ground_temp_case3.npz")) + + +class TestGoldenTsWaveDelay: + """Golden tests for TsWaveDelay thermal inertia model.""" + + def test_first_morning_timestep(self, ground_temp_inputs, case1_data): + """First timestep of the day should reset previous temperature.""" + lup, new_timeadd, new_tgmap1 = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=int(case1_data["input_firstdaytime"]), + timeadd=float(case1_data["input_timeadd"]), + timestepdec=float(case1_data["input_timestepdec"]), + tgmap1=ground_temp_inputs["tgmap1"], + ) + + np.testing.assert_allclose( + np.array(lup), + case1_data["lup"], + rtol=RTOL, + atol=ATOL, + err_msg="Case 1 Lup differs from golden fixture", + ) + + np.testing.assert_allclose( + new_timeadd, + float(case1_data["timeadd"]), + rtol=RTOL, + atol=1e-6, + err_msg="Case 1 timeadd differs from golden fixture", + ) + + np.testing.assert_allclose( + np.array(new_tgmap1), + case1_data["tgmap1"], + rtol=RTOL, + atol=ATOL, + err_msg="Case 1 Tgmap1 differs from golden fixture", + ) + + def test_short_timestep_accumulation(self, ground_temp_inputs, case2_data): + """Short timestep should accumulate time and blend temperatures.""" + lup, new_timeadd, new_tgmap1 = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=int(case2_data["input_firstdaytime"]), + timeadd=float(case2_data["input_timeadd"]), + timestepdec=float(case2_data["input_timestepdec"]), + tgmap1=ground_temp_inputs["tgmap1"], + ) + + np.testing.assert_allclose( + np.array(lup), + case2_data["lup"], + rtol=RTOL, + atol=ATOL, + err_msg="Case 2 Lup differs from golden fixture", + ) + + np.testing.assert_allclose( + new_timeadd, + float(case2_data["timeadd"]), + rtol=RTOL, + atol=1e-6, + err_msg="Case 2 timeadd differs from golden fixture", + ) + + def test_long_timestep_update(self, ground_temp_inputs, case3_data): + """Long timestep (>=59 min) should update Tgmap1.""" + lup, new_timeadd, new_tgmap1 = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=int(case3_data["input_firstdaytime"]), + timeadd=float(case3_data["input_timeadd"]), + timestepdec=float(case3_data["input_timestepdec"]), + tgmap1=ground_temp_inputs["tgmap1"], + ) + + np.testing.assert_allclose( + np.array(lup), + case3_data["lup"], + rtol=RTOL, + atol=ATOL, + err_msg="Case 3 Lup differs from golden fixture", + ) + + np.testing.assert_allclose( + new_timeadd, + float(case3_data["timeadd"]), + rtol=RTOL, + atol=1e-6, + err_msg="Case 3 timeadd differs from golden fixture", + ) + + np.testing.assert_allclose( + np.array(new_tgmap1), + case3_data["tgmap1"], + rtol=RTOL, + atol=ATOL, + err_msg="Case 3 Tgmap1 differs from golden fixture", + ) + + +class TestGoldenTsWaveDelayProperties: + """Verify physical properties of TsWaveDelay model.""" + + def test_thermal_inertia_effect(self, ground_temp_inputs, case2_data): + """Output should blend between current and previous temperature.""" + lup, _, _ = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=0, + timeadd=float(case2_data["input_timeadd"]), + timestepdec=float(case2_data["input_timestepdec"]), + tgmap1=ground_temp_inputs["tgmap1"], + ) + lup_arr = np.array(lup) + + # Output should be between current and previous values + gvflup = ground_temp_inputs["gvflup"] + tgmap1 = ground_temp_inputs["tgmap1"] + + min_vals = np.minimum(gvflup, tgmap1) + max_vals = np.maximum(gvflup, tgmap1) + + # Allow small numerical tolerance + assert np.all(lup_arr >= min_vals - 0.1), "Output below minimum bound" + assert np.all(lup_arr <= max_vals + 0.1), "Output above maximum bound" + + def test_first_morning_resets_state(self, ground_temp_inputs): + """First morning timestep should set Tgmap1 = Tgmap0.""" + _, _, new_tgmap1 = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=1, # First morning + timeadd=0.0, + timestepdec=30 / 1440, + tgmap1=ground_temp_inputs["tgmap1"], + ) + + # After first morning, Tgmap1 should equal current input + np.testing.assert_allclose( + np.array(new_tgmap1), + ground_temp_inputs["gvflup"], + rtol=1e-5, + err_msg="First morning should reset Tgmap1 to current value", + ) + + def test_exponential_decay_weight(self, ground_temp_inputs): + """Verify exponential decay weight is applied correctly.""" + timeadd = 0.05 # ~72 minutes (above threshold) + + lup, _, new_tgmap1 = ground.ts_wave_delay( + ground_temp_inputs["gvflup"], + firstdaytime=0, + timeadd=timeadd, + timestepdec=timeadd, + tgmap1=ground_temp_inputs["tgmap1"], + ) + + # Calculate expected weight + weight = np.exp(-33.27 * timeadd) + + # Expected output + expected = ground_temp_inputs["gvflup"] * (1 - weight) + ground_temp_inputs["tgmap1"] * weight + + np.testing.assert_allclose( + np.array(lup), + expected, + rtol=1e-5, + err_msg="Exponential decay weight not applied correctly", + ) + + +class TestGoldenGroundTemperatureSinusoidal: + """Tests for compute_ground_temperature sinusoidal model. + + Verifies the diurnal temperature curve shape: + - Rises from sunrise to TmaxLST + - Peaks near TmaxLST + - Declines after TmaxLST (afternoon cooling) + - Returns to ~0 by late evening + """ + + @pytest.fixture + def common_inputs(self): + """Common inputs for sinusoidal tests (cobblestone defaults).""" + shape = (3, 3) + return { + "ta": 20.0, + "sun_altitude": 45.0, + "altmax": 55.0, + "snup": 5.0, # Sunrise at 05:00 + "global_rad": 600.0, + "rad_g0": 650.0, + "zen_deg": 45.0, + # Cobblestone params + "alb_grid": np.full(shape, 0.2, dtype=np.float32), + "emis_grid": np.full(shape, 0.95, dtype=np.float32), + "tgk_grid": np.full(shape, 0.37, dtype=np.float32), + "tstart_grid": np.full(shape, -3.41, dtype=np.float32), + "tmaxlst_grid": np.full(shape, 15.0, dtype=np.float32), + } + + def _compute_tg_at_time(self, inputs, hour): + """Compute tg and tg_wall at a given hour.""" + dectime = hour / 24.0 + tg, tg_wall, ci, _, _ = ground.compute_ground_temperature( + inputs["ta"], + inputs["sun_altitude"], + inputs["altmax"], + dectime, + inputs["snup"], + inputs["global_rad"], + inputs["rad_g0"], + inputs["zen_deg"], + inputs["alb_grid"], + inputs["emis_grid"], + inputs["tgk_grid"], + inputs["tstart_grid"], + inputs["tmaxlst_grid"], + ) + return np.array(tg), tg_wall, ci + + def test_afternoon_cooling_ground(self, common_inputs): + """Ground temperature must decline after TmaxLST (15:00). + + This is the critical test — a phase clamping bug would keep tg + stuck at its peak value instead of declining. + """ + tg_peak, _, _ = self._compute_tg_at_time(common_inputs, 15.0) + tg_after, _, _ = self._compute_tg_at_time(common_inputs, 18.0) + tg_evening, _, _ = self._compute_tg_at_time(common_inputs, 22.0) + + peak_val = tg_peak[0, 0] + after_val = tg_after[0, 0] + evening_val = tg_evening[0, 0] + + # Must decline: peak > 18:00 > 22:00 + assert after_val < peak_val, f"tg at 18:00 ({after_val:.2f}) should be less than peak at 15:00 ({peak_val:.2f})" + assert evening_val < after_val, ( + f"tg at 22:00 ({evening_val:.2f}) should be less than at 18:00 ({after_val:.2f})" + ) + + def test_afternoon_cooling_wall(self, common_inputs): + """Wall temperature must decline after TmaxLST (15:00).""" + _, wall_peak, _ = self._compute_tg_at_time(common_inputs, 15.0) + _, wall_after, _ = self._compute_tg_at_time(common_inputs, 18.0) + _, wall_evening, _ = self._compute_tg_at_time(common_inputs, 22.0) + + assert wall_after < wall_peak, ( + f"tg_wall at 18:00 ({wall_after:.2f}) should be less than peak at 15:00 ({wall_peak:.2f})" + ) + assert wall_evening < wall_after, ( + f"tg_wall at 22:00 ({wall_evening:.2f}) should be less than at 18:00 ({wall_after:.2f})" + ) + + def test_diurnal_curve_shape(self, common_inputs): + """Full diurnal curve should rise, peak, and decline.""" + hours = [6, 9, 12, 15, 18, 21, 23] + tg_vals = [] + for h in hours: + tg, _, _ = self._compute_tg_at_time(common_inputs, h) + tg_vals.append(tg[0, 0]) + + # Temperature should increase from 06:00 to 15:00 + assert tg_vals[1] > tg_vals[0], "09:00 > 06:00" + assert tg_vals[2] > tg_vals[1], "12:00 > 09:00" + assert tg_vals[3] > tg_vals[2], "15:00 > 12:00" + + # Peak at 15:00 (index 3) should be the maximum + assert tg_vals[3] == max(tg_vals), ( + f"Peak should be at 15:00 (TmaxLST), got max at index {tg_vals.index(max(tg_vals))}" + ) + + # Temperature should decrease after 15:00 + assert tg_vals[4] < tg_vals[3], "18:00 < 15:00" + assert tg_vals[5] < tg_vals[4], "21:00 < 18:00" + assert tg_vals[6] < tg_vals[5], "23:00 < 21:00" + + def test_before_sunrise_is_zero(self, common_inputs): + """Temperature before sunrise should be 0.""" + tg, tg_wall, _ = self._compute_tg_at_time(common_inputs, 4.0) + assert np.all(tg == 0.0), "Ground temp before sunrise should be 0" + assert tg_wall == 0.0, "Wall temp before sunrise should be 0" + + def test_wall_material_params_affect_output(self, common_inputs): + """Different wall material params should produce different temperatures.""" + dectime = 12.0 / 24.0 + # Default (cobblestone): tgk=0.37, tstart=-3.41, tmaxlst=15.0 + _, wall_default, _, _, _ = ground.compute_ground_temperature( + common_inputs["ta"], + common_inputs["sun_altitude"], + common_inputs["altmax"], + dectime, + common_inputs["snup"], + common_inputs["global_rad"], + common_inputs["rad_g0"], + common_inputs["zen_deg"], + common_inputs["alb_grid"], + common_inputs["emis_grid"], + common_inputs["tgk_grid"], + common_inputs["tstart_grid"], + common_inputs["tmaxlst_grid"], + ) + + # Wood wall: higher TgK, higher Tstart → different amplitude + _, wall_wood, _, _, _ = ground.compute_ground_temperature( + common_inputs["ta"], + common_inputs["sun_altitude"], + common_inputs["altmax"], + dectime, + common_inputs["snup"], + common_inputs["global_rad"], + common_inputs["rad_g0"], + common_inputs["zen_deg"], + common_inputs["alb_grid"], + common_inputs["emis_grid"], + common_inputs["tgk_grid"], + common_inputs["tstart_grid"], + common_inputs["tmaxlst_grid"], + tgk_wall=0.50, + tstart_wall=-2.0, + tmaxlst_wall=14.0, + ) + + assert wall_wood != wall_default, ( + "Wood wall params should produce different temperature than default cobblestone" + ) + + +class TestRustVsUMEPNumericalAgreement: + """Side-by-side numerical comparison of Rust vs UMEP Python formulas. + + Reimplements the exact UMEP formulas from + Solweig_2025a_calc_forprocessing.py (lines 171-199) in pure NumPy, + then calls the Rust function with identical inputs and checks for + exact numerical agreement (within f32 precision). + + This catches any formula transcription errors, ordering differences, + or missing terms between our Rust implementation and the UMEP reference. + """ + + @staticmethod + def _umep_ground_temp( + altmax: float, + dectime_frac: float, + snup_hours: float, + global_rad: float, + rad_g0: float, + zen_deg: float, + tgk: np.ndarray, + tstart: np.ndarray, + tmaxlst: np.ndarray, + tgk_wall: float, + tstart_wall: float, + tmaxlst_wall: float, + sun_altitude: float, + ) -> tuple[np.ndarray, float, float]: + """Pure-Python UMEP reference implementation (lines 171-199). + + Returns (Tg, Tgwall, CI_TgG) matching UMEP exactly. + """ + # --- Tgamp (UMEP line 172, 174) --- + Tgamp = tgk * altmax + tstart + Tgampwall = tgk_wall * altmax + tstart_wall + + # --- Phase + sinusoidal (UMEP lines 175-176) --- + snup_frac = snup_hours / 24.0 + if dectime_frac > snup_frac: + # Ground (per-pixel) + tmaxlst_frac = tmaxlst / 24.0 + phase = (dectime_frac - snup_frac) / (tmaxlst_frac - snup_frac) + Tg = Tgamp * np.sin(phase * np.pi / 2.0) + + # Wall (scalar) + tmaxlst_wall_frac = tmaxlst_wall / 24.0 + denom_wall = tmaxlst_wall_frac - snup_frac + phase_wall = (dectime_frac - snup_frac) / denom_wall if denom_wall > 0 else dectime_frac - snup_frac + Tgwall = Tgampwall * np.sin(phase_wall * np.pi / 2.0) + else: + Tg = np.zeros_like(tgk) + Tgwall = 0.0 + + # --- Wall clip before CI (UMEP lines 178-180) --- + if Tgwall < 0: + Tgwall = 0.0 + + # --- CI_TgG (UMEP lines 184, 189-192) --- + if sun_altitude > 0 and rad_g0 > 0: + corr = 0.1473 * np.log(90.0 - zen_deg) + 0.3454 + CI_TgG = (global_rad / rad_g0) + (1.0 - corr) + if CI_TgG > 1.0 or np.isinf(CI_TgG): + CI_TgG = 1.0 + else: + CI_TgG = 1.0 + + # --- Apply CI (UMEP lines 196-197) --- + Tg = Tg * CI_TgG + Tgwall = Tgwall * CI_TgG + + # --- Ground clip (UMEP lines 198-199, with landcover=1) --- + Tg[Tg < 0] = 0.0 + + return Tg, Tgwall, CI_TgG + + # Test scenarios: (name, inputs_dict) + # Each scenario tests a specific aspect of the formula + SCENARIOS = [ + ( + "noon_clear_cobblestone", + dict( + altmax=55.0, + hour=12.0, + snup=5.0, + global_rad=600.0, + rad_g0=650.0, + zen_deg=35.0, + sun_altitude=55.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "noon_clear_asphalt", + dict( + altmax=55.0, + hour=12.0, + snup=5.0, + global_rad=600.0, + rad_g0=650.0, + zen_deg=35.0, + sun_altitude=55.0, + tgk_val=0.58, + tstart_val=-9.78, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "afternoon_decline_15h", + dict( + altmax=55.0, + hour=18.0, + snup=5.0, + global_rad=300.0, + rad_g0=400.0, + zen_deg=60.0, + sun_altitude=30.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "evening_22h", + dict( + altmax=55.0, + hour=22.0, + snup=5.0, + global_rad=0.0, + rad_g0=0.0, + zen_deg=90.0, + sun_altitude=0.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "before_sunrise", + dict( + altmax=55.0, + hour=3.0, + snup=5.0, + global_rad=0.0, + rad_g0=0.0, + zen_deg=90.0, + sun_altitude=0.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "peak_at_tmaxlst", + dict( + altmax=55.0, + hour=15.0, + snup=5.0, + global_rad=500.0, + rad_g0=550.0, + zen_deg=45.0, + sun_altitude=45.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "cloudy_ci_low", + dict( + altmax=55.0, + hour=12.0, + snup=5.0, + global_rad=200.0, + rad_g0=650.0, + zen_deg=35.0, + sun_altitude=55.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "wood_wall_noon", + dict( + altmax=55.0, + hour=12.0, + snup=5.0, + global_rad=600.0, + rad_g0=650.0, + zen_deg=35.0, + sun_altitude=55.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.50, + tstart_wall=-2.0, + tmaxlst_wall=14.0, + ), + ), + ( + "brick_wall_afternoon", + dict( + altmax=55.0, + hour=18.0, + snup=5.0, + global_rad=300.0, + rad_g0=400.0, + zen_deg=60.0, + sun_altitude=30.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=15.0, + tgk_wall=0.40, + tstart_wall=-4.0, + tmaxlst_wall=15.0, + ), + ), + ( + "grass_morning", + dict( + altmax=55.0, + hour=8.0, + snup=5.0, + global_rad=300.0, + rad_g0=320.0, + zen_deg=60.0, + sun_altitude=30.0, + tgk_val=0.21, + tstart_val=-3.38, + tmaxlst_val=14.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "water", + dict( + altmax=55.0, + hour=12.0, + snup=5.0, + global_rad=600.0, + rad_g0=650.0, + zen_deg=35.0, + sun_altitude=55.0, + tgk_val=0.0, + tstart_val=0.0, + tmaxlst_val=12.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=15.0, + ), + ), + ( + "high_latitude_low_sun", + dict( + altmax=15.0, + hour=12.0, + snup=9.0, + global_rad=100.0, + rad_g0=120.0, + zen_deg=78.0, + sun_altitude=12.0, + tgk_val=0.37, + tstart_val=-3.41, + tmaxlst_val=13.0, + tgk_wall=0.37, + tstart_wall=-3.41, + tmaxlst_wall=13.0, + ), + ), + ] + + @pytest.mark.parametrize("name,inputs", SCENARIOS, ids=[s[0] for s in SCENARIOS]) + def test_rust_matches_umep_formula(self, name, inputs): + """Rust output must match the UMEP Python formula exactly (within f32).""" + shape = (3, 3) + dectime_frac = inputs["hour"] / 24.0 + + # Build grid inputs + tgk_grid = np.full(shape, inputs["tgk_val"], dtype=np.float32) + tstart_grid = np.full(shape, inputs["tstart_val"], dtype=np.float32) + tmaxlst_grid = np.full(shape, inputs["tmaxlst_val"], dtype=np.float32) + alb_grid = np.full(shape, 0.2, dtype=np.float32) + emis_grid = np.full(shape, 0.95, dtype=np.float32) + + # UMEP reference (pure Python) + umep_tg, umep_tgwall, umep_ci = self._umep_ground_temp( + altmax=inputs["altmax"], + dectime_frac=dectime_frac, + snup_hours=inputs["snup"], + global_rad=inputs["global_rad"], + rad_g0=inputs["rad_g0"], + zen_deg=inputs["zen_deg"], + tgk=tgk_grid.astype(np.float64), + tstart=tstart_grid.astype(np.float64), + tmaxlst=tmaxlst_grid.astype(np.float64), + tgk_wall=inputs["tgk_wall"], + tstart_wall=inputs["tstart_wall"], + tmaxlst_wall=inputs["tmaxlst_wall"], + sun_altitude=inputs["sun_altitude"], + ) + + # Rust + rust_tg, rust_tgwall, rust_ci, _, _ = ground.compute_ground_temperature( + float(inputs["sun_altitude"]), # ta (unused but required) + inputs["sun_altitude"], + inputs["altmax"], + dectime_frac, + inputs["snup"], + inputs["global_rad"], + inputs["rad_g0"], + inputs["zen_deg"], + alb_grid, + emis_grid, + tgk_grid, + tstart_grid, + tmaxlst_grid, + tgk_wall=inputs["tgk_wall"], + tstart_wall=inputs["tstart_wall"], + tmaxlst_wall=inputs["tmaxlst_wall"], + ) + rust_tg = np.array(rust_tg) + + # Compare CI_TgG + np.testing.assert_allclose( + rust_ci, + umep_ci, + atol=1e-5, + err_msg=f"[{name}] CI_TgG differs", + ) + + # Compare ground temperature grid + np.testing.assert_allclose( + rust_tg, + umep_tg.astype(np.float32), + atol=1e-4, + err_msg=f"[{name}] Tg grid differs", + ) + + # Compare wall temperature + np.testing.assert_allclose( + rust_tgwall, + float(umep_tgwall), + atol=1e-4, + err_msg=f"[{name}] Tg_wall differs", + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_gvf.py b/tests/golden/test_golden_gvf.py new file mode 100644 index 0000000..59149a7 --- /dev/null +++ b/tests/golden/test_golden_gvf.py @@ -0,0 +1,347 @@ +""" +Golden Regression Tests for Ground View Factor (GVF) Calculations + +These tests verify the Rust GVF implementation produces physically valid +and consistent outputs. GVF determines how much radiation a person receives +from ground and wall surfaces (as opposed to sky). + +Test strategy: +- Physical property tests: Verify ranges, relationships, symmetry +- Regression tests: Compare against pre-computed golden fixtures (when available) + +Reference: Lindberg et al. (2008) - SOLWEIG GVF model with wall radiation +""" + +from pathlib import Path + +import numpy as np +import pytest +from scipy import ndimage +from solweig.constants import SBC +from solweig.rustalgos import gvf as gvf_module +from solweig.rustalgos import shadowing + +pytestmark = pytest.mark.slow + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Physical constants +KELVIN_OFFSET = 273.15 +DEFAULT_ALBEDO = 0.15 # Typical urban ground albedo +DEFAULT_EMISSIVITY = 0.95 # Typical ground emissivity +DEFAULT_TA = 25.0 # Air temperature (°C) +DEFAULT_TGWALL = 2.0 # Wall temperature deviation (K) + + +@pytest.fixture(scope="module") +def input_data(): + """Load input data from golden fixtures (shared across all tests in module).""" + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy"), + "cdsm": np.load(FIXTURES_DIR / "input_cdsm.npy"), + "tdsm": np.load(FIXTURES_DIR / "input_tdsm.npy"), + "bush": np.load(FIXTURES_DIR / "input_bush.npy"), + "wall_ht": np.load(FIXTURES_DIR / "input_wall_ht.npy"), + "wall_asp": np.load(FIXTURES_DIR / "input_wall_asp.npy"), + "params": dict(np.load(FIXTURES_DIR / "input_params.npz")), + } + + +@pytest.fixture(scope="module") +def shadow_data(): + """Load shadow data from golden fixtures (shared across all tests in module).""" + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy"), + "veg_sh": np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy"), + "wall_sh": np.load(FIXTURES_DIR / "shadow_noon_wall_sh.npy"), + "wall_sun": np.load(FIXTURES_DIR / "shadow_noon_wall_sun.npy"), + } + + +def create_buildings_mask(wall_ht: np.ndarray, pixel_size: float) -> np.ndarray: + """ + Create building mask for GVF calculation. + + GVF expects: 0=building, 1=ground. + """ + wall_mask = wall_ht > 0 + struct = ndimage.generate_binary_structure(2, 2) + iterations = int(25 / pixel_size) + 1 + dilated = ndimage.binary_dilation(wall_mask, struct, iterations=iterations) + return (~dilated).astype(np.float32) + + +def create_gvf_inputs(input_data, shadow_data): + """Create all inputs needed for GVF calculation.""" + rows, cols = input_data["dsm"].shape + scale = float(input_data["params"]["scale"]) + + # Load ground temperature from fixture (spatially varying based on shadow) + tg_path = FIXTURES_DIR / "gvf_input_tg.npy" + tg = np.load(tg_path).astype(np.float32) if tg_path.exists() else np.zeros((rows, cols), dtype=np.float32) + + emis_grid = np.full((rows, cols), DEFAULT_EMISSIVITY, dtype=np.float32) + alb_grid = np.full((rows, cols), DEFAULT_ALBEDO, dtype=np.float32) + + # Building mask + buildings = create_buildings_mask(input_data["wall_ht"], scale) + + # Combined shadow (bldg + veg) + shadow = (shadow_data["bldg_sh"] * shadow_data["veg_sh"]).astype(np.float32) + + return { + "wallsun": shadow_data["wall_sun"].astype(np.float32), + "walls": input_data["wall_ht"].astype(np.float32), + "buildings": buildings, + "shadow": shadow, + "dirwalls": input_data["wall_asp"].astype(np.float32), + "tg": tg, + "emis_grid": emis_grid, + "alb_grid": alb_grid, + "scale": scale, + } + + +@pytest.fixture(scope="module") +def gvf_inputs(input_data, shadow_data): + """Prepare all GVF inputs (shared across all tests in module).""" + return create_gvf_inputs(input_data, shadow_data) + + +@pytest.fixture(scope="module") +def gvf_result(gvf_inputs): + """Compute GVF result using Rust implementation (computed once per module).""" + shadowing.disable_gpu() + + params = gvf_module.GvfScalarParams( + scale=gvf_inputs["scale"], + first=2.0, # round(1.8m human height) + second=36.0, # round(1.8 * 20) + tgwall=DEFAULT_TGWALL, + ta=DEFAULT_TA, + ewall=0.90, # Wall emissivity + sbc=SBC, + albedo_b=0.20, # Wall albedo + twater=DEFAULT_TA, + landcover=False, + ) + + return gvf_module.gvf_calc( + gvf_inputs["wallsun"], + gvf_inputs["walls"], + gvf_inputs["buildings"], + gvf_inputs["shadow"], + gvf_inputs["dirwalls"], + gvf_inputs["tg"], + gvf_inputs["emis_grid"], + gvf_inputs["alb_grid"], + None, # lc_grid + params, + ) + + +class TestGvfPhysicalProperties: + """Verify GVF outputs satisfy physical constraints.""" + + def test_gvfalb_range(self, gvf_result): + """GVF × albedo should be in range [0, albedo_max].""" + gvfalb = np.array(gvf_result.gvfalb) + valid_mask = ~np.isnan(gvfalb) + assert np.all(gvfalb[valid_mask] >= 0), "gvfalb has negative values" + # GVF × albedo cannot exceed albedo (GVF ≤ 1) + assert np.all(gvfalb[valid_mask] <= 1.0), "gvfalb exceeds 1.0" + + def test_gvfalbnosh_range(self, gvf_result): + """GVF × albedo (no shadow) should be in range [0, 1].""" + gvfalbnosh = np.array(gvf_result.gvfalbnosh) + valid_mask = ~np.isnan(gvfalbnosh) + assert np.all(gvfalbnosh[valid_mask] >= 0), "gvfalbnosh has negative values" + assert np.all(gvfalbnosh[valid_mask] <= 1.0), "gvfalbnosh exceeds 1.0" + + def test_gvf_lup_positive(self, gvf_result): + """Upwelling longwave should be positive (thermal emission).""" + lup = np.array(gvf_result.gvf_lup) + valid_mask = ~np.isnan(lup) + # Thermal emission is always positive + assert np.all(lup[valid_mask] >= 0), "gvf_lup has negative values" + + def test_gvf_lup_reasonable_range(self, gvf_result): + """Upwelling longwave should be in physically reasonable range.""" + lup = np.array(gvf_result.gvf_lup) + valid_mask = ~np.isnan(lup) & (lup > 0) + # Stefan-Boltzmann: at 25°C, blackbody emits ~448 W/m² + # With emissivity and GVF, expect 100-600 W/m² range + assert np.all(lup[valid_mask] < 1000), "gvf_lup exceeds 1000 W/m²" + + def test_gvf_norm_range(self, gvf_result): + """GVF normalization factor should be in [0, 1].""" + gvf_norm = np.array(gvf_result.gvf_norm) + valid_mask = ~np.isnan(gvf_norm) + assert np.all(gvf_norm[valid_mask] >= 0), "gvf_norm has negative values" + assert np.all(gvf_norm[valid_mask] <= 1.0), "gvf_norm exceeds 1.0" + + +class TestGvfDirectionalConsistency: + """Verify directional GVF components are consistent.""" + + def test_directional_gvfalb_range(self, gvf_result): + """All directional gvfalb should be in valid range.""" + for direction in ["e", "s", "w", "n"]: + arr = np.array(getattr(gvf_result, f"gvfalb_{direction}")) + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"gvfalb_{direction} has negative values" + assert np.all(arr[valid_mask] <= 1.0), f"gvfalb_{direction} exceeds 1.0" + + def test_directional_lup_positive(self, gvf_result): + """All directional Lup should be positive.""" + for direction in ["e", "s", "w", "n"]: + arr = np.array(getattr(gvf_result, f"gvf_lup_{direction}")) + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"gvf_lup_{direction} has negative values" + + def test_directional_symmetry_approximate(self, gvf_result): + """For uniform inputs, directional components should be roughly similar.""" + # Get all directional Lup values + lup_e = np.array(gvf_result.gvf_lup_e) + lup_s = np.array(gvf_result.gvf_lup_s) + lup_w = np.array(gvf_result.gvf_lup_w) + lup_n = np.array(gvf_result.gvf_lup_n) + + # Compute mean of each direction (excluding NaN and building pixels) + means = [] + for arr in [lup_e, lup_s, lup_w, lup_n]: + valid = arr[~np.isnan(arr) & (arr > 0)] + if len(valid) > 0: + means.append(np.mean(valid)) + + if len(means) >= 2: + # Directional means should be within 50% of each other + # (allowing for building asymmetry in test data) + max_mean = max(means) + min_mean = min(means) + ratio = max_mean / min_mean if min_mean > 0 else 1 + assert ratio < 2.0, f"Directional Lup ratio {ratio:.2f} too large" + + +class TestGvfShapeConsistency: + """Verify all GVF arrays have consistent shapes.""" + + def test_all_outputs_same_shape(self, gvf_result, gvf_inputs): + """All GVF output arrays should match input shape.""" + expected_shape = gvf_inputs["buildings"].shape + + output_names = [ + "gvf_lup", + "gvfalb", + "gvfalbnosh", + "gvf_lup_e", + "gvfalb_e", + "gvfalbnosh_e", + "gvf_lup_s", + "gvfalb_s", + "gvfalbnosh_s", + "gvf_lup_w", + "gvfalb_w", + "gvfalbnosh_w", + "gvf_lup_n", + "gvfalb_n", + "gvfalbnosh_n", + "gvf_sum", + "gvf_norm", + ] + + for name in output_names: + arr = np.array(getattr(gvf_result, name)) + assert arr.shape == expected_shape, f"{name} has wrong shape: {arr.shape} != {expected_shape}" + + +class TestGvfBuildingBehavior: + """Verify GVF handles building pixels correctly.""" + + def test_buildings_have_normalized_gvf(self, gvf_result, gvf_inputs): + """Building pixels should have gvf_norm = 1.0 (normalized).""" + gvf_norm = np.array(gvf_result.gvf_norm) + buildings = gvf_inputs["buildings"] + + # Where buildings=0 (is a building), gvf_norm should be 1.0 + building_mask = buildings == 0 + if np.any(building_mask): + building_gvf = gvf_norm[building_mask] + assert np.allclose(building_gvf, 1.0, atol=1e-5), "Building pixels don't have gvf_norm=1.0" + + +class TestGvfWallEffects: + """Verify GVF responds correctly to wall presence.""" + + def test_wall_areas_have_nonzero_gvf(self, gvf_result, gvf_inputs): + """Areas near walls should have non-zero GVF contribution.""" + gvf_sum = np.array(gvf_result.gvf_sum) + walls = gvf_inputs["walls"] + + # Dilate wall mask to find nearby pixels + wall_mask = walls > 0 + struct = ndimage.generate_binary_structure(2, 2) + near_walls = ndimage.binary_dilation(wall_mask, struct, iterations=3) + + # Exclude building pixels themselves + buildings = gvf_inputs["buildings"] + near_walls_ground = near_walls & (buildings > 0) + + if np.any(near_walls_ground): + wall_area_gvf = gvf_sum[near_walls_ground] + # Mean GVF near walls should be positive + assert np.mean(wall_area_gvf) > 0, "GVF near walls should be positive" + + +# Golden regression tests (compare against stored fixtures) +class TestGvfGoldenRegression: + """ + Golden regression tests comparing current output against stored fixtures. + + These tests are skipped if golden fixtures don't exist yet. + Run generate_fixtures.py to create them. + """ + + @pytest.fixture + def gvf_golden(self): + """Load golden GVF fixtures if they exist.""" + fixtures = {} + golden_files = { + "gvf_lup": FIXTURES_DIR / "gvf_lup.npy", + "gvfalb": FIXTURES_DIR / "gvf_alb.npy", + "gvf_norm": FIXTURES_DIR / "gvf_norm.npy", + } + for name, path in golden_files.items(): + if path.exists(): + fixtures[name] = np.load(path) + return fixtures if fixtures else None + + def test_gvf_lup_matches_golden(self, gvf_result, gvf_golden): + """GVF Lup should match golden fixture.""" + if gvf_golden is None or "gvf_lup" not in gvf_golden: + pytest.skip("Golden GVF fixtures not generated yet") + + np.testing.assert_allclose( + np.array(gvf_result.gvf_lup), + gvf_golden["gvf_lup"], + rtol=1e-4, + atol=1e-4, + err_msg="GVF Lup differs from golden fixture", + ) + + def test_gvfalb_matches_golden(self, gvf_result, gvf_golden): + """GVF albedo should match golden fixture.""" + if gvf_golden is None or "gvfalb" not in gvf_golden: + pytest.skip("Golden GVF fixtures not generated yet") + + np.testing.assert_allclose( + np.array(gvf_result.gvfalb), + gvf_golden["gvfalb"], + rtol=1e-4, + atol=1e-4, + err_msg="GVF albedo differs from golden fixture", + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_pet.py b/tests/golden/test_golden_pet.py new file mode 100644 index 0000000..4ebbfaf --- /dev/null +++ b/tests/golden/test_golden_pet.py @@ -0,0 +1,193 @@ +""" +Golden Regression Tests for PET (Physiologically Equivalent Temperature) Calculations + +These tests compare the Rust PET implementation against pre-computed golden +fixtures generated from the UMEP Python module. + +PET uses an iterative energy balance solver, which is slower than UTCI but +provides a more physiologically-based thermal comfort index. +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import pet + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for PET calculations +# PET uses iterative solver; max observed: single 1.0e-2, grid 2.2e-2 +RTOL = 0.005 # 0.5% relative tolerance +ATOL = 0.05 # 0.05°C absolute tolerance + + +@pytest.fixture(scope="module") +def single_point_data(): + """Load single-point PET test cases.""" + data = dict(np.load(FIXTURES_DIR / "pet_single_point.npz", allow_pickle=True)) + return { + "inputs": data["inputs"], # [n_tests, 4] -> [ta, rh, tmrt, va] + "outputs": data["outputs"], # [n_tests] + "descriptions": data["descriptions"], + "mbody": float(data["mbody"]), + "age": float(data["age"]), + "height": float(data["height"]), + "activity": float(data["activity"]), + "clo": float(data["clo"]), + "sex": int(data["sex"]), + } + + +@pytest.fixture(scope="module") +def grid_data(): + """Load grid PET test data.""" + params = dict(np.load(FIXTURES_DIR / "pet_grid_params.npz")) + return { + "ta": float(params["ta"]), + "rh": float(params["rh"]), + "tmrt": np.load(FIXTURES_DIR / "pet_grid_tmrt.npy"), + "va": np.load(FIXTURES_DIR / "pet_grid_va.npy"), + "expected": np.load(FIXTURES_DIR / "pet_grid_output.npy"), + "mbody": float(params["mbody"]), + "age": float(params["age"]), + "height": float(params["height"]), + "activity": float(params["activity"]), + "clo": float(params["clo"]), + "sex": int(params["sex"]), + } + + +class TestGoldenPetSinglePoint: + """Golden tests for single-point PET calculations.""" + + def test_pet_single_comfortable(self, single_point_data): + """Test PET for comfortable conditions.""" + self._test_case(single_point_data, "comfortable") + + def test_pet_single_hot_summer(self, single_point_data): + """Test PET for hot summer day.""" + self._test_case(single_point_data, "hot_summer") + + def test_pet_single_cold_winter(self, single_point_data): + """Test PET for cold winter day.""" + self._test_case(single_point_data, "cold_winter") + + def test_pet_single_tropical(self, single_point_data): + """Test PET for tropical high humidity conditions.""" + self._test_case(single_point_data, "tropical") + + def test_pet_single_high_radiation(self, single_point_data): + """Test PET for high radiation (large Tmrt-Ta delta).""" + self._test_case(single_point_data, "high_radiation") + + def _test_case(self, data, description): + """Helper to test a specific case by description.""" + idx = list(data["descriptions"]).index(description) + ta, rh, tmrt, va = data["inputs"][idx] + expected = data["outputs"][idx] + + result = pet.pet_calculate( + float(ta), + float(rh), + float(tmrt), + float(va), + data["mbody"], + data["age"], + data["height"], + data["activity"], + data["clo"], + data["sex"], + ) + + np.testing.assert_allclose( + result, + expected, + rtol=RTOL, + atol=ATOL, + err_msg=f"PET mismatch for {description}: got {result}, expected {expected}", + ) + + +class TestGoldenPetGrid: + """Golden tests for grid PET calculations.""" + + def test_pet_grid_matches_golden(self, grid_data): + """Grid PET should match golden fixture.""" + result = pet.pet_grid( + grid_data["ta"], + grid_data["rh"], + grid_data["tmrt"], + grid_data["va"], + grid_data["mbody"], + grid_data["age"], + grid_data["height"], + grid_data["activity"], + grid_data["clo"], + grid_data["sex"], + ) + result_arr = np.array(result) + + # Mask out invalid values (-9999) + valid_mask = grid_data["expected"] > -999 + + np.testing.assert_allclose( + result_arr[valid_mask], + grid_data["expected"][valid_mask], + rtol=RTOL, + atol=ATOL, + err_msg="Grid PET differs from golden fixture", + ) + + def test_pet_grid_shape(self, grid_data): + """Grid PET should have correct shape.""" + result = pet.pet_grid( + grid_data["ta"], + grid_data["rh"], + grid_data["tmrt"], + grid_data["va"], + grid_data["mbody"], + grid_data["age"], + grid_data["height"], + grid_data["activity"], + grid_data["clo"], + grid_data["sex"], + ) + assert np.array(result).shape == grid_data["expected"].shape + + +class TestGoldenPetProperties: + """Verify golden fixtures maintain expected physical properties.""" + + def test_pet_range(self, single_point_data): + """PET values should be in physically plausible range.""" + outputs = single_point_data["outputs"] + # PET typically ranges from -20°C to +50°C for outdoor conditions + assert np.all(outputs > -30), "PET values below plausible range" + assert np.all(outputs < 60), "PET values above plausible range" + + def test_pet_responds_to_tmrt(self, single_point_data): + """Higher Tmrt should generally increase PET.""" + # Compare comfortable (Tmrt=22) vs high_radiation (Tmrt=55) + idx_comfort = list(single_point_data["descriptions"]).index("comfortable") + idx_radiation = list(single_point_data["descriptions"]).index("high_radiation") + + pet_comfort = single_point_data["outputs"][idx_comfort] + pet_radiation = single_point_data["outputs"][idx_radiation] + + # High radiation case has much higher Tmrt, so PET should be higher + assert pet_radiation > pet_comfort, "PET should increase with higher Tmrt" + + def test_pet_hot_vs_cold(self, single_point_data): + """Hot conditions should have higher PET than cold.""" + idx_cold = list(single_point_data["descriptions"]).index("cold_winter") + idx_hot = list(single_point_data["descriptions"]).index("hot_summer") + + pet_cold = single_point_data["outputs"][idx_cold] + pet_hot = single_point_data["outputs"][idx_hot] + + assert pet_hot > pet_cold, "Hot summer should have higher PET than cold winter" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_radiation.py b/tests/golden/test_golden_radiation.py new file mode 100644 index 0000000..d1c355c --- /dev/null +++ b/tests/golden/test_golden_radiation.py @@ -0,0 +1,587 @@ +""" +Golden Regression Tests for Radiation (Kside/Lside) Calculations + +These tests verify the Rust radiation implementations produce physically valid +and consistent outputs for directional shortwave (Kside) and longwave (Lside). + +Test strategy: +- Physical property tests: Verify ranges, relationships, direction dependence +- Isotropic mode tests: Test simpler computation path (no shadow matrices needed) +- Regression tests: Compare against pre-computed golden fixtures (when available) + +Reference: +- Lindberg et al. (2008, 2016) - SOLWEIG radiation model +- Perez et al. (1993) - Anisotropic sky luminance distribution +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.constants import SBC +from solweig.rustalgos import shadowing, vegetation + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Physical constants +KELVIN_OFFSET = 273.15 + +# Typical weather conditions for testing +DEFAULT_TA = 25.0 # Air temperature (°C) +DEFAULT_RADG = 800.0 # Global radiation (W/m²) +DEFAULT_RADI = 600.0 # Direct radiation (W/m²) +DEFAULT_RADD = 200.0 # Diffuse radiation (W/m²) +DEFAULT_ESKY = 0.75 # Sky emissivity +DEFAULT_CI = 0.85 # Clearness index + + +@pytest.fixture(scope="module") +def input_data(): + """Load input data from golden fixtures (shared across all tests in module).""" + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy"), + "params": dict(np.load(FIXTURES_DIR / "input_params.npz")), + } + + +@pytest.fixture(scope="module") +def svf_data(): + """Load SVF data from golden fixtures (shared across all tests in module).""" + return { + "svf": np.load(FIXTURES_DIR / "svf_total.npy").astype(np.float32), + "svf_north": np.load(FIXTURES_DIR / "svf_north.npy").astype(np.float32), + "svf_east": np.load(FIXTURES_DIR / "svf_east.npy").astype(np.float32), + "svf_south": np.load(FIXTURES_DIR / "svf_south.npy").astype(np.float32), + "svf_west": np.load(FIXTURES_DIR / "svf_west.npy").astype(np.float32), + "svf_veg": np.load(FIXTURES_DIR / "svf_veg.npy").astype(np.float32), + } + + +@pytest.fixture(scope="module") +def shadow_data(): + """Load shadow data from golden fixtures (shared across all tests in module).""" + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy").astype(np.float32), + "veg_sh": np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy").astype(np.float32), + } + + +def create_kside_inputs(svf_data, shadow_data): + """Create inputs for Kside calculation (isotropic mode).""" + shape = svf_data["svf"].shape + + # Create synthetic Kup arrays (ground-reflected shortwave) + kup_base = np.full(shape, 50.0, dtype=np.float32) # ~50 W/m² reflected + + # Create F_sh array (fraction of shadow on walls) + f_sh = np.full(shape, 0.5, dtype=np.float32) + + # Combined shadow + shadow = (shadow_data["bldg_sh"] * shadow_data["veg_sh"]).astype(np.float32) + + # SVF vegetation: Use the actual svf_veg from fixtures + # SVF_veg represents SVF accounting for vegetation transmissivity + # It should satisfy: SVF_veg >= SVF (vegetation reduces but doesn't increase sky view) + # And: svfvegbu = SVF_veg + SVF - 1 should be in [0, 1] + # Using the actual svf_veg ensures correct relationships + svf_veg = svf_data["svf_veg"] + + return { + "shadow": shadow, + "svf_s": svf_data["svf_south"], + "svf_w": svf_data["svf_west"], + "svf_n": svf_data["svf_north"], + "svf_e": svf_data["svf_east"], + # Use svf_veg for all directions (simplified - real code uses directional svf_veg) + "svf_e_veg": svf_veg.copy(), + "svf_s_veg": svf_veg.copy(), + "svf_w_veg": svf_veg.copy(), + "svf_n_veg": svf_veg.copy(), + "f_sh": f_sh, + "kup_e": kup_base.copy(), + "kup_s": kup_base.copy(), + "kup_w": kup_base.copy(), + "kup_n": kup_base.copy(), + } + + +def create_lside_inputs(svf_data, shadow_data): + """Create inputs for Lside calculation.""" + shape = svf_data["svf"].shape + + # Compute Ldown (sky longwave) + ta_k = DEFAULT_TA + KELVIN_OFFSET + ldown_base = DEFAULT_ESKY * SBC * (ta_k**4) + ldown = np.full(shape, ldown_base, dtype=np.float32) + + # Create F_sh array + f_sh = np.full(shape, 0.5, dtype=np.float32) + + # Create Lup arrays (upwelling longwave from ground) + lup_base = 0.95 * SBC * (ta_k**4) # Ground emission + lup = np.full(shape, lup_base, dtype=np.float32) + + # Use svf_veg for both SVF vegetation parameters + # This ensures correct relationships: svfvegbu = svf_veg + svf - 1 stays in valid range + svf_veg = svf_data["svf_veg"] + + return { + "svf_s": svf_data["svf_south"], + "svf_w": svf_data["svf_west"], + "svf_n": svf_data["svf_north"], + "svf_e": svf_data["svf_east"], + # Use svf_veg for vegetation SVF (proper relationship with base SVF) + "svf_e_veg": svf_veg.copy(), + "svf_s_veg": svf_veg.copy(), + "svf_w_veg": svf_veg.copy(), + "svf_n_veg": svf_veg.copy(), + # SVF_aveg (averaged vegetation SVF) - use same as svf_veg for testing + "svf_e_aveg": svf_veg.copy(), + "svf_s_aveg": svf_veg.copy(), + "svf_w_aveg": svf_veg.copy(), + "svf_n_aveg": svf_veg.copy(), + "ldown": ldown, + "f_sh": f_sh, + "lup_e": lup.copy(), + "lup_s": lup.copy(), + "lup_w": lup.copy(), + "lup_n": lup.copy(), + } + + +@pytest.fixture(scope="module") +def kside_inputs(svf_data, shadow_data): + """Prepare Kside inputs (shared across all tests in module).""" + return create_kside_inputs(svf_data, shadow_data) + + +@pytest.fixture(scope="module") +def lside_inputs(svf_data, shadow_data): + """Prepare Lside inputs (shared across all tests in module).""" + return create_lside_inputs(svf_data, shadow_data) + + +@pytest.fixture(scope="module") +def kside_result(kside_inputs): + """Compute Kside result using Rust implementation (computed once per module).""" + shadowing.disable_gpu() + + return vegetation.kside_veg( + DEFAULT_RADI, # radI + DEFAULT_RADD, # radD + DEFAULT_RADG, # radG + kside_inputs["shadow"], + kside_inputs["svf_s"], + kside_inputs["svf_w"], + kside_inputs["svf_n"], + kside_inputs["svf_e"], + kside_inputs["svf_e_veg"], + kside_inputs["svf_s_veg"], + kside_inputs["svf_w_veg"], + kside_inputs["svf_n_veg"], + 180.0, # azimuth (noon) + 60.0, # altitude (high sun) + 0.5, # psi (vegetation transmissivity) + 0.0, # t (instrument offset) + 0.20, # albedo + kside_inputs["f_sh"], + kside_inputs["kup_e"], + kside_inputs["kup_s"], + kside_inputs["kup_w"], + kside_inputs["kup_n"], + True, # cyl (cylindrical body model) + None, # lv (None for isotropic) + False, # anisotropic_diffuse + None, # diffsh + None, # asvf + None, # shmat + None, # vegshmat + None, # vbshvegshmat + ) + + +@pytest.fixture(scope="module") +def lside_result(lside_inputs): + """Compute Lside result using Rust implementation (computed once per module).""" + shadowing.disable_gpu() + + return vegetation.lside_veg( + lside_inputs["svf_s"], + lside_inputs["svf_w"], + lside_inputs["svf_n"], + lside_inputs["svf_e"], + lside_inputs["svf_e_veg"], + lside_inputs["svf_s_veg"], + lside_inputs["svf_w_veg"], + lside_inputs["svf_n_veg"], + lside_inputs["svf_e_aveg"], + lside_inputs["svf_s_aveg"], + lside_inputs["svf_w_aveg"], + lside_inputs["svf_n_aveg"], + 180.0, # azimuth + 60.0, # altitude + DEFAULT_TA, # Ta + 2.0, # Tw (wall temperature deviation) + SBC, # Stefan-Boltzmann constant + 0.90, # ewall + lside_inputs["ldown"], + DEFAULT_ESKY, # esky + 0.0, # t (instrument offset) + lside_inputs["f_sh"], + DEFAULT_CI, # CI + lside_inputs["lup_e"], + lside_inputs["lup_s"], + lside_inputs["lup_w"], + lside_inputs["lup_n"], + False, # anisotropic_longwave + ) + + +class TestKsidePhysicalProperties: + """Verify Kside outputs satisfy physical constraints.""" + + def test_kside_i_non_negative(self, kside_result): + """Direct component should be non-negative.""" + kside_i = np.array(kside_result.kside_i) + valid_mask = ~np.isnan(kside_i) + assert np.all(kside_i[valid_mask] >= 0), "kside_i has negative values" + + def test_kside_i_upper_bound(self, kside_result): + """Direct component limited by incident radiation.""" + kside_i = np.array(kside_result.kside_i) + valid_mask = ~np.isnan(kside_i) + # Direct on vertical surface can't exceed I × cos(altitude) + max_direct = DEFAULT_RADI * np.cos(np.radians(60.0)) + assert np.all(kside_i[valid_mask] <= max_direct * 1.1), f"kside_i exceeds physical maximum {max_direct}" + + def test_directional_kside_non_negative(self, kside_result): + """All directional shortwave should be non-negative.""" + for direction in ["keast", "ksouth", "kwest", "knorth"]: + arr = np.array(getattr(kside_result, direction)) + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"{direction} has negative values" + + def test_directional_kside_reasonable_range(self, kside_result): + """Directional shortwave should be in reasonable range.""" + for direction in ["keast", "ksouth", "kwest", "knorth"]: + arr = np.array(getattr(kside_result, direction)) + valid_mask = ~np.isnan(arr) & (arr > 0) + if np.any(valid_mask): + # Shortwave on vertical surfaces can be high in areas with wall reflections + # Typical range 0-500 W/m², but can exceed 2000 W/m² in complex geometries + # Check median is reasonable (< 500 W/m²) rather than maximum + median_val = np.median(arr[valid_mask]) + assert median_val < 500, f"{direction} median {median_val:.1f} exceeds 500 W/m²" + + +class TestKsideSunPositionDependence: + """Verify Kside responds correctly to sun position.""" + + def test_noon_south_dominates(self, kside_inputs): + """At solar noon (azimuth=180), south-facing should receive most direct.""" + shadowing.disable_gpu() + + # Test at noon with high sun + result = vegetation.kside_veg( + DEFAULT_RADI, + DEFAULT_RADD, + DEFAULT_RADG, + kside_inputs["shadow"], + kside_inputs["svf_s"], + kside_inputs["svf_w"], + kside_inputs["svf_n"], + kside_inputs["svf_e"], + kside_inputs["svf_e_veg"], + kside_inputs["svf_s_veg"], + kside_inputs["svf_w_veg"], + kside_inputs["svf_n_veg"], + 180.0, # Noon + 60.0, + 0.5, + 0.0, + 0.20, + kside_inputs["f_sh"], + kside_inputs["kup_e"], + kside_inputs["kup_s"], + kside_inputs["kup_w"], + kside_inputs["kup_n"], + False, # box model to see directional differences + None, + False, + None, + None, + None, + None, + None, + ) + + ks = np.nanmean(np.array(result.ksouth)) + kn = np.nanmean(np.array(result.knorth)) + + # At noon in Northern Hemisphere, south receives more than north + # (with box model, direct beam goes to south-facing surfaces) + assert ks >= kn, f"South ({ks:.1f}) should receive >= North ({kn:.1f}) at noon" + + def test_morning_east_receives_direct(self, kside_inputs): + """In morning (azimuth=90), east-facing should receive direct.""" + shadowing.disable_gpu() + + result = vegetation.kside_veg( + DEFAULT_RADI, + DEFAULT_RADD, + DEFAULT_RADG, + kside_inputs["shadow"], + kside_inputs["svf_s"], + kside_inputs["svf_w"], + kside_inputs["svf_n"], + kside_inputs["svf_e"], + kside_inputs["svf_e_veg"], + kside_inputs["svf_s_veg"], + kside_inputs["svf_w_veg"], + kside_inputs["svf_n_veg"], + 90.0, # Morning + 30.0, # Lower sun + 0.5, + 0.0, + 0.20, + kside_inputs["f_sh"], + kside_inputs["kup_e"], + kside_inputs["kup_s"], + kside_inputs["kup_w"], + kside_inputs["kup_n"], + False, # box model + None, + False, + None, + None, + None, + None, + None, + ) + + ke = np.nanmean(np.array(result.keast)) + kw = np.nanmean(np.array(result.kwest)) + + # In morning, east receives more than west + assert ke >= kw, f"East ({ke:.1f}) should receive >= West ({kw:.1f}) in morning" + + +class TestLsidePhysicalProperties: + """Verify Lside outputs satisfy physical constraints.""" + + def test_lside_mostly_positive(self, lside_result): + """Longwave radiation should be mostly positive. + + Note: A small percentage of pixels may have negative values due to + numerical edge cases in the polynomial-based view factor calculation. + This is a known limitation when SVF values are near extreme bounds. + """ + for direction in ["least", "lsouth", "lwest", "lnorth"]: + arr = np.array(getattr(lside_result, direction)) + valid_mask = ~np.isnan(arr) + valid_vals = arr[valid_mask] + negative_fraction = (valid_vals < 0).sum() / len(valid_vals) + # Allow up to 1% negative values (numerical edge cases) + assert negative_fraction < 0.01, ( + f"{direction} has {negative_fraction * 100:.1f}% negative values (max allowed: 1%)" + ) + # Mean should definitely be positive + assert np.mean(valid_vals) > 0, f"{direction} mean is negative" + + def test_lside_reasonable_range(self, lside_result): + """Longwave should be in physically reasonable range.""" + for direction in ["least", "lsouth", "lwest", "lnorth"]: + arr = np.array(getattr(lside_result, direction)) + valid_mask = ~np.isnan(arr) & (arr > 0) + if np.any(valid_mask): + # Longwave on vertical surfaces typically 100-600 W/m² + assert np.all(arr[valid_mask] < 1000), f"{direction} exceeds 1000 W/m²" + # Should be above freezing emission (~200 W/m² at 0°C) + mean_val = np.mean(arr[valid_mask]) + assert mean_val > 100, f"{direction} mean too low: {mean_val:.1f}" + + +class TestLsideDirectionalConsistency: + """Verify Lside directional components are consistent.""" + + def test_directional_means_similar(self, lside_result): + """Directional Lside means should be roughly similar (isotropic sky).""" + means = [] + for direction in ["least", "lsouth", "lwest", "lnorth"]: + arr = np.array(getattr(lside_result, direction)) + valid = arr[~np.isnan(arr) & (arr > 0)] + if len(valid) > 0: + means.append(np.mean(valid)) + + if len(means) >= 2: + # In isotropic mode, directional Lside should be similar + max_mean = max(means) + min_mean = min(means) + ratio = max_mean / min_mean if min_mean > 0 else 1 + # Allow some variation due to SVF differences + assert ratio < 2.0, f"Directional Lside ratio {ratio:.2f} too large" + + +class TestRadiationShapeConsistency: + """Verify radiation arrays have consistent shapes.""" + + def test_kside_shape_matches_input(self, kside_result, kside_inputs): + """All Kside outputs should match input shape.""" + expected_shape = kside_inputs["shadow"].shape + + for attr in ["keast", "ksouth", "kwest", "knorth", "kside_i", "kside_d", "kside"]: + arr = np.array(getattr(kside_result, attr)) + assert arr.shape == expected_shape, f"{attr} shape {arr.shape} != {expected_shape}" + + def test_lside_shape_matches_input(self, lside_result, lside_inputs): + """All Lside outputs should match input shape.""" + expected_shape = lside_inputs["svf_e"].shape + + for attr in ["least", "lsouth", "lwest", "lnorth"]: + arr = np.array(getattr(lside_result, attr)) + assert arr.shape == expected_shape, f"{attr} shape {arr.shape} != {expected_shape}" + + +class TestRadiationShadowEffects: + """Verify radiation responds correctly to shadow conditions.""" + + def test_shadow_reduces_direct(self, kside_inputs): + """Shadows should reduce direct shortwave component.""" + shadowing.disable_gpu() + + # Fully sunlit + kside_inputs_sunlit = kside_inputs.copy() + kside_inputs_sunlit["shadow"] = np.ones_like(kside_inputs["shadow"]) + + result_sunlit = vegetation.kside_veg( + DEFAULT_RADI, + DEFAULT_RADD, + DEFAULT_RADG, + kside_inputs_sunlit["shadow"], + kside_inputs["svf_s"], + kside_inputs["svf_w"], + kside_inputs["svf_n"], + kside_inputs["svf_e"], + kside_inputs["svf_e_veg"], + kside_inputs["svf_s_veg"], + kside_inputs["svf_w_veg"], + kside_inputs["svf_n_veg"], + 180.0, + 60.0, + 0.5, + 0.0, + 0.20, + kside_inputs["f_sh"], + kside_inputs["kup_e"], + kside_inputs["kup_s"], + kside_inputs["kup_w"], + kside_inputs["kup_n"], + True, + None, + False, + None, + None, + None, + None, + None, + ) + + # Fully shaded + kside_inputs_shaded = kside_inputs.copy() + kside_inputs_shaded["shadow"] = np.zeros_like(kside_inputs["shadow"]) + + result_shaded = vegetation.kside_veg( + DEFAULT_RADI, + DEFAULT_RADD, + DEFAULT_RADG, + kside_inputs_shaded["shadow"], + kside_inputs["svf_s"], + kside_inputs["svf_w"], + kside_inputs["svf_n"], + kside_inputs["svf_e"], + kside_inputs["svf_e_veg"], + kside_inputs["svf_s_veg"], + kside_inputs["svf_w_veg"], + kside_inputs["svf_n_veg"], + 180.0, + 60.0, + 0.5, + 0.0, + 0.20, + kside_inputs["f_sh"], + kside_inputs["kup_e"], + kside_inputs["kup_s"], + kside_inputs["kup_w"], + kside_inputs["kup_n"], + True, + None, + False, + None, + None, + None, + None, + None, + ) + + kside_i_sunlit = np.nanmean(np.array(result_sunlit.kside_i)) + kside_i_shaded = np.nanmean(np.array(result_shaded.kside_i)) + + # Shaded direct should be zero + assert kside_i_shaded < 1.0, f"Shaded kside_i should be ~0, got {kside_i_shaded:.1f}" + # Sunlit should be positive + assert kside_i_sunlit > 100, f"Sunlit kside_i should be significant, got {kside_i_sunlit:.1f}" + + +# Golden regression tests +class TestRadiationGoldenRegression: + """ + Golden regression tests comparing current output against stored fixtures. + + These tests are skipped if golden fixtures don't exist yet. + Run generate_fixtures.py to create them. + """ + + @pytest.fixture + def radiation_golden(self): + """Load golden radiation fixtures if they exist.""" + fixtures = {} + golden_files = { + "kside_e": FIXTURES_DIR / "radiation_kside_e.npy", + "kside_s": FIXTURES_DIR / "radiation_kside_s.npy", + "lside_e": FIXTURES_DIR / "radiation_lside_e.npy", + "lside_s": FIXTURES_DIR / "radiation_lside_s.npy", + } + for name, path in golden_files.items(): + if path.exists(): + fixtures[name] = np.load(path) + return fixtures if fixtures else None + + def test_kside_matches_golden(self, kside_result, radiation_golden): + """Kside should match golden fixtures.""" + if radiation_golden is None or "kside_e" not in radiation_golden: + pytest.skip("Golden radiation fixtures not generated yet") + + np.testing.assert_allclose( + np.array(kside_result.keast), + radiation_golden["kside_e"], + rtol=1e-4, + atol=1e-4, + err_msg="Kside east differs from golden fixture", + ) + + def test_lside_matches_golden(self, lside_result, radiation_golden): + """Lside should match golden fixtures.""" + if radiation_golden is None or "lside_e" not in radiation_golden: + pytest.skip("Golden radiation fixtures not generated yet") + + np.testing.assert_allclose( + np.array(lside_result.least), + radiation_golden["lside_e"], + rtol=1e-4, + atol=1e-4, + err_msg="Lside east differs from golden fixture", + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_shadows.py b/tests/golden/test_golden_shadows.py new file mode 100644 index 0000000..70a8fb0 --- /dev/null +++ b/tests/golden/test_golden_shadows.py @@ -0,0 +1,190 @@ +""" +Golden Regression Tests for Shadow Calculations + +These tests compare the Rust shadow algorithm implementation against +pre-computed golden fixtures generated from the UMEP Python module. + +The fixtures are the ground truth (UMEP Python outputs), and these tests +verify that the Rust implementation produces equivalent results. + +If these tests fail, it means: +- The Rust implementation differs from UMEP Python (investigate the difference) +- Or the fixtures were regenerated (rerun tests to confirm they pass) +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import shadowing + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + + +@pytest.fixture(scope="module") +def input_data(): + """Load input data from golden fixtures (shared across all tests in module).""" + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy"), + "cdsm": np.load(FIXTURES_DIR / "input_cdsm.npy"), + "tdsm": np.load(FIXTURES_DIR / "input_tdsm.npy"), + "bush": np.load(FIXTURES_DIR / "input_bush.npy"), + "wall_ht": np.load(FIXTURES_DIR / "input_wall_ht.npy"), + "wall_asp": np.load(FIXTURES_DIR / "input_wall_asp.npy") * np.pi / 180.0, + "params": dict(np.load(FIXTURES_DIR / "input_params.npz")), + } + + +def compute_shadows(input_data, azimuth, altitude): + """Compute shadows with given sun position.""" + shadowing.disable_gpu() + return shadowing.calculate_shadows_wall_ht_25( + azimuth, + altitude, + float(input_data["params"]["scale"]), + float(input_data["params"]["amaxvalue"]), + input_data["dsm"], + input_data["cdsm"], + input_data["tdsm"], + input_data["bush"], + input_data["wall_ht"], + input_data["wall_asp"].astype(np.float32), + None, + None, + None, + ) + + +class TestGoldenShadowsMorning: + """Golden tests for morning sun position (azimuth=90, altitude=30).""" + + @pytest.fixture(scope="class") + def morning_golden(self): + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_morning_bldg_sh.npy"), + "veg_sh": np.load(FIXTURES_DIR / "shadow_morning_veg_sh.npy"), + "wall_sh": np.load(FIXTURES_DIR / "shadow_morning_wall_sh.npy"), + "wall_sun": np.load(FIXTURES_DIR / "shadow_morning_wall_sun.npy"), + } + + @pytest.fixture(scope="class") + def morning_result(self, input_data): + return compute_shadows(input_data, azimuth=90.0, altitude=30.0) + + def test_bldg_sh_matches_golden(self, morning_result, morning_golden): + """Building shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(morning_result.bldg_sh), + morning_golden["bldg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Building shadows differ from golden fixture", + ) + + def test_veg_sh_matches_golden(self, morning_result, morning_golden): + """Vegetation shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(morning_result.veg_sh), + morning_golden["veg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Vegetation shadows differ from golden fixture", + ) + + def test_wall_sh_matches_golden(self, morning_result, morning_golden): + """Wall shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(morning_result.wall_sh), + morning_golden["wall_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Wall shadows differ from golden fixture", + ) + + def test_wall_sun_matches_golden(self, morning_result, morning_golden): + """Wall sun should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(morning_result.wall_sun), + morning_golden["wall_sun"], + rtol=1e-5, + atol=1e-5, + err_msg="Wall sun differs from golden fixture", + ) + + +class TestGoldenShadowsNoon: + """Golden tests for noon sun position (azimuth=180, altitude=60).""" + + @pytest.fixture(scope="class") + def noon_golden(self): + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_noon_bldg_sh.npy"), + "veg_sh": np.load(FIXTURES_DIR / "shadow_noon_veg_sh.npy"), + "wall_sh": np.load(FIXTURES_DIR / "shadow_noon_wall_sh.npy"), + "wall_sun": np.load(FIXTURES_DIR / "shadow_noon_wall_sun.npy"), + } + + @pytest.fixture(scope="class") + def noon_result(self, input_data): + return compute_shadows(input_data, azimuth=180.0, altitude=60.0) + + def test_bldg_sh_matches_golden(self, noon_result, noon_golden): + """Building shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(noon_result.bldg_sh), + noon_golden["bldg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Building shadows differ from golden fixture", + ) + + def test_veg_sh_matches_golden(self, noon_result, noon_golden): + """Vegetation shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(noon_result.veg_sh), + noon_golden["veg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Vegetation shadows differ from golden fixture", + ) + + +class TestGoldenShadowsAfternoon: + """Golden tests for afternoon sun position (azimuth=270, altitude=45).""" + + @pytest.fixture(scope="class") + def afternoon_golden(self): + return { + "bldg_sh": np.load(FIXTURES_DIR / "shadow_afternoon_bldg_sh.npy"), + "veg_sh": np.load(FIXTURES_DIR / "shadow_afternoon_veg_sh.npy"), + "wall_sh": np.load(FIXTURES_DIR / "shadow_afternoon_wall_sh.npy"), + "wall_sun": np.load(FIXTURES_DIR / "shadow_afternoon_wall_sun.npy"), + } + + @pytest.fixture(scope="class") + def afternoon_result(self, input_data): + return compute_shadows(input_data, azimuth=270.0, altitude=45.0) + + def test_bldg_sh_matches_golden(self, afternoon_result, afternoon_golden): + """Building shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(afternoon_result.bldg_sh), + afternoon_golden["bldg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Building shadows differ from golden fixture", + ) + + def test_veg_sh_matches_golden(self, afternoon_result, afternoon_golden): + """Vegetation shadows should match golden fixture exactly.""" + np.testing.assert_allclose( + np.array(afternoon_result.veg_sh), + afternoon_golden["veg_sh"], + rtol=1e-5, + atol=1e-5, + err_msg="Vegetation shadows differ from golden fixture", + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_svf.py b/tests/golden/test_golden_svf.py new file mode 100644 index 0000000..de38667 --- /dev/null +++ b/tests/golden/test_golden_svf.py @@ -0,0 +1,153 @@ +""" +Golden Regression Tests for Sky View Factor (SVF) Calculations + +These tests compare the Rust SVF algorithm implementation against +pre-computed golden fixtures generated from the UMEP Python module. + +All non-vegetation SVF components match to within ~2e-6 (float32 precision). +Vegetation SVF has a known ~1.1% max difference due to different shadow +algorithm internals (shadowingfunction_20 vs shadowingfunction_wallheight_23). +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import shadowing, skyview + +pytestmark = pytest.mark.slow + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# All non-vegetation SVF components match within ~2e-6 (f32 precision) +STRICT_RTOL = 1e-5 +STRICT_ATOL = 1e-5 + +# Vegetation SVF: max observed diff ~1.09e-2; use 1.5% margin +VEG_RTOL = 0.015 +VEG_ATOL = 0.015 + + +@pytest.fixture(scope="module") +def input_data(): + """Load input data from golden fixtures (shared across all tests in module).""" + return { + "dsm": np.load(FIXTURES_DIR / "input_dsm.npy"), + "cdsm": np.load(FIXTURES_DIR / "input_cdsm.npy"), + "tdsm": np.load(FIXTURES_DIR / "input_tdsm.npy"), + "params": dict(np.load(FIXTURES_DIR / "input_params.npz")), + } + + +@pytest.fixture(scope="module") +def svf_golden(): + """Load golden SVF fixtures (shared across all tests in module).""" + return { + "svf": np.load(FIXTURES_DIR / "svf_total.npy"), + "svf_north": np.load(FIXTURES_DIR / "svf_north.npy"), + "svf_east": np.load(FIXTURES_DIR / "svf_east.npy"), + "svf_south": np.load(FIXTURES_DIR / "svf_south.npy"), + "svf_west": np.load(FIXTURES_DIR / "svf_west.npy"), + "svf_veg": np.load(FIXTURES_DIR / "svf_veg.npy"), + } + + +@pytest.fixture(scope="module") +def svf_result(input_data): + """Compute current SVF result (computed once per module).""" + shadowing.disable_gpu() + return skyview.calculate_svf( + input_data["dsm"], + input_data["cdsm"], + input_data["tdsm"], + float(input_data["params"]["scale"]), + True, # usevegdem + float(input_data["params"]["amaxvalue"]), + 2, # patch_option + None, # min_sun_elev + None, # progress_callback + ) + + +class TestGoldenSvf: + """Golden tests for SVF calculations.""" + + def test_svf_total_matches_golden(self, svf_result, svf_golden): + """Total SVF should match golden fixture (max diff ~2e-6).""" + np.testing.assert_allclose( + np.array(svf_result.svf), + svf_golden["svf"], + rtol=STRICT_RTOL, + atol=STRICT_ATOL, + err_msg="Total SVF differs from golden fixture", + ) + + def test_svf_north_matches_golden(self, svf_result, svf_golden): + """North SVF should match golden fixture (max diff ~1.3e-6).""" + np.testing.assert_allclose( + np.array(svf_result.svf_north), + svf_golden["svf_north"], + rtol=STRICT_RTOL, + atol=STRICT_ATOL, + err_msg="North SVF differs from golden fixture", + ) + + def test_svf_east_matches_golden(self, svf_result, svf_golden): + """East SVF should match golden fixture (max diff ~1.3e-6).""" + np.testing.assert_allclose( + np.array(svf_result.svf_east), + svf_golden["svf_east"], + rtol=STRICT_RTOL, + atol=STRICT_ATOL, + err_msg="East SVF differs from golden fixture", + ) + + def test_svf_south_matches_golden(self, svf_result, svf_golden): + """South SVF should match golden fixture (max diff ~1.3e-6).""" + np.testing.assert_allclose( + np.array(svf_result.svf_south), + svf_golden["svf_south"], + rtol=STRICT_RTOL, + atol=STRICT_ATOL, + err_msg="South SVF differs from golden fixture", + ) + + def test_svf_west_matches_golden(self, svf_result, svf_golden): + """West SVF should match golden fixture (max diff ~1.3e-6).""" + np.testing.assert_allclose( + np.array(svf_result.svf_west), + svf_golden["svf_west"], + rtol=STRICT_RTOL, + atol=STRICT_ATOL, + err_msg="West SVF differs from golden fixture", + ) + + def test_svf_veg_matches_golden(self, svf_result, svf_golden): + """Vegetation SVF should match golden fixture (max diff ~1.1e-2).""" + np.testing.assert_allclose( + np.array(svf_result.svf_veg), + svf_golden["svf_veg"], + rtol=VEG_RTOL, + atol=VEG_ATOL, + err_msg="Vegetation SVF differs from golden fixture beyond 1.5% tolerance", + ) + + +class TestGoldenSvfProperties: + """Verify golden fixtures maintain expected properties.""" + + def test_svf_range(self, svf_golden): + """Golden SVF values should be in valid range [0, 1].""" + for name, arr in svf_golden.items(): + valid_mask = ~np.isnan(arr) + assert np.all(arr[valid_mask] >= 0), f"{name} has values < 0" + assert np.all(arr[valid_mask] <= 1), f"{name} has values > 1" + + def test_svf_shape_consistency(self, svf_golden): + """All SVF arrays should have the same shape.""" + shapes = [arr.shape for arr in svf_golden.values()] + assert all(s == shapes[0] for s in shapes), "SVF arrays have inconsistent shapes" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_tmrt.py b/tests/golden/test_golden_tmrt.py new file mode 100644 index 0000000..6ce7d3f --- /dev/null +++ b/tests/golden/test_golden_tmrt.py @@ -0,0 +1,247 @@ +""" +Golden Regression Tests for Tmrt (Mean Radiant Temperature) Calculations + +These tests verify that the Rust Tmrt implementation correctly computes +Mean Radiant Temperature from radiation budget components using the +Stefan-Boltzmann formula: + + Tmrt = (Sstr / (abs_l * SBC))^0.25 - 273.15 + +where Sstr is the total absorbed shortwave and longwave radiation. +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import tmrt + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for Tmrt calculations (max observed diff ~3.05e-5) +RTOL = 1e-4 +ATOL = 1e-4 + + +@pytest.fixture(scope="module") +def tmrt_inputs(): + """Load Tmrt radiation input fixtures.""" + return { + "kdown": np.load(FIXTURES_DIR / "tmrt_input_kdown.npy"), + "kup": np.load(FIXTURES_DIR / "tmrt_input_kup.npy"), + "ldown": np.load(FIXTURES_DIR / "tmrt_input_ldown.npy"), + "lup": np.load(FIXTURES_DIR / "tmrt_input_lup.npy"), + "kside_n": np.load(FIXTURES_DIR / "tmrt_input_kside_n.npy"), + "kside_e": np.load(FIXTURES_DIR / "tmrt_input_kside_e.npy"), + "kside_s": np.load(FIXTURES_DIR / "tmrt_input_kside_s.npy"), + "kside_w": np.load(FIXTURES_DIR / "tmrt_input_kside_w.npy"), + "kside_total": np.load(FIXTURES_DIR / "tmrt_input_kside_total.npy"), + "lside_n": np.load(FIXTURES_DIR / "tmrt_input_lside_n.npy"), + "lside_e": np.load(FIXTURES_DIR / "tmrt_input_lside_e.npy"), + "lside_s": np.load(FIXTURES_DIR / "tmrt_input_lside_s.npy"), + "lside_w": np.load(FIXTURES_DIR / "tmrt_input_lside_w.npy"), + "lside_total": np.load(FIXTURES_DIR / "tmrt_input_lside_total.npy"), + } + + +@pytest.fixture(scope="module") +def tmrt_expected(): + """Load expected Tmrt outputs.""" + return { + "aniso": np.load(FIXTURES_DIR / "tmrt_output_aniso.npy"), + "iso": np.load(FIXTURES_DIR / "tmrt_output_iso.npy"), + } + + +@pytest.fixture(scope="module") +def tmrt_params(): + """Load Tmrt parameters.""" + data = dict(np.load(FIXTURES_DIR / "tmrt_params.npz")) + return { + "abs_k": float(data["abs_k"]), + "abs_l": float(data["abs_l"]), + } + + +class TestGoldenTmrt: + """Golden tests for Tmrt calculations.""" + + def test_tmrt_anisotropic_matches_golden(self, tmrt_inputs, tmrt_expected, tmrt_params): + """Tmrt with anisotropic sky model should match golden fixture.""" + params = tmrt.TmrtParams( + abs_k=tmrt_params["abs_k"], + abs_l=tmrt_params["abs_l"], + is_standing=True, + use_anisotropic_sky=True, + ) + + result = tmrt.compute_tmrt( + tmrt_inputs["kdown"], + tmrt_inputs["kup"], + tmrt_inputs["ldown"], + tmrt_inputs["lup"], + tmrt_inputs["kside_n"], + tmrt_inputs["kside_e"], + tmrt_inputs["kside_s"], + tmrt_inputs["kside_w"], + tmrt_inputs["lside_n"], + tmrt_inputs["lside_e"], + tmrt_inputs["lside_s"], + tmrt_inputs["lside_w"], + tmrt_inputs["kside_total"], + tmrt_inputs["lside_total"], + params, + ) + result_arr = np.array(result) + + np.testing.assert_allclose( + result_arr, + tmrt_expected["aniso"], + rtol=RTOL, + atol=ATOL, + err_msg="Anisotropic Tmrt differs from golden fixture", + ) + + def test_tmrt_isotropic_matches_golden(self, tmrt_inputs, tmrt_expected, tmrt_params): + """Tmrt with isotropic sky model should match golden fixture.""" + params = tmrt.TmrtParams( + abs_k=tmrt_params["abs_k"], + abs_l=tmrt_params["abs_l"], + is_standing=True, + use_anisotropic_sky=False, + ) + + result = tmrt.compute_tmrt( + tmrt_inputs["kdown"], + tmrt_inputs["kup"], + tmrt_inputs["ldown"], + tmrt_inputs["lup"], + tmrt_inputs["kside_n"], + tmrt_inputs["kside_e"], + tmrt_inputs["kside_s"], + tmrt_inputs["kside_w"], + tmrt_inputs["lside_n"], + tmrt_inputs["lside_e"], + tmrt_inputs["lside_s"], + tmrt_inputs["lside_w"], + tmrt_inputs["kside_total"], + tmrt_inputs["lside_total"], + params, + ) + result_arr = np.array(result) + + np.testing.assert_allclose( + result_arr, + tmrt_expected["iso"], + rtol=RTOL, + atol=ATOL, + err_msg="Isotropic Tmrt differs from golden fixture", + ) + + def test_tmrt_shape(self, tmrt_inputs, tmrt_params): + """Tmrt output should match input shape.""" + params = tmrt.TmrtParams( + abs_k=tmrt_params["abs_k"], + abs_l=tmrt_params["abs_l"], + is_standing=True, + use_anisotropic_sky=True, + ) + + result = tmrt.compute_tmrt( + tmrt_inputs["kdown"], + tmrt_inputs["kup"], + tmrt_inputs["ldown"], + tmrt_inputs["lup"], + tmrt_inputs["kside_n"], + tmrt_inputs["kside_e"], + tmrt_inputs["kside_s"], + tmrt_inputs["kside_w"], + tmrt_inputs["lside_n"], + tmrt_inputs["lside_e"], + tmrt_inputs["lside_s"], + tmrt_inputs["lside_w"], + tmrt_inputs["kside_total"], + tmrt_inputs["lside_total"], + params, + ) + + assert np.array(result).shape == tmrt_inputs["kdown"].shape + + +class TestGoldenTmrtProperties: + """Verify golden fixtures maintain expected physical properties.""" + + def test_tmrt_range(self, tmrt_expected): + """Tmrt values should be in physically plausible range.""" + for name, arr in tmrt_expected.items(): + assert np.all(arr >= -50), f"{name} Tmrt below -50°C" + assert np.all(arr <= 80), f"{name} Tmrt above 80°C" + + def test_aniso_vs_iso_difference(self, tmrt_expected): + """Anisotropic model should produce slightly different Tmrt than isotropic.""" + # Due to the additional Lside*Fcyl term in anisotropic mode + diff = np.abs(tmrt_expected["aniso"] - tmrt_expected["iso"]) + mean_diff = np.mean(diff) + + # Should be some difference (anisotropic adds more longwave) + assert mean_diff > 0.5, "Anisotropic and isotropic should differ" + # But not too large + assert mean_diff < 15, "Anisotropic/isotropic difference too large" + + def test_tmrt_increases_with_radiation(self, tmrt_inputs, tmrt_params): + """Higher radiation should produce higher Tmrt.""" + params = tmrt.TmrtParams( + abs_k=tmrt_params["abs_k"], + abs_l=tmrt_params["abs_l"], + is_standing=True, + use_anisotropic_sky=True, + ) + + # Compute Tmrt with normal inputs + result_normal = np.array( + tmrt.compute_tmrt( + tmrt_inputs["kdown"], + tmrt_inputs["kup"], + tmrt_inputs["ldown"], + tmrt_inputs["lup"], + tmrt_inputs["kside_n"], + tmrt_inputs["kside_e"], + tmrt_inputs["kside_s"], + tmrt_inputs["kside_w"], + tmrt_inputs["lside_n"], + tmrt_inputs["lside_e"], + tmrt_inputs["lside_s"], + tmrt_inputs["lside_w"], + tmrt_inputs["kside_total"], + tmrt_inputs["lside_total"], + params, + ) + ) + + # Compute with doubled shortwave + result_doubled = np.array( + tmrt.compute_tmrt( + tmrt_inputs["kdown"] * 2, + tmrt_inputs["kup"] * 2, + tmrt_inputs["ldown"], + tmrt_inputs["lup"], + tmrt_inputs["kside_n"] * 2, + tmrt_inputs["kside_e"] * 2, + tmrt_inputs["kside_s"] * 2, + tmrt_inputs["kside_w"] * 2, + tmrt_inputs["lside_n"], + tmrt_inputs["lside_e"], + tmrt_inputs["lside_s"], + tmrt_inputs["lside_w"], + tmrt_inputs["kside_total"] * 2, + tmrt_inputs["lside_total"], + params, + ) + ) + + # Mean Tmrt should be higher with more radiation + assert np.mean(result_doubled) > np.mean(result_normal), "Doubling shortwave radiation should increase Tmrt" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_utci.py b/tests/golden/test_golden_utci.py new file mode 100644 index 0000000..2fdb0ab --- /dev/null +++ b/tests/golden/test_golden_utci.py @@ -0,0 +1,171 @@ +""" +Golden Regression Tests for UTCI (Universal Thermal Climate Index) Calculations + +These tests compare the Rust UTCI implementation against pre-computed golden +fixtures generated from the UMEP Python module. + +Both implementations use the same 6th order polynomial approximation from +Bröde et al., so results should match to floating-point precision. +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import utci + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for UTCI calculations +# Both use identical polynomial, so should match very closely +RTOL = 1e-4 +ATOL = 1e-4 + + +@pytest.fixture(scope="module") +def single_point_data(): + """Load single-point UTCI test cases.""" + data = dict(np.load(FIXTURES_DIR / "utci_single_point.npz", allow_pickle=True)) + return { + "inputs": data["inputs"], # [n_tests, 4] -> [ta, rh, tmrt, va] + "outputs": data["outputs"], # [n_tests] + "descriptions": data["descriptions"], + } + + +@pytest.fixture(scope="module") +def grid_data(): + """Load grid UTCI test data.""" + params = dict(np.load(FIXTURES_DIR / "utci_grid_params.npz")) + return { + "ta": float(params["ta"]), + "rh": float(params["rh"]), + "tmrt": np.load(FIXTURES_DIR / "utci_grid_tmrt.npy"), + "va": np.load(FIXTURES_DIR / "utci_grid_va.npy"), + "expected": np.load(FIXTURES_DIR / "utci_grid_output.npy"), + } + + +class TestGoldenUtciSinglePoint: + """Golden tests for single-point UTCI calculations.""" + + def test_utci_single_comfortable(self, single_point_data): + """Test UTCI for comfortable conditions.""" + self._test_case(single_point_data, "comfortable") + + def test_utci_single_hot_summer(self, single_point_data): + """Test UTCI for hot summer day.""" + self._test_case(single_point_data, "hot_summer") + + def test_utci_single_cold_winter(self, single_point_data): + """Test UTCI for cold winter day.""" + self._test_case(single_point_data, "cold_winter") + + def test_utci_single_tropical(self, single_point_data): + """Test UTCI for tropical high humidity conditions.""" + self._test_case(single_point_data, "tropical") + + def test_utci_single_windy(self, single_point_data): + """Test UTCI for windy conditions.""" + self._test_case(single_point_data, "windy") + + def test_utci_single_high_radiation(self, single_point_data): + """Test UTCI for high radiation (large Tmrt-Ta delta).""" + self._test_case(single_point_data, "high_radiation") + + def test_utci_single_low_wind(self, single_point_data): + """Test UTCI for low wind edge case.""" + self._test_case(single_point_data, "low_wind") + + def _test_case(self, data, description): + """Helper to test a specific case by description.""" + idx = list(data["descriptions"]).index(description) + ta, rh, tmrt, va = data["inputs"][idx] + expected = data["outputs"][idx] + + result = utci.utci_single(float(ta), float(rh), float(tmrt), float(va)) + + np.testing.assert_allclose( + result, + expected, + rtol=RTOL, + atol=ATOL, + err_msg=f"UTCI mismatch for {description}: got {result}, expected {expected}", + ) + + +class TestGoldenUtciGrid: + """Golden tests for grid UTCI calculations.""" + + def test_utci_grid_matches_golden(self, grid_data): + """Grid UTCI should match golden fixture.""" + result = utci.utci_grid( + grid_data["ta"], + grid_data["rh"], + grid_data["tmrt"], + grid_data["va"], + ) + result_arr = np.array(result) + + # Mask out invalid values (-9999) + valid_mask = grid_data["expected"] > -999 + + np.testing.assert_allclose( + result_arr[valid_mask], + grid_data["expected"][valid_mask], + rtol=RTOL, + atol=ATOL, + err_msg="Grid UTCI differs from golden fixture", + ) + + def test_utci_grid_shape(self, grid_data): + """Grid UTCI should have correct shape.""" + result = utci.utci_grid( + grid_data["ta"], + grid_data["rh"], + grid_data["tmrt"], + grid_data["va"], + ) + assert np.array(result).shape == grid_data["expected"].shape + + +class TestGoldenUtciProperties: + """Verify golden fixtures maintain expected physical properties.""" + + def test_utci_range(self, single_point_data): + """UTCI values should be in physically plausible range.""" + outputs = single_point_data["outputs"] + # UTCI typically ranges from -50°C to +50°C for outdoor conditions + assert np.all(outputs > -60), "UTCI values below plausible range" + assert np.all(outputs < 60), "UTCI values above plausible range" + + def test_utci_responds_to_tmrt(self, single_point_data): + """Higher Tmrt should generally increase UTCI.""" + # Compare comfortable (Tmrt=22) vs high_radiation (Tmrt=60) + idx_comfort = list(single_point_data["descriptions"]).index("comfortable") + idx_radiation = list(single_point_data["descriptions"]).index("high_radiation") + + utci_comfort = single_point_data["outputs"][idx_comfort] + utci_radiation = single_point_data["outputs"][idx_radiation] + + # High radiation case has much higher Tmrt, so UTCI should be higher + assert utci_radiation > utci_comfort, "UTCI should increase with higher Tmrt" + + def test_utci_responds_to_wind(self, single_point_data): + """Higher wind speed should generally reduce UTCI in warm conditions.""" + # Compare hot_summer (va=1.0) vs windy (va=8.0) + # Windy has lower temp but high wind should still show cooling effect + idx_windy = list(single_point_data["descriptions"]).index("windy") + idx_hot = list(single_point_data["descriptions"]).index("hot_summer") + + # Windy case: Ta=25, Tmrt=30, va=8.0 + # Hot case: Ta=35, Tmrt=55, va=1.0 + # Hot case should have higher UTCI due to higher temp and Tmrt + utci_windy = single_point_data["outputs"][idx_windy] + utci_hot = single_point_data["outputs"][idx_hot] + + assert utci_hot > utci_windy, "Hot summer should have higher UTCI than windy" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_wall_geometry.py b/tests/golden/test_golden_wall_geometry.py new file mode 100644 index 0000000..d5005c7 --- /dev/null +++ b/tests/golden/test_golden_wall_geometry.py @@ -0,0 +1,265 @@ +""" +Golden Regression Tests for Wall Geometry Algorithms + +These tests verify that the pure numpy implementation of wall height and +wall aspect calculations (without scipy) produces results consistent with +the UMEP Python reference implementation (which uses scipy). + +The wall algorithms use image rotation to detect wall orientations from the DSM. +Our `rotate_array` function replaces scipy.ndimage.rotate. + +Reference fixtures were generated using UMEP Python with scipy. +""" + +from pathlib import Path + +import numpy as np +import pytest + +pytestmark = pytest.mark.slow + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for wall geometry calculations +# Wall heights are integers (pixel counts), aspects are in degrees +WALL_HT_RTOL = 0.05 # 5% relative tolerance +WALL_HT_ATOL = 0.5 # 0.5m absolute tolerance +WALL_ASP_ATOL = 10.0 # 10 degrees for aspect (rotation quantization) + + +@pytest.fixture(scope="module") +def dsm(): + """Load DSM input fixture.""" + return np.load(FIXTURES_DIR / "input_dsm.npy").astype(np.float32) + + +@pytest.fixture(scope="module") +def expected_wall_ht(): + """Load expected wall height from UMEP Python.""" + return np.load(FIXTURES_DIR / "input_wall_ht.npy").astype(np.float32) + + +@pytest.fixture(scope="module") +def expected_wall_asp(): + """Load expected wall aspect from UMEP Python.""" + return np.load(FIXTURES_DIR / "input_wall_asp.npy").astype(np.float32) + + +@pytest.fixture(scope="module") +def params(): + """Load input parameters.""" + return dict(np.load(FIXTURES_DIR / "input_params.npz")) + + +class TestGoldenWallHeight: + """Golden tests for wall height detection.""" + + def test_findwalls_produces_nonnegative_heights(self, dsm): + """Wall heights should be non-negative.""" + from solweig.physics.wallalgorithms import findwalls + + walllimit = 2.0 # Minimum wall height to detect + wall_ht = findwalls(dsm, walllimit) + + assert np.all(wall_ht >= 0), "Wall heights should be non-negative" + + def test_findwalls_shape_matches_dsm(self, dsm): + """Output shape should match input DSM.""" + from solweig.physics.wallalgorithms import findwalls + + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + + assert wall_ht.shape == dsm.shape, "Wall height shape should match DSM" + + def test_findwalls_detects_walls_at_building_edges(self, dsm, expected_wall_ht): + """Walls should be detected where expected (building edges).""" + from solweig.physics.wallalgorithms import findwalls + + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + + # Compare number of wall pixels (within tolerance) + expected_wall_count = np.sum(expected_wall_ht > 0) + actual_wall_count = np.sum(wall_ht > 0) + + # Should detect similar number of walls (within 20%) + ratio = actual_wall_count / max(expected_wall_count, 1) + assert 0.8 <= ratio <= 1.2, ( + f"Wall pixel count differs too much: expected {expected_wall_count}, " + f"got {actual_wall_count} (ratio={ratio:.2f})" + ) + + def test_findwalls_heights_match_golden(self, dsm, expected_wall_ht): + """Wall heights should match golden reference within tolerance.""" + from solweig.physics.wallalgorithms import findwalls + + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + + # Only compare where both have walls + both_have_walls = (wall_ht > 0) & (expected_wall_ht > 0) + if np.any(both_have_walls): + actual_heights = wall_ht[both_have_walls] + expected_heights = expected_wall_ht[both_have_walls] + + np.testing.assert_allclose( + actual_heights, + expected_heights, + rtol=WALL_HT_RTOL, + atol=WALL_HT_ATOL, + err_msg="Wall heights differ from golden reference", + ) + + +class TestGoldenWallAspect: + """Golden tests for wall aspect (orientation) calculation.""" + + def test_wall_aspect_range(self, dsm, params): + """Wall aspects should be in valid range [0, 360).""" + from solweig.physics.wallalgorithms import filter1Goodwin_as_aspect_v3, findwalls + + scale = float(params["scale"]) + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + + # Only run aspect calculation if we have walls + if np.any(wall_ht > 0): + wall_asp = filter1Goodwin_as_aspect_v3(wall_ht, scale, dsm) + + # Check range only where walls exist + wall_mask = wall_ht > 0 + aspects_at_walls = wall_asp[wall_mask] + + # Aspects should be in [0, 360) range + assert np.all(aspects_at_walls >= 0), "Wall aspects should be >= 0" + assert np.all(aspects_at_walls < 360), "Wall aspects should be < 360" + + def test_wall_aspect_shape_matches_dsm(self, dsm, params): + """Output shape should match input DSM.""" + from solweig.physics.wallalgorithms import filter1Goodwin_as_aspect_v3, findwalls + + scale = float(params["scale"]) + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + wall_asp = filter1Goodwin_as_aspect_v3(wall_ht, scale, dsm) + + assert wall_asp.shape == dsm.shape, "Wall aspect shape should match DSM" + + @pytest.mark.slow + def test_wall_aspect_matches_golden(self, dsm, expected_wall_ht, expected_wall_asp, params): + """Wall aspects should match golden reference within tolerance. + + Note: This test is marked slow because aspect calculation iterates + through 180 angles with filter convolutions. + """ + from solweig.physics.wallalgorithms import filter1Goodwin_as_aspect_v3, findwalls + + scale = float(params["scale"]) + walllimit = 2.0 + wall_ht = findwalls(dsm, walllimit) + wall_asp = filter1Goodwin_as_aspect_v3(wall_ht, scale, dsm) + + # Compare aspects where both reference and computed have walls + both_have_walls = (wall_ht > 0) & (expected_wall_ht > 0) + if np.any(both_have_walls): + actual_asp = wall_asp[both_have_walls] + expected_asp = expected_wall_asp[both_have_walls] + + # For angles, we need circular comparison + # Difference should be small, accounting for 360 wrap-around + diff = np.abs(actual_asp - expected_asp) + diff = np.minimum(diff, 360 - diff) # Handle wrap-around + + # Most aspects should match within tolerance + matching = diff <= WALL_ASP_ATOL + match_rate = np.mean(matching) + + assert match_rate >= 0.80, ( + f"Only {match_rate * 100:.1f}% of wall aspects match within " + f"{WALL_ASP_ATOL}° tolerance (expected >= 80%)" + ) + + +class TestRotateArrayConsistency: + """Tests for the pure numpy rotate_array implementation.""" + + def test_rotate_90_degrees(self): + """90 degree rotation should transpose and flip.""" + from solweig.physics.morphology import rotate_array + + arr = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.float32) + + result = rotate_array(arr, 90, order=0) + expected = np.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]], dtype=np.float32) + + np.testing.assert_array_equal(result, expected) + + def test_rotate_180_degrees(self): + """180 degree rotation should flip both axes.""" + from solweig.physics.morphology import rotate_array + + arr = np.array([[1, 2], [3, 4]], dtype=np.float32) + + result = rotate_array(arr, 180, order=0) + expected = np.array([[4, 3], [2, 1]], dtype=np.float32) + + np.testing.assert_array_equal(result, expected) + + def test_rotate_360_degrees(self): + """360 degree rotation should return original.""" + from solweig.physics.morphology import rotate_array + + arr = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32) + + result = rotate_array(arr, 360, order=0) + + np.testing.assert_array_equal(result, arr) + + def test_rotate_bilinear_smooth(self): + """Bilinear interpolation should produce smooth values.""" + from solweig.physics.morphology import rotate_array + + arr = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]], dtype=np.float32) + + result = rotate_array(arr, 45, order=1) + + # Center should still have high value + assert result[1, 1] > 0.5, "Center should retain most of the value" + + # Should have some non-zero neighbors due to interpolation + assert np.any(result[result != result[1, 1]] > 0), "Interpolation should spread values" + + +class TestBinaryDilationConsistency: + """Tests for the pure numpy binary_dilation implementation.""" + + def test_single_pixel_dilation(self): + """Single pixel should expand to 3x3 with 8-connectivity.""" + from solweig.physics.morphology import binary_dilation + + arr = np.array([[False, False, False], [False, True, False], [False, False, False]], dtype=bool) + + result = binary_dilation(arr, iterations=1) + + # With 8-connectivity, should expand to all neighbors + expected = np.array([[True, True, True], [True, True, True], [True, True, True]], dtype=bool) + + np.testing.assert_array_equal(result, expected) + + def test_dilation_iterations(self): + """Multiple iterations should expand further.""" + from solweig.physics.morphology import binary_dilation + + arr = np.zeros((7, 7), dtype=bool) + arr[3, 3] = True + + result_1 = binary_dilation(arr, iterations=1) + result_2 = binary_dilation(arr, iterations=2) + + # More iterations = more expansion + assert np.sum(result_2) > np.sum(result_1) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/golden/test_golden_walls.py b/tests/golden/test_golden_walls.py new file mode 100644 index 0000000..832fbf3 --- /dev/null +++ b/tests/golden/test_golden_walls.py @@ -0,0 +1,253 @@ +""" +Golden Regression Tests for Wall Temperature Deviation Calculations + +These tests verify that the Rust compute_ground_temperature function correctly +calculates both ground (Tg) and wall (Tg_wall) temperature deviations from +air temperature based on sun position and land cover properties. + +The model uses a sinusoidal daily pattern: + Tg = Tgamp * sin(phase * PI/2) * CI_correction +where: + Tgamp = TgK * altmax + Tstart + phase = (dectime - snup) / (tmaxlst - snup) +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig.rustalgos import ground + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + +# Tolerance for wall temperature calculations (all diffs are exactly 0.0) +RTOL = 1e-5 +ATOL = 1e-5 + + +@pytest.fixture(scope="module") +def wall_temp_inputs(): + """Load wall temperature input fixtures.""" + return { + "alb": np.load(FIXTURES_DIR / "wall_temp_input_alb.npy"), + "emis": np.load(FIXTURES_DIR / "wall_temp_input_emis.npy"), + "tgk": np.load(FIXTURES_DIR / "wall_temp_input_tgk.npy"), + "tstart": np.load(FIXTURES_DIR / "wall_temp_input_tstart.npy"), + "tmaxlst": np.load(FIXTURES_DIR / "wall_temp_input_tmaxlst.npy"), + } + + +@pytest.fixture(scope="module") +def wall_temp_expected(): + """Load expected wall temperature outputs.""" + data = dict(np.load(FIXTURES_DIR / "wall_temp_output.npz")) + return { + "tg": data["tg"], + "tg_wall": float(data["tg_wall"]), + "ci_tg": float(data["ci_tg"]), + "ta": float(data["ta"]), + "sun_altitude": float(data["sun_altitude"]), + "altmax": float(data["altmax"]), + "dectime": float(data["dectime"]), + "snup": float(data["snup"]), + "global_rad": float(data["global_rad"]), + "rad_g0": float(data["rad_g0"]), + "zen_deg": float(data["zen_deg"]), + } + + +class TestGoldenWallTemperature: + """Golden tests for wall temperature deviation calculations.""" + + def test_ground_temp_matches_golden(self, wall_temp_inputs, wall_temp_expected): + """Ground temperature deviation should match golden fixture.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + np.testing.assert_allclose( + np.array(tg), + wall_temp_expected["tg"], + rtol=RTOL, + atol=ATOL, + err_msg="Ground temperature (Tg) differs from golden fixture", + ) + + def test_wall_temp_matches_golden(self, wall_temp_inputs, wall_temp_expected): + """Wall temperature deviation should match golden fixture.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + np.testing.assert_allclose( + tg_wall, + wall_temp_expected["tg_wall"], + rtol=RTOL, + atol=ATOL, + err_msg="Wall temperature (Tg_wall) differs from golden fixture", + ) + + def test_clearness_index_matches_golden(self, wall_temp_inputs, wall_temp_expected): + """Clearness index correction should match golden fixture.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + np.testing.assert_allclose( + ci_tg, + wall_temp_expected["ci_tg"], + rtol=RTOL, + atol=ATOL, + err_msg="Clearness index (CI_Tg) differs from golden fixture", + ) + + def test_output_shape(self, wall_temp_inputs, wall_temp_expected): + """Output arrays should match input shape.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + assert np.array(tg).shape == wall_temp_inputs["tgk"].shape + + +class TestGoldenWallTempProperties: + """Verify physical properties of wall temperature model.""" + + def test_temp_non_negative(self, wall_temp_inputs, wall_temp_expected): + """Temperature deviations should be non-negative during daytime.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + assert np.all(np.array(tg) >= 0), "Ground Tg should be non-negative" + assert tg_wall >= 0, "Wall Tg should be non-negative" + + def test_land_cover_variation(self, wall_temp_inputs, wall_temp_expected): + """Different land covers (TgK) should produce different temperatures.""" + tg, tg_wall, ci_tg, alb_out, emis_out = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + tg_arr = np.array(tg) + + # Upper half has different TgK (asphalt) than lower half (grass) + mean_upper = np.mean(tg_arr[:10, :]) + mean_lower = np.mean(tg_arr[10:, :]) + + # Different land covers should produce different temperatures + assert abs(mean_upper - mean_lower) > 0.5, "Different land covers should produce different Tg values" + + def test_higher_altmax_higher_temp(self, wall_temp_inputs, wall_temp_expected): + """Higher max sun altitude should produce higher temperature amplitude.""" + # Calculate with normal altmax + tg_normal, _, _, _, _ = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + wall_temp_expected["altmax"], # 65° + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + # Calculate with higher altmax + tg_higher, _, _, _, _ = ground.compute_ground_temperature( + wall_temp_expected["ta"], + wall_temp_expected["sun_altitude"], + 80.0, # Higher max altitude + wall_temp_expected["dectime"], + wall_temp_expected["snup"], + wall_temp_expected["global_rad"], + wall_temp_expected["rad_g0"], + wall_temp_expected["zen_deg"], + wall_temp_inputs["alb"], + wall_temp_inputs["emis"], + wall_temp_inputs["tgk"], + wall_temp_inputs["tstart"], + wall_temp_inputs["tmaxlst"], + ) + + assert np.mean(np.array(tg_higher)) > np.mean(np.array(tg_normal)), ( + "Higher max altitude should produce higher Tg" + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/qgis_mocks.py b/tests/qgis_mocks.py new file mode 100644 index 0000000..f5eb99e --- /dev/null +++ b/tests/qgis_mocks.py @@ -0,0 +1,150 @@ +""" +Shared QGIS mock setup for testing plugin code without a QGIS installation. + +Import this module BEFORE any qgis_plugin imports to inject mocks into sys.modules. +All mock classes and stubs are defined here to ensure consistency across test files. +""" + +from __future__ import annotations + +import sys +from unittest.mock import MagicMock + +# --------------------------------------------------------------------------- +# Real exception/base classes (needed for isinstance/pytest.raises) +# --------------------------------------------------------------------------- + +QgsProcessingException = type("QgsProcessingException", (Exception,), {}) + + +class QgsProcessingAlgorithm: + """Stub for QgsProcessingAlgorithm - must be a real class for subclassing.""" + + def parameterAsRasterLayer(self, *a, **kw): + return None + + def parameterAsOutputLayer(self, *a, **kw): + return None + + def addParameter(self, *a, **kw): + pass + + +class QgsRasterLayer: + """Stub for QgsRasterLayer.""" + + def __init__(self, *a, **kw): + pass + + def isValid(self): + return True + + def source(self): + return "" + + def dataProvider(self): + return MagicMock() + + +class QgsProject: + """Stub for QgsProject with singleton pattern.""" + + _inst = MagicMock() + + @classmethod + def instance(cls): + return cls._inst + + +class QgsProcessingProvider: + """Stub for QgsProcessingProvider - must be a real class for subclassing.""" + + def addAlgorithm(self, *a, **kw): + pass + + def icon(self): + return None + + +# --------------------------------------------------------------------------- +# Build mock modules +# --------------------------------------------------------------------------- + +_mock_qgis_core = MagicMock() +_mock_qgis_core.QgsProcessingException = QgsProcessingException +_mock_qgis_core.QgsProcessingAlgorithm = QgsProcessingAlgorithm +_mock_qgis_core.QgsProcessingProvider = QgsProcessingProvider +_mock_qgis_core.QgsRasterLayer = QgsRasterLayer +_mock_qgis_core.QgsProject = QgsProject +_mock_qgis_core.QgsProcessingContext = MagicMock +_mock_qgis_core.QgsProcessingFeedback = MagicMock +_mock_qgis_core.QgsApplication = MagicMock() + +_mock_qgis_pyqt_core = MagicMock() +_mock_qgis_pyqt_gui = MagicMock() +_mock_qgis_pyqt_widgets = MagicMock() +_mock_qgis_pyqt = MagicMock() +_mock_qgis_pyqt.QtCore = _mock_qgis_pyqt_core +_mock_qgis_pyqt.QtGui = _mock_qgis_pyqt_gui +_mock_qgis_pyqt.QtWidgets = _mock_qgis_pyqt_widgets + +_mock_qgis = MagicMock() +_mock_qgis.core = _mock_qgis_core +_mock_qgis.PyQt = _mock_qgis_pyqt + +_mock_osgeo = MagicMock() + + +def _has_real_osgeo() -> bool: + """Check if a real (non-mock) osgeo package is available.""" + if "osgeo" in sys.modules: + mod = sys.modules["osgeo"] + return not isinstance(mod, MagicMock) and hasattr(mod, "__file__") + try: + import importlib.util + + return importlib.util.find_spec("osgeo") is not None + except (ImportError, ValueError): + return False + + +def install(): + """Install QGIS mocks into sys.modules. Idempotent - safe to call multiple times. + + Note: This only installs qgis module mocks. Call install_osgeo() separately + if you need osgeo mocks for imports, and uninstall_osgeo() immediately after + to avoid polluting other test modules. + """ + qgis_mocks = { + "qgis": _mock_qgis, + "qgis.core": _mock_qgis_core, + "qgis.PyQt": _mock_qgis_pyqt, + "qgis.PyQt.QtCore": _mock_qgis_pyqt_core, + "qgis.PyQt.QtGui": _mock_qgis_pyqt_gui, + "qgis.PyQt.QtWidgets": _mock_qgis_pyqt_widgets, + } + for name, mock in qgis_mocks.items(): + sys.modules[name] = mock + + +def install_osgeo(): + """Install osgeo mocks into sys.modules (only if osgeo is not really installed). + + Call uninstall_osgeo() immediately after the imports that need it to avoid + polluting other test modules (e.g. test_io.py's GeoTIFF tests). + """ + if not _has_real_osgeo(): + osgeo_mocks = { + "osgeo": _mock_osgeo, + "osgeo.gdal": _mock_osgeo.gdal, + "osgeo.osr": _mock_osgeo.osr, + } + for name, mock in osgeo_mocks.items(): + sys.modules.setdefault(name, mock) + + +def uninstall_osgeo(): + """Remove osgeo mocks from sys.modules to avoid polluting other tests.""" + for name in ("osgeo", "osgeo.gdal", "osgeo.osr"): + if name in sys.modules and isinstance(sys.modules[name], MagicMock): + del sys.modules[name] diff --git a/tests/rustalgos/test_rustalgos.py b/tests/rustalgos/test_rustalgos.py deleted file mode 100644 index baf6167..0000000 --- a/tests/rustalgos/test_rustalgos.py +++ /dev/null @@ -1,1320 +0,0 @@ -""" -First run demo for Athens and Goteborg to generate temp rasters used in tests -""" - -import cProfile -import pstats -import timeit - -import matplotlib.pyplot as plt -import numpy as np -from memory_profiler import memory_usage -from umep.class_configs import ShadowMatrices, SvfData, TgMaps, WallsData -from umep.functions.SOLWEIGpython import Solweig_run -from umep.functions.SOLWEIGpython.anisotropic_sky import anisotropic_sky as ani_sky -from umep.functions.SOLWEIGpython.cylindric_wedge import cylindric_wedge -from umep.functions.SOLWEIGpython.daylen import daylen -from umep.functions.SOLWEIGpython.gvf_2018a import gvf_2018a -from umep.functions.SOLWEIGpython.Kside_veg_v2022a import Kside_veg_v2022a -from umep.functions.SOLWEIGpython.Kup_veg_2015a import Kup_veg_2015a -from umep.functions.SOLWEIGpython.Lside_veg_v2022a import Lside_veg_v2022a -from umep.functions.SOLWEIGpython.patch_radiation import patch_steradians -from umep.functions.SOLWEIGpython.solweig_runner_core import SolweigRunCore -from umep.functions.SOLWEIGpython.TsWaveDelay_2015a import TsWaveDelay_2015a -from umep.functions.svf_functions import svfForProcessing153 -from umep.util.SEBESOLWEIGCommonFiles.clearnessindex_2013b import clearnessindex_2013b -from umep.util.SEBESOLWEIGCommonFiles.create_patches import create_patches -from umep.util.SEBESOLWEIGCommonFiles.Perez_v3 import Perez_v3 -from umep.util.SEBESOLWEIGCommonFiles.shadowingfunction_wallheight_23 import shadowingfunction_wallheight_23 -from umepr.hybrid.svf import svfForProcessing153_rust_shdw -from umepr.rustalgos import gvf, shadowing, sky, skyview, vegetation -from umepr.solweig_runner_rust import SolweigRunRust - - -def make_large_tile(arr: np.ndarray) -> np.ndarray: - """Create a larger tile by tiling the input array 2x2.""" - return np.ascontiguousarray( - np.block([[arr, arr, arr, arr], [arr, arr, arr, arr], [arr, arr, arr, arr], [arr, arr, arr, arr]]), - dtype=np.float32, - ) - - -def test_shadowing(): - # Test shadowingfunction_wallheight_23 vs calculate_shadows_wall_ht_25 for speed and memory - repeats = 3 - azi = 45.0 - alt = 30.0 - SWC = SolweigRunCore( - config_path_str="tests/rustalgos/test_config_shadows.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - - dsm = make_large_tile(SWC.raster_data.dsm) - cdsm = make_large_tile(SWC.raster_data.cdsm) - tdsm = make_large_tile(SWC.raster_data.tdsm) - bush = make_large_tile(SWC.raster_data.bush) - wall_ht = make_large_tile(SWC.raster_data.wallheight) - wall_asp = make_large_tile(SWC.raster_data.wallaspect) - - # --- Timing only (no memory profiling) --- - def run_py(): - return shadowingfunction_wallheight_23( # type: ignore - dsm, - cdsm, - tdsm, - azi, - alt, - SWC.raster_data.scale, - SWC.raster_data.amaxvalue, - bush, - wall_ht, - wall_asp * np.pi / 180.0, - ) - - def run_rust_cpu(): - shadowing.disable_gpu() - return shadowing.calculate_shadows_wall_ht_25( # type: ignore - azi, - alt, - SWC.raster_data.scale, - SWC.raster_data.amaxvalue, - dsm, - cdsm, - tdsm, - bush, - wall_ht, - wall_asp * np.pi / 180.0, - None, - None, - None, - ) - - def run_rust_gpu(): - shadowing.enable_gpu() - return shadowing.calculate_shadows_wall_ht_25( # type: ignore - azi, - alt, - SWC.raster_data.scale, - SWC.raster_data.amaxvalue, - dsm, - cdsm, - tdsm, - bush, - wall_ht, - wall_asp * np.pi / 180.0, - None, - None, - None, - ) - - py_timings = timeit.repeat(run_py, number=1, repeat=repeats) - print_timing_stats("shadowingfunction_wallheight_23", py_timings) - - print("\n--- Testing with GPU disabled (CPU only) ---") - rust_cpu_timings = timeit.repeat(run_rust_cpu, number=1, repeat=repeats) - print_timing_stats("shadowing.calculate_shadows_wall_ht_25 (CPU)", rust_cpu_timings) - - # Print relative speed as percentage - print("\n--- Python vs Rust comparison ---") - relative_speed(py_timings, rust_cpu_timings) - - print("\n--- Testing with GPU enabled ---") - rust_gpu_timings = timeit.repeat(run_rust_gpu, number=1, repeat=repeats) - print_timing_stats("shadowing.calculate_shadows_wall_ht_25 (GPU)", rust_gpu_timings) - - print("\n--- Python vs Rust + GPUcomparison ---") - relative_speed(py_timings, rust_gpu_timings) - - # --- Memory profiling only (no timing) --- - py_memory = memory_usage(run_py, max_usage=True) - print(f"shadowingfunction_wallheight_23: max memory usage: {py_memory:.2f} MiB") - - # Use the selected run_rust function for memory test - rust_without_gpu_memory = memory_usage(run_rust_cpu, max_usage=True) - print(f"shadowing.calculate_shadows_wall_ht_25: max memory usage (CPU): {rust_without_gpu_memory:.2f} MiB") - - # Use the selected run_rust function for memory test - rust_with_gpu_memory = memory_usage(run_rust_gpu, max_usage=True) - print(f"shadowing.calculate_shadows_wall_ht_25: max memory usage (GPU): {rust_with_gpu_memory:.2f} MiB") - - # Run Python version - veg_sh, bldg_sh, veg_blocks_bldg_sh, wall_sh, wall_sun, wall_sh_veg, face_sh, face_sun = run_py() - result_py = { - "veg_sh": veg_sh, - "bldg_sh": bldg_sh, - "veg_blocks_bldg_sh": veg_blocks_bldg_sh, - "wall_sh": wall_sh, - "wall_sun": wall_sun, - "wall_sh_veg": wall_sh_veg, - "face_sh": face_sh, - "face_sun": face_sun, - } - # Run Rust version (re-enable GPU if available) - for with_gpu in [True, False]: - if with_gpu: - print("\n--- Running Rust shadowing with GPU enabled for output comparison ---") - result_rust = run_rust_gpu() - append = " (GPU)" - else: - print("\n--- Running Rust shadowing with GPU disabled for output comparison ---") - result_rust = run_rust_cpu() - append = " (CPU)" - key_map = { - "veg_sh": "veg_sh", - "bldg_sh": "bldg_sh", - "veg_blocks_bldg_sh": "veg_blocks_bldg_sh", - "wall_sh": "wall_sh", - "wall_sun": "wall_sun", - "wall_sh_veg": "wall_sh_veg", - "face_sh": "face_sh", - "face_sun": "face_sun", - } - # Compare results - compare_results(result_py, result_rust, key_map) - # Plot visual residuals - plot_visual_residuals(bldg_sh, result_rust.bldg_sh, title_prefix="Building Shadows " + append) - plot_visual_residuals(veg_sh, result_rust.veg_sh, title_prefix="Vegetation Shadows" + append) - plot_visual_residuals( - veg_blocks_bldg_sh, result_rust.veg_blocks_bldg_sh, title_prefix="Veg Blocks Bldg Shadows" + append - ) - plot_visual_residuals(wall_sh, result_rust.wall_sh, title_prefix="Wall Shadows" + append) - plot_visual_residuals(wall_sun, result_rust.wall_sun, title_prefix="Wall Sun" + append) - plot_visual_residuals(wall_sh_veg, result_rust.wall_sh_veg, title_prefix="Wall Sh Veg" + append) - plot_visual_residuals(face_sh, result_rust.face_sh, title_prefix="Face Sh" + append) - plot_visual_residuals(face_sun, result_rust.face_sun, title_prefix="Face Sun" + append) - - -def test_svf(): - # Test svfForProcessing153 vs skyview.calculate_svf_153 for speed - repeats = 1 - SWC = SolweigRunCore( - config_path_str="tests/rustalgos/test_config_shadows.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - - dsm = SWC.raster_data.dsm.astype(np.float32) - cdsm = SWC.raster_data.cdsm.astype(np.float32) - tdsm = SWC.raster_data.tdsm.astype(np.float32) - - # --- Timing only (no memory profiling) --- - def run_old_py(): - # uses older shadowingfunction_20 - return svfForProcessing153(dsm, cdsm, tdsm, SWC.raster_data.scale, 1, SWC.raster_data.amaxvalue) - - def run_hybrid(): - # uses rust shadowing based on shadowingfunction_wallheight_23 - shadowing.disable_gpu() - return svfForProcessing153_rust_shdw(dsm, cdsm, tdsm, SWC.raster_data.scale, 1, SWC.raster_data.amaxvalue) - - def run_rust_cpu(): - shadowing.disable_gpu() - return skyview.calculate_svf( - dsm, cdsm, tdsm, SWC.raster_data.scale, True, SWC.raster_data.amaxvalue, 2, None, None - ) - - def run_rust_gpu(): - shadowing.enable_gpu() - return skyview.calculate_svf( - dsm, cdsm, tdsm, SWC.raster_data.scale, True, SWC.raster_data.amaxvalue, 2, None, None - ) - - times_old_py = timeit.repeat(run_old_py, number=1, repeat=repeats) - print_timing_stats("svfForProcessing153 - (shadowingfunction_20)", times_old_py) - - times_hybrid = timeit.repeat(run_hybrid, number=1, repeat=repeats) - print_timing_stats("svfForProcessing153 - hybrid w. rust shadows", times_hybrid) - - # Print relative speed as percentage - print("\n--- Relative Speed shadowingfunction_20 - hybrid w. rust shadows vs. Python ---") - relative_speed(times_old_py, times_hybrid) - - print("\n--- SVF with GPU disabled (CPU only) ---") - times_rust_cpu = timeit.repeat(run_rust_cpu, number=1, repeat=repeats) - print_timing_stats("skyview.calculate_svf (CPU)", times_rust_cpu) - - # Print relative speed as percentage - print("\n--- Relative Speed shadowingfunction_20 - rust CPU SVF vs. Python ---") - relative_speed(times_old_py, times_rust_cpu) - - print("\n--- SVF with GPU enabled ---") - times_rust_gpu = timeit.repeat(run_rust_gpu, number=1, repeat=repeats) - print_timing_stats("skyview.calculate_svf (GPU)", times_rust_gpu) - - # Print relative speed as percentage - print("\n--- Relative Speed shadowingfunction_20 - rust GPU SVF vs. Python ---") - relative_speed(times_old_py, times_rust_gpu) - - # --- Memory profiling only (no timing) --- - old_py_memory = memory_usage(run_old_py, max_usage=True) - print(f"svfForProcessing153: old py max memory usage: {old_py_memory:.2f} MiB") - - hybrid_memory = memory_usage(run_hybrid, max_usage=True) - print(f"svfForProcessing153 - hybrid w. rust shadows: max memory usage: {hybrid_memory:.2f} MiB") - - rust_memory = memory_usage(run_rust_cpu, max_usage=True) - print(f"skyview.calculate_svf: max memory usage (CPU): {rust_memory:.2f} MiB") - - rust_memory = memory_usage(run_rust_gpu, max_usage=True) - print(f"skyview.calculate_svf: max memory usage (GPU): {rust_memory:.2f} MiB") - - # Errors - result_old_py = run_old_py() - result_hybrid = run_hybrid() - - # Compare results - key_map = { - "svf": "svf", - "svfE": "svfE", - "svfS": "svfS", - "svfW": "svfW", - "svfN": "svfN", - "svfveg": "svfveg", - "svfEveg": "svfEveg", - "svfSveg": "svfSveg", - "svfWveg": "svfWveg", - "svfNveg": "svfNveg", - "svfaveg": "svfaveg", - "svfEaveg": "svfEaveg", - "svfSaveg": "svfSaveg", - "svfWaveg": "svfWaveg", - "svfNaveg": "svfNaveg", - } - # Small diffs for N and E and totals - print("Small differences expected for N and E and totals due to different shadowing implementations") - compare_results(result_old_py, result_hybrid, key_map) - - print("\nGenerating residual plots...") - plot_visual_residuals(result_old_py["svf"], result_hybrid["svf"], title_prefix="Svf old py vs hybrid") - plot_visual_residuals(result_old_py["svfE"], result_hybrid["svfE"], title_prefix="Svf East old py vs hybrid") - plot_visual_residuals(result_old_py["svfS"], result_hybrid["svfS"], title_prefix="Svf South old py vs hybrid") - plot_visual_residuals(result_old_py["svfW"], result_hybrid["svfW"], title_prefix="Svf West old py vs hybrid") - plot_visual_residuals(result_old_py["svfN"], result_hybrid["svfN"], title_prefix="Svf North old py vs hybrid") - plot_visual_residuals(result_old_py["svfveg"], result_hybrid["svfveg"], title_prefix="Svf Veg old py vs hybrid") - plot_visual_residuals( - result_old_py["svfEveg"], result_hybrid["svfEveg"], title_prefix="Svf East Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfSveg"], result_hybrid["svfSveg"], title_prefix="Svf South Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfWveg"], result_hybrid["svfWveg"], title_prefix="Svf West Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfNveg"], result_hybrid["svfNveg"], title_prefix="Svf North Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfaveg"], result_hybrid["svfaveg"], title_prefix="Svf vbssh Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfEaveg"], result_hybrid["svfEaveg"], title_prefix="Svf East vbssh Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfSaveg"], result_hybrid["svfSaveg"], title_prefix="Svf South vbssh Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfWaveg"], result_hybrid["svfWaveg"], title_prefix="Svf West vbssh Veg old py vs hybrid" - ) - plot_visual_residuals( - result_old_py["svfNaveg"], result_hybrid["svfNaveg"], title_prefix="Svf North vbssh Veg old py vs hybrid" - ) - - # For testing outputs use hybrid version - shadowing is tested separately in above test - # (otherwise testing against outputs from underlying shadowingfunction_20 gives different results) - # Run Python version - for with_gpu in [True, False]: - if with_gpu: - print("\n--- Running Rust SVF with GPU enabled for output comparison ---") - result_rust = run_rust_gpu() - append = " (GPU)" - else: - print("\n--- Running Rust SVF with GPU disabled for output comparison ---") - result_rust = run_rust_cpu() - append = " (CPU)" - - # Compare results - key_map = { - "svf": "svf", - "svfE": "svf_east", - "svfS": "svf_south", - "svfW": "svf_west", - "svfN": "svf_north", - "svfveg": "svf_veg", - "svfEveg": "svf_veg_east", - "svfSveg": "svf_veg_south", - "svfWveg": "svf_veg_west", - "svfNveg": "svf_veg_north", - "svfaveg": "svf_veg_blocks_bldg_sh", - "svfEaveg": "svf_veg_blocks_bldg_sh_east", - "svfSaveg": "svf_veg_blocks_bldg_sh_south", - "svfWaveg": "svf_veg_blocks_bldg_sh_west", - "svfNaveg": "svf_veg_blocks_bldg_sh_north", - } - compare_results(result_hybrid, result_rust, key_map) - - # Plot visual residuals for all comparable SVF components explicitly - print("\nGenerating residual plots...") - plot_visual_residuals(result_hybrid["svf"], result_rust.svf, title_prefix="Svf " + append) - plot_visual_residuals(result_hybrid["svfE"], result_rust.svf_east, title_prefix="Svf East" + append) - plot_visual_residuals(result_hybrid["svfS"], result_rust.svf_south, title_prefix="Svf South" + append) - plot_visual_residuals(result_hybrid["svfW"], result_rust.svf_west, title_prefix="Svf West" + append) - plot_visual_residuals(result_hybrid["svfN"], result_rust.svf_north, title_prefix="Svf North" + append) - plot_visual_residuals(result_hybrid["svfveg"], result_rust.svf_veg, title_prefix="Svf Veg" + append) - plot_visual_residuals(result_hybrid["svfEveg"], result_rust.svf_veg_east, title_prefix="Svf East Veg" + append) - plot_visual_residuals( - result_hybrid["svfSveg"], result_rust.svf_veg_south, title_prefix="Svf South Veg" + append - ) - plot_visual_residuals(result_hybrid["svfWveg"], result_rust.svf_veg_west, title_prefix="Svf West Veg" + append) - plot_visual_residuals( - result_hybrid["svfNveg"], result_rust.svf_veg_north, title_prefix="Svf North Veg" + append - ) - plot_visual_residuals( - result_hybrid["svfaveg"], result_rust.svf_veg_blocks_bldg_sh, title_prefix="Svf vbssh Veg" + append - ) - plot_visual_residuals( - result_hybrid["svfEaveg"], - result_rust.svf_veg_blocks_bldg_sh_east, - title_prefix="Svf East vbssh Veg" + append, - ) - plot_visual_residuals( - result_hybrid["svfSaveg"], - result_rust.svf_veg_blocks_bldg_sh_south, - title_prefix="Svf South vbssh Veg" + append, - ) - plot_visual_residuals( - result_hybrid["svfWaveg"], - result_rust.svf_veg_blocks_bldg_sh_west, - title_prefix="Svf West vbssh Veg" + append, - ) - plot_visual_residuals( - result_hybrid["svfNaveg"], - result_rust.svf_veg_blocks_bldg_sh_north, - title_prefix="Svf North vbssh Veg" + append, - ) - - -def test_solweig(): - repeats = 1 - - # Origin - def run_ori(): - Solweig_run.solweig_run("tests/rustalgos/test_config_solweig_old_fmt.ini", None) - - ori_timings = timeit.repeat(run_ori, number=1, repeat=repeats) - print_timing_stats("solweig_run (old format)", ori_timings) - - # --- Timing only (no memory profiling) --- - SWC = SolweigRunCore( - config_path_str="tests/rustalgos/test_config_solweig.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - SWC.config.output_dir = "temp/goteborg/test_py/" - # Set corner to NaN - SWC.raster_data.dsm[0:100, 0:100] = np.nan - - def run_py(): - SWC.run() - - py_timings = timeit.repeat(run_py, number=1, repeat=repeats) - print_timing_stats("solweig_run", py_timings) - - SWR = SolweigRunRust( - config_path_str="tests/rustalgos/test_config_solweig.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - - def run_hybrid(): - SWR.run() - - hybrid_timings = timeit.repeat(run_hybrid, number=1, repeat=repeats) - print_timing_stats("solweig_run w rust shadows", hybrid_timings) - - # Print relative speed as percentage - print("\n--- Relative Speed Original vs. Python ---") - relative_speed(ori_timings, py_timings) - print("\n--- Relative Speed Original vs. Rust ---") - relative_speed(ori_timings, hybrid_timings) - # NO ANISO - ~2.5 - # WITH ANISO - ~2.5 - print("\n--- Relative Speed Core vs. GPU Rust ---") - relative_speed(py_timings, hybrid_timings) - - # --- Memory profiling only (no timing) --- - print("\n--- Memory Profiling ---") - # Memory profiling for original Solweig run - ori_memory = memory_usage(run_ori, max_usage=True) - print(f"\nsolweig_run (old format): max memory usage: {ori_memory:.2f} MiB") - - py_memory = memory_usage(run_py, max_usage=True) - print(f"\nsolweig_run: max memory usage: {py_memory:.2f} MiB") - - rust_memory = memory_usage(run_hybrid, max_usage=True) - print(f"\nsolweig_run w rust shadows: max memory usage: {rust_memory:.2f} MiB") - - -def test_profile_solweig(): - SWR = SolweigRunRust( - config_path_str="tests/rustalgos/test_config_solweig.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - # ANI patch parallel 24 25.169 1.049 25.169 1.049 {built-in method sky.anisotropic_sky} - # ANI pixel parallel 24 2.019 0.084 2.019 0.084 {built-in method sky.anisotropic_sky} - # GVF 18 1.407 0.078 1.407 0.078 {built-in method gvf.gvf_calc} - # GVF pixel parallel 18 0.184 0.010 0.184 0.010 {built-in method gvf.gvf_calc} - # shadowing 18 0.918 0.051 0.918 0.051 {built-in method shadowing.calculate_shadows_wall_ht_25} - - """ - GVF time includes sun on surface!! - NO ANISO - Running SOLWEIG: 100%|██████████| 24/24 [00:09<00:00, 2.61step/s] 100276 function calls (100107 primitive calls) in 9.306 seconds - 18 5.285 0.294 5.285 0.294 {built-in method gvf.gvf_calc} - 169 1.455 0.009 2.228 0.013 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/common.py:57(save_raster) - 18 1.095 0.061 1.095 0.061 {built-in method shadowing.calculate_shadows_wall_ht_25} - 169 0.558 0.003 0.596 0.004 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/rasterio/__init__.py:99(open) - 24 0.135 0.006 6.941 0.289 /Users/gareth/dev/umep-rust/pysrc/umepr/functions/solweig.py:30(Solweig_2025a_calc) - 18 0.126 0.007 0.129 0.007 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/cylindric_wedge.py:3(cylindric_wedge) - 24 0.090 0.004 0.090 0.004 {built-in method vegetation.lside_veg} - 2/1 0.070 0.035 9.290 9.290 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/solweig_runner.py:343(run) - 18 0.069 0.004 0.069 0.004 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/Kup_veg_2015a.py:3(Kup_veg_2015a) - 990 0.053 0.000 0.053 0.000 {method 'astype' of 'numpy.ndarray' objects} - 18 0.053 0.003 0.053 0.003 {built-in method vegetation.kside_veg} - WITH ANISO - CYLINDER - Running SOLWEIG: 100%|██████████| 24/24 [00:15<00:00, 1.54step/s] 171116 function calls (170947 primitive calls) in 16.461 seconds - ncalls tottime percall cumtime percall filename:lineno(function) - 18 5.073 0.282 5.073 0.282 {built-in method gvf.gvf_calc} - 24 4.182 0.174 4.182 0.174 {built-in method sky.anisotropic_sky} - 24 1.394 0.058 13.789 0.575 /Users/gareth/dev/umep-rust/pysrc/umepr/functions/solweig.py:30(Solweig_2025a_calc) - 169 1.285 0.008 2.079 0.012 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/common.py:57(save_raster) - 18 1.031 0.057 1.031 0.057 {built-in method shadowing.calculate_shadows_wall_ht_25} - 1436 0.938 0.001 0.938 0.001 {method 'astype' of 'numpy.ndarray' objects} - 18 0.892 0.050 0.892 0.050 {built-in method vegetation.kside_veg} - 169 0.588 0.003 0.624 0.004 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/rasterio/__init__.py:99(open) - 1 0.391 0.391 0.391 0.391 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/solweig_runner.py:185(hemispheric_image) - 18 0.104 0.006 0.107 0.006 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/cylindric_wedge.py:3(cylindric_wedge) - 2/1 0.077 0.038 16.005 16.005 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/solweig_runner.py:343(run) - 18 0.052 0.003 0.052 0.003 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/Kup_veg_2015a.py:3(Kup_veg_2015a) - 169 0.050 0.000 0.053 0.000 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/pyproj/crs/crs.py:185(__init__) - 24 0.048 0.002 13.838 0.577 /Users/gareth/dev/umep-rust/pysrc/umepr/solweig_runner_rust.py:15(calc_solweig) - 108 0.026 0.000 0.026 0.000 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/TsWaveDelay_2015a.py:4(TsWaveDelay_2015a) - 24 0.024 0.001 0.024 0.001 {built-in method vegetation.lside_veg} - WITH ANISO - BOX - Running SOLWEIG: 100%|██████████| 24/24 [00:13<00:00, 1.71step/s] 171116 function calls (170947 primitive calls) in 14.697 seconds - 18 5.305 0.295 5.305 0.295 {built-in method gvf.gvf_calc} - 24 2.288 0.095 2.288 0.095 {built-in method sky.anisotropic_sky} - 24 1.603 0.067 12.705 0.529 /Users/gareth/dev/umep-rust/pysrc/umepr/functions/solweig.py:30(Solweig_2025a_calc) - 18 1.483 0.082 1.483 0.082 {built-in method shadowing.calculate_shadows_wall_ht_25} - 169 1.457 0.009 2.292 0.014 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/common.py:57(save_raster) - 1436 1.101 0.001 1.101 0.001 {method 'astype' of 'numpy.ndarray' objects} - 18 0.940 0.052 0.940 0.052 {built-in method vegetation.kside_veg} - 169 0.610 0.004 0.649 0.004 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/rasterio/__init__.py:99(open) - 1 0.384 0.384 0.384 0.384 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/solweig_runner.py:185(hemispheric_image) - 18 0.125 0.007 0.128 0.007 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/cylindric_wedge.py:3(cylindric_wedge) - 12/6 0.106 0.009 0.553 0.092 {method 'acquire' of '_thread.lock' objects} - 24 0.099 0.004 0.099 0.004 {built-in method vegetation.lside_veg} - """ - profiler = cProfile.Profile() - profiler.enable() - SWR.run() - profiler.disable() - stats = pstats.Stats(profiler).sort_stats("tottime") - stats.print_stats(30) # Show top 30 lines - - SWC = SolweigRunCore( - config_path_str="tests/rustalgos/test_config_solweig.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - """ - NO ANISO - 324 16.477 0.051 17.924 0.055 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/sunonsurface_2018a.py:3(sunonsurface_2018a) - 18 3.574 0.199 3.813 0.212 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/util/SEBESOLWEIGCommonFiles/shadowingfunction_wallheight_23.py:42(shadowingfunction_wallheight_23) - 96 1.813 0.019 1.813 0.019 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/Lvikt_veg.py:1(Lvikt_veg) - 18 1.275 0.071 18.695 1.039 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/gvf_2018a.py:6(gvf_2018a) - WITH ANISO - 24 24.894 1.037 33.829 1.410 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/anisotropic_sky.py:11(anisotropic_sky) - 324 14.680 0.045 15.968 0.049 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/sunonsurface_2018a.py:3(sunonsurface_2018a) - 18 9.770 0.543 12.326 0.685 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/Kside_veg_v2022a.py:6(Kside_veg_v2022a) - 6426 4.469 0.001 4.469 0.001 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/functions/SOLWEIGpython/sunlit_shaded_patches.py:6(shaded_or_sunlit) - 18 3.817 0.212 4.071 0.226 /Users/gareth/dev/umep-rust/.venv/lib/python3.12/site-packages/umep/util/SEBESOLWEIGCommonFiles/shadowingfunction_wallheight_23.py:42(shadowingfunction_wallheight_23) - """ - profiler = cProfile.Profile() - profiler.enable() - SWC.run() - profiler.disable() - stats = pstats.Stats(profiler).sort_stats("tottime") - stats.print_stats(30) # Show top 30 lines - - -def test_solweig_sub_funcs(): - # prepare variables - SWC = SolweigRunCore( - config_path_str="tests/rustalgos/test_config_solweig.ini", - params_json_path="tests/rustalgos/test_params_solweig.json", - ) - idx = 12 - scale = 1 / SWC.raster_data.trf_arr[1] - SBC = 5.67051e-8 - if SWC.params.Tmrt_params.Value.posture == "Standing": - posture = SWC.params.Posture.Standing.Value - else: - posture = SWC.params.Posture.Sitting.Value - _, _, _, SNUP = daylen(SWC.environ_data.jday[idx], SWC.location["latitude"]) - first = np.round(posture.height) - if first == 0.0: - first = 1.0 - second = np.round(posture.height * 20.0) - dectime = SWC.environ_data.dectime[idx] - altmax = SWC.environ_data.altmax[idx] - Ta = SWC.environ_data.Ta[idx] - # tg_maps normally created in SolweigRunCore.run(), create here for testing - SWC.tg_maps = TgMaps(SWC.config.use_landcover, SWC.params, SWC.raster_data) - Tgamp = SWC.tg_maps.TgK * altmax + SWC.tg_maps.Tstart # Fixed 2021 - # Tgampwall = (TgK_wall * altmax - (Tstart_wall)) + (Tstart_wall) # Old - Tgampwall = SWC.tg_maps.TgK_wall * altmax + SWC.tg_maps.Tstart_wall - Tg = Tgamp * np.sin( - (((dectime - np.floor(dectime)) - SNUP / 24) / (SWC.tg_maps.TmaxLST / 24 - SNUP / 24)) * np.pi / 2 - ) # 2015 a, based on max sun altitude - Tgwall = Tgampwall * np.sin( - (((dectime - np.floor(dectime)) - SNUP / 24) / (SWC.tg_maps.TmaxLST_wall / 24 - SNUP / 24)) * np.pi / 2 - ) # 2015a, based on max sun altitude - if Tgwall < 0: # temporary for removing low Tg during morning 20130205 - # Tg = 0 - Tgwall = 0 - # shadow_mats normally created in SolweigRunCore.run(), create here for testing - svf_data = SvfData(SWC.config) - SWC.shadow_mats = ShadowMatrices(SWC.config, SWC.params, svf_data) - # walls_data normally created in SolweigRunCore.run(), create here for testing - SWC.walls_data = WallsData( - SWC.config, - SWC.params, - SWC.raster_data, - SWC.environ_data, - SWC.tg_maps, - ) - - sh_results = shadowing.calculate_shadows_wall_ht_25( - SWC.environ_data.azimuth[idx], - SWC.environ_data.altitude[idx], - scale, - SWC.raster_data.amaxvalue, - SWC.raster_data.dsm.astype(np.float32), - SWC.raster_data.cdsm.astype(np.float32), - SWC.raster_data.tdsm.astype(np.float32), - SWC.raster_data.bush.astype(np.float32), - SWC.raster_data.wallheight.astype(np.float32), - SWC.raster_data.wallaspect.astype(np.float32) * np.pi / 180.0, - None, - None, - None, - ) - shadow = sh_results.wall_sh - (1 - sh_results.veg_sh) * (1 - SWC.environ_data.psi[idx]) - - repeats = 3 - - def run_gvf_py(): - return gvf_2018a( # type: ignore - sh_results.wall_sun.astype(np.float32), - SWC.raster_data.wallheight.astype(np.float32), - SWC.raster_data.buildings.astype(np.float32), - scale, - shadow.astype(np.float32), - first, - second, - SWC.raster_data.wallaspect.astype(np.float32), - Tg.astype(np.float32), - Tgwall, - Ta, - SWC.tg_maps.emis_grid.astype(np.float32), - SWC.params.Emissivity.Value.Walls, - SWC.tg_maps.alb_grid.astype(np.float32), - SBC, - SWC.params.Albedo.Effective.Value.Walls, - SWC.raster_data.rows, - SWC.raster_data.cols, - SWC.environ_data.Twater[idx], - None, - False, - ) - - def run_gvf_rust(): - return gvf.gvf_calc( # type: ignore - sh_results.wall_sun.astype(np.float32), - SWC.raster_data.wallheight.astype(np.float32), - SWC.raster_data.buildings.astype(np.float32), - scale, - shadow.astype(np.float32), - first, - second, - SWC.raster_data.wallaspect.astype(np.float32), - Tg.astype(np.float32), - Tgwall, - Ta, - SWC.tg_maps.emis_grid.astype(np.float32), - SWC.params.Emissivity.Value.Walls, - SWC.tg_maps.alb_grid.astype(np.float32), - SBC, - SWC.params.Albedo.Effective.Value.Walls, - SWC.environ_data.Twater[idx], - None, - False, - ) - - py_gvf_timings = timeit.repeat(run_gvf_py, number=1, repeat=repeats) - print_timing_stats("gvf_2018a", py_gvf_timings) - - rust_gvf_timings = timeit.repeat(run_gvf_rust, number=1, repeat=repeats) - print_timing_stats("gvf.gvf_calc", rust_gvf_timings) - - # Print relative speed as percentage - relative_speed(py_gvf_timings, rust_gvf_timings) - - ( - gvfLup, - gvfalb, - gvfalbnosh, - gvfLupE, - gvfalbE, - gvfalbnoshE, - gvfLupS, - gvfalbS, - gvfalbnoshS, - gvfLupW, - gvfalbW, - gvfalbnoshW, - gvfLupN, - gvfalbN, - gvfalbnoshN, - gvfSum, - gvfNorm, - ) = run_gvf_py() - - result_gvf_py = { - "gvfLup": gvfLup, - "gvfalb": gvfalb, - "gvfalbnosh": gvfalbnosh, - "gvfLupE": gvfLupE, - "gvfalbE": gvfalbE, - "gvfalbnoshE": gvfalbnoshE, - "gvfLupS": gvfLupS, - "gvfalbS": gvfalbS, - "gvfalbnoshS": gvfalbnoshS, - "gvfLupW": gvfLupW, - "gvfalbW": gvfalbW, - "gvfalbnoshW": gvfalbnoshW, - "gvfLupN": gvfLupN, - "gvfalbN": gvfalbN, - "gvfalbnoshN": gvfalbnoshN, - "gvfSum": gvfSum, - "gvfNorm": gvfNorm, - } - - result_gvf_rust = run_gvf_rust() - - key_map = { - "gvfSum": "gvf_sum", - "gvfNorm": "gvf_norm", - "gvfLup": "gvf_lup", - "gvfLupN": "gvf_lup_n", - "gvfLupS": "gvf_lup_s", - "gvfLupE": "gvf_lup_e", - "gvfLupW": "gvf_lup_w", - "gvfalb": "gvfalb", - "gvfalbN": "gvfalb_n", - "gvfalbS": "gvfalb_s", - "gvfalbE": "gvfalb_e", - "gvfalbW": "gvfalb_w", - "gvfalbnosh": "gvfalbnosh", - "gvfalbnoshN": "gvfalbnosh_n", - "gvfalbnoshS": "gvfalbnosh_s", - "gvfalbnoshE": "gvfalbnosh_e", - "gvfalbnoshW": "gvfalbnosh_w", - } - # Compare results - compare_results(result_gvf_py, result_gvf_rust, key_map) - # Plot visual residuals - plot_visual_residuals(gvfSum, result_gvf_rust.gvf_sum, title_prefix="GVF Sum") - plot_visual_residuals(gvfNorm, result_gvf_rust.gvf_norm, title_prefix="GVF Norm") - plot_visual_residuals(gvfLup, result_gvf_rust.gvf_lup, title_prefix="GVF Lup") - plot_visual_residuals(gvfLupN, result_gvf_rust.gvf_lup_n, title_prefix="GVF Lup N") - plot_visual_residuals(gvfLupS, result_gvf_rust.gvf_lup_s, title_prefix="GVF Lup S") - plot_visual_residuals(gvfLupW, result_gvf_rust.gvf_lup_w, title_prefix="GVF Lup W") - plot_visual_residuals(gvfLupE, result_gvf_rust.gvf_lup_e, title_prefix="GVF Lup E") - plot_visual_residuals(gvfalb, result_gvf_rust.gvfalb, title_prefix="GVF Albedo") - plot_visual_residuals(gvfalbN, result_gvf_rust.gvfalb_n, title_prefix="GVF Albedo N") - plot_visual_residuals(gvfalbS, result_gvf_rust.gvfalb_s, title_prefix="GVF Albedo S") - plot_visual_residuals(gvfalbW, result_gvf_rust.gvfalb_w, title_prefix="GVF Albedo W") - plot_visual_residuals(gvfalbE, result_gvf_rust.gvfalb_e, title_prefix="GVF Albedo E") - plot_visual_residuals(gvfalbnosh, result_gvf_rust.gvfalbnosh, title_prefix="GVF Albedo No Shadow") - plot_visual_residuals(gvfalbnoshN, result_gvf_rust.gvfalbnosh_n, title_prefix="GVF Albedo No Shadow N") - plot_visual_residuals(gvfalbnoshS, result_gvf_rust.gvfalbnosh_s, title_prefix="GVF Albedo No Shadow S") - plot_visual_residuals(gvfalbnoshW, result_gvf_rust.gvfalbnosh_w, title_prefix="GVF Albedo No Shadow W") - plot_visual_residuals(gvfalbnoshE, result_gvf_rust.gvfalbnosh_e, title_prefix="GVF Albedo No Shadow E") - - ### KSIDE - t = 0.0 - F_sh = cylindric_wedge( - SWC.environ_data.zen[idx], - SWC.svf_data.svfalfa, - SWC.raster_data.rows, - SWC.raster_data.cols, - ) - Kup, KupE, KupS, KupW, KupN = Kup_veg_2015a( - SWC.environ_data.radI[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radG[idx], - SWC.environ_data.altitude[idx], - SWC.raster_data.svfbuveg, - SWC.params.Emissivity.Value.Walls, - F_sh, - result_gvf_rust.gvfalb, - result_gvf_rust.gvfalb_e, - result_gvf_rust.gvfalb_s, - result_gvf_rust.gvfalb_w, - result_gvf_rust.gvfalb_n, - result_gvf_rust.gvfalbnosh, - result_gvf_rust.gvfalbnosh_e, - result_gvf_rust.gvfalbnosh_s, - result_gvf_rust.gvfalbnosh_w, - result_gvf_rust.gvfalbnosh_n, - ) - zenDeg = SWC.environ_data.zen[idx] * (180 / np.pi) - lv, pc_, pb_ = Perez_v3( - zenDeg, - SWC.environ_data.azimuth[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radI[idx], - SWC.environ_data.jday[idx], - 1, - 2, - ) - - def run_kside_py(): - return Kside_veg_v2022a( # type: ignore - SWC.environ_data.radI[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radG[idx], - shadow.astype(np.float32), - SWC.svf_data.svf_south.astype(np.float32), - SWC.svf_data.svf_west.astype(np.float32), - SWC.svf_data.svf_north.astype(np.float32), - SWC.svf_data.svf_east.astype(np.float32), - SWC.svf_data.svf_veg_east.astype(np.float32), - SWC.svf_data.svf_veg_south.astype(np.float32), - SWC.svf_data.svf_veg_west.astype(np.float32), - SWC.svf_data.svf_veg_north.astype(np.float32), - SWC.environ_data.azimuth[idx], - SWC.environ_data.altitude[idx], - SWC.environ_data.psi[idx], - t, - SWC.params.Albedo.Effective.Value.Walls, - F_sh.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - True, # cylindrical - lv.astype(np.float32) if lv is not None else None, - True, # anisotropic sky - SWC.shadow_mats.diffsh.astype(np.float32) if SWC.shadow_mats.diffsh is not None else None, - SWC.raster_data.rows, - SWC.raster_data.cols, - SWC.shadow_mats.asvf.astype(np.float32) if SWC.shadow_mats.asvf is not None else None, - SWC.shadow_mats.shmat.astype(np.float32) if SWC.shadow_mats.shmat is not None else None, - SWC.shadow_mats.vegshmat.astype(np.float32) if SWC.shadow_mats.vegshmat is not None else None, - SWC.shadow_mats.vbshvegshmat.astype(np.float32) if SWC.shadow_mats.vbshvegshmat is not None else None, - ) - - def run_kside_rust(): - return vegetation.kside_veg( # type: ignore - SWC.environ_data.radI[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radG[idx], - shadow.astype(np.float32), - SWC.svf_data.svf_south.astype(np.float32), - SWC.svf_data.svf_west.astype(np.float32), - SWC.svf_data.svf_north.astype(np.float32), - SWC.svf_data.svf_east.astype(np.float32), - SWC.svf_data.svf_veg_east.astype(np.float32), - SWC.svf_data.svf_veg_south.astype(np.float32), - SWC.svf_data.svf_veg_west.astype(np.float32), - SWC.svf_data.svf_veg_north.astype(np.float32), - SWC.environ_data.azimuth[idx], - SWC.environ_data.altitude[idx], - SWC.environ_data.psi[idx], - t, - SWC.params.Albedo.Effective.Value.Walls, - F_sh.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - True, # cylindrical - lv.astype(np.float32) if lv is not None else None, - True, # anisotropic sky - SWC.shadow_mats.diffsh.astype(np.float32) if SWC.shadow_mats.diffsh is not None else None, - SWC.shadow_mats.asvf.astype(np.float32) if SWC.shadow_mats.asvf is not None else None, - SWC.shadow_mats.shmat.astype(np.float32) if SWC.shadow_mats.shmat is not None else None, - SWC.shadow_mats.vegshmat.astype(np.float32) if SWC.shadow_mats.vegshmat is not None else None, - SWC.shadow_mats.vbshvegshmat.astype(np.float32) if SWC.shadow_mats.vbshvegshmat is not None else None, - ) - - py_kside_timings = timeit.repeat(run_kside_py, number=1, repeat=repeats) - print_timing_stats("kside_veg_v2022a", py_kside_timings) - - rust_kside_timings = timeit.repeat(run_kside_rust, number=1, repeat=repeats) - print_timing_stats("vegetation.kside_veg", rust_kside_timings) - - # Print relative speed as percentage - relative_speed(py_kside_timings, rust_kside_timings) - - ( - Keast, - Ksouth, - Kwest, - Knorth, - KsideI, - KsideD, - Kside, - ) = run_kside_py() - - result_kside_py = { - "Keast": Keast, - "Ksouth": Ksouth, - "Kwest": Kwest, - "Knorth": Knorth, - "KsideI": KsideI, - "KsideD": KsideD, - "Kside": Kside, - } - - result_kside_rust = run_kside_rust() - - key_map = { - "Keast": "keast", - "Ksouth": "ksouth", - "Kwest": "kwest", - "Knorth": "knorth", - "KsideI": "kside_i", - "KsideD": "kside_d", - "Kside": "kside", - } - # Compare results - compare_results(result_kside_py, result_kside_rust, key_map) - # Plot visual residuals - plot_visual_residuals(Keast, result_kside_rust.keast, title_prefix="Keast_veg") - plot_visual_residuals(Ksouth, result_kside_rust.ksouth, title_prefix="Ksouth_veg") - plot_visual_residuals(Kwest, result_kside_rust.kwest, title_prefix="Kwest_veg") - plot_visual_residuals(Knorth, result_kside_rust.knorth, title_prefix="Knorth_veg") - plot_visual_residuals(KsideI, result_kside_rust.kside_i, title_prefix="KsideI_veg") - plot_visual_residuals(KsideD, result_kside_rust.kside_d, title_prefix="KsideD_veg") - plot_visual_residuals(Kside, result_kside_rust.kside, title_prefix="Kside_veg") - - ### LSIDE - elvis = 0.0 - ea = 6.107 * 10 ** ((7.5 * Ta) / (237.3 + Ta)) * (SWC.environ_data.RH[idx] / 100.0) - msteg = 46.5 * (ea / (Ta + 273.15)) - esky = (1 - (1 + msteg) * np.exp(-((1.2 + 3.0 * msteg) ** 0.5))) + elvis - I0, CI, Kt, I0et, CIuncorr = clearnessindex_2013b( - SWC.environ_data.zen[idx], - SWC.environ_data.jday[idx], - Ta, - SWC.environ_data.RH[idx] / 100.0, - SWC.environ_data.radG[idx], - SWC.location, - SWC.environ_data.P[idx], - ) - ewall = SWC.params.Albedo.Effective.Value.Walls - Ldown = ( - (SWC.svf_data.svf + SWC.svf_data.svf_veg - 1) * esky * SBC * ((Ta + 273.15) ** 4) - + (2 - SWC.svf_data.svf_veg - SWC.svf_data.svf_veg_blocks_bldg_sh) * ewall * SBC * ((Ta + 273.15) ** 4) - + (SWC.svf_data.svf_veg_blocks_bldg_sh - SWC.svf_data.svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) - + (2 - SWC.svf_data.svf - SWC.svf_data.svf_veg) * (1 - ewall) * esky * SBC * ((Ta + 273.15) ** 4) - ) - if CI < 0.95: - c = 1 - CI - Ldown = Ldown * (1 - c) + c * ( - (SWC.svf_data.svf + SWC.svf_data.svf_veg - 1) * SBC * ((Ta + 273.15) ** 4) - + (2 - SWC.svf_data.svf_veg - SWC.svf_data.svf_veg_blocks_bldg_sh) * ewall * SBC * ((Ta + 273.15) ** 4) - + (SWC.svf_data.svf_veg_blocks_bldg_sh - SWC.svf_data.svf) * ewall * SBC * ((Ta + 273.15 + Tgwall) ** 4) - + (2 - SWC.svf_data.svf - SWC.svf_data.svf_veg) * (1 - ewall) * esky * SBC * ((Ta + 273.15) ** 4) - ) - timestepdec = 0 - timeadd = 0.0 - firstdaytime = 1.0 - Lup, timeaddnotused, Tgmap1 = TsWaveDelay_2015a(gvfLup, firstdaytime, timeadd, timestepdec, SWC.tg_maps.Tgmap1) - LupE, timeaddnotused, Tgmap1E = TsWaveDelay_2015a(gvfLupE, firstdaytime, timeadd, timestepdec, SWC.tg_maps.Tgmap1E) - LupS, timeaddnotused, Tgmap1S = TsWaveDelay_2015a(gvfLupS, firstdaytime, timeadd, timestepdec, SWC.tg_maps.Tgmap1S) - LupW, timeaddnotused, Tgmap1W = TsWaveDelay_2015a(gvfLupW, firstdaytime, timeadd, timestepdec, SWC.tg_maps.Tgmap1W) - LupN, timeaddnotused, Tgmap1N = TsWaveDelay_2015a(gvfLupN, firstdaytime, timeadd, timestepdec, SWC.tg_maps.Tgmap1N) - - def run_lside_py(): - return Lside_veg_v2022a( - SWC.svf_data.svf_south.astype(np.float32), - SWC.svf_data.svf_west.astype(np.float32), - SWC.svf_data.svf_north.astype(np.float32), - SWC.svf_data.svf_east.astype(np.float32), - SWC.svf_data.svf_veg_east.astype(np.float32), - SWC.svf_data.svf_veg_south.astype(np.float32), - SWC.svf_data.svf_veg_west.astype(np.float32), - SWC.svf_data.svf_veg_north.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_east.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_south.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_west.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_north.astype(np.float32), - SWC.environ_data.azimuth[idx], - SWC.environ_data.altitude[idx], - Ta, - Tgwall, - SBC, - SWC.params.Albedo.Effective.Value.Walls, - Ldown.astype(np.float32), - esky, - t, - F_sh.astype(np.float32), - CI, - LupE.astype(np.float32), - LupS.astype(np.float32), - LupW.astype(np.float32), - LupN.astype(np.float32), - 0, - ) - - def run_lside_rust(): - return vegetation.lside_veg( - SWC.svf_data.svf_south.astype(np.float32), - SWC.svf_data.svf_west.astype(np.float32), - SWC.svf_data.svf_north.astype(np.float32), - SWC.svf_data.svf_east.astype(np.float32), - SWC.svf_data.svf_veg_east.astype(np.float32), - SWC.svf_data.svf_veg_south.astype(np.float32), - SWC.svf_data.svf_veg_west.astype(np.float32), - SWC.svf_data.svf_veg_north.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_east.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_south.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_west.astype(np.float32), - SWC.svf_data.svf_veg_blocks_bldg_sh_north.astype(np.float32), - SWC.environ_data.azimuth[idx], - SWC.environ_data.altitude[idx], - Ta, - Tgwall, - SBC, - SWC.params.Albedo.Effective.Value.Walls, - Ldown.astype(np.float32), - esky, - t, - F_sh.astype(np.float32), - CI, - LupE.astype(np.float32), - LupS.astype(np.float32), - LupW.astype(np.float32), - LupN.astype(np.float32), - False, - ) - - py_lside_timings = timeit.repeat(run_lside_py, number=1, repeat=repeats) - print_timing_stats("lside_veg_v2022a", py_lside_timings) - - rust_lside_timings = timeit.repeat(run_lside_rust, number=1, repeat=repeats) - print_timing_stats("vegetation.lside_veg", rust_lside_timings) - - # Print relative speed as percentage - relative_speed(py_lside_timings, rust_lside_timings) - - ( - Least, - Lsouth, - Lwest, - Lnorth, - ) = run_lside_py() - - result_lside_py = { - "Least": Least, - "Lsouth": Lsouth, - "Lwest": Lwest, - "Lnorth": Lnorth, - } - result_lside_rust = run_lside_rust() - - key_map = { - "Least": "least", - "Lsouth": "lsouth", - "Lwest": "lwest", - "Lnorth": "lnorth", - } - # Compare results - compare_results(result_lside_py, result_lside_rust, key_map) - # Plot visual residuals - plot_visual_residuals(Least, result_lside_rust.least, title_prefix="Least_veg") - plot_visual_residuals(Lsouth, result_lside_rust.lsouth, title_prefix="Lsouth_veg") - plot_visual_residuals(Lwest, result_lside_rust.lwest, title_prefix="Lwest_veg") - plot_visual_residuals(Lnorth, result_lside_rust.lnorth, title_prefix="Lnorth_veg") - - ### aniso - skyvaultalt, skyvaultazi, _, _, _, _, _ = create_patches(2) - patch_emissivities = np.zeros(skyvaultalt.shape[0]) - x = np.transpose(np.atleast_2d(skyvaultalt)) - y = np.transpose(np.atleast_2d(skyvaultazi)) - z = np.transpose(np.atleast_2d(patch_emissivities)) - L_patches = np.append(np.append(x, y, axis=1), z, axis=1) - steradians, skyalt, patch_altitude = patch_steradians(L_patches) - Lup = SBC * SWC.tg_maps.emis_grid * ((SWC.tg_maps.Knight + Ta + Tg + 273.15) ** 4) - - def run_ani_py(): - return ani_sky( - SWC.shadow_mats.shmat.astype(np.float32), - SWC.shadow_mats.vegshmat.astype(np.float32), - SWC.shadow_mats.vbshvegshmat.astype(np.float32), - SWC.environ_data.altitude[idx], - SWC.environ_data.azimuth[idx], - SWC.shadow_mats.asvf.astype(np.float32), - SWC.config.person_cylinder, - esky, - L_patches.astype(np.float32), - 0, # wall scheme, - SWC.walls_data.voxelTable.astype(np.float32) if SWC.walls_data.voxelTable is not None else None, - SWC.walls_data.voxelMaps.astype(np.float32) if SWC.walls_data.voxelMaps is not None else None, - steradians.astype(np.float32), - Ta, - Tgwall, - SWC.params.Emissivity.Value.Walls, - Lup.astype(np.float32), - SWC.environ_data.radI[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radG[idx], - lv.astype(np.float32), - SWC.params.Albedo.Effective.Value.Walls, - 0, - SWC.shadow_mats.diffsh.astype(np.float32), - shadow.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - idx, - ) - - def run_ani_rust(): - return sky.anisotropic_sky( - SWC.shadow_mats.shmat.astype(np.float32), - SWC.shadow_mats.vegshmat.astype(np.float32), - SWC.shadow_mats.vbshvegshmat.astype(np.float32), - SWC.environ_data.altitude[idx], - SWC.environ_data.azimuth[idx], - SWC.shadow_mats.asvf.astype(np.float32), - SWC.config.person_cylinder, - esky, - L_patches.astype(np.float32), - False, # wall scheme, - SWC.walls_data.voxelTable.astype(np.float32) if SWC.walls_data.voxelTable is not None else None, - SWC.walls_data.voxelMaps.astype(np.float32) if SWC.walls_data.voxelMaps is not None else None, - steradians.astype(np.float32), - Ta, - Tgwall, - SWC.params.Emissivity.Value.Walls, - Lup.astype(np.float32), - SWC.environ_data.radI[idx], - SWC.environ_data.radD[idx], - SWC.environ_data.radG[idx], - lv.astype(np.float32), - SWC.params.Albedo.Effective.Value.Walls, - False, - SWC.shadow_mats.diffsh.astype(np.float32), - shadow.astype(np.float32), - KupE.astype(np.float32), - KupS.astype(np.float32), - KupW.astype(np.float32), - KupN.astype(np.float32), - idx, - ) - - py_ani_timings = timeit.repeat(run_ani_py, number=1, repeat=repeats) - print_timing_stats("anisotropic_sky", py_ani_timings) - - rust_ani_timings = timeit.repeat(run_ani_rust, number=1, repeat=repeats) - print_timing_stats("sky.anisotropic_sky", rust_ani_timings) - - # Print relative speed as percentage - relative_speed(py_ani_timings, rust_ani_timings) - - ( - Ldown, - Lside, - Lside_sky, - Lside_veg, - Lside_sh, - Lside_sun, - Lside_ref, - Least, - Lwest, - Lnorth, - Lsouth, - Keast, - Ksouth, - Kwest, - Knorth, - KsideI, - KsideD, - Kside, - steradians, - skyalt, - ) = run_ani_py() - - result_ani_py = { - "Ldown": Ldown, - "Lside": Lside, - "Lside_sky": Lside_sky, - "Lside_veg": Lside_veg, - "Lside_sh": Lside_sh, - "Lside_sun": Lside_sun, - "Lside_ref": Lside_ref, - "Least": Least, - "Lwest": Lwest, - "Lnorth": Lnorth, - "Lsouth": Lsouth, - "Keast": Keast, - "Ksouth": Ksouth, - "Kwest": Kwest, - "Knorth": Knorth, - "KsideI": KsideI, - "KsideD": KsideD, - "Kside": Kside, - "steradians": steradians, - "skyalt": skyalt, - } - - result_ani_rust = run_ani_rust() - - key_map = { - "Ldown": "ldown", - "Lside": "lside", - "Lside_sky": "lside_sky", - "Lside_veg": "lside_veg", - "Lside_sh": "lside_sh", - "Lside_sun": "lside_sun", - "Lside_ref": "lside_ref", - "Least": "least", - "Lwest": "lwest", - "Lnorth": "lnorth", - "Lsouth": "lsouth", - "Keast": "keast", - "Ksouth": "ksouth", - "Kwest": "kwest", - "Knorth": "knorth", - "KsideI": "kside_i", - "KsideD": "kside_d", - "Kside": "kside", - "steradians": "steradians", - "skyalt": "skyalt", - } - - # Compare results - compare_results(result_ani_py, result_ani_rust, key_map) - # Plot visual residuals - plot_visual_residuals(Ldown, result_ani_rust.ldown, title_prefix="Ldown") - plot_visual_residuals(Lside, result_ani_rust.lside, title_prefix="Lside") - plot_visual_residuals(Lside_sky, result_ani_rust.lside_sky, title_prefix="Lside_sky") - plot_visual_residuals(Lside_veg, result_ani_rust.lside_veg, title_prefix="Lside_veg") - plot_visual_residuals(Lside_sh, result_ani_rust.lside_sh, title_prefix="Lside_sh") - plot_visual_residuals(Lside_sun, result_ani_rust.lside_sun, title_prefix="Lside_sun") - plot_visual_residuals(Lside_ref, result_ani_rust.lside_ref, title_prefix="Lside_ref") - plot_visual_residuals(Least, result_ani_rust.least, title_prefix="Least") - plot_visual_residuals(Lwest, result_ani_rust.lwest, title_prefix="Lwest") - plot_visual_residuals(Lnorth, result_ani_rust.lnorth, title_prefix="Lnorth") - plot_visual_residuals(Lsouth, result_ani_rust.lsouth, title_prefix="Lsouth") - plot_visual_residuals(Keast, result_ani_rust.keast, title_prefix="Keast") - plot_visual_residuals(Ksouth, result_ani_rust.ksouth, title_prefix="Ksouth") - plot_visual_residuals(Kwest, result_ani_rust.kwest, title_prefix="Kwest") - plot_visual_residuals(Knorth, result_ani_rust.knorth, title_prefix="Knorth") - plot_visual_residuals(KsideI, result_ani_rust.kside_i, title_prefix="KsideI") - plot_visual_residuals(KsideD, result_ani_rust.kside_d, title_prefix="KsideD") - plot_visual_residuals(Kside, result_ani_rust.kside, title_prefix="Kside") - - -# Calculate and print per-array right percentage -def pct(a, b, atol, rtol): - if a is None or b is None: - return float("nan") - # Ensure shapes match before comparison - if a.shape != b.shape: - return f"Shape mismatch: {a.shape} vs {b.shape}" - return 100.0 * np.isclose(a, b, atol=atol, rtol=rtol, equal_nan=True).sum() / a.size - - -def compare_results(result_py, result_rust, key_map, atol=0.001, rtol=0.001): - print("\n--- Comparison ---") - for py_key, rust_key in key_map.items(): - py_val = result_py.get(py_key) - if isinstance(result_rust, dict): - rust_val = result_rust.get(rust_key) - else: - rust_val = getattr(result_rust, rust_key, None) - match_pct = pct(py_val, rust_val, atol=atol, rtol=rtol) - mean_diff = ( - np.nanmean(np.abs(py_val - rust_val)) if py_val is not None and rust_val is not None else float("nan") - ) - range_diff = np.nanmax(py_val) - np.nanmin(py_val) if py_val is not None else float("nan") - print( - f"{py_key:<20} vs {rust_key:<35} right: {match_pct:.2f} mean diff: {mean_diff:.3f} range: {range_diff:.2f}" - ) - - -def print_timing_stats(func_name, times): - """Prints the min, max, and average timing statistics for a function.""" - if not times: - print(f"\n{func_name}: No timing data available.") - return - min_time = min(times) - max_time = max(times) - avg_time = sum(times) / len(times) - print(f"\n{func_name}: min={min_time:.3f}s, max={max_time:.3f}s, avg={avg_time:.3f}s") - - -def relative_speed(times_py, times_rust): - """Calculates and prints how many times faster the Rust version is compared to Python.""" - rust_avg = sum(times_rust) / len(times_rust) - py_avg = sum(times_py) / len(times_py) - speedup_factor = py_avg / rust_avg - print(f"\nRelative speed: {speedup_factor:.2f} times faster for given data.") - - -def plot_visual_residuals( - py_array, - rust_array, - title_prefix="Visual", - cmap="viridis", - cmap_residuals="coolwarm", - tick_fontsize="xx-small", - colorbar_shrink=0.6, -): - # check shape - if py_array.shape != rust_array.shape: - print(f"Error: Input arrays have different shapes: {py_array.shape} vs {rust_array.shape}") - return - - fig, axes = plt.subplots(3, 1, figsize=(6, 12)) # 3 rows, 1 column - - # Plot Array 1 (Python) - im1 = axes[0].imshow(py_array, cmap=cmap) - cbar1 = fig.colorbar(im1, ax=axes[0], shrink=colorbar_shrink) - cbar1.ax.tick_params(labelsize=tick_fontsize) - axes[0].set_title(f"{title_prefix} - Array 1 (Python)") - axes[0].axis("off") - - # Plot Array 2 (Rust) - im2 = axes[1].imshow(rust_array, cmap=cmap) - cbar2 = fig.colorbar(im2, ax=axes[1], shrink=colorbar_shrink) - cbar2.ax.tick_params(labelsize=tick_fontsize) - axes[1].set_title(f"{title_prefix} - Array 2 (Rust)") - axes[1].axis("off") - - # Determine the symmetric range for the residuals colormap - min_extent = 0.001 - residuals = rust_array - py_array - max_abs_residual = max(np.abs(residuals).max(), min_extent) - - im3 = axes[2].imshow(residuals, cmap=cmap_residuals, vmin=-max_abs_residual, vmax=max_abs_residual) - cbar3 = fig.colorbar(im3, ax=axes[2], shrink=colorbar_shrink) - cbar3.ax.tick_params(labelsize=tick_fontsize) - axes[2].set_title(f"{title_prefix} - Residuals (Rust - Python)") - axes[2].axis("off") - - plt.tight_layout() # Adjust layout to prevent overlapping titles/labels - plt.savefig(f"temp/{title_prefix.lower().replace(' ', '_')}_residuals.png", dpi=150) diff --git a/tests/spec/__init__.py b/tests/spec/__init__.py new file mode 100644 index 0000000..bb2e0af --- /dev/null +++ b/tests/spec/__init__.py @@ -0,0 +1,2 @@ +# Specification-based tests +# Tests in this directory verify properties defined in specs/*.md diff --git a/tests/spec/test_aniso_gpu_parity.py b/tests/spec/test_aniso_gpu_parity.py new file mode 100644 index 0000000..75907a1 --- /dev/null +++ b/tests/spec/test_aniso_gpu_parity.py @@ -0,0 +1,410 @@ +"""GPU vs CPU parity tests for the anisotropic sky computation. + +Verifies that the GPU (WGSL) anisotropic sky shader produces results +matching the CPU (Rayon) implementation within f32 accumulation tolerance. + +The GPU path outputs (ldown, lside, kside_partial) which the pipeline +combines with trivial CPU-side terms (kside_i, keast=kup*0.5, etc.). +""" + +from datetime import datetime + +import numpy as np +import pytest +from solweig.api import ( + Location, + SurfaceData, + Weather, + calculate, +) +from solweig.models.precomputed import ShadowArrays +from solweig.rustalgos import pipeline + +pytestmark = pytest.mark.slow + + +@pytest.fixture(scope="module") +def location(): + return Location(latitude=57.7, longitude=12.0, utc_offset=1) + + +@pytest.fixture(scope="module") +def noon_weather(): + return Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + +def _make_flat_surface_with_shadows(shape=(10, 10), n_patches=153): + """Create a flat surface with synthetic shadow matrices.""" + from conftest import make_mock_svf + + dsm = np.ones(shape, dtype=np.float32) * 2.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(shape)) + + n_pack = (n_patches + 7) // 8 + shmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vegshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vbshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + + surface.shadow_matrices = ShadowArrays( + _shmat_u8=shmat_u8, + _vegshmat_u8=vegshmat_u8, + _vbshmat_u8=vbshmat_u8, + _n_patches=n_patches, + ) + return surface + + +def _make_partial_shadow_surface(shape=(15, 15), n_patches=153): + """Surface with spatially varying shadow patterns for thorough parity testing.""" + from conftest import make_mock_svf + + dsm = np.ones(shape, dtype=np.float32) * 2.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(shape)) + + n_pack = (n_patches + 7) // 8 + rng = np.random.default_rng(42) + + shmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vegshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vbshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + + # Block some patches in the right half + for p in range(40): + byte_idx = p >> 3 + bit_mask = np.uint8(1 << (p & 7)) + shmat_u8[:, shape[1] // 2 :, byte_idx] &= ~bit_mask + + # Random veg blocking in top half + for p in range(20, 60): + byte_idx = p >> 3 + bit_mask = np.uint8(1 << (p & 7)) + mask = rng.integers(0, 2, (shape[0] // 2, shape[1]), dtype=np.uint8) + vegshmat_u8[: shape[0] // 2, :, byte_idx] &= ~(bit_mask * (1 - mask)) + + surface.shadow_matrices = ShadowArrays( + _shmat_u8=shmat_u8, + _vegshmat_u8=vegshmat_u8, + _vbshmat_u8=vbshmat_u8.copy(), + _n_patches=n_patches, + ) + return surface + + +@pytest.fixture(scope="module") +def gpu_available(): + """Check if GPU aniso is available (GPU feature compiled + hardware present).""" + try: + return pipeline.is_aniso_gpu_enabled() + except AttributeError: + return False + + +class TestAnisoGpuCpuParity: + """GPU and CPU anisotropic sky must produce matching results.""" + + def _run_with_gpu(self, surface, location, weather, *, gpu_on): + """Run calculate() with GPU enabled or disabled.""" + try: + if gpu_on: + pipeline.enable_aniso_gpu() + else: + pipeline.disable_aniso_gpu() + except AttributeError: + if gpu_on: + pytest.skip("GPU feature not compiled") + + result = calculate( + surface, + location, + weather, + use_anisotropic_sky=True, + ) + return result + + def test_open_sky_tmrt_parity(self, location, noon_weather, gpu_available): + """Open sky: GPU and CPU Tmrt match within f32 tolerance.""" + if not gpu_available: + pytest.skip("GPU not available") + + surface_gpu = _make_flat_surface_with_shadows() + surface_cpu = _make_flat_surface_with_shadows() + + result_gpu = self._run_with_gpu(surface_gpu, location, noon_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, noon_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch on open sky", + ) + + def test_open_sky_kdown_parity(self, location, noon_weather, gpu_available): + """Open sky: GPU and CPU kdown match within f32 tolerance.""" + if not gpu_available: + pytest.skip("GPU not available") + + surface_gpu = _make_flat_surface_with_shadows() + surface_cpu = _make_flat_surface_with_shadows() + + result_gpu = self._run_with_gpu(surface_gpu, location, noon_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, noon_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.kdown) & ~np.isnan(result_cpu.kdown) + if np.any(valid): + np.testing.assert_allclose( + result_gpu.kdown[valid], + result_cpu.kdown[valid], + rtol=1e-3, + atol=1.0, + err_msg="GPU vs CPU kdown mismatch on open sky", + ) + + def test_partial_shadows_parity(self, location, noon_weather, gpu_available): + """Partial shadows: GPU and CPU Tmrt match within f32 tolerance.""" + if not gpu_available: + pytest.skip("GPU not available") + + surface_gpu = _make_partial_shadow_surface() + surface_cpu = _make_partial_shadow_surface() + + result_gpu = self._run_with_gpu(surface_gpu, location, noon_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, noon_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch with partial shadows", + ) + + def test_full_obstruction_parity(self, location, noon_weather, gpu_available): + """All patches blocked: GPU and CPU should produce matching low-radiation results.""" + if not gpu_available: + pytest.skip("GPU not available") + + shape = (10, 10) + n_patches = 153 + surface_gpu = _make_flat_surface_with_shadows(shape=shape, n_patches=n_patches) + surface_cpu = _make_flat_surface_with_shadows(shape=shape, n_patches=n_patches) + + # Zero out all shadow matrices — every patch blocked + n_pack = (n_patches + 7) // 8 + zeros = np.zeros((shape[0], shape[1], n_pack), dtype=np.uint8) + for s in (surface_gpu, surface_cpu): + s.shadow_matrices = ShadowArrays( + _shmat_u8=zeros.copy(), + _vegshmat_u8=zeros.copy(), + _vbshmat_u8=zeros.copy(), + _n_patches=n_patches, + ) + + result_gpu = self._run_with_gpu(surface_gpu, location, noon_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, noon_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch with full obstruction", + ) + + def test_night_time_parity(self, location, gpu_available): + """Night time (sun below horizon): GPU and CPU should match.""" + if not gpu_available: + pytest.skip("GPU not available") + + # January 2 AM — sun well below horizon at lat 57.7 + night_weather = Weather( + datetime=datetime(2024, 1, 15, 2, 0), + ta=2.0, + rh=80.0, + global_rad=0.0, + ) + + surface_gpu = _make_flat_surface_with_shadows() + surface_cpu = _make_flat_surface_with_shadows() + + result_gpu = self._run_with_gpu(surface_gpu, location, night_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, night_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values even at night" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch at night time", + ) + + # Verify kdown is zero or near-zero at night + if result_gpu.kdown is not None: + kdown_valid = ~np.isnan(result_gpu.kdown) + if np.any(kdown_valid): + assert np.nanmax(result_gpu.kdown) < 1.0, "kdown should be ~0 at night" + + def test_sitting_posture_parity(self, location, noon_weather, gpu_available): + """Sitting posture (cyl=False): GPU short-circuits to zero, CPU should match.""" + if not gpu_available: + pytest.skip("GPU not available") + + from solweig.models.config import HumanParams + + surface_gpu = _make_flat_surface_with_shadows() + surface_cpu = _make_flat_surface_with_shadows() + + sitting = HumanParams(posture="sitting") + + for gpu_on, _surface in [(True, surface_gpu), (False, surface_cpu)]: + try: + if gpu_on: + pipeline.enable_aniso_gpu() + else: + pipeline.disable_aniso_gpu() + except AttributeError: + if gpu_on: + pytest.skip("GPU feature not compiled") + + result_gpu = calculate( + surface_gpu, + location, + noon_weather, + use_anisotropic_sky=True, + human=sitting, + ) + pipeline.disable_aniso_gpu() + result_cpu = calculate( + surface_cpu, + location, + noon_weather, + use_anisotropic_sky=True, + human=sitting, + ) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch with sitting posture (cyl=False)", + ) + + def test_zero_radiation_parity(self, location, gpu_available): + """Zero radiation input: GPU and CPU should match.""" + if not gpu_available: + pytest.skip("GPU not available") + + # Daytime sun position but zero radiation (overcast edge case) + zero_rad_weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=20.0, + rh=90.0, + global_rad=0.0, + ) + + surface_gpu = _make_flat_surface_with_shadows() + surface_cpu = _make_flat_surface_with_shadows() + + result_gpu = self._run_with_gpu(surface_gpu, location, zero_rad_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, zero_rad_weather, gpu_on=False) + + valid = ~np.isnan(result_gpu.tmrt) & ~np.isnan(result_cpu.tmrt) + assert np.any(valid), "Should have valid Tmrt values" + + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch with zero radiation", + ) + + def test_invalid_pixels_nan_parity(self, location, noon_weather, gpu_available): + """Invalid pixels (NaN DSM) produce NaN in both GPU and CPU paths.""" + if not gpu_available: + pytest.skip("GPU not available") + + from conftest import make_mock_svf + + shape = (10, 10) + n_patches = 153 + dsm = np.ones(shape, dtype=np.float32) * 2.0 + # Mark some pixels as invalid (NaN in DSM) + dsm[0:3, 0:3] = np.nan + + surface_gpu = SurfaceData(dsm=dsm.copy(), pixel_size=1.0, svf=make_mock_svf(shape)) + surface_cpu = SurfaceData(dsm=dsm.copy(), pixel_size=1.0, svf=make_mock_svf(shape)) + + n_pack = (n_patches + 7) // 8 + shmat = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vegshmat = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vbshmat = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + + for s in (surface_gpu, surface_cpu): + s.shadow_matrices = ShadowArrays( + _shmat_u8=shmat.copy(), + _vegshmat_u8=vegshmat.copy(), + _vbshmat_u8=vbshmat.copy(), + _n_patches=n_patches, + ) + + result_gpu = self._run_with_gpu(surface_gpu, location, noon_weather, gpu_on=True) + result_cpu = self._run_with_gpu(surface_cpu, location, noon_weather, gpu_on=False) + + # NaN pixels should be NaN in both + gpu_nan = np.isnan(result_gpu.tmrt) + cpu_nan = np.isnan(result_cpu.tmrt) + np.testing.assert_array_equal( + gpu_nan, + cpu_nan, + err_msg="GPU and CPU should produce NaN at the same pixels", + ) + + # Valid pixels should match + valid = ~gpu_nan & ~cpu_nan + if np.any(valid): + np.testing.assert_allclose( + result_gpu.tmrt[valid], + result_cpu.tmrt[valid], + rtol=1e-3, + atol=0.5, + err_msg="GPU vs CPU Tmrt mismatch on valid pixels with NaN neighbors", + ) + + def test_gpu_fallback_when_disabled(self, location, noon_weather): + """With GPU disabled, results are identical to CPU-only path.""" + surface_a = _make_flat_surface_with_shadows() + surface_b = _make_flat_surface_with_shadows() + + result_a = self._run_with_gpu(surface_a, location, noon_weather, gpu_on=False) + result_b = self._run_with_gpu(surface_b, location, noon_weather, gpu_on=False) + + valid = ~np.isnan(result_a.tmrt) & ~np.isnan(result_b.tmrt) + if np.any(valid): + np.testing.assert_array_equal( + result_a.tmrt[valid], + result_b.tmrt[valid], + err_msg="Two CPU-only runs should produce identical results", + ) diff --git a/tests/spec/test_anisotropic_pipeline.py b/tests/spec/test_anisotropic_pipeline.py new file mode 100644 index 0000000..9426e77 --- /dev/null +++ b/tests/spec/test_anisotropic_pipeline.py @@ -0,0 +1,355 @@ +"""End-to-end tests for the anisotropic sky pipeline. + +Covers the full path from calculate() → compute_core_fused → Rust pipeline +with use_anisotropic_sky=True. Tests critical fixes: + +1. Bitpacked shadow matrix extraction in pipeline.rs: + Patches are 1 bit each, 8 per byte. Pipeline must use (i >> 3, i & 7) + not read raw bytes as patch values. + +2. Vegetation shadow initialization in skyview.rs: + No-vegetation surfaces must have veg shadow = all 1s (0xFF), meaning + "vegetation doesn't block anything". Without this fix, psi=0.03 would + attenuate diffuse radiation by ~97%. + +3. Python ShadowArrays.diffsh() parity with Rust pipeline diffsh: + Both must produce identical diffuse shadow values for the same inputs. +""" + +from datetime import datetime + +import numpy as np +import pytest +from solweig.api import ( + Location, + SurfaceData, + Weather, + calculate, +) +from solweig.models.precomputed import ShadowArrays, _pack_u8_to_bitpacked + +pytestmark = pytest.mark.slow + + +@pytest.fixture(scope="module") +def location(): + return Location(latitude=57.7, longitude=12.0, utc_offset=1) + + +@pytest.fixture(scope="module") +def noon_weather(): + return Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + +def _make_surface_with_building(shape=(30, 30)): + """Create a surface with a building that triggers shadow computation.""" + from conftest import make_mock_svf + + dsm = np.zeros(shape, dtype=np.float32) + dsm[10:20, 10:20] = 10.0 # 10m building + return SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(shape)) + + +def _make_flat_surface_with_shadows(shape=(10, 10), n_patches=153): + """Create a flat surface with synthetic shadow matrices for anisotropic sky. + + The shadow matrices are constructed to be physically plausible: + - All patches visible at all pixels (fully open sky) + - No vegetation blocking + """ + from conftest import make_mock_svf + + dsm = np.ones(shape, dtype=np.float32) * 2.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(shape)) + + # Create bitpacked shadow matrices: all patches visible + n_pack = (n_patches + 7) // 8 + shmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vegshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vbshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + + surface.shadow_matrices = ShadowArrays( + _shmat_u8=shmat_u8, + _vegshmat_u8=vegshmat_u8, + _vbshmat_u8=vbshmat_u8, + _n_patches=n_patches, + ) + return surface + + +class TestAnisotropicNoVegetation: + """Anisotropic sky on surfaces without vegetation. + + Critical regression test for the veg shadow initialization fix. + When no vegetation is present, veg shadow bits must be all 1s (0xFF), + meaning vegetation doesn't block any sky patches. Without this fix, + diffuse radiation was attenuated by ~97% (psi=0.03). + """ + + def test_aniso_produces_valid_tmrt(self, location, noon_weather): + """Anisotropic sky on flat surface with shadows produces valid Tmrt.""" + surface = _make_flat_surface_with_shadows() + result = calculate( + surface, + location, + noon_weather, + use_anisotropic_sky=True, + ) + tmrt = result.tmrt + assert tmrt.shape == (10, 10) + # Should be in physically reasonable range (summer noon, open sky) + valid = ~np.isnan(tmrt) + assert np.any(valid), "Should have valid Tmrt values" + assert np.nanmin(tmrt) > 0, "Summer noon Tmrt should be positive" + assert np.nanmax(tmrt) < 80, "Tmrt should be < 80°C" + + def test_aniso_kdown_not_attenuated(self, location, noon_weather): + """With all-visible shadow matrices, kdown should be close to global_rad. + + Regression test: if veg shadow = all 0s instead of all 1s, + diffuse radiation would be attenuated to ~3%, causing kdown << global_rad. + """ + surface = _make_flat_surface_with_shadows() + result = calculate( + surface, + location, + noon_weather, + use_anisotropic_sky=True, + ) + # kdown should be close to global radiation for open sky + # (800 W/m² minus some reflection, but should be > 200) + valid = ~np.isnan(result.kdown) + if np.any(valid): + assert np.nanmean(result.kdown) > 200, ( + f"kdown mean = {np.nanmean(result.kdown):.1f} — " + "suspiciously low, may indicate veg shadow attenuation bug" + ) + + +class TestAnisotropicVsIsotropic: + """Compare anisotropic and isotropic sky models on the same surface.""" + + def test_both_produce_valid_tmrt(self, location, noon_weather): + """Both models produce valid Tmrt for the same surface.""" + surface_aniso = _make_flat_surface_with_shadows() + surface_iso = _make_flat_surface_with_shadows() + + result_aniso = calculate( + surface_aniso, + location, + noon_weather, + use_anisotropic_sky=True, + ) + result_iso = calculate( + surface_iso, + location, + noon_weather, + use_anisotropic_sky=False, + ) + + for label, result in [("aniso", result_aniso), ("iso", result_iso)]: + valid = ~np.isnan(result.tmrt) + assert np.any(valid), f"{label} should have valid Tmrt" + assert np.nanmin(result.tmrt) > 0, f"{label} Tmrt should be positive" + assert np.nanmax(result.tmrt) < 80, f"{label} Tmrt should be < 80°C" + + def test_models_differ_but_correlate(self, location, noon_weather): + """Anisotropic and isotropic Tmrt should differ but be in the same ballpark.""" + surface_aniso = _make_flat_surface_with_shadows() + surface_iso = _make_flat_surface_with_shadows() + + result_aniso = calculate( + surface_aniso, + location, + noon_weather, + use_anisotropic_sky=True, + ) + result_iso = calculate( + surface_iso, + location, + noon_weather, + use_anisotropic_sky=False, + ) + + tmrt_a = result_aniso.tmrt + tmrt_i = result_iso.tmrt + valid = ~np.isnan(tmrt_a) & ~np.isnan(tmrt_i) + + if np.sum(valid) > 1: + diff = np.abs(tmrt_a[valid] - tmrt_i[valid]) + mean_diff = np.mean(diff) + # Models should produce somewhat different results (aniso adds Lside*Fcyl) + # but not wildly different on a flat surface + assert mean_diff < 20, f"Mean Tmrt difference = {mean_diff:.1f}°C — too large for flat surface" + + +class TestAnisotropicWithPartialShadows: + """Anisotropic sky with partially blocked shadow matrices.""" + + def test_partial_shadows_produce_spatial_variation(self, location, noon_weather): + """Shadow matrices with spatial variation produce Tmrt variation.""" + shape = (10, 10) + n_patches = 153 + from conftest import make_mock_svf + + dsm = np.ones(shape, dtype=np.float32) * 2.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(shape)) + + # Create shadow matrices with spatial pattern: + # Left half: all patches visible; right half: half blocked + n_pack = (n_patches + 7) // 8 + shmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + # Block low-altitude patches (first 31 patches) on right half + for p in range(31): + byte_idx = p >> 3 + bit_mask = np.uint8(1 << (p & 7)) + shmat_u8[:, 5:, byte_idx] &= ~bit_mask + + vegshmat_u8 = np.full((shape[0], shape[1], n_pack), 0xFF, dtype=np.uint8) + vbshmat_u8 = shmat_u8.copy() + + surface.shadow_matrices = ShadowArrays( + _shmat_u8=shmat_u8, + _vegshmat_u8=vegshmat_u8, + _vbshmat_u8=vbshmat_u8, + _n_patches=n_patches, + ) + + result = calculate( + surface, + location, + noon_weather, + use_anisotropic_sky=True, + ) + + tmrt = result.tmrt + valid = ~np.isnan(tmrt) + assert np.any(valid), "Should have valid Tmrt" + # Mean Tmrt should be valid + assert np.nanmin(tmrt) > -10 + assert np.nanmax(tmrt) < 80 + + +class TestShadowArraysDiffshParity: + """Python ShadowArrays.diffsh() must match the Rust pipeline's internal diffsh. + + Both use the formula: diffsh[i] = sh_bit[i] - (1 - veg_bit[i]) * (1 - psi) + + We can't directly access the Rust pipeline's diffsh, but we can verify that + the Python diffsh fed into weighted_patch_sum produces the same result as + the Rust sky.weighted_patch_sum. + """ + + def test_diffsh_with_all_visible_equals_ones(self): + """All patches visible, no veg → diffsh = 1.0 everywhere.""" + n_patches = 153 + rows, cols = 3, 3 + n_pack = (n_patches + 7) // 8 + + sa = ShadowArrays( + _shmat_u8=np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8), + _vegshmat_u8=np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8), + _vbshmat_u8=np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8), + _n_patches=n_patches, + ) + diffsh = sa.diffsh(transmissivity=0.03) + # sh=1, veg=1 → 1 - (1-1)*(1-0.03) = 1.0 + np.testing.assert_allclose(diffsh, 1.0, atol=1e-6) + + def test_diffsh_weighted_sum_matches_rust(self): + """Python diffsh → weighted_patch_sum matches Rust computation.""" + from solweig.rustalgos import sky + + rng = np.random.default_rng(42) + n_patches = 153 + rows, cols = 5, 5 + + # Random binary shadow patterns + sh_u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + veg_u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + + packed_sh = _pack_u8_to_bitpacked(sh_u8) + packed_veg = _pack_u8_to_bitpacked(veg_u8) + + sa = ShadowArrays( + _shmat_u8=packed_sh, + _vegshmat_u8=packed_veg, + _vbshmat_u8=packed_sh.copy(), + _n_patches=n_patches, + ) + + psi = 0.03 + py_diffsh = sa.diffsh(transmissivity=psi).astype(np.float32) + + # Use Perez luminance as weights (realistic test) + from solweig.rustalgos import pipeline + + lv = np.asarray(pipeline.perez_v3_py(30.0, 180.0, 200.0, 400.0, 180, 2)) + weights = lv[:, 2].astype(np.float32) # luminance column + + # Rust weighted_patch_sum + rs_result = np.asarray(sky.weighted_patch_sum(py_diffsh, weights)) + + # Python manual sum + py_result = np.sum( + py_diffsh * weights[np.newaxis, np.newaxis, :], + axis=2, + ) + + np.testing.assert_allclose( + rs_result, + py_result, + rtol=1e-5, + atol=1e-6, + err_msg="Rust and Python weighted_patch_sum differ on diffsh", + ) + + +class TestAnisotropicGoldenRegression: + """Freeze anisotropic pipeline output for regression detection. + + These golden values were captured after fixing: + - Bitpacked shadow extraction (pipeline.rs) + - Veg shadow initialization (skyview.rs) + """ + + @pytest.fixture(scope="class") + def golden_result(self, location, noon_weather): + """Compute anisotropic result for golden comparison.""" + surface = _make_flat_surface_with_shadows(shape=(5, 5)) + return calculate( + surface, + location, + noon_weather, + use_anisotropic_sky=True, + ) + + def test_tmrt_golden_mean(self, golden_result): + """Mean Tmrt should be stable across code changes.""" + tmrt = golden_result.tmrt + valid = ~np.isnan(tmrt) + mean_tmrt = np.nanmean(tmrt[valid]) + # Capture golden range (tight enough to catch regressions, loose enough + # for f32 variation across platforms) + assert 20 < mean_tmrt < 70, f"Mean aniso Tmrt = {mean_tmrt:.2f}°C — outside expected range" + + def test_kdown_golden_mean(self, golden_result): + """Mean kdown should be stable across code changes.""" + kdown = golden_result.kdown + valid = ~np.isnan(kdown) + mean_kdown = np.nanmean(kdown[valid]) + # Open sky, 800 W/m² global → kdown should be substantial + assert mean_kdown > 200, f"Mean kdown = {mean_kdown:.1f} — too low, may indicate attenuation bug" + assert mean_kdown < 900, f"Mean kdown = {mean_kdown:.1f} — too high for 800 W/m² global" + + def test_shadow_golden(self, golden_result): + """Shadow field should be all-sunlit for flat surface at noon.""" + shadow = golden_result.shadow + valid = ~np.isnan(shadow) + # Flat surface at noon → should be mostly sunlit (shadow = 1) + assert np.nanmean(shadow[valid]) > 0.9, "Flat surface at noon should be mostly sunlit" diff --git a/tests/spec/test_bitpacking.py b/tests/spec/test_bitpacking.py new file mode 100644 index 0000000..2150ea5 --- /dev/null +++ b/tests/spec/test_bitpacking.py @@ -0,0 +1,333 @@ +"""Unit tests for bitpacked shadow matrix encoding/decoding. + +The SVF computation stores shadow matrices as bitpacked uint8 arrays: + - 1 bit per sky patch, 8 patches per byte + - Bit layout: patch i is stored at byte (i >> 3), bit position (i & 7) + - Bit = 1 means "sky visible" (was 255 in original u8 format) + - Bit = 0 means "blocked" (was 0 in original u8 format) + +Both the Python ShadowArrays.diffsh() method and the Rust pipeline +extract bits using this scheme. These tests verify: + 1. Round-trip: pack → unpack preserves data for all bit positions + 2. diffsh formula correctness with known inputs + 3. Parity: Python diffsh matches Rust weighted_patch_sum on the same data +""" + +import numpy as np +import pytest +from solweig.models.precomputed import ( + ShadowArrays, + _pack_u8_to_bitpacked, + _unpack_bitpacked_to_float32, +) + + +class TestBitpackRoundTrip: + """Pack → unpack round-trip must preserve data for any patch count.""" + + @pytest.mark.parametrize("n_patches", [1, 5, 8, 9, 15, 16, 145, 153, 306]) + def test_round_trip_all_ones(self, n_patches): + """All-visible (255) round-trips to all 1.0.""" + rows, cols = 3, 4 + u8 = np.full((rows, cols, n_patches), 255, dtype=np.uint8) + packed = _pack_u8_to_bitpacked(u8) + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + np.testing.assert_array_equal(unpacked, 1.0) + + @pytest.mark.parametrize("n_patches", [1, 5, 8, 9, 15, 16, 145, 153, 306]) + def test_round_trip_all_zeros(self, n_patches): + """All-blocked (0) round-trips to all 0.0.""" + rows, cols = 3, 4 + u8 = np.zeros((rows, cols, n_patches), dtype=np.uint8) + packed = _pack_u8_to_bitpacked(u8) + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + np.testing.assert_array_equal(unpacked, 0.0) + + def test_round_trip_every_bit_position(self): + """Each of the 8 bit positions within a byte round-trips correctly.""" + # 16 patches = 2 bytes, so we test all 8 positions in both bytes + n_patches = 16 + rows, cols = 1, 1 + for p in range(n_patches): + u8 = np.zeros((rows, cols, n_patches), dtype=np.uint8) + u8[0, 0, p] = 255 # Set only one patch visible + packed = _pack_u8_to_bitpacked(u8) + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + for q in range(n_patches): + expected = 1.0 if q == p else 0.0 + assert unpacked[0, 0, q] == expected, f"Patch {q} should be {expected} when only patch {p} is set" + + def test_round_trip_alternating_pattern(self): + """Alternating on/off round-trips correctly.""" + n_patches = 153 + rows, cols = 2, 2 + u8 = np.zeros((rows, cols, n_patches), dtype=np.uint8) + # Set every other patch to visible + u8[:, :, ::2] = 255 + packed = _pack_u8_to_bitpacked(u8) + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + for p in range(n_patches): + expected = 1.0 if p % 2 == 0 else 0.0 + np.testing.assert_array_equal( + unpacked[:, :, p], + expected, + err_msg=f"Patch {p} expected {expected}", + ) + + def test_round_trip_random_pattern(self): + """Random binary pattern round-trips correctly.""" + rng = np.random.default_rng(42) + n_patches = 153 + rows, cols = 5, 5 + # Random binary: 0 or 255 + u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + packed = _pack_u8_to_bitpacked(u8) + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + expected = (u8 / 255.0).astype(np.float32) + np.testing.assert_array_equal(unpacked, expected) + + def test_non_byte_aligned_padding_bits_ignored(self): + """Unused bits in the last byte don't affect result. + + For 5 patches, only bits 0-4 of byte 0 matter. Bits 5-7 are padding. + """ + n_patches = 5 + rows, cols = 1, 1 + u8 = np.full((rows, cols, n_patches), 255, dtype=np.uint8) + packed = _pack_u8_to_bitpacked(u8) + # Corrupt padding bits (bits 5, 6, 7 of byte 0) + packed[0, 0, 0] |= 0b11100000 + unpacked = _unpack_bitpacked_to_float32(packed, n_patches) + # Should still be 5 ones — padding bits are ignored + assert unpacked.shape == (1, 1, 5) + np.testing.assert_array_equal(unpacked, 1.0) + + +class TestDiffshFormula: + """ShadowArrays.diffsh() must implement: shmat - (1 - vegshmat) * (1 - psi).""" + + def _make_shadow_arrays(self, shmat_u8, vegshmat_u8, n_patches): + """Helper to create ShadowArrays from u8 per-patch arrays.""" + packed_sh = _pack_u8_to_bitpacked(shmat_u8) + packed_veg = _pack_u8_to_bitpacked(vegshmat_u8) + packed_vb = _pack_u8_to_bitpacked(shmat_u8) # simplified + return ShadowArrays( + _shmat_u8=packed_sh, + _vegshmat_u8=packed_veg, + _vbshmat_u8=packed_vb, + _n_patches=n_patches, + ) + + def test_no_vegetation_blocking_diffsh_equals_shmat(self): + """When vegshmat = all 1s (no vegetation), diffsh = shmat exactly.""" + n_patches = 10 + rows, cols = 2, 2 + rng = np.random.default_rng(99) + shmat_u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + vegshmat_u8 = np.full((rows, cols, n_patches), 255, dtype=np.uint8) + + sa = self._make_shadow_arrays(shmat_u8, vegshmat_u8, n_patches) + diffsh = sa.diffsh(transmissivity=0.03) + expected = (shmat_u8 / 255.0).astype(np.float32) + + np.testing.assert_allclose(diffsh, expected, atol=1e-6) + + def test_full_vegetation_blocking_diffsh_equals_psi(self): + """When vegshmat = all 0s (full veg block) and shmat = all 1s, diffsh = psi.""" + n_patches = 10 + rows, cols = 2, 2 + psi = 0.03 + shmat_u8 = np.full((rows, cols, n_patches), 255, dtype=np.uint8) + vegshmat_u8 = np.zeros((rows, cols, n_patches), dtype=np.uint8) + + sa = self._make_shadow_arrays(shmat_u8, vegshmat_u8, n_patches) + diffsh = sa.diffsh(transmissivity=psi) + # shmat=1, vegshmat=0: diffsh = 1 - (1-0)*(1-0.03) = 1 - 0.97 = 0.03 + np.testing.assert_allclose(diffsh, psi, atol=1e-6) + + def test_building_blocked_always_zero(self): + """When shmat = 0 (building blocks), diffsh <= 0 regardless of veg.""" + n_patches = 10 + rows, cols = 2, 2 + shmat_u8 = np.zeros((rows, cols, n_patches), dtype=np.uint8) + vegshmat_u8 = np.full((rows, cols, n_patches), 255, dtype=np.uint8) + + sa = self._make_shadow_arrays(shmat_u8, vegshmat_u8, n_patches) + diffsh = sa.diffsh(transmissivity=0.03) + # shmat=0, vegshmat=1: diffsh = 0 - (1-1)*(1-0.03) = 0 + np.testing.assert_allclose(diffsh, 0.0, atol=1e-6) + + def test_mixed_pattern_matches_formula(self): + """Specific mixed pattern produces correct values per formula.""" + n_patches = 4 + psi = 0.05 + # patch 0: sh=1, veg=1 → 1 - 0*0.95 = 1.0 + # patch 1: sh=1, veg=0 → 1 - 1*0.95 = 0.05 + # patch 2: sh=0, veg=1 → 0 - 0*0.95 = 0.0 + # patch 3: sh=0, veg=0 → 0 - 1*0.95 = -0.95 + shmat_u8 = np.array([[[255, 255, 0, 0]]], dtype=np.uint8) + vegshmat_u8 = np.array([[[255, 0, 255, 0]]], dtype=np.uint8) + + sa = self._make_shadow_arrays(shmat_u8, vegshmat_u8, n_patches) + diffsh = sa.diffsh(transmissivity=psi) + + expected = np.array([[[1.0, psi, 0.0, -(1 - psi)]]], dtype=np.float32) + np.testing.assert_allclose(diffsh, expected, atol=1e-6) + + +class TestRustBitExtractionParity: + """Rust sky.weighted_patch_sum on Python-unpacked diffsh must match Python diffsh sum. + + This validates that the Rust bit extraction in pipeline.rs (i >> 3, i & 7) + produces the same results as Python's _unpack_bitpacked_to_float32. + """ + + def test_weighted_sum_parity(self): + """Rust weighted_patch_sum on Python-unpacked data matches manual sum.""" + from solweig.rustalgos import sky + + rng = np.random.default_rng(123) + n_patches = 153 + rows, cols = 4, 4 + + # Create random bitpacked shadow matrices + shmat_u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + vegshmat_u8 = (rng.integers(0, 2, (rows, cols, n_patches)) * 255).astype(np.uint8) + packed_sh = _pack_u8_to_bitpacked(shmat_u8) + packed_veg = _pack_u8_to_bitpacked(vegshmat_u8) + + sa = ShadowArrays( + _shmat_u8=packed_sh, + _vegshmat_u8=packed_veg, + _vbshmat_u8=packed_sh.copy(), + _n_patches=n_patches, + ) + + # Python diffsh + psi = 0.03 + py_diffsh = sa.diffsh(transmissivity=psi) + + # Uniform weights + weights = np.ones(n_patches, dtype=np.float32) / n_patches + + # Rust weighted_patch_sum on Python-unpacked data + rs_result = np.asarray(sky.weighted_patch_sum(py_diffsh.astype(np.float32), weights)) + + # Python manual sum + py_result = np.sum(py_diffsh * weights[np.newaxis, np.newaxis, :], axis=2) + + np.testing.assert_allclose( + rs_result, + py_result, + rtol=1e-5, + atol=1e-6, + err_msg="Rust weighted_patch_sum differs from Python sum on diffsh", + ) + + def test_anisotropic_sky_uses_correct_bits(self): + """anisotropic_sky with known shadow patterns produces expected behavior. + + Creates a shadow matrix where only high-altitude patches are visible, + then verifies that ldown responds correctly (should be non-zero since + visible patches still contribute radiation). + """ + from solweig.rustalgos import sky + + n_patches = 96 # 6 altitude bands + rows, cols = 3, 3 + + # Generate patches: 6 altitude bands + patches = [] + alt_bands = [6, 18, 30, 42, 54, 66] + azis_per_band = [30, 24, 24, 18, 12, 6] # total = 114, but we trim + count = 0 + for alt, n_azi in zip(alt_bands, azis_per_band, strict=False): + azi_step = 360.0 / n_azi + for j in range(n_azi): + if count >= n_patches: + break + patches.append([alt, j * azi_step]) + count += 1 + if count >= n_patches: + break + l_patches = np.array(patches[:n_patches], dtype=np.float32) + + # Steradians (simplified) + steradians = np.ones(n_patches, dtype=np.float32) / n_patches + + # Shadow matrices: all patches visible (uint8 per byte) + n_pack = (n_patches + 7) // 8 + shmat = np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8) + vegshmat = np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8) + vbshmat = np.full((rows, cols, n_pack), 0xFF, dtype=np.uint8) + + # Luminance: uniform + lum = np.ones(n_patches, dtype=np.float32) / n_patches + lv = np.column_stack([l_patches, lum]).astype(np.float32) + + asvf = np.zeros((rows, cols), dtype=np.float32) # arccos(sqrt(1)) = 0 + lup = np.full((rows, cols), 400.0, dtype=np.float32) + shadow = np.ones((rows, cols), dtype=np.float32) + kup = np.full((rows, cols), 50.0, dtype=np.float32) + + sun = sky.SunParams(altitude=45.0, azimuth=180.0) + sky_p = sky.SkyParams(esky=0.75, ta=25.0, cyl=True, wall_scheme=False, albedo=0.2) + surf_p = sky.SurfaceParams(tgwall=2.0, ewall=0.9, rad_i=600.0, rad_d=200.0) + + result = sky.anisotropic_sky( + shmat, + vegshmat, + vbshmat, + sun, + asvf, + sky_p, + l_patches, + None, + None, + steradians, + surf_p, + lup, + lv, + shadow, + kup, + kup, + kup, + kup, + ) + + ldown = np.asarray(result.ldown) + # All patches visible → ldown should be positive and reasonable + assert np.all(ldown > 0), "ldown should be positive when all patches visible" + assert np.all(ldown < 800), "ldown should be < 800 W/m²" + + # Now block ALL patches and verify ldown changes + # (blocking sky patches adds wall emission instead of sky emission, + # so ldown may increase or decrease depending on wall/sky temperatures) + shmat_blocked = np.zeros((rows, cols, n_pack), dtype=np.uint8) + result_blocked = sky.anisotropic_sky( + shmat_blocked, + vegshmat, + vbshmat, + sun, + asvf, + sky_p, + l_patches, + None, + None, + steradians, + surf_p, + lup, + lv, + shadow, + kup, + kup, + kup, + kup, + ) + ldown_blocked = np.asarray(result_blocked.ldown) + assert not np.allclose(ldown, ldown_blocked, atol=0.1), "Blocking all patches should change ldown" + + # Diffuse shortwave (kside_d) should definitely decrease when sky is blocked + kside_d_open = np.asarray(result.kside_d) + kside_d_blocked = np.asarray(result_blocked.kside_d) + assert np.all(kside_d_open >= kside_d_blocked - 1e-3), "Blocking sky should not increase diffuse shortwave" diff --git a/tests/spec/test_low_sun_angles.py b/tests/spec/test_low_sun_angles.py new file mode 100644 index 0000000..1fab2ce --- /dev/null +++ b/tests/spec/test_low_sun_angles.py @@ -0,0 +1,197 @@ +""" +Low Sun Angle Handling Tests + +Tests for numerical stability at sun altitudes < 3° where tan(zenith) → infinity. +Verifies the guards in the Rust cylindric_wedge and perez_v3 implementations. + +Reference: MIN_SUN_ELEVATION_DEG = 3.0 is the established UMEP/SOLWEIG threshold. +""" + +import warnings + +import numpy as np +import pytest +from solweig.constants import MIN_SUN_ELEVATION_DEG # noqa: F401 - used in test +from solweig.physics.cylindric_wedge import cylindric_wedge +from solweig.rustalgos import pipeline + + +class TestCylindricWedgeLowSun: + """Tests for cylindric_wedge at low sun angles.""" + + def test_returns_fully_shaded_below_threshold(self): + """Walls should be fully shaded (F_sh=1) when sun altitude < 3°.""" + rows, cols = 50, 50 + svfalfa = np.full((rows, cols), 0.5, dtype=np.float32) # Typical value + + # Test at various altitudes below threshold + for altitude in [0.1, 1.0, 2.0, 2.9]: + zenith_rad = (90 - altitude) * (np.pi / 180) + result = cylindric_wedge(zenith_rad, svfalfa, rows, cols) + + assert np.allclose(result, 1.0), f"At altitude {altitude}°, walls should be fully shaded (F_sh=1)" + + def test_normal_calculation_above_threshold(self): + """Normal calculation should occur when sun altitude >= 3°.""" + rows, cols = 50, 50 + svfalfa = np.full((rows, cols), 0.5, dtype=np.float32) + + # Test at altitudes above threshold + for altitude in [3.0, 5.0, 10.0, 45.0]: + zenith_rad = (90 - altitude) * (np.pi / 180) + result = cylindric_wedge(zenith_rad, svfalfa, rows, cols) + + # Should have values between 0 and 1, not all 1s + assert result.min() >= 0.0 + assert result.max() <= 1.0 + # At reasonable sun angles with uniform svfalfa, shouldn't be all ones + if altitude >= 10: + assert result.mean() < 0.99, f"At altitude {altitude}°, should have some sunlit walls" + + def test_no_overflow_warnings_at_edge(self): + """No overflow warnings should occur at the 3° boundary.""" + rows, cols = 100, 100 + svfalfa = np.random.uniform(0.1, 1.0, (rows, cols)).astype(np.float32) + + # Test at and near the threshold + for altitude in [2.9, 3.0, 3.1]: + zenith_rad = (90 - altitude) * (np.pi / 180) + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + result = cylindric_wedge(zenith_rad, svfalfa, rows, cols) + + # Filter for overflow warnings + overflow_warnings = [x for x in w if "overflow" in str(x.message).lower()] + assert len(overflow_warnings) == 0, f"Overflow at altitude {altitude}°: {overflow_warnings}" + + # Result should be valid (no NaN or Inf) + assert np.all(np.isfinite(result)), f"Non-finite values at altitude {altitude}°" + + +class TestPerezLowSun: + """Tests for Rust perez_v3 at low sun angles. + + The Rust port includes guards that return a uniform distribution when + sun altitude < 3° or diffuse radiation < 10 W/m². These guards prevent + NaN/Inf from tan(zenith) → infinity near the horizon. + """ + + def test_returns_uniform_distribution_below_threshold(self): + """Rust Perez should return uniform sky distribution when altitude < 3°.""" + jday = 182 + + for altitude in [0.5, 1.0, 2.0, 2.9]: + zenith = 90 - altitude + rs_lv = np.asarray(pipeline.perez_v3_py(zenith, 180.0, 100.0, 500.0, jday, 1)) + std_dev = np.std(rs_lv[:, 2]) + assert std_dev < 1e-6, f"At altitude {altitude}°, distribution should be uniform (std={std_dev:.8f})" + + def test_normal_calculation_above_threshold(self): + """Normal Perez calculation should occur when altitude >= 3°.""" + zenith = 90 - 30 # 30° altitude + rs_lv = np.asarray(pipeline.perez_v3_py(zenith, 180.0, 200.0, 600.0, 182, 1)) + std_dev = np.std(rs_lv[:, 2]) + assert std_dev > 1e-6, ( + f"At 30° altitude with radiation, should have anisotropic distribution (std={std_dev:.8f})" + ) + + def test_no_nan_or_inf_at_boundary(self): + """No NaN or Inf values at the 3° boundary.""" + for altitude in [2.9, 3.0, 3.1]: + zenith = 90 - altitude + rs_lv = np.asarray(pipeline.perez_v3_py(zenith, 180.0, 100.0, 300.0, 182, 1)) + assert np.all(np.isfinite(rs_lv)), f"Non-finite values at altitude {altitude}°" + + def test_returns_uniform_for_very_low_diffuse(self): + """Rust Perez should return uniform when diffuse radiation < 10 W/m².""" + zenith = 90 - 45 # 45° altitude + rs_lv = np.asarray(pipeline.perez_v3_py(zenith, 180.0, 5.0, 800.0, 182, 1)) + std_dev = np.std(rs_lv[:, 2]) + assert std_dev < 1e-6, f"With radD=5, distribution should be uniform (std={std_dev:.8f})" + + +class TestConstantConsistency: + """Tests that the MIN_SUN_ELEVATION_DEG constant is used consistently.""" + + def test_constant_value(self): + """MIN_SUN_ELEVATION_DEG should be 3.0 (established UMEP threshold).""" + assert MIN_SUN_ELEVATION_DEG == 3.0 + + def test_threshold_matches_constant(self): + """Both functions should use the same threshold from constants.""" + # Test just above and below 3° + altitude_below = 2.99 + altitude_above = 3.01 + + rows, cols = 10, 10 + svfalfa = np.full((rows, cols), 0.5, dtype=np.float32) + + # cylindric_wedge at 2.99° should return all 1s + zen_below = (90 - altitude_below) * (np.pi / 180) + result_below = cylindric_wedge(zen_below, svfalfa, rows, cols) + assert np.allclose(result_below, 1.0), "Should be fully shaded at 2.99°" + + # cylindric_wedge at 3.01° should calculate normally + zen_above = (90 - altitude_above) * (np.pi / 180) + _result_above = cylindric_wedge(zen_above, svfalfa, rows, cols) + # Not checking exact values, just verifying the function executes + # (the actual calculation happens without raising/returning all-1s) + + +class TestCylindricWedgeRust: + """Tests that Rust cylindric_wedge matches Python reference implementation.""" + + def test_matches_python_reference(self): + """Rust output matches Python for a range of sun angles.""" + from solweig.rustalgos import sky as rust_sky + + rows, cols = 50, 50 + rng = np.random.default_rng(42) + svfalfa = rng.uniform(0.1, 1.0, (rows, cols)).astype(np.float32) + + for altitude in [5.0, 15.0, 30.0, 45.0, 60.0, 85.0]: + zen_rad = (90 - altitude) * (np.pi / 180) + py_result = cylindric_wedge(zen_rad, svfalfa, rows, cols) + rs_result = rust_sky.cylindric_wedge(float(zen_rad), svfalfa) + + np.testing.assert_allclose( + rs_result, + py_result, + rtol=2e-5, + atol=1e-5, + err_msg=f"Rust/Python mismatch at altitude {altitude}°", + ) + + def test_low_sun_guard_matches(self): + """Rust returns all 1.0 below 3° threshold, same as Python.""" + from solweig.rustalgos import sky as rust_sky + + svfalfa = np.full((10, 10), 0.5, dtype=np.float32) + + for altitude in [0.1, 1.0, 2.0, 2.9]: + zen_rad = (90 - altitude) * (np.pi / 180) + rs_result = rust_sky.cylindric_wedge(float(zen_rad), svfalfa) + + assert np.allclose(rs_result, 1.0), f"Rust should return all 1.0 at altitude {altitude}°" + + def test_boundary_at_3_degrees(self): + """Rust transition at 3° matches Python.""" + from solweig.rustalgos import sky as rust_sky + + svfalfa = np.full((10, 10), 0.5, dtype=np.float32) + + # Just below threshold + zen_below = (90 - 2.99) * (np.pi / 180) + rs_below = rust_sky.cylindric_wedge(float(zen_below), svfalfa) + assert np.allclose(rs_below, 1.0) + + # Just above threshold + zen_above = (90 - 3.01) * (np.pi / 180) + rs_above = rust_sky.cylindric_wedge(float(zen_above), svfalfa) + py_above = cylindric_wedge(zen_above, svfalfa, 10, 10) + np.testing.assert_allclose(rs_above, py_above, rtol=1e-5) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/spec/test_perez_parity.py b/tests/spec/test_perez_parity.py new file mode 100644 index 0000000..ad60706 --- /dev/null +++ b/tests/spec/test_perez_parity.py @@ -0,0 +1,212 @@ +"""Parity tests: Rust Perez_v3 vs upstream UMEP Perez_v3 and steradians. + +Verifies that the Rust port produces results matching the upstream UMEP +implementation for a range of atmospheric conditions. +""" + +import numpy as np +import pytest +from solweig.physics.patch_radiation import patch_steradians +from solweig.rustalgos import pipeline + +umep_perez = pytest.importorskip( + "umep.util.SEBESOLWEIGCommonFiles.Perez_v3", + reason="UMEP package required for Perez parity tests", +) +Perez_v3 = umep_perez.Perez_v3 + +# ── Test parameters: representative atmospheric conditions ────────────────── + +# Cases where Rust and UMEP should agree (normal atmospheric conditions). +PEREZ_CASES = [ + # (zen_deg, azimuth_deg, rad_d, rad_i, jday, patch_option, label) + (30.0, 180.0, 200.0, 400.0, 180, 2, "clear_midday_summer"), + (60.0, 135.0, 150.0, 300.0, 80, 2, "morning_spring"), + (75.0, 250.0, 80.0, 100.0, 350, 2, "low_sun_winter"), + (45.0, 200.0, 300.0, 100.0, 180, 2, "overcast_summer"), + (20.0, 180.0, 50.0, 800.0, 180, 2, "very_clear_high_sun"), + (85.0, 90.0, 20.0, 30.0, 1, 2, "near_horizon"), + # Different patch options + (40.0, 180.0, 200.0, 400.0, 180, 1, "patch_option_1"), + (40.0, 180.0, 200.0, 400.0, 180, 3, "patch_option_3"), +] + +# Cases where Rust intentionally diverges from UMEP: the Rust port includes +# low-sun and low-diffuse guards (returning uniform distribution) that the +# upstream UMEP implementation lacks. These guards prevent NaN/Inf at edge cases. +PEREZ_EDGE_CASES = [ + (88.0, 180.0, 50.0, 10.0, 180, 2, "below_threshold"), + (30.0, 180.0, 5.0, 400.0, 180, 2, "low_diffuse"), +] + + +class TestPerez_v3Parity: + """Rust perez_v3 must match upstream UMEP Perez_v3 for the same inputs.""" + + @pytest.mark.parametrize( + "zen,azi,rad_d,rad_i,jday,patch_option,label", + PEREZ_CASES, + ids=[c[-1] for c in PEREZ_CASES], + ) + def test_luminance_parity(self, zen, azi, rad_d, rad_i, jday, patch_option, label): + """Rust luminance column matches UMEP within f32 tolerance.""" + # UMEP reference + py_lv, _, _ = Perez_v3(zen, azi, rad_d, rad_i, jday, patchchoice=1, patch_option=patch_option) + + # Rust implementation + rs_lv = np.asarray(pipeline.perez_v3_py(zen, azi, rad_d, rad_i, jday, patch_option)) + + # Shape must match + assert py_lv.shape == rs_lv.shape, f"shape mismatch: umep={py_lv.shape} rs={rs_lv.shape}" + + # Altitudes and azimuths (columns 0,1) come from create_patches — should match exactly + np.testing.assert_allclose( + rs_lv[:, 0], + py_lv[:, 0], + atol=0.01, + err_msg=f"[{label}] Patch altitudes differ", + ) + np.testing.assert_allclose( + rs_lv[:, 1], + py_lv[:, 1], + atol=0.01, + err_msg=f"[{label}] Patch azimuths differ", + ) + + # Luminances (column 2) — allow f32 precision tolerance + np.testing.assert_allclose( + rs_lv[:, 2], + py_lv[:, 2].astype(np.float32), + rtol=1e-3, + atol=1e-6, + err_msg=f"[{label}] Patch luminances differ", + ) + + @pytest.mark.parametrize( + "zen,azi,rad_d,rad_i,jday,patch_option,label", + PEREZ_CASES, + ids=[c[-1] for c in PEREZ_CASES], + ) + def test_luminance_normalised(self, zen, azi, rad_d, rad_i, jday, patch_option, label): + """Rust luminance sums to 1.0 (normalised probability distribution).""" + rs_lv = np.asarray(pipeline.perez_v3_py(zen, azi, rad_d, rad_i, jday, patch_option)) + lum_sum = rs_lv[:, 2].sum() + assert abs(lum_sum - 1.0) < 1e-4, f"[{label}] luminance sum = {lum_sum}" + + +class TestPerez_v3EdgeCases: + """Rust intentionally diverges from UMEP at edge cases (low sun, low diffuse). + + The Rust port includes guards that return a uniform distribution when + sun altitude < 3° or diffuse radiation < 10 W/m². UMEP lacks these + guards and can produce numerical instability at these conditions. + """ + + @pytest.mark.parametrize( + "zen,azi,rad_d,rad_i,jday,patch_option,label", + PEREZ_EDGE_CASES, + ids=[c[-1] for c in PEREZ_EDGE_CASES], + ) + def test_rust_returns_uniform(self, zen, azi, rad_d, rad_i, jday, patch_option, label): + """Rust returns uniform distribution at edge cases.""" + rs_lv = np.asarray(pipeline.perez_v3_py(zen, azi, rad_d, rad_i, jday, patch_option)) + std_dev = np.std(rs_lv[:, 2]) + assert std_dev < 1e-6, f"[{label}] Rust should return uniform (std={std_dev:.8f})" + + @pytest.mark.parametrize( + "zen,azi,rad_d,rad_i,jday,patch_option,label", + PEREZ_EDGE_CASES, + ids=[c[-1] for c in PEREZ_EDGE_CASES], + ) + def test_umep_does_not_return_uniform(self, zen, azi, rad_d, rad_i, jday, patch_option, label): + """UMEP does NOT return uniform at these edge cases (documenting divergence).""" + umep_lv, _, _ = Perez_v3(zen, azi, rad_d, rad_i, jday, patchchoice=1, patch_option=patch_option) + std_dev = np.std(umep_lv[:, 2]) + # UMEP produces non-uniform output here — our Rust port intentionally improves on this + assert std_dev > 1e-6, f"[{label}] UMEP unexpectedly returns uniform (std={std_dev:.8f})" + + +class TestSteradiansParity: + """Rust compute_steradians must match Python patch_steradians.""" + + @pytest.mark.parametrize("patch_option", [1, 2, 3]) + def test_steradians_match_python(self, patch_option): + """Rust steradians match Python for each patch option.""" + # Python reference: patch_steradians needs the lv array (uses column 0 only) + umep_lv, _, _ = Perez_v3(30.0, 180.0, 200.0, 400.0, 180, patchchoice=1, patch_option=patch_option) + py_ster, _, _ = patch_steradians(umep_lv) + + # Rust implementation + rs_ster = np.asarray(pipeline.compute_steradians_py(patch_option)) + + assert len(rs_ster) == len(py_ster), f"length mismatch: rs={len(rs_ster)} py={len(py_ster)}" + np.testing.assert_allclose( + rs_ster, + py_ster.astype(np.float32), + rtol=1e-4, + atol=1e-6, + err_msg=f"Steradians differ for patch_option={patch_option}", + ) + + @pytest.mark.parametrize("patch_option", [1, 2, 3]) + def test_steradians_positive(self, patch_option): + """All steradian values should be positive.""" + rs_ster = np.asarray(pipeline.compute_steradians_py(patch_option)) + assert np.all(rs_ster > 0), "Found non-positive steradians" + + +class TestSteradiansCaching: + """ShadowArrays.steradians cached property returns correct values.""" + + @pytest.mark.parametrize("patch_option", [1, 2]) + def test_cached_steradians_match_direct(self, patch_option): + """Cached steradians on ShadowArrays match direct computation.""" + from solweig.models.precomputed import ShadowArrays + + patch_map = {1: 145, 2: 153, 3: 306} + n_patches = patch_map[patch_option] + + # Create a minimal ShadowArrays with the right patch count + shape = (4, 4, n_patches) + dummy = np.zeros(shape, dtype=np.uint8) + sa = ShadowArrays( + _shmat_u8=dummy, + _vegshmat_u8=dummy, + _vbshmat_u8=dummy, + _n_patches=n_patches, + ) + + # Direct computation via UMEP + local patch_steradians + umep_lv, _, _ = Perez_v3(30.0, 180.0, 200.0, 400.0, 180, patchchoice=1, patch_option=patch_option) + py_ster, _, _ = patch_steradians(umep_lv) + + # Cached property + cached_ster = sa.steradians + + assert len(cached_ster) == n_patches + np.testing.assert_allclose( + cached_ster, + py_ster, + rtol=1e-5, + err_msg=f"Cached steradians differ from direct computation (patch_option={patch_option})", + ) + + def test_steradians_property_is_cached(self): + """Second access returns the same object (no recomputation).""" + from solweig.models.precomputed import ShadowArrays + + dummy = np.zeros((4, 4, 153), dtype=np.uint8) + sa = ShadowArrays(_shmat_u8=dummy, _vegshmat_u8=dummy, _vbshmat_u8=dummy, _n_patches=153) + first = sa.steradians + second = sa.steradians + assert first is second, "steradians property not cached — recomputed on second access" + + +class TestPatchCounts: + """Rust create_patches returns correct patch counts for each option.""" + + @pytest.mark.parametrize("patch_option,expected_count", [(1, 145), (2, 153), (3, 305)]) + def test_patch_count(self, patch_option, expected_count): + """Rust patch count matches expected for each option.""" + rs_lv = np.asarray(pipeline.perez_v3_py(30.0, 180.0, 200.0, 400.0, 180, patch_option)) + assert rs_lv.shape[0] == expected_count, f"Expected {expected_count}, got {rs_lv.shape[0]}" diff --git a/tests/spec/test_pet.py b/tests/spec/test_pet.py new file mode 100644 index 0000000..76d5464 --- /dev/null +++ b/tests/spec/test_pet.py @@ -0,0 +1,236 @@ +""" +Physiological Equivalent Temperature (PET) Tests + +Tests derived from specs/pet.md properties. + +Note: pet_grid takes (ta_scalar, rh_scalar, tmrt_grid, va_grid, ...) where +ta and rh are scalars applied to the whole grid. +""" + +import numpy as np +import pytest +from solweig.rustalgos import pet + +# ============================================================================= +# Test Fixtures - Default Human Parameters +# ============================================================================= + +DEFAULT_PERSON = { + "mbody": 75.0, # kg + "age": 35, # years + "height": 1.75, # m + "activity": 80.0, # W/m² (light walking) + "clo": 0.9, # clothing insulation + "sex": 1, # 1=male +} + + +def calculate_pet(ta, rh, tmrt, va, person=None): + """Calculate PET with default or custom person parameters.""" + if person is None: + person = DEFAULT_PERSON + return pet.pet_calculate( + ta, + rh, + tmrt, + va, + person["mbody"], + person["age"], + person["height"], + person["activity"], + person["clo"], + person["sex"], + ) + + +# ============================================================================= +# Property Tests (from specs/pet.md) +# ============================================================================= + + +class TestPetProperties: + """Tests for PET calculation properties.""" + + def test_property_1_pet_is_person_specific(self): + """Property 1: PET varies with person characteristics.""" + ta = 25.0 + rh = 50.0 + tmrt = 30.0 + va = 1.0 + + # Young fit person + young_person = {**DEFAULT_PERSON, "age": 25, "mbody": 70.0} + pet_young = calculate_pet(ta, rh, tmrt, va, young_person) + + # Older person + old_person = {**DEFAULT_PERSON, "age": 65, "mbody": 80.0} + pet_old = calculate_pet(ta, rh, tmrt, va, old_person) + + # Both should produce valid results but may differ + assert not np.isnan(pet_young), "Young person PET should be valid" + assert not np.isnan(pet_old), "Older person PET should be valid" + + def test_property_2_pet_reference_is_indoor(self): + """Property 2: PET = ~21°C is comfortable indoors.""" + # Indoor reference conditions: Tmrt=Ta, v=0.1m/s, RH=50% + ta = 21.0 + tmrt = 21.0 + va = 0.1 + rh = 50.0 + + result = calculate_pet(ta, rh, tmrt, va) + + # In reference conditions, PET should be close to Ta + assert abs(result - ta) < 5.0, f"PET ({result:.1f}) should be close to Ta ({ta}) in reference conditions" + + def test_property_3_higher_tmrt_higher_pet(self): + """Property 3: Higher Tmrt → higher PET.""" + ta = 30.0 + rh = 50.0 + va = 1.0 + + # Shaded (Tmrt ≈ Ta) + pet_shade = calculate_pet(ta, rh, tmrt=ta, va=va) + + # Sunlit (high Tmrt) + pet_sun = calculate_pet(ta, rh, tmrt=ta + 30, va=va) + + assert pet_sun > pet_shade, f"Sunlit PET ({pet_sun:.1f}) should be > shaded ({pet_shade:.1f})" + + def test_property_5_activity_increases_pet(self): + """Property 5: Higher activity → higher PET in warm conditions.""" + ta = 30.0 + rh = 50.0 + tmrt = 35.0 + va = 1.0 + + # Light activity (standing) + rest_person = {**DEFAULT_PERSON, "activity": 58.0} # Resting + pet_rest = calculate_pet(ta, rh, tmrt, va, rest_person) + + # High activity (walking fast) + active_person = {**DEFAULT_PERSON, "activity": 135.0} # Walking 5 km/h + pet_active = calculate_pet(ta, rh, tmrt, va, active_person) + + # Higher activity should increase heat stress (higher PET) + assert pet_active > pet_rest, f"Active PET ({pet_active:.1f}) should be > resting ({pet_rest:.1f})" + + def test_property_8_wind_generally_reduces_pet(self): + """Property 8: Wind generally reduces PET.""" + ta = 30.0 + rh = 50.0 + tmrt = 40.0 + + # Calm + pet_calm = calculate_pet(ta, rh, tmrt, va=0.5) + + # Windy + pet_windy = calculate_pet(ta, rh, tmrt, va=5.0) + + assert pet_windy < pet_calm, f"Wind should reduce PET: calm={pet_calm:.1f}, windy={pet_windy:.1f}" + + +class TestPetComfortCategories: + """Test that PET produces expected comfort categories.""" + + def test_very_hot_conditions(self): + """PET should indicate heat stress in hot sunny conditions.""" + ta = 35.0 + rh = 50.0 + tmrt = 65.0 # Hot sunny + va = 1.0 + + result = calculate_pet(ta, rh, tmrt, va) + + # Should be in "hot" or "very hot" range (>35°C) + assert result > 30, f"Hot sunny PET ({result:.1f}) should indicate heat stress" + + def test_comfortable_conditions(self): + """PET should be in comfort range for moderate conditions.""" + ta = 22.0 + rh = 50.0 + tmrt = 25.0 + va = 1.0 + + result = calculate_pet(ta, rh, tmrt, va) + + # Comfort zone is 18-23°C for PET + assert 15 <= result <= 30, f"Comfortable conditions should give PET near comfort range, got {result:.1f}" + + def test_cold_conditions(self): + """PET should indicate cold stress in cold conditions.""" + ta = 5.0 + rh = 60.0 + tmrt = 5.0 + va = 2.0 + + result = calculate_pet(ta, rh, tmrt, va) + + # Should be in "cold" or "cool" range (<18°C) + assert result < 18, f"Cold PET ({result:.1f}) should indicate cold stress" + + +class TestPetGrid: + """Test grid-based PET calculation. + + Note: pet_grid signature is (ta_scalar, rh_scalar, tmrt_grid, va_grid, ...) + """ + + def test_grid_calculation(self): + """Test that grid calculation works for 2D arrays.""" + shape = (10, 10) + ta = 25.0 # scalar + rh = 50.0 # scalar + tmrt = np.full(shape, 35.0, dtype=np.float32) + va = np.full(shape, 1.0, dtype=np.float32) + + result = pet.pet_grid( + ta, + rh, + tmrt, + va, + DEFAULT_PERSON["mbody"], + DEFAULT_PERSON["age"], + DEFAULT_PERSON["height"], + DEFAULT_PERSON["activity"], + DEFAULT_PERSON["clo"], + DEFAULT_PERSON["sex"], + ) + + assert result.shape == shape, f"Output shape {result.shape} should match input {shape}" + # -9999 is used for invalid pixels + valid_mask = result != -9999 + assert np.any(valid_mask), "Grid PET should have some valid values" + + def test_grid_consistent_values(self): + """Grid with uniform Tmrt/va should produce uniform output.""" + shape = (5, 5) + ta = 25.0 + rh = 50.0 + tmrt = np.full(shape, 35.0, dtype=np.float32) + va = np.full(shape, 1.0, dtype=np.float32) + + result = pet.pet_grid( + ta, + rh, + tmrt, + va, + DEFAULT_PERSON["mbody"], + DEFAULT_PERSON["age"], + DEFAULT_PERSON["height"], + DEFAULT_PERSON["activity"], + DEFAULT_PERSON["clo"], + DEFAULT_PERSON["sex"], + ) + + # All valid values should be the same + valid_mask = result != -9999 + valid_values = result[valid_mask] + if len(valid_values) > 1: + np.testing.assert_allclose( + valid_values, valid_values[0], rtol=1e-4, err_msg="Uniform inputs should produce uniform output" + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/spec/test_radiation_formulas.py b/tests/spec/test_radiation_formulas.py new file mode 100644 index 0000000..c955dae --- /dev/null +++ b/tests/spec/test_radiation_formulas.py @@ -0,0 +1,226 @@ +""" +Tests for radiation formula compliance with specifications. + +Validates that implementations match the formulas in specs/radiation.md: +- Sky emissivity (Jonsson et al. 2006) +- Diffuse fraction (Reindl et al. 1990) +- Clearness index + +Reference: specs/radiation.md +""" + +import numpy as np + + +class TestSkyEmissivity: + """ + Test sky emissivity formula from Jonsson et al. (2006). + + Formula (from specs/radiation.md): + ea = 6.107 × 10^((7.5 × Ta) / (237.3 + Ta)) × (RH / 100) + msteg = 46.5 × (ea / Ta_K) + ε_sky = 1 - (1 + msteg) × exp(-√(1.2 + 3.0 × msteg)) + """ + + def compute_sky_emissivity(self, ta: float, rh: float) -> float: + """Compute sky emissivity using Jonsson et al. (2006) formula.""" + ta_k = ta + 273.15 + ea = 6.107 * 10 ** ((7.5 * ta) / (237.3 + ta)) * (rh / 100.0) + msteg = 46.5 * (ea / ta_k) + esky = 1 - (1 + msteg) * np.exp(-np.sqrt(1.2 + 3.0 * msteg)) + return esky + + def test_sky_emissivity_range(self): + """Sky emissivity should be in range [0.5, 1.0] for typical conditions.""" + # Cold dry: low emissivity + esky_cold_dry = self.compute_sky_emissivity(ta=0, rh=30) + assert 0.5 < esky_cold_dry < 0.8, f"Cold dry: {esky_cold_dry}" + + # Hot humid: high emissivity + esky_hot_humid = self.compute_sky_emissivity(ta=35, rh=90) + assert 0.8 < esky_hot_humid < 1.0, f"Hot humid: {esky_hot_humid}" + + def test_sky_emissivity_increases_with_humidity(self): + """Higher humidity should increase sky emissivity.""" + ta = 25 # Fixed temperature + esky_low_rh = self.compute_sky_emissivity(ta, rh=20) + esky_high_rh = self.compute_sky_emissivity(ta, rh=80) + + assert esky_high_rh > esky_low_rh, ( + f"Emissivity should increase with humidity: RH=20% → {esky_low_rh:.3f}, RH=80% → {esky_high_rh:.3f}" + ) + + def test_sky_emissivity_increases_with_temperature(self): + """Higher temperature should generally increase sky emissivity.""" + rh = 50 # Fixed humidity + esky_cold = self.compute_sky_emissivity(ta=5, rh=rh) + esky_warm = self.compute_sky_emissivity(ta=30, rh=rh) + + assert esky_warm > esky_cold, ( + f"Emissivity should increase with temperature: Ta=5°C → {esky_cold:.3f}, Ta=30°C → {esky_warm:.3f}" + ) + + def test_implementation_matches_spec(self): + """Verify implementation in components/radiation.py uses same formula.""" + # Import the actual implementation + + # The formula is embedded in compute_radiation, lines 88-92: + # ta_k = weather.ta + 273.15 + # ea = 6.107 * 10 ** ((7.5 * weather.ta) / (237.3 + weather.ta)) * (weather.rh / 100.0) + # msteg = 46.5 * (ea / ta_k) + # esky = 1 - (1 + msteg) * np.exp(-np.sqrt(1.2 + 3.0 * msteg)) + + # Test with known values + ta, rh = 25, 60 + expected = self.compute_sky_emissivity(ta, rh) + + # Compute manually using the exact implementation formula + ta_k = ta + 273.15 + ea = 6.107 * 10 ** ((7.5 * ta) / (237.3 + ta)) * (rh / 100.0) + msteg = 46.5 * (ea / ta_k) + actual = 1 - (1 + msteg) * np.exp(-np.sqrt(1.2 + 3.0 * msteg)) + + assert abs(expected - actual) < 1e-10, f"Formula mismatch: expected {expected}, got {actual}" + + +class TestDiffuseFraction: + """ + Test diffuse fraction model from Reindl et al. (1990). + + Reference: specs/radiation.md, Diffuse Fraction (Reindl Model) + """ + + def test_diffuse_fraction_import(self): + """Verify diffusefraction module can be imported.""" + from solweig.physics.diffusefraction import diffusefraction + + assert callable(diffusefraction) + + def test_overcast_high_diffuse_fraction(self): + """Overcast conditions (low Kt) should have high diffuse fraction.""" + from solweig.physics.diffusefraction import diffusefraction + + # Kt <= 0.3: overcast + radG = 100 # Low global radiation + altitude = 30 # degrees + Kt = 0.2 # Very overcast + Ta = 20 + RH = 70 + + radI, radD = diffusefraction(radG, altitude, Kt, Ta, RH) + diffuse_fraction = radD / radG if radG > 0 else 0 + + assert diffuse_fraction > 0.7, ( + f"Overcast (Kt={Kt}) should have high diffuse fraction, got {diffuse_fraction:.2f}" + ) + + def test_clear_sky_low_diffuse_fraction(self): + """Clear conditions (high Kt) should have low diffuse fraction.""" + from solweig.physics.diffusefraction import diffusefraction + + # Kt >= 0.78: clear + radG = 800 # High global radiation + altitude = 60 # degrees + Kt = 0.85 # Clear sky + Ta = 25 + RH = 40 + + radI, radD = diffusefraction(radG, altitude, Kt, Ta, RH) + diffuse_fraction = radD / radG if radG > 0 else 0 + + assert diffuse_fraction < 0.3, f"Clear (Kt={Kt}) should have low diffuse fraction, got {diffuse_fraction:.2f}" + + def test_direct_plus_diffuse_equals_global(self): + """Direct + diffuse should approximately equal global radiation.""" + from solweig.physics.diffusefraction import diffusefraction + + radG = 500 + altitude = 45 + Kt = 0.5 + Ta = 22 + RH = 55 + + radI, radD = diffusefraction(radG, altitude, Kt, Ta, RH) + + # Direct on horizontal = radI * sin(altitude) + sin_alt = np.sin(np.radians(altitude)) + radI_horizontal = radI * sin_alt + + reconstructed = radI_horizontal + radD + + # Should be close to radG (some numerical error acceptable) + assert abs(reconstructed - radG) < radG * 0.05, f"I*sin(alt) + D = {reconstructed:.1f}, expected ~{radG}" + + +class TestAbsorptionCoefficients: + """ + Test absorption coefficients match ISO 7726:1998 standard. + + Reference: specs/tmrt.md + """ + + def test_default_abs_k_is_0_70(self): + """Default shortwave absorption should be 0.70 (ISO 7726).""" + from solweig.models import HumanParams + + human = HumanParams() + assert human.abs_k == 0.7, f"absK should be 0.70, got {human.abs_k}" + + def test_default_abs_l_is_0_97(self): + """Default longwave absorption should be 0.97 (ISO 7726).""" + from solweig.models import HumanParams + + human = HumanParams() + assert human.abs_l == 0.97, f"absL should be 0.97, got {human.abs_l}" + + def test_json_params_abs_l_is_0_97(self): + """JSON params should specify absL = 0.97 (ISO 7726).""" + from solweig.loaders import load_params + + params = load_params() + abs_l = params.Tmrt_params.Value.absL + assert abs_l == 0.97, f"params absL should be 0.97, got {abs_l}" + + +class TestViewFactors: + """ + Test posture view factors match specs. + + Reference: specs/tmrt.md, Mayer & Höppe (1987) + """ + + def test_standing_view_factors(self): + """Standing posture: Fup=0.06, Fside=0.22.""" + from solweig.loaders import load_params + + params = load_params() + standing = params.Posture.Standing.Value + + assert standing.Fup == 0.06, f"Standing Fup should be 0.06, got {standing.Fup}" + assert standing.Fside == 0.22, f"Standing Fside should be 0.22, got {standing.Fside}" + + def test_sitting_view_factors(self): + """Sitting posture: Fup=0.166666, Fside=0.166666.""" + from solweig.loaders import load_params + + params = load_params() + sitting = params.Posture.Sitting.Value + + assert abs(sitting.Fup - 0.166666) < 0.001, f"Sitting Fup should be ~0.167, got {sitting.Fup}" + assert abs(sitting.Fside - 0.166666) < 0.001, f"Sitting Fside should be ~0.167, got {sitting.Fside}" + + def test_view_factors_sum_approximately_one(self): + """View factors should sum to approximately 1.0.""" + from solweig.loaders import load_params + + params = load_params() + + # Standing: 2*Fup + 4*Fside + standing = params.Posture.Standing.Value + standing_sum = 2 * standing.Fup + 4 * standing.Fside + assert 0.9 < standing_sum < 1.1, f"Standing factors sum to {standing_sum}, expected ~1.0" + + # Sitting: 2*Fup + 4*Fside + sitting = params.Posture.Sitting.Value + sitting_sum = 2 * sitting.Fup + 4 * sitting.Fside + assert 0.9 < sitting_sum < 1.1, f"Sitting factors sum to {sitting_sum}, expected ~1.0" diff --git a/tests/spec/test_shadows.py b/tests/spec/test_shadows.py new file mode 100644 index 0000000..e35f74f --- /dev/null +++ b/tests/spec/test_shadows.py @@ -0,0 +1,231 @@ +""" +Shadow Calculation Tests + +Tests derived from specs/shadows.md properties. +Each test verifies a physical property that must hold for the shadow algorithm. +""" + +import math + +import numpy as np +import pytest +from solweig import rustalgos + +# ============================================================================= +# Test Fixtures +# ============================================================================= + + +@pytest.fixture(autouse=True, scope="module") +def _disable_shadow_gpu(): + """Use CPU path for deterministic shadow property tests.""" + rustalgos.shadowing.disable_gpu() + + +def create_flat_dsm(size=(50, 50), elevation=10.0): + """Create completely flat DSM.""" + return np.full(size, elevation, dtype=np.float32) + + +def create_building_dsm(size=(100, 100), building_height=20.0, ground=0.0): + """Create DSM with single 10x10 building in center.""" + dsm = np.full(size, ground, dtype=np.float32) + cy, cx = size[0] // 2, size[1] // 2 + dsm[cy - 5 : cy + 5, cx - 5 : cx + 5] = ground + building_height + return dsm + + +def calculate_shadow(dsm, altitude, azimuth, pixel_size=1.0): + """ + Calculate shadows using Rust implementation. + + Returns sunlit mask: 1 = sunlit, 0 = shaded + """ + if altitude <= 0: + # Below horizon: no direct beam shadows, everything is sunlit by convention. + return np.ones_like(dsm, dtype=np.float32) + + max_height = float(np.max(dsm) - np.min(dsm)) + result = rustalgos.shadowing.calculate_shadows_wall_ht_25( + float(azimuth), + float(altitude), + float(pixel_size), + max_height, + dsm.astype(np.float32), + None, # veg_canopy + None, # veg_trunk + None, # bush + None, # walls + None, # aspect + None, # walls_scheme + None, # aspect_scheme + None, # min_sun_elev + ) + return np.array(result.bldg_sh, dtype=np.float32) + + +# ============================================================================= +# Property Tests (from specs/shadows.md) +# ============================================================================= + + +class TestShadowProperties: + """Tests for shadow calculation properties.""" + + def test_property_1_no_shadows_below_horizon(self): + """Property 1: No shadows when sun altitude <= 0 (below horizon).""" + dsm = create_building_dsm() + + for altitude in [-10, -5, 0]: + sunlit = calculate_shadow(dsm, altitude=altitude, azimuth=180) + assert np.all(sunlit == 1), f"Non-sunlit pixels exist at altitude {altitude}°" + + def test_property_2_flat_terrain_no_shadows(self): + """Property 2: Flat terrain has no shadows.""" + dsm = create_flat_dsm() + sunlit = calculate_shadow(dsm, altitude=45, azimuth=180) + assert np.all(sunlit == 1), "Flat terrain should be fully sunlit" + + def test_property_3_lower_sun_longer_shadows(self): + """Property 3: Lower sun = longer shadows (more shadow area).""" + dsm = create_building_dsm() + + altitudes = [60, 45, 30, 15] + shadow_areas = [] + + for alt in altitudes: + sunlit = calculate_shadow(dsm, altitude=alt, azimuth=180) + shadow_areas.append(np.sum(1.0 - sunlit)) + + # Each lower altitude should have more shadow + for i in range(len(altitudes) - 1): + assert shadow_areas[i] < shadow_areas[i + 1], ( + f"Shadow at {altitudes[i]}° ({shadow_areas[i]}) should be less than " + f"at {altitudes[i + 1]}° ({shadow_areas[i + 1]})" + ) + + def test_property_4_shadows_opposite_sun_south(self): + """Property 4: Sun from south (180°) -> shadows extend north.""" + dsm = create_building_dsm() + sunlit = calculate_shadow(dsm, altitude=30, azimuth=180) + shaded = 1.0 - sunlit + + cy = dsm.shape[0] // 2 + north_shadow = np.sum(shaded[: cy - 5, :]) # Above building + south_shadow = np.sum(shaded[cy + 5 :, :]) # Below building + + assert north_shadow > south_shadow, "Shadows should extend north when sun is south" + + def test_property_4_shadows_opposite_sun_east(self): + """Property 4: Sun from east (90°) -> shadows extend west.""" + dsm = create_building_dsm() + sunlit = calculate_shadow(dsm, altitude=30, azimuth=90) + shaded = 1.0 - sunlit + + cx = dsm.shape[1] // 2 + west_shadow = np.sum(shaded[:, : cx - 5]) # Left of building + east_shadow = np.sum(shaded[:, cx + 5 :]) # Right of building + + assert west_shadow > east_shadow, "Shadows should extend west when sun is east" + + def test_property_5_taller_objects_longer_shadows(self): + """Property 5: Taller objects cast longer shadows.""" + dsm = np.zeros((100, 100), dtype=np.float32) + # Short building (10m) on left + dsm[45:55, 20:30] = 10.0 + # Tall building (30m) on right + dsm[45:55, 70:80] = 30.0 + + sunlit = calculate_shadow(dsm, altitude=45, azimuth=180) + shaded = 1.0 - sunlit + + short_shadow = np.sum(shaded[:45, 20:30]) # North of short building + tall_shadow = np.sum(shaded[:45, 70:80]) # North of tall building + + assert tall_shadow > short_shadow, "Taller building should cast longer shadow" + + def test_property_6_shadow_length_equation(self): + """Property 6: Shadow length ≈ height / tan(altitude) within 15%.""" + height = 20.0 + altitude = 45.0 + expected_length = height / math.tan(math.radians(altitude)) + + dsm = np.zeros((100, 100), dtype=np.float32) + dsm[50:60, 45:55] = height # Building from row 50-60 + + sunlit = calculate_shadow(dsm, altitude=altitude, azimuth=180) + shaded = 1.0 - sunlit + + # Find northernmost shadow pixel + shadow_north = shaded[:50, 45:55] + shadow_rows = np.where(np.any(shadow_north > 0, axis=1))[0] + + measured_length = 50 - shadow_rows[0] if len(shadow_rows) > 0 else 0 + + tolerance = expected_length * 0.15 + 3 # 15% or 3 pixels + assert abs(measured_length - expected_length) <= tolerance, ( + f"Shadow length {measured_length} should be ~{expected_length:.1f} (±15%)" + ) + + def test_property_7_building_tops_sunlit(self): + """Property 7: Building tops (rooftops) are sunlit when sun > 0.""" + dsm = create_building_dsm(building_height=30) + sunlit = calculate_shadow(dsm, altitude=45, azimuth=180) + + # Building top pixels + cy, cx = dsm.shape[0] // 2, dsm.shape[1] // 2 + rooftop = sunlit[cy - 5 : cy + 5, cx - 5 : cx + 5] + + sunlit_fraction = np.sum(rooftop == 1) / rooftop.size + assert sunlit_fraction > 0.9, f"Rooftop should be mostly sunlit, got {sunlit_fraction:.0%}" + + def test_property_8_binary_values(self): + """Property 8: Shadow mask contains only 0 or 1.""" + dsm = create_building_dsm() + sunlit = calculate_shadow(dsm, altitude=45, azimuth=180) + + unique = set(np.unique(sunlit)) + assert unique.issubset({0.0, 1.0}), f"Sunlit values should be binary, got {unique}" + + +# ============================================================================= +# Equation Tests +# ============================================================================= + + +class TestShadowEquation: + """Tests that verify shadow length matches L = h / tan(α).""" + + @pytest.mark.parametrize( + "altitude,expected_ratio", + [ + (60, 0.577), # tan(60°) ≈ 1.732, so L/h ≈ 0.577 + (45, 1.0), # tan(45°) = 1, so L/h = 1 + (30, 1.732), # tan(30°) ≈ 0.577, so L/h ≈ 1.732 + ], + ) + def test_shadow_length_ratio(self, altitude, expected_ratio): + """Shadow length / height should equal 1/tan(altitude).""" + height = 20.0 + theoretical_length = height / math.tan(math.radians(altitude)) + + dsm = np.zeros((200, 200), dtype=np.float32) + dsm[90:110, 90:110] = height + + sunlit = calculate_shadow(dsm, altitude=altitude, azimuth=180) + shaded = 1.0 - sunlit + + # Measure shadow north of building + shadow_north = shaded[:90, 90:110] + shadow_rows = np.where(np.any(shadow_north > 0, axis=1))[0] + + measured_length = 90 - shadow_rows[0] if len(shadow_rows) > 0 else 0 + tolerance = theoretical_length * 0.15 + 3 + + assert abs(measured_length - theoretical_length) <= tolerance, ( + f"At {altitude}°: expected ~{theoretical_length:.1f}m, got {measured_length}m" + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/spec/test_svf.py b/tests/spec/test_svf.py new file mode 100644 index 0000000..78cfdff --- /dev/null +++ b/tests/spec/test_svf.py @@ -0,0 +1,190 @@ +""" +Sky View Factor (SVF) Tests + +Tests derived from specs/svf.md properties. +""" + +import numpy as np +import pytest +from solweig import rustalgos + +# ============================================================================= +# Test Fixtures +# ============================================================================= + + +def create_flat_dsm(size=(50, 50), elevation=0.0): + """Create completely flat DSM.""" + return np.full(size, elevation, dtype=np.float32) + + +def create_canyon_dsm(size=(50, 50), wall_height=30.0, canyon_width=20): + """Create urban canyon DSM (walls on east and west sides).""" + dsm = np.zeros(size, dtype=np.float32) + # West wall + dsm[:, :10] = wall_height + # East wall + dsm[:, -10:] = wall_height + return dsm + + +def create_building_dsm(size=(50, 50), building_height=20.0): + """Create DSM with single building in center.""" + dsm = np.zeros(size, dtype=np.float32) + cy, cx = size[0] // 2, size[1] // 2 + dsm[cy - 5 : cy + 5, cx - 5 : cx + 5] = building_height + return dsm + + +def create_courtyard_dsm(size=(50, 50), wall_height=20.0, courtyard_size=20): + """Create square courtyard (walls on all sides, open center).""" + dsm = np.full(size, wall_height, dtype=np.float32) + cy, cx = size[0] // 2, size[1] // 2 + half = courtyard_size // 2 + dsm[cy - half : cy + half, cx - half : cx + half] = 0.0 + return dsm + + +def calculate_svf(dsm, pixel_size=1.0): + """ + Calculate SVF using Rust implementation. + + Returns SvfResult with svf, svf_north, svf_east, svf_south, svf_west. + """ + max_height = float(np.max(dsm) - np.min(dsm)) + if max_height == 0: + max_height = 1.0 # Avoid division by zero for flat terrain + + # No vegetation + vegdem = np.zeros_like(dsm, dtype=np.float32) + vegdem2 = np.zeros_like(dsm, dtype=np.float32) + + result = rustalgos.skyview.calculate_svf( + dsm.astype(np.float32), + vegdem, + vegdem2, + float(pixel_size), + False, # usevegdem + max_height, + None, # patch_option (default) + None, # min_sun_elev + None, # progress_callback + ) + return result + + +# ============================================================================= +# Property Tests (from specs/svf.md) +# ============================================================================= + + +class TestSvfProperties: + """Tests for SVF calculation properties.""" + + def test_property_1_range_0_to_1(self): + """Property 1: All SVF values must be between 0 and 1.""" + dsm = create_building_dsm() + result = calculate_svf(dsm) + + svf = np.array(result.svf) + assert np.all(svf >= 0), "SVF has values < 0" + assert np.all(svf <= 1), "SVF has values > 1" + + def test_property_2_flat_terrain_equals_1(self): + """Property 2: Flat open terrain has SVF = 1 everywhere.""" + dsm = create_flat_dsm(size=(50, 50)) + result = calculate_svf(dsm) + + svf = np.array(result.svf) + # Allow small tolerance for numerical precision + assert np.allclose(svf, 1.0, atol=0.05), f"Flat terrain SVF should be ~1, got {svf.mean():.3f}" + + def test_property_3_canyon_less_than_half(self): + """Property 3: Deep urban canyons have SVF < 0.5.""" + # Create very narrow, deep canyon (H/W ratio > 2) + # 60m walls, 10m wide canyon = H/W = 6 + dsm = np.zeros((50, 50), dtype=np.float32) + dsm[:, :20] = 60.0 # West wall + dsm[:, 30:] = 60.0 # East wall (only 10 pixels wide canyon) + result = calculate_svf(dsm) + + svf = np.array(result.svf) + # Check canyon floor (center of the narrow gap) + canyon_floor_svf = svf[20:30, 23:27].mean() + assert canyon_floor_svf < 0.5, f"Deep canyon SVF should be < 0.5, got {canyon_floor_svf:.3f}" + + def test_property_4_taller_obstacles_lower_svf(self): + """Property 4: Points near taller obstacles have lower SVF.""" + # Low building + dsm_low = create_building_dsm(size=(50, 50), building_height=10.0) + result_low = calculate_svf(dsm_low) + svf_low = np.array(result_low.svf) + + # Tall building + dsm_tall = create_building_dsm(size=(50, 50), building_height=40.0) + result_tall = calculate_svf(dsm_tall) + svf_tall = np.array(result_tall.svf) + + # Check ground level near building + ground_svf_low = svf_low[30:35, 20:30].mean() # South of building + ground_svf_tall = svf_tall[30:35, 20:30].mean() + + assert ground_svf_tall < ground_svf_low, ( + f"Taller building should reduce SVF: low={ground_svf_low:.3f}, tall={ground_svf_tall:.3f}" + ) + + def test_property_6_rooftops_high_svf(self): + """Property 6: Building rooftops have SVF close to 1.""" + dsm = create_building_dsm(size=(50, 50), building_height=30.0) + result = calculate_svf(dsm) + + svf = np.array(result.svf) + # Check rooftop (center of grid where building is) + cy, cx = 25, 25 + rooftop_svf = svf[cy - 3 : cy + 3, cx - 3 : cx + 3].mean() + + assert rooftop_svf > 0.8, f"Rooftop SVF should be high (>0.8), got {rooftop_svf:.3f}" + + def test_property_7_more_buildings_lower_svf(self): + """Property 7: More buildings nearby = lower ground-level SVF.""" + # Single building + dsm_single = np.zeros((50, 50), dtype=np.float32) + dsm_single[20:30, 20:30] = 20.0 + result_single = calculate_svf(dsm_single) + svf_single = np.array(result_single.svf) + + # Multiple buildings + dsm_multi = np.zeros((50, 50), dtype=np.float32) + dsm_multi[5:15, 5:15] = 20.0 + dsm_multi[5:15, 35:45] = 20.0 + dsm_multi[35:45, 5:15] = 20.0 + dsm_multi[35:45, 35:45] = 20.0 + dsm_multi[20:30, 20:30] = 20.0 # Center building + result_multi = calculate_svf(dsm_multi) + svf_multi = np.array(result_multi.svf) + + # Compare ground-level SVF at center (between buildings) + center_svf_single = svf_single[12:18, 12:18].mean() + center_svf_multi = svf_multi[12:18, 12:18].mean() + + assert center_svf_multi < center_svf_single, ( + f"More buildings should reduce SVF: single={center_svf_single:.3f}, multi={center_svf_multi:.3f}" + ) + + +class TestSvfDirectional: + """Tests for directional SVF components.""" + + def test_directional_svf_range(self): + """Directional SVF values should be in [0, 1].""" + dsm = create_building_dsm() + result = calculate_svf(dsm) + + for direction in ["svf_north", "svf_east", "svf_south", "svf_west"]: + arr = np.array(getattr(result, direction)) + assert np.all(arr >= 0), f"{direction} has values < 0" + assert np.all(arr <= 1), f"{direction} has values > 1" + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/spec/test_svf_core_api.py b/tests/spec/test_svf_core_api.py new file mode 100644 index 0000000..f0538c0 --- /dev/null +++ b/tests/spec/test_svf_core_api.py @@ -0,0 +1,107 @@ +"""Regression tests for SkyviewRunner.calculate_svf_core.""" + +import numpy as np +from solweig import rustalgos + + +def _max_height(dsm: np.ndarray, cdsm: np.ndarray) -> float: + h = float(np.nanmax(np.maximum(dsm, cdsm)) - np.nanmin(dsm)) + return h if h > 0 else 1.0 + + +def _assert_core_match( + full_result, + core_result, + rs: slice, + cs: slice, + *, + use_veg: bool, +) -> None: + svf_fields = ["svf", "svf_north", "svf_east", "svf_south", "svf_west"] + veg_fields = [ + "svf_veg", + "svf_veg_north", + "svf_veg_east", + "svf_veg_south", + "svf_veg_west", + "svf_veg_blocks_bldg_sh", + "svf_veg_blocks_bldg_sh_north", + "svf_veg_blocks_bldg_sh_east", + "svf_veg_blocks_bldg_sh_south", + "svf_veg_blocks_bldg_sh_west", + ] + + for name in svf_fields: + full = np.asarray(getattr(full_result, name))[rs, cs] + core = np.asarray(getattr(core_result, name)) + np.testing.assert_allclose(core, full, atol=0.0, rtol=0.0, err_msg=f"Mismatch in {name}") + + if use_veg: + for name in veg_fields: + full = np.asarray(getattr(full_result, name))[rs, cs] + core = np.asarray(getattr(core_result, name)) + np.testing.assert_allclose(core, full, atol=0.0, rtol=0.0, err_msg=f"Mismatch in {name}") + + sh_fields = ["bldg_sh_matrix", "veg_sh_matrix", "veg_blocks_bldg_sh_matrix"] + for name in sh_fields: + full = np.asarray(getattr(full_result, name))[rs, cs, :] + core = np.asarray(getattr(core_result, name)) + np.testing.assert_array_equal(core, full, err_msg=f"Mismatch in {name}") + + +def test_svf_core_matches_full_without_vegetation(): + rng = np.random.default_rng(42) + rows, cols = 80, 96 + dsm = (rng.random((rows, cols), dtype=np.float32) * 25.0).astype(np.float32) + veg = np.zeros_like(dsm, dtype=np.float32) + trunk = np.zeros_like(dsm, dtype=np.float32) + + runner = rustalgos.skyview.SkyviewRunner() + max_h = _max_height(dsm, veg) + + full = runner.calculate_svf(dsm, veg, trunk, 1.0, False, max_h, 2, 3.0) + core = runner.calculate_svf_core( + dsm, + veg, + trunk, + 1.0, + False, + max_h, + 2, + 3.0, + 7, + 71, + 11, + 83, + ) + _assert_core_match(full, core, slice(7, 71), slice(11, 83), use_veg=False) + + +def test_svf_core_matches_full_with_vegetation(): + rng = np.random.default_rng(7) + rows, cols = 72, 88 + dsm = (rng.random((rows, cols), dtype=np.float32) * 20.0).astype(np.float32) + canopy_rel = (rng.random((rows, cols), dtype=np.float32) * 8.0).astype(np.float32) + trunk_rel = (canopy_rel * 0.25).astype(np.float32) + canopy_abs = (dsm + canopy_rel).astype(np.float32) + trunk_abs = (dsm + trunk_rel).astype(np.float32) + + runner = rustalgos.skyview.SkyviewRunner() + max_h = _max_height(dsm, canopy_abs) + + full = runner.calculate_svf(dsm, canopy_abs, trunk_abs, 1.0, True, max_h, 2, 3.0) + core = runner.calculate_svf_core( + dsm, + canopy_abs, + trunk_abs, + 1.0, + True, + max_h, + 2, + 3.0, + 5, + 68, + 9, + 79, + ) + _assert_core_match(full, core, slice(5, 68), slice(9, 79), use_veg=True) diff --git a/tests/spec/test_umep_parity.py b/tests/spec/test_umep_parity.py new file mode 100644 index 0000000..73cf42a --- /dev/null +++ b/tests/spec/test_umep_parity.py @@ -0,0 +1,95 @@ +"""Parity tests: local Python modules vs upstream UMEP equivalents. + +Modules kept locally (because production code cannot depend on UMEP) +are validated here against the upstream UMEP implementations to ensure +they remain in sync. + +Local modules tested: +- physics/create_patches.py — used by precomputed.py (production) +- physics/patch_radiation.py — patch_steradians() used by precomputed.py (production) +""" + +import numpy as np +import pytest + +# ── UMEP imports (skip entire module if UMEP not installed) ────────────────── + +umep_patches = pytest.importorskip( + "umep.util.SEBESOLWEIGCommonFiles.create_patches", + reason="UMEP package required for parity tests", +) +umep_patch_rad = pytest.importorskip( + "umep.functions.SOLWEIGpython.patch_radiation", + reason="UMEP package required for parity tests", +) +umep_perez = pytest.importorskip( + "umep.util.SEBESOLWEIGCommonFiles.Perez_v3", + reason="UMEP package required for parity tests", +) + +from solweig.physics.create_patches import create_patches as local_create_patches # noqa: E402 +from solweig.physics.patch_radiation import patch_steradians as local_patch_steradians # noqa: E402 + + +class TestCreatePatchesParity: + """Local create_patches must exactly match UMEP create_patches.""" + + @pytest.mark.parametrize("patch_option", [1, 2, 3, 4]) + def test_all_outputs_match(self, patch_option): + """Every return value must be identical for all patch options.""" + local = local_create_patches(patch_option) + umep = umep_patches.create_patches(patch_option) + + assert len(local) == len(umep), "Different number of return values" + + names = [ + "skyvaultalt", + "skyvaultazi", + "annulino", + "skyvaultaltint", + "patches_in_band", + "skyvaultaziint", + "azistart", + ] + for i, name in enumerate(names): + np.testing.assert_array_equal( + np.asarray(local[i]), + np.asarray(umep[i]), + err_msg=f"create_patches({patch_option}): {name} differs", + ) + + @pytest.mark.parametrize("patch_option,expected_count", [(1, 145), (2, 153)]) + def test_patch_count(self, patch_option, expected_count): + """Number of patches matches expected for standard options.""" + alt, _, _, _, _, _, _ = local_create_patches(patch_option) + assert alt.size == expected_count + + +class TestPatchSteradiansParity: + """Local patch_steradians must match UMEP patch_steradians.""" + + @pytest.mark.parametrize("patch_option", [1, 2, 3]) + def test_steradians_match(self, patch_option): + """Steradian values must match UMEP for each patch option.""" + # Generate lv array via UMEP Perez (used as input to steradians) + lv, _, _ = umep_perez.Perez_v3(30.0, 180.0, 200.0, 400.0, 180, patchchoice=1, patch_option=patch_option) + + local_ster, _, _ = local_patch_steradians(lv) + umep_ster, _, _ = umep_patch_rad.patch_steradians(lv) + + np.testing.assert_allclose( + local_ster, + umep_ster, + rtol=1e-6, + atol=1e-8, + err_msg=f"patch_steradians differs for patch_option={patch_option}", + ) + + @pytest.mark.parametrize("patch_option", [1, 2, 3]) + def test_steradians_sum_to_2pi(self, patch_option): + """Steradians should sum to approximately 2*pi (hemisphere).""" + lv, _, _ = umep_perez.Perez_v3(30.0, 180.0, 200.0, 400.0, 180, patchchoice=1, patch_option=patch_option) + local_ster, _, _ = local_patch_steradians(lv) + ster_sum = local_ster.sum() + # Hemisphere = 2*pi steradians + np.testing.assert_allclose(ster_sum, 2 * np.pi, rtol=0.05, err_msg="Steradians don't sum to ~2*pi") diff --git a/tests/spec/test_utci.py b/tests/spec/test_utci.py new file mode 100644 index 0000000..110160d --- /dev/null +++ b/tests/spec/test_utci.py @@ -0,0 +1,129 @@ +""" +Universal Thermal Climate Index (UTCI) Tests + +Tests derived from specs/utci.md properties. + +Note: utci_grid takes (ta_scalar, rh_scalar, tmrt_grid, va_grid) where +ta and rh are scalars applied to the whole grid. +""" + +import numpy as np +import pytest +from solweig.rustalgos import utci + +# ============================================================================= +# Property Tests (from specs/utci.md) +# ============================================================================= + + +class TestUtciProperties: + """Tests for UTCI calculation properties.""" + + def test_property_1_utci_is_temperature(self): + """Property 1: UTCI is an equivalent temperature in °C.""" + # utci_single(ta, rh, tmrt, va10m) + result = utci.utci_single(25.0, 50.0, 30.0, 1.0) + + # UTCI should be a reasonable temperature value + assert isinstance(result, (int, float)), "UTCI should be numeric" + assert -60 < result < 70, f"UTCI {result}°C outside reasonable range" + + def test_property_2_valid_input_ranges(self): + """Property 2: UTCI handles valid input ranges.""" + # Test various valid combinations (ta, rh, tmrt, va) + test_cases = [ + (-10.0, 50.0, -5.0, 1.0), # Cold + (25.0, 50.0, 30.0, 2.0), # Moderate + (40.0, 30.0, 60.0, 1.0), # Hot + ] + + for ta, rh, tmrt, va in test_cases: + result = utci.utci_single(ta, rh, tmrt, va) + assert result != -999, f"UTCI returned invalid for ta={ta}, rh={rh}, tmrt={tmrt}, va={va}" + + def test_property_3_higher_tmrt_higher_utci(self): + """Property 3: Higher Tmrt → higher UTCI.""" + ta = 30.0 + rh = 50.0 + va = 1.0 + + # Low Tmrt (shaded) + utci_shade = utci.utci_single(ta, rh, ta, va) + + # High Tmrt (sunlit) - larger delta to see clear effect + utci_sun = utci.utci_single(ta, rh, ta + 35, va) + + assert utci_sun > utci_shade, f"Sunlit UTCI ({utci_sun:.1f}) should be > shaded ({utci_shade:.1f})" + + def test_property_8_high_humidity_increases_utci_in_heat(self): + """Property 8: High humidity increases UTCI in hot conditions.""" + ta = 35.0 + tmrt = 45.0 + va = 1.0 + + # Low humidity + utci_dry = utci.utci_single(ta, 30.0, tmrt, va) + + # High humidity + utci_humid = utci.utci_single(ta, 80.0, tmrt, va) + + assert utci_humid > utci_dry, ( + f"Humidity should increase UTCI in heat: dry={utci_dry:.1f}, humid={utci_humid:.1f}" + ) + + +class TestUtciStressCategories: + """Test that UTCI produces expected stress categories.""" + + def test_heat_stress_categories(self): + """UTCI should produce expected heat stress values.""" + # Hot sunny conditions (high Tmrt) + utci_val = utci.utci_single(35.0, 50.0, 65.0, 1.0) + + # Should indicate significant heat stress (> moderate threshold of 32) + assert utci_val > 32, f"Hot sunny UTCI ({utci_val:.1f}) should indicate heat stress" + + +class TestUtciGrid: + """Test grid-based UTCI calculation. + + Note: utci_grid signature is (ta_scalar, rh_scalar, tmrt_grid, va_grid) + Arrays must be float32. + """ + + def test_grid_calculation(self): + """Test that grid calculation works for 2D arrays.""" + shape = (10, 10) + ta = 25.0 # scalar + rh = 50.0 # scalar + tmrt = np.full(shape, 35.0, dtype=np.float32) + va = np.full(shape, 1.0, dtype=np.float32) + + result = utci.utci_grid(ta, rh, tmrt, va) + + assert result.shape == shape, f"Output shape {result.shape} should match input {shape}" + # -9999 is used for invalid pixels + valid_mask = result != -9999 + assert np.any(valid_mask), "Grid UTCI should have some valid values" + + def test_grid_consistent_values(self): + """Grid with uniform Tmrt/va should produce uniform output.""" + shape = (5, 5) + ta = 25.0 + rh = 50.0 + tmrt = np.full(shape, 35.0, dtype=np.float32) + va = np.full(shape, 1.0, dtype=np.float32) + + result = utci.utci_grid(ta, rh, tmrt, va) + + # All valid values should be the same + valid_mask = result != -9999 + valid_values = result[valid_mask] + if len(valid_values) > 1: + np.testing.assert_allclose( + valid_values, valid_values[0], rtol=1e-4, err_msg="Uniform inputs should produce uniform output" + ) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 0000000..27fb9ea --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,1092 @@ +""" +Tests for the simplified SOLWEIG API. + +These tests verify that the new dataclasses work correctly and +compute derived values as expected. +""" + +from datetime import datetime + +import numpy as np +import pytest +from conftest import make_mock_svf +from solweig.api import ( + HumanParams, + Location, + ModelConfig, + SolweigResult, + SurfaceData, + Weather, + calculate, + calculate_buffer_distance, + calculate_tiled, + generate_tiles, +) +from solweig.errors import MissingPrecomputedData +from solweig.models.surface import _max_shadow_height + + +class TestSurfaceData: + """Tests for SurfaceData dataclass.""" + + def test_basic_creation(self): + """SurfaceData can be created with just a DSM.""" + dsm = np.ones((10, 10), dtype=np.float32) + surface = SurfaceData(dsm=dsm) + + assert surface.dsm.shape == (10, 10) + assert surface.cdsm is None + assert surface.dem is None + assert surface.pixel_size == 1.0 + + def test_with_optional_rasters(self): + """SurfaceData accepts optional CDSM, DEM, TDSM.""" + dsm = np.ones((10, 10)) * 100 + cdsm = np.ones((10, 10)) * 5 + dem = np.ones((10, 10)) * 50 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, dem=dem, pixel_size=2.0) + + assert surface.cdsm is not None + assert surface.dem is not None + assert surface.pixel_size == 2.0 + + def test_auto_converts_to_float32(self): + """SurfaceData converts arrays to float32.""" + dsm = np.ones((10, 10), dtype=np.float64) + surface = SurfaceData(dsm=dsm) + + assert surface.dsm.dtype == np.float32 + + def test_max_height_auto_computed(self): + """max_height is automatically computed from DSM.""" + dsm = np.zeros((10, 10)) + dsm[5, 5] = 100 # Building + dsm[2, 2] = 10 # Lower ground + + surface = SurfaceData(dsm=dsm) + + assert surface.max_height == 100.0 # max - min = 100 - 0 + + def test_max_height_with_terrain(self): + """max_height handles terrain variation.""" + dsm = np.zeros((10, 10)) + dsm[:, :] = 50 # Base terrain + dsm[5, 5] = 150 # Building on terrain + + surface = SurfaceData(dsm=dsm) + + # max_height = 150 - 50 = 100 + assert surface.max_height == 100.0 + + def test_max_height_all_nan_returns_zero(self): + """All-NaN DSM should safely report zero casting height.""" + dsm = np.full((5, 5), np.nan, dtype=np.float32) + surface = SurfaceData(dsm=dsm) + assert surface.max_height == 0.0 + + def test_max_height_conservatively_includes_cdsm(self): + """Buffer-oriented max_height includes CDSM whenever present.""" + dsm = np.ones((5, 5), dtype=np.float32) * 100.0 + cdsm = np.ones((5, 5), dtype=np.float32) * 130.0 + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=False) + assert surface.max_height == 30.0 + + def test_shape_property(self): + """shape property returns DSM dimensions.""" + dsm = np.ones((100, 200)) + surface = SurfaceData(dsm=dsm) + + assert surface.shape == (100, 200) + + +class TestMaxShadowHeightHelper: + """Tests for internal max shadow height helper semantics.""" + + def test_all_nan_returns_zero(self): + dsm = np.full((5, 5), np.nan, dtype=np.float32) + assert _max_shadow_height(dsm) == 0.0 + + def test_respects_use_veg_flag(self): + dsm = np.ones((5, 5), dtype=np.float32) * 100.0 + cdsm = np.ones((5, 5), dtype=np.float32) * 130.0 + assert _max_shadow_height(dsm, cdsm, use_veg=False) == 0.0 + assert _max_shadow_height(dsm, cdsm, use_veg=True) == 30.0 + + +class TestLocation: + """Tests for Location dataclass.""" + + def test_basic_creation(self): + """Location can be created with lat/lon.""" + loc = Location(latitude=57.7, longitude=12.0) + + assert loc.latitude == 57.7 + assert loc.longitude == 12.0 + assert loc.altitude == 0.0 + assert loc.utc_offset == 0 + + def test_with_altitude_and_utc(self): + """Location accepts altitude and UTC offset.""" + loc = Location(latitude=40.0, longitude=-74.0, altitude=100.0, utc_offset=-5) + + assert loc.altitude == 100.0 + assert loc.utc_offset == -5 + + def test_validates_latitude_range(self): + """Location validates latitude in [-90, 90].""" + with pytest.raises(ValueError, match="Latitude"): + Location(latitude=91.0, longitude=0.0) + + with pytest.raises(ValueError, match="Latitude"): + Location(latitude=-91.0, longitude=0.0) + + def test_validates_longitude_range(self): + """Location validates longitude in [-180, 180].""" + with pytest.raises(ValueError, match="Longitude"): + Location(latitude=0.0, longitude=181.0) + + with pytest.raises(ValueError, match="Longitude"): + Location(latitude=0.0, longitude=-181.0) + + def test_to_sun_position_dict(self): + """to_sun_position_dict returns correct format.""" + loc = Location(latitude=57.7, longitude=12.0, altitude=100.0) + d = loc.to_sun_position_dict() + + assert d["latitude"] == 57.7 + assert d["longitude"] == 12.0 + assert d["altitude"] == 100.0 + + def test_from_epw(self, tmp_path): + """Location.from_epw extracts lat, lon, tz_offset, and elevation from EPW header.""" + epw_content = ( + "LOCATION,Madrid,ESP,NA,Test Data,NA,40.45,-3.55,1.0,667.0\n" + "DESIGN CONDITIONS,0\n" + "TYPICAL/EXTREME PERIODS,0\n" + "GROUND TEMPERATURES,0\n" + "HOLIDAYS/DAYLIGHT SAVINGS,No,0,0,0\n" + "COMMENTS 1,Test\n" + "COMMENTS 2,Test\n" + "DATA PERIODS,1,1,Data,Sunday, 1/ 1,12/31\n" + "2023,1,1,1,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9," + "5.0,2.0,80,101325,0,0,0,0,0,0,0,0,0,0,180,3.0,5,5,10.0,77777,9,999999999,0,0.0,0,88,0.0,0.0,0.0\n" + ) + epw_path = tmp_path / "madrid.epw" + epw_path.write_text(epw_content) + + loc = Location.from_epw(epw_path) + + assert loc.latitude == pytest.approx(40.45) + assert loc.longitude == pytest.approx(-3.55) + assert loc.utc_offset == 1 + assert loc.altitude == pytest.approx(667.0) + + def test_from_epw_file_not_found(self): + """Location.from_epw raises FileNotFoundError for missing file.""" + with pytest.raises(FileNotFoundError): + Location.from_epw("/nonexistent/path.epw") + + +class TestWeather: + """Tests for Weather dataclass.""" + + def test_basic_creation(self): + """Weather can be created with required fields.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + weather = Weather(datetime=dt_obj, ta=25.0, rh=50.0, global_rad=800.0) + + assert weather.ta == 25.0 + assert weather.rh == 50.0 + assert weather.global_rad == 800.0 + assert weather.ws == 1.0 # default + + def test_with_optional_fields(self): + """Weather accepts optional wind speed and pressure.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + weather = Weather(datetime=dt_obj, ta=25.0, rh=50.0, global_rad=800.0, ws=3.5, pressure=1020.0) + + assert weather.ws == 3.5 + assert weather.pressure == 1020.0 + + def test_validates_rh_range(self): + """Weather validates RH in [0, 100].""" + dt_obj = datetime(2024, 7, 15, 12, 0) + + with pytest.raises(ValueError, match="humidity"): + Weather(datetime=dt_obj, ta=25.0, rh=101.0, global_rad=800.0) + + with pytest.raises(ValueError, match="humidity"): + Weather(datetime=dt_obj, ta=25.0, rh=-1.0, global_rad=800.0) + + def test_validates_global_rad_positive(self): + """Weather validates global_rad >= 0.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + + with pytest.raises(ValueError, match="radiation"): + Weather(datetime=dt_obj, ta=25.0, rh=50.0, global_rad=-100.0) + + def test_compute_derived_sun_position(self): + """compute_derived calculates sun position.""" + # Summer noon in Gothenburg + dt_obj = datetime(2024, 7, 15, 12, 0) + weather = Weather(datetime=dt_obj, ta=25.0, rh=50.0, global_rad=800.0) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + + weather.compute_derived(location) + + # Sun should be high in the sky at noon in summer + assert weather.sun_altitude > 40 + assert weather.sun_altitude < 70 + # Azimuth at clock noon varies with longitude/timezone + # At Gothenburg (12°E, UTC+2), clock noon is before solar noon + assert 100 < weather.sun_azimuth < 220 + + def test_compute_derived_radiation_split(self): + """compute_derived splits global into direct/diffuse.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + weather = Weather(datetime=dt_obj, ta=25.0, rh=50.0, global_rad=800.0) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + + weather.compute_derived(location) + + # Direct + diffuse should be close to global (not exact due to geometry) + assert weather.direct_rad > 0 + assert weather.diffuse_rad > 0 + # Diffuse fraction typically 10-40% on clear day + diffuse_fraction = weather.diffuse_rad / weather.global_rad + assert 0.1 < diffuse_fraction < 0.6 + + def test_compute_derived_night(self): + """compute_derived handles nighttime correctly.""" + # Midnight + dt_obj = datetime(2024, 7, 15, 0, 0) + weather = Weather(datetime=dt_obj, ta=15.0, rh=80.0, global_rad=0.0) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + + weather.compute_derived(location) + + # Sun below horizon at midnight + assert weather.sun_altitude < 0 + assert weather.direct_rad == 0.0 + assert weather.diffuse_rad == 0.0 + + def test_is_daytime_property(self): + """is_daytime returns correct value.""" + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + + # Noon + weather_day = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0) + weather_day.compute_derived(location) + assert weather_day.is_daytime is True + + # Midnight + weather_night = Weather(datetime=datetime(2024, 7, 15, 0, 0), ta=15.0, rh=80.0, global_rad=0.0) + weather_night.compute_derived(location) + assert weather_night.is_daytime is False + + +class TestHumanParams: + """Tests for HumanParams dataclass.""" + + def test_default_values(self): + """HumanParams has sensible defaults.""" + human = HumanParams() + + assert human.posture == "standing" + assert human.abs_k == 0.7 + assert human.abs_l == 0.97 + assert human.age == 35 + assert human.weight == 75.0 + assert human.height == 1.75 + + def test_custom_values(self): + """HumanParams accepts custom values.""" + human = HumanParams(posture="sitting", abs_k=0.6, abs_l=0.95, age=45, weight=80.0, height=1.80) + + assert human.posture == "sitting" + assert human.abs_k == 0.6 + assert human.age == 45 + + def test_validates_posture(self): + """HumanParams validates posture.""" + with pytest.raises(ValueError, match="Posture"): + HumanParams(posture="lying") + + def test_validates_abs_k_range(self): + """HumanParams validates abs_k in (0, 1].""" + with pytest.raises(ValueError, match="abs_k"): + HumanParams(abs_k=0.0) + + with pytest.raises(ValueError, match="abs_k"): + HumanParams(abs_k=1.5) + + def test_validates_abs_l_range(self): + """HumanParams validates abs_l in (0, 1].""" + with pytest.raises(ValueError, match="abs_l"): + HumanParams(abs_l=0.0) + + +class TestSolweigResult: + """Tests for SolweigResult dataclass.""" + + def test_basic_creation(self): + """SolweigResult can be created with Tmrt.""" + tmrt = np.ones((10, 10)) * 40.0 + result = SolweigResult(tmrt=tmrt) + + assert result.tmrt.shape == (10, 10) + assert result.utci is None + assert result.pet is None + + def test_with_all_outputs(self): + """SolweigResult can hold all output grids.""" + shape = (10, 10) + result = SolweigResult( + tmrt=np.ones(shape) * 40.0, + shadow=np.zeros(shape), + kdown=np.ones(shape) * 500.0, + kup=np.ones(shape) * 50.0, + ldown=np.ones(shape) * 350.0, + lup=np.ones(shape) * 400.0, + utci=np.ones(shape) * 30.0, + pet=np.ones(shape) * 28.0, + ) + + assert result.shadow is not None + assert result.kdown is not None + assert result.utci is not None + assert result.pet is not None + + +class TestSolweigResultMethods: + """Tests for SolweigResult.compute_utci() and compute_pet() methods.""" + + def test_compute_utci_with_weather_object(self): + """compute_utci() works with Weather object.""" + tmrt = np.ones((10, 10), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ws=2.0, + ) + + utci = result.compute_utci(weather) + + assert utci.shape == (10, 10) + # UTCI should be in reasonable range for these conditions + assert np.all(utci > 20) and np.all(utci < 50) + + def test_compute_utci_with_individual_values(self): + """compute_utci() works with individual values.""" + tmrt = np.ones((10, 10), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + utci = result.compute_utci(25.0, rh=50.0, wind=2.0) + + assert utci.shape == (10, 10) + assert np.all(utci > 20) and np.all(utci < 50) + + def test_compute_utci_default_wind(self): + """compute_utci() uses default wind speed of 1.0 m/s.""" + tmrt = np.ones((10, 10), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + # No wind provided - should default to 1.0 + utci = result.compute_utci(25.0, rh=50.0) + + assert utci.shape == (10, 10) + assert np.all(np.isfinite(utci)) + + def test_compute_utci_requires_rh_with_float(self): + """compute_utci() raises ValueError when rh not provided with float ta.""" + tmrt = np.ones((10, 10), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + with pytest.raises(ValueError, match="rh is required"): + result.compute_utci(25.0) + + def test_compute_pet_with_weather_object(self): + """compute_pet() works with Weather object.""" + tmrt = np.ones((5, 5), dtype=np.float32) * 35.0 # Smaller grid for speed + result = SolweigResult(tmrt=tmrt) + + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ws=2.0, + ) + + pet = result.compute_pet(weather) + + assert pet.shape == (5, 5) + # PET should be in reasonable range + assert np.all(pet > 10) and np.all(pet < 50) + + def test_compute_pet_with_individual_values(self): + """compute_pet() works with individual values.""" + tmrt = np.ones((5, 5), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + pet = result.compute_pet(25.0, rh=50.0, wind=2.0) + + assert pet.shape == (5, 5) + assert np.all(np.isfinite(pet)) + + def test_compute_pet_with_custom_human_params(self): + """compute_pet() accepts custom HumanParams.""" + tmrt = np.ones((5, 5), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + pet = result.compute_pet(weather, human=HumanParams(weight=60, height=1.60)) + + assert pet.shape == (5, 5) + assert np.all(np.isfinite(pet)) + + def test_compute_pet_requires_rh_with_float(self): + """compute_pet() raises ValueError when rh not provided with float ta.""" + tmrt = np.ones((5, 5), dtype=np.float32) * 35.0 + result = SolweigResult(tmrt=tmrt) + + with pytest.raises(ValueError, match="rh is required"): + result.compute_pet(25.0) + + +@pytest.mark.slow +class TestConfigPrecedence: + """Tests for config precedence - explicit parameters override config values.""" + + def test_explicit_anisotropic_overrides_config(self): + """Explicit use_anisotropic_sky=False overrides config.use_anisotropic_sky=True.""" + + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # Config says use anisotropic, but explicit param says don't + # This should NOT raise MissingPrecomputedData since explicit False wins + config = ModelConfig(use_anisotropic_sky=True) + result = calculate( + surface, + location, + weather, + config=config, + use_anisotropic_sky=False, # Explicit wins + ) + + assert result.tmrt is not None + + def test_explicit_human_overrides_config(self): + """Explicit human params override config.human.""" + + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + config_human = HumanParams(posture="sitting", abs_k=0.6) + explicit_human = HumanParams(posture="standing", abs_k=0.8) + + config = ModelConfig(human=config_human) + result = calculate( + surface, + location, + weather, + config=config, + human=explicit_human, # Should use standing, abs_k=0.8 + ) + + # Result should exist (test doesn't crash) + assert result.tmrt is not None + + def test_none_param_uses_config_value(self): + """When explicit param is None, config value is used.""" + + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + config_human = HumanParams(posture="sitting") + config = ModelConfig(human=config_human) + + # human=None means use config's human + result = calculate( + surface, + location, + weather, + config=config, + human=None, # Should fall back to config.human + ) + + assert result.tmrt is not None + + def test_no_config_uses_defaults(self): + """When no config provided, defaults are used.""" + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # No config, no explicit params - should use defaults + result = calculate(surface, location, weather) + + assert result.tmrt is not None + + +@pytest.mark.slow +class TestCalculateIntegration: + """Integration tests for the calculate() function.""" + + def test_basic_calculation(self): + """calculate() returns valid Tmrt for simple DSM.""" + + # Simple flat DSM with one building + dsm = np.zeros((30, 30), dtype=np.float32) + dsm[10:20, 10:20] = 10.0 # 10m building + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + result = calculate(surface, location, weather) + + # Check output structure + assert result.tmrt.shape == (30, 30) + assert result.shadow is not None + assert result.shadow.shape == (30, 30) + # UTCI/PET are not auto-computed - use post-processing functions + assert result.utci is None + assert result.pet is None + + # Check Tmrt is in reasonable range (use nanmin/nanmax to handle NaN) + # -50 is used as a sentinel for invalid/building pixels + assert np.nanmin(result.tmrt) >= -50 + assert np.nanmax(result.tmrt) < 80 + + def test_nighttime_calculation(self): + """calculate() handles nighttime (sun below horizon).""" + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + + # Midnight - sun below horizon + weather = Weather( + datetime=datetime(2024, 7, 15, 0, 0), + ta=15.0, + rh=80.0, + global_rad=0.0, + ) + + result = calculate(surface, location, weather) + + # At night, Tmrt is computed from full longwave balance (no shortwave). + # Under open sky (SVF~1) the cold sky pulls Tmrt well below Ta — typically + # ~5-10 C lower. This matches UMEP behaviour; the old Python shortcut + # (Tmrt=Ta) was wrong. + valid = result.tmrt[np.isfinite(result.tmrt)] + assert np.all(valid < 15.0), "Night Tmrt should be below Ta under open sky" + assert np.all(valid > -5.0), "Night Tmrt should not be unreasonably cold" + # Shortwave must be zero at night + assert result.kdown is not None and np.allclose(result.kdown[np.isfinite(result.kdown)], 0.0, atol=1e-3) + assert result.kup is not None and np.allclose(result.kup[np.isfinite(result.kup)], 0.0, atol=1e-3) + + def test_explicit_anisotropic_requires_shadow_matrices(self): + """Explicit anisotropic request must fail without shadow matrices.""" + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + with pytest.raises(MissingPrecomputedData): + calculate(surface, location, weather, use_anisotropic_sky=True) + + def test_shadows_exist(self): + """Shadows are cast by buildings during daytime.""" + # Tall building that should cast shadows + dsm = np.zeros((40, 40), dtype=np.float32) + dsm[15:25, 15:25] = 20.0 # 20m building + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 10, 0), # Morning - shadows to west + ta=20.0, + rh=60.0, + global_rad=600.0, + ) + + result = calculate(surface, location, weather) + + # Should have some shadow pixels (not all 0 or all 1) + assert result.shadow is not None + shadow_fraction = result.shadow.sum() / result.shadow.size + assert 0.1 < shadow_fraction < 0.9, "Expected partial shadowing" + + def test_utci_postprocessing(self): + """UTCI is computed via post-processing, not by default.""" + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # Calculate Tmrt (UTCI not computed by default) + result = calculate(surface, location, weather) + + assert result.tmrt is not None + assert result.utci is None # Not auto-computed - use compute_utci_grid() + + def test_with_custom_human_params(self): + """Custom human parameters affect calculation.""" + dsm = np.ones((20, 20), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # Different postures should give slightly different results + result_standing = calculate(surface, location, weather, human=HumanParams(posture="standing")) + result_sitting = calculate(surface, location, weather, human=HumanParams(posture="sitting")) + + # Results should exist and be valid + assert result_standing.tmrt is not None + assert result_sitting.tmrt is not None + + +@pytest.mark.slow +class TestTiledProcessing: + """Tests for tiled processing functions.""" + + def test_calculate_buffer_distance_basic(self): + """Buffer distance scales with building height.""" + # 10m building at 3° sun elevation: buffer = 10 / tan(3°) ≈ 191m + buffer = calculate_buffer_distance(10.0) + assert 180 < buffer < 200 + + # 50m building: buffer = 50 / tan(3°) ≈ 954m, under 1000m cap + buffer = calculate_buffer_distance(50.0) + assert 940 < buffer < 960 + + # 60m building: buffer = 60 / tan(3°) ≈ 1145m, capped at 1000m + buffer = calculate_buffer_distance(60.0) + assert buffer == 1000.0 # MAX_BUFFER_M + + def test_calculate_buffer_distance_zero_height(self): + """Zero height returns zero buffer.""" + assert calculate_buffer_distance(0.0) == 0.0 + assert calculate_buffer_distance(-5.0) == 0.0 + + def test_calculate_buffer_distance_custom_sun_elevation(self): + """Buffer distance changes with sun elevation.""" + # Higher sun = shorter shadows + buffer_3deg = calculate_buffer_distance(10.0, min_sun_elev_deg=3.0) + buffer_10deg = calculate_buffer_distance(10.0, min_sun_elev_deg=10.0) + + assert buffer_10deg < buffer_3deg + + def test_generate_tiles_basic(self): + """generate_tiles creates correct tile specs.""" + # generate_tiles takes rows, cols, tile_size, overlap + tiles = generate_tiles(rows=100, cols=100, tile_size=50, overlap=10) + + # 100x100 with tile_size=50 should give 4 tiles (2x2 grid) + assert len(tiles) == 4 + + # Check first tile + tile0 = tiles[0] + assert tile0.row_start == 0 + assert tile0.col_start == 0 + assert tile0.core_shape == (50, 50) + + def test_generate_tiles_overlap(self): + """Tiles have correct overlap at edges.""" + tiles = generate_tiles(rows=100, cols=100, tile_size=50, overlap=10) + + # First tile (top-left corner) has no top/left overlap + tile0 = tiles[0] + assert tile0.overlap_top == 0 + assert tile0.overlap_left == 0 + assert tile0.overlap_bottom == 10 + assert tile0.overlap_right == 10 + + # Last tile (bottom-right corner) has no bottom/right overlap + tile3 = tiles[3] + assert tile3.overlap_top == 10 + assert tile3.overlap_left == 10 + assert tile3.overlap_bottom == 0 + assert tile3.overlap_right == 0 + + def test_generate_tiles_single_tile(self): + """Small raster generates single tile.""" + # 30x30 raster smaller than tile_size should give 1 tile + tiles = generate_tiles(rows=30, cols=30, tile_size=256, overlap=10) + + assert len(tiles) == 1 + assert tiles[0].core_shape == (30, 30) + + def test_tiled_vs_nontiled_parity(self): + """Tiled calculation produces same results as non-tiled.""" + # Create a test DSM with a building + dsm = np.zeros((60, 60), dtype=np.float32) + dsm[20:40, 20:40] = 15.0 # 15m building + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # Run both methods (UTCI/PET not auto-computed in new API) + result_nontiled = calculate(surface, location, weather) + result_tiled = calculate_tiled(surface, location, weather, tile_size=256) + + # Compare Tmrt + valid = np.isfinite(result_nontiled.tmrt) & np.isfinite(result_tiled.tmrt) + assert valid.sum() > 0, "No valid pixels to compare" + + diff = np.abs(result_tiled.tmrt[valid] - result_nontiled.tmrt[valid]) + mean_diff = diff.mean() + max_diff = diff.max() + + assert mean_diff < 0.01, f"Mean Tmrt diff {mean_diff:.4f}°C exceeds tolerance" + assert max_diff < 0.1, f"Max Tmrt diff {max_diff:.4f}°C exceeds tolerance" + + # Compare shadow (should be identical) + assert result_tiled.shadow is not None + assert result_nontiled.shadow is not None + shadow_match = np.allclose(result_tiled.shadow, result_nontiled.shadow, equal_nan=True) + assert shadow_match, "Shadow grids differ between tiled and non-tiled" + + def test_calculate_tiled_with_building(self): + """Tiled calculation handles buildings correctly.""" + # DSM with a tall building that casts shadows + dsm = np.zeros((80, 80), dtype=np.float32) + dsm[30:50, 30:50] = 20.0 # 20m building + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 10, 0), # Morning + ta=22.0, + rh=55.0, + global_rad=600.0, + ) + + result = calculate_tiled(surface, location, weather, tile_size=256) + + # Check output structure + assert result.tmrt.shape == (80, 80) + assert result.shadow is not None + assert result.shadow.shape == (80, 80) + # UTCI not auto-computed - use post-processing if needed + assert result.utci is None + + # Check shadows exist - allow wider range since shadow fraction depends on + # sun position (morning sun creates longer shadows) + shadow_fraction = result.shadow.sum() / result.shadow.size + assert 0.05 < shadow_fraction < 0.95, f"Unexpected shadow fraction: {shadow_fraction}" + + def test_calculate_tiled_fallback_to_nontiled(self): + """Small rasters fall back to non-tiled calculation.""" + # Small DSM that fits in a single tile + dsm = np.ones((40, 40), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf(dsm.shape)) + location = Location(latitude=57.7, longitude=12.0, utc_offset=1) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + # This should work without errors (falls back to non-tiled) + result = calculate_tiled(surface, location, weather, tile_size=256) + + assert result.tmrt.shape == (40, 40) + assert result.shadow is not None + assert result.shadow.shape == (40, 40) + + +class TestPreprocessing: + """Tests for CDSM/TDSM preprocessing and transmissivity calculation.""" + + def test_tdsm_auto_generation(self): + """TDSM is auto-generated from CDSM * trunk_ratio when not provided.""" + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 # 5m relative vegetation height + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, trunk_ratio=0.25) + + # Before preprocessing, TDSM should be None + assert surface.tdsm is None + + # After preprocessing, TDSM should be auto-generated + surface.preprocess() + + assert surface.tdsm is not None + # TDSM should be boosted: base + (cdsm * trunk_ratio) = 100 + (5 * 0.25) = 101.25 + # But only where cdsm > threshold (0.1) + expected_tdsm = 100.0 + 5.0 * 0.25 # 101.25 + assert np.allclose(surface.tdsm, expected_tdsm, atol=0.01) + + def test_cdsm_boosting_with_dem(self): + """CDSM is boosted to absolute height using DEM as base.""" + dsm = np.ones((10, 10), dtype=np.float32) * 110.0 # DSM includes building + dem = np.ones((10, 10), dtype=np.float32) * 100.0 # Ground level + cdsm = np.ones((10, 10), dtype=np.float32) * 8.0 # 8m relative veg height + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, dem=dem) + surface.preprocess() + + # CDSM should now be DEM + relative_cdsm = 100 + 8 = 108 + assert surface.cdsm is not None + assert np.allclose(surface.cdsm, 108.0, atol=0.01) + + def test_cdsm_boosting_without_dem(self): + """CDSM is boosted using DSM as base when DEM not provided.""" + dsm = np.ones((10, 10), dtype=np.float32) * 105.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 6.0 # 6m relative veg height + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + surface.preprocess() + + # CDSM should now be DSM + relative_cdsm = 105 + 6 = 111 + assert surface.cdsm is not None + assert np.allclose(surface.cdsm, 111.0, atol=0.01) + + def test_preprocess_zeros_below_threshold(self): + """Preprocessing clamps vegetation below 0.1m threshold to base elevation.""" + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.array([[0.05, 0.5], [1.0, 0.0]], dtype=np.float32) # Some below threshold + cdsm = np.pad(cdsm, ((0, 8), (0, 8)), constant_values=0.0) + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + surface.preprocess() + + # CDSM is now absolute elevation; below-threshold values clamped to base (DSM=100) + assert surface.cdsm is not None + assert surface.cdsm[0, 0] == 100.0 # Was 0.05 relative, below threshold → base + assert surface.cdsm[0, 1] > 100.0 # Was 0.5 relative, above threshold → 100.5 + assert surface.cdsm[1, 0] > 100.0 # Was 1.0 relative, above threshold → 101.0 + assert surface.cdsm[1, 1] == 100.0 # Was 0.0 relative, below threshold → base + + def test_preprocess_idempotent(self): + """Calling preprocess() multiple times has no effect after first call.""" + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + surface.preprocess() + assert surface.cdsm is not None + cdsm_after_first = surface.cdsm.copy() + + surface.preprocess() # Second call + assert surface.cdsm is not None + assert np.array_equal(surface.cdsm, cdsm_after_first) + + def test_transmissivity_leaf_on_summer(self): + """Summer (leaf on) uses low transmissivity.""" + from solweig.components.shadows import compute_transmissivity + + # July is within typical leaf-on period (DOY 100-300) + psi = compute_transmissivity(doy=180) + assert psi == 0.03 + + def test_transmissivity_leaf_off_winter(self): + """Winter (leaf off) uses high transmissivity.""" + from solweig.components.shadows import compute_transmissivity + + # January is outside typical leaf-on period + psi = compute_transmissivity(doy=30) + assert psi == 0.5 + + def test_transmissivity_leaf_off_late_autumn(self): + """Late autumn (leaf off) uses high transmissivity.""" + from solweig.components.shadows import compute_transmissivity + + # December is outside typical leaf-on period + psi = compute_transmissivity(doy=350) + assert psi == 0.5 + + def test_transmissivity_conifer_always_leaf_on(self): + """Conifers always use leaf-on transmissivity regardless of season.""" + from solweig.components.shadows import compute_transmissivity + + # Winter with conifer flag should still use leaf-on value + psi = compute_transmissivity(doy=30, conifer=True) + assert psi == 0.03 + + # Summer with conifer should also be leaf-on + psi = compute_transmissivity(doy=180, conifer=True) + assert psi == 0.03 + + def test_transmissivity_boundary_days(self): + """Test behavior at leaf on/off boundary days.""" + from solweig.components.shadows import compute_transmissivity + + # Default boundaries are 100 and 300 + # Day 100 is NOT included (first_day < doy < last_day) + psi_day_100 = compute_transmissivity(doy=100) + assert psi_day_100 == 0.5 # Not yet leaf-on + + # Day 101 should be leaf-on + psi_day_101 = compute_transmissivity(doy=101) + assert psi_day_101 == 0.03 + + # Day 299 should be leaf-on + psi_day_299 = compute_transmissivity(doy=299) + assert psi_day_299 == 0.03 + + # Day 300 is NOT included + psi_day_300 = compute_transmissivity(doy=300) + assert psi_day_300 == 0.5 # No longer leaf-on + + def test_per_layer_height_defaults(self): + """Per-layer height flags have correct defaults.""" + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + surface = SurfaceData(dsm=dsm) + assert surface.dsm_relative is False + assert surface.cdsm_relative is True + assert surface.tdsm_relative is True + + def test_per_layer_height_explicit(self): + """Per-layer height flags can be set explicitly.""" + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 105.0 # Absolute heights + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=False) + assert surface.cdsm_relative is False + + def test_looks_like_relative_heights_true(self): + """_looks_like_relative_heights returns True for typical relative data.""" + # DSM at ~100m elevation, CDSM with 5m vegetation (relative) + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + assert surface._looks_like_relative_heights() is True + + def test_looks_like_relative_heights_false_absolute(self): + """_looks_like_relative_heights returns False for absolute heights.""" + # DSM at ~100m, CDSM at ~105m (absolute, trees on terrain) + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 105.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + assert surface._looks_like_relative_heights() is False + + def test_looks_like_relative_heights_false_coastal(self): + """_looks_like_relative_heights handles coastal areas near sea level.""" + # DSM near sea level, CDSM with 5m vegetation (relative) + # This is a tricky case - low elevation could be absolute or relative + dsm = np.ones((10, 10), dtype=np.float32) * 5.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 8.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + # At low elevations, heuristic is inconclusive - returns False to avoid false positives + assert surface._looks_like_relative_heights() is False + + def test_preprocessing_warning_issued(self, caplog): + """Warning is issued when CDSM looks relative but preprocess not called.""" + import logging + + # DSM at ~100m elevation, CDSM with 5m relative vegetation + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True) + + with caplog.at_level(logging.WARNING): + surface._check_preprocessing_needed() + + assert "preprocess() was not called" in caplog.text + assert "relative vegetation heights" in caplog.text + + def test_preprocessing_warning_not_issued_after_preprocess(self, caplog): + """No warning after preprocess() is called.""" + import logging + + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True) + surface.preprocess() # This sets _preprocessed = True + + with caplog.at_level(logging.WARNING): + surface._check_preprocessing_needed() + + assert "preprocess() was not called" not in caplog.text + + def test_preprocessing_warning_not_issued_when_cdsm_relative_false(self, caplog): + """No warning when cdsm_relative=False (user says data is absolute).""" + import logging + + dsm = np.ones((10, 10), dtype=np.float32) * 100.0 + cdsm = np.ones((10, 10), dtype=np.float32) * 5.0 # Looks relative but user says no + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=False) + + with caplog.at_level(logging.WARNING): + surface._check_preprocessing_needed() + + assert "preprocess() was not called" not in caplog.text + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_buffer_pool.py b/tests/test_buffer_pool.py new file mode 100644 index 0000000..c62ca3e --- /dev/null +++ b/tests/test_buffer_pool.py @@ -0,0 +1,247 @@ +"""Tests for buffer pool functionality.""" + +import numpy as np +from solweig.buffers import BufferPool, TimestepBuffers, ensure_float32_inplace + + +class TestBufferPool: + """Tests for BufferPool class.""" + + def test_creates_buffer_on_first_get(self): + """First get() call allocates a new buffer.""" + pool = BufferPool((100, 100)) + buf = pool.get("test") + + assert buf.shape == (100, 100) + assert buf.dtype == np.float32 + assert pool.num_buffers == 1 + + def test_reuses_buffer_on_subsequent_get(self): + """Subsequent get() calls return the same buffer.""" + pool = BufferPool((100, 100)) + + buf1 = pool.get("test") + buf1[0, 0] = 42.0 + + buf2 = pool.get("test") + + # Should be the same underlying buffer + assert buf1 is buf2 + assert buf2[0, 0] == 42.0 + + def test_get_zeros_fills_with_zeros(self): + """get_zeros() returns a zeroed buffer.""" + pool = BufferPool((100, 100)) + + # First write some data + buf1 = pool.get("test") + buf1.fill(999.0) + + # get_zeros should zero it + buf2 = pool.get_zeros("test") + assert np.all(buf2 == 0.0) + + def test_get_full_fills_with_value(self): + """get_full() returns buffer filled with specified value.""" + pool = BufferPool((100, 100)) + + buf = pool.get_full("test", 3.14) + assert np.allclose(buf, 3.14) + + def test_different_names_get_different_buffers(self): + """Different buffer names get separate allocations.""" + pool = BufferPool((100, 100)) + + buf1 = pool.get("buffer_a") + buf2 = pool.get("buffer_b") + + assert buf1 is not buf2 + assert pool.num_buffers == 2 + + def test_ensure_float32_no_copy_when_already_float32(self): + """ensure_float32 returns same object if already float32.""" + pool = BufferPool((100, 100)) + + arr = np.zeros((100, 100), dtype=np.float32) + result = pool.ensure_float32(arr, "test") + + assert result is arr # Same object, no copy + + def test_ensure_float32_converts_other_dtypes(self): + """ensure_float32 converts non-float32 arrays.""" + pool = BufferPool((100, 100)) + + arr = np.zeros((100, 100), dtype=np.float64) + arr[0, 0] = 1.5 + + result = pool.ensure_float32(arr, "test") + + assert result.dtype == np.float32 + assert result[0, 0] == 1.5 + assert result is not arr # Different object + + def test_ensure_float32_uses_pooled_buffer(self): + """ensure_float32 reuses pooled buffer for conversion.""" + pool = BufferPool((100, 100)) + + arr1 = np.ones((100, 100), dtype=np.float64) + arr2 = np.ones((100, 100), dtype=np.float64) * 2 + + result1 = pool.ensure_float32(arr1, "conv") + result2 = pool.ensure_float32(arr2, "conv") + + # Should reuse the same pooled buffer + assert result1 is result2 + # Second call overwrote the values + assert np.all(result2 == 2.0) + + def test_memory_bytes_calculation(self): + """memory_bytes returns correct total.""" + pool = BufferPool((100, 100), dtype=np.float32) + + pool.get("a") + pool.get("b") + + # 2 buffers * 100 * 100 * 4 bytes + expected = 2 * 100 * 100 * 4 + assert pool.memory_bytes == expected + + def test_clear_removes_all_buffers(self): + """clear() removes all buffers from pool.""" + pool = BufferPool((100, 100)) + pool.get("a") + pool.get("b") + + assert pool.num_buffers == 2 + + pool.clear() + + assert pool.num_buffers == 0 + assert pool.memory_bytes == 0 + + def test_custom_dtype(self): + """Pool respects custom dtype.""" + pool = BufferPool((50, 50), dtype=np.float64) + buf = pool.get("test") + + assert buf.dtype == np.float64 + + def test_repr_shows_useful_info(self): + """repr() shows shape, buffers, and memory.""" + pool = BufferPool((100, 100)) + pool.get("test") + + repr_str = repr(pool) + assert "shape=(100, 100)" in repr_str + assert "buffers=1" in repr_str + assert "memory=" in repr_str + + +class TestTimestepBuffers: + """Tests for TimestepBuffers context manager.""" + + def test_provides_pool_in_context(self): + """Context manager provides BufferPool.""" + with TimestepBuffers((100, 100)) as pool: + assert isinstance(pool, BufferPool) + buf = pool.get_zeros("test") + assert buf.shape == (100, 100) + + def test_clears_buffers_on_exit(self): + """Buffers are cleared when exiting context.""" + buffers = TimestepBuffers((100, 100)) + + with buffers as pool: + pool.get("a") + pool.get("b") + assert pool.num_buffers == 2 + + # After context exit, pool should be cleared + assert buffers.pool.num_buffers == 0 + + +class TestEnsureFloat32Inplace: + """Tests for ensure_float32_inplace utility.""" + + def test_returns_same_if_float32(self): + """Returns same object if already float32.""" + arr = np.array([1.0, 2.0, 3.0], dtype=np.float32) + result = ensure_float32_inplace(arr) + + assert result is arr + + def test_converts_float64(self): + """Converts float64 to float32.""" + arr = np.array([1.0, 2.0, 3.0], dtype=np.float64) + result = ensure_float32_inplace(arr) + + assert result.dtype == np.float32 + assert result is not arr + + def test_converts_int(self): + """Converts integer arrays to float32.""" + arr = np.array([1, 2, 3], dtype=np.int32) + result = ensure_float32_inplace(arr) + + assert result.dtype == np.float32 + np.testing.assert_array_equal(result, [1.0, 2.0, 3.0]) + + +class TestBufferPoolPerformance: + """Performance-related tests for buffer pool.""" + + def test_pool_get_faster_than_empty_allocation(self): + """Pooled get() should be faster than repeated np.empty.""" + import time + + shape = (500, 500) + iterations = 100 + + # Time repeated empty allocations + start = time.perf_counter() + for _ in range(iterations): + arr = np.empty(shape, dtype=np.float32) + arr[0, 0] = 1.0 # Prevent optimization + alloc_time = time.perf_counter() - start + + # Time pooled buffers (get without zeroing) + pool = BufferPool(shape) + start = time.perf_counter() + for _ in range(iterations): + arr = pool.get("test") + arr[0, 0] = 1.0 # Prevent optimization + pool_time = time.perf_counter() - start + + # Pool should be faster since it avoids allocation + # But we use a generous margin since timing can vary + # The main benefit is reducing GC pressure, which is hard to measure + assert pool_time < alloc_time * 10.0, ( + f"Pool ({pool_time:.4f}s) should not be dramatically slower than allocation ({alloc_time:.4f}s)" + ) + + def test_ensure_float32_avoids_copy_when_possible(self): + """ensure_float32 should avoid copies for float32 input.""" + import time + + shape = (500, 500) + iterations = 100 + pool = BufferPool(shape) + + # Create float32 array + arr = np.zeros(shape, dtype=np.float32) + + # Time ensure_float32 (should be nearly instant - no copy) + start = time.perf_counter() + for _ in range(iterations): + result = pool.ensure_float32(arr, "test") + assert result is arr # Same object + no_copy_time = time.perf_counter() - start + + # Time astype (always copies) + start = time.perf_counter() + for _ in range(iterations): + result = arr.astype(np.float32) + copy_time = time.perf_counter() - start + + # No-copy should be much faster + assert no_copy_time < copy_time, f"No-copy ({no_copy_time:.4f}s) should be faster than copy ({copy_time:.4f}s)" diff --git a/tests/test_docs_consistency.py b/tests/test_docs_consistency.py new file mode 100644 index 0000000..32ff74b --- /dev/null +++ b/tests/test_docs_consistency.py @@ -0,0 +1,48 @@ +"""Lightweight checks for user-facing docs consistency.""" + +from pathlib import Path + +REPO_ROOT = Path(__file__).resolve().parents[1] + + +def _read(rel_path: str) -> str: + return (REPO_ROOT / rel_path).read_text(encoding="utf-8") + + +def test_basic_usage_does_not_claim_isotropic_default(): + """Basic usage should not claim isotropic is the default behavior.""" + text = _read("docs/guide/basic-usage.md") + assert "By default, SOLWEIG treats diffuse sky radiation as uniform (isotropic)." not in text + assert "use_anisotropic_sky=True" in text + assert "MissingPrecomputedData" in text + + +def test_timeseries_docs_describe_timestep_outputs(): + """Timeseries guide should document the timestep_outputs parameter.""" + text = _read("docs/guide/timeseries.md") + assert "timestep_outputs" in text + + +def test_timeseries_docs_describe_report_and_plot(): + """Timeseries guide should document report() and plot().""" + text = _read("docs/guide/timeseries.md") + assert ".report()" in text + assert ".plot(" in text + assert "summary.timeseries" in text + + +def test_readme_uses_report_method(): + """README should use summary.report() not print(summary).""" + text = _read("README.md") + assert "summary.report()" in text + + +def test_index_and_readme_state_svf_and_aniso_preconditions(): + """Landing docs should state SVF and explicit anisotropic prerequisites.""" + index_text = _read("docs/index.md") + readme_text = _read("README.md") + + assert "SVF Rule" in index_text + assert "Anisotropic Rule" in index_text + assert "requires SVF to already be prepared" in readme_text + assert "use_anisotropic_sky=True" in readme_text diff --git a/tests/test_errors.py b/tests/test_errors.py new file mode 100644 index 0000000..85806b0 --- /dev/null +++ b/tests/test_errors.py @@ -0,0 +1,335 @@ +""" +Tests for structured error handling and validate_inputs(). + +These tests verify that the error system provides actionable messages +and that validate_inputs() catches errors before expensive calculations. +""" + +from datetime import datetime + +import numpy as np +import pytest +from conftest import make_mock_svf +from solweig.api import ( + GridShapeMismatch, + InvalidSurfaceData, + MissingPrecomputedData, + SolweigError, + SurfaceData, + Weather, + validate_inputs, +) + + +class TestSolweigErrorHierarchy: + """Tests for the error class hierarchy.""" + + def test_solweig_error_is_base_exception(self): + """SolweigError can be used to catch all SOLWEIG errors.""" + error = SolweigError("Test error") + assert isinstance(error, Exception) + + def test_invalid_surface_data_has_fields(self): + """InvalidSurfaceData has optional field, expected, got attributes.""" + error = InvalidSurfaceData( + "Grid mismatch", + field="cdsm", + expected="(100, 100)", + got="(50, 50)", + ) + assert error.field == "cdsm" + assert error.expected == "(100, 100)" + assert error.got == "(50, 50)" + + def test_grid_shape_mismatch_is_invalid_surface_data(self): + """GridShapeMismatch is a subclass of InvalidSurfaceData.""" + error = GridShapeMismatch("cdsm", (100, 100), (50, 50)) + assert isinstance(error, InvalidSurfaceData) + assert isinstance(error, SolweigError) + + def test_grid_shape_mismatch_has_shapes(self): + """GridShapeMismatch provides expected_shape and actual_shape.""" + error = GridShapeMismatch("cdsm", (100, 100), (50, 50)) + assert error.field == "cdsm" + assert error.expected_shape == (100, 100) + assert error.actual_shape == (50, 50) + assert "(100, 100)" in str(error) + assert "(50, 50)" in str(error) + + def test_missing_precomputed_data_has_suggestion(self): + """MissingPrecomputedData can include a suggestion.""" + error = MissingPrecomputedData( + "shadow_matrices required for anisotropic sky", + suggestion="Set use_anisotropic_sky=False", + ) + assert error.what == "shadow_matrices required for anisotropic sky" + assert error.suggestion == "Set use_anisotropic_sky=False" + assert "shadow_matrices" in str(error) + assert "Set use_anisotropic_sky=False" in str(error) + + +class TestValidateInputs: + """Tests for the validate_inputs() preflight function.""" + + def test_valid_surface_returns_empty_warnings(self): + """Valid surface data returns no warnings.""" + dsm = np.ones((100, 100), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((100, 100))) + + warnings = validate_inputs(surface) + + assert warnings == [] + + def test_missing_svf_raises_error(self): + """Surface without SVF raises MissingPrecomputedData.""" + dsm = np.ones((50, 50), dtype=np.float32) + surface = SurfaceData(dsm=dsm) + + with pytest.raises(MissingPrecomputedData) as excinfo: + validate_inputs(surface) + + assert "SVF" in str(excinfo.value) + assert "compute_svf()" in str(excinfo.value) + + def test_mismatched_cdsm_raises_grid_shape_mismatch(self): + """CDSM with wrong shape raises GridShapeMismatch.""" + dsm = np.ones((100, 100), dtype=np.float32) + cdsm = np.ones((50, 50), dtype=np.float32) # Wrong shape + + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + + with pytest.raises(GridShapeMismatch) as excinfo: + validate_inputs(surface) + + assert excinfo.value.field == "cdsm" + assert excinfo.value.expected_shape == (100, 100) + assert excinfo.value.actual_shape == (50, 50) + + def test_mismatched_dem_raises_grid_shape_mismatch(self): + """DEM with wrong shape raises GridShapeMismatch.""" + dsm = np.ones((100, 100), dtype=np.float32) + dem = np.ones((100, 50), dtype=np.float32) # Wrong shape + + surface = SurfaceData(dsm=dsm, dem=dem) + + with pytest.raises(GridShapeMismatch) as excinfo: + validate_inputs(surface) + + assert excinfo.value.field == "dem" + + def test_anisotropic_without_shadow_matrices_raises_error(self): + """Anisotropic sky without shadow matrices raises MissingPrecomputedData.""" + dsm = np.ones((50, 50), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((50, 50))) + + with pytest.raises(MissingPrecomputedData) as excinfo: + validate_inputs(surface, use_anisotropic_sky=True) + + assert "shadow_matrices" in str(excinfo.value) + + def test_unpreprocessed_cdsm_warning(self): + """Warning issued for unpreprocessed CDSM with cdsm_relative=True.""" + dsm = np.ones((50, 50), dtype=np.float32) * 10.0 + cdsm = np.ones((50, 50), dtype=np.float32) * 5.0 # Relative heights + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert any("preprocess()" in w for w in warnings) + + def test_no_warning_after_preprocess(self): + """No warning when preprocess() has been called.""" + dsm = np.ones((50, 50), dtype=np.float32) * 10.0 + cdsm = np.ones((50, 50), dtype=np.float32) * 5.0 + + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True, svf=make_mock_svf((50, 50))) + surface.preprocess() + + warnings = validate_inputs(surface) + + # No CDSM preprocessing warning + assert not any("preprocess()" in w for w in warnings) + + def test_extreme_temperature_warning(self): + """Warning issued for extreme temperature values.""" + dsm = np.ones((20, 20), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((20, 20))) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=65.0, # Extreme temperature + rh=50.0, + global_rad=800.0, + ) + + warnings = validate_inputs(surface, weather=weather) + + assert any("ta=" in w and "outside typical range" in w for w in warnings) + + def test_excessive_radiation_warning(self): + """Warning issued for radiation exceeding solar constant.""" + dsm = np.ones((20, 20), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((20, 20))) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=1500.0, # Exceeds solar constant (~1361 W/m²) + ) + + warnings = validate_inputs(surface, weather=weather) + + assert any("global_rad=" in w and "solar constant" in w for w in warnings) + + def test_validates_weather_list(self): + """validate_inputs() accepts a list of Weather objects.""" + dsm = np.ones((20, 20), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((20, 20))) + weather_list = [ + Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0), + Weather(datetime=datetime(2024, 7, 15, 13, 0), ta=70.0, rh=50.0, global_rad=750.0), # Extreme + ] + + warnings = validate_inputs(surface, weather=weather_list) + + # Should warn about the second weather entry + assert any("[1]" in w and "ta=" in w for w in warnings) + + def test_no_warnings_for_normal_weather(self): + """No warnings for normal weather values.""" + dsm = np.ones((20, 20), dtype=np.float32) + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((20, 20))) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + warnings = validate_inputs(surface, weather=weather) + + assert warnings == [] + + +class TestHeightValidationWarnings: + """Tests for DSM/CDSM/TDSM height sanity warnings.""" + + def test_warns_dsm_extreme_height_range(self): + """DSM with >500m height range triggers warning.""" + dsm = np.ones((50, 50), dtype=np.float32) * 10.0 + dsm[0, 0] = 600.0 # Creates 590m range + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert any("height range" in w and "590" in w for w in warnings) + + def test_warns_dsm_high_minimum_no_dem(self): + """DSM with min >100m and no DEM triggers warning.""" + dsm = np.ones((50, 50), dtype=np.float32) * 200.0 + dsm[25, 25] = 210.0 # Some buildings + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert any("minimum value is 200m" in w and "no DEM" in w for w in warnings) + + def test_no_warning_dsm_high_minimum_with_dem(self): + """DSM with min >100m but DEM provided does not warn about elevation.""" + dsm = np.ones((50, 50), dtype=np.float32) * 200.0 + dsm[25, 25] = 210.0 + dem = np.ones((50, 50), dtype=np.float32) * 195.0 + surface = SurfaceData(dsm=dsm, dem=dem, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert not any("no DEM" in w for w in warnings) + + def test_warns_cdsm_looks_absolute_with_relative_flag(self): + """CDSM with min non-zero >50m and cdsm_relative=True triggers warning.""" + dsm = np.ones((50, 50), dtype=np.float32) * 130.0 + cdsm = np.zeros((50, 50), dtype=np.float32) + cdsm[10:20, 10:20] = 120.0 # Looks like absolute elevation, not tree height + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert any("CDSM minimum non-zero value is 120m" in w for w in warnings) + + def test_warns_cdsm_looks_relative_with_absolute_flag(self): + """CDSM with values much smaller than DSM and cdsm_relative=False triggers warning.""" + dsm = np.ones((50, 50), dtype=np.float32) * 150.0 + cdsm = np.zeros((50, 50), dtype=np.float32) + cdsm[10:20, 10:20] = 15.0 # Looks like relative tree heights + surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=False, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert any("much smaller than DSM" in w for w in warnings) + + def test_no_warning_normal_inputs(self): + """Typical urban inputs produce no height warnings.""" + dsm = np.ones((50, 50), dtype=np.float32) * 5.0 + dsm[20:30, 20:30] = 15.0 # Buildings 10m range + surface = SurfaceData(dsm=dsm, svf=make_mock_svf((50, 50))) + + warnings = validate_inputs(surface) + + assert warnings == [] + + def test_warns_tdsm_looks_absolute_with_relative_flag(self): + """TDSM with min non-zero >50m and tdsm_relative=True triggers warning.""" + dsm = np.ones((50, 50), dtype=np.float32) * 130.0 + cdsm = np.zeros((50, 50), dtype=np.float32) + cdsm[10:20, 10:20] = 120.0 + tdsm = np.zeros((50, 50), dtype=np.float32) + tdsm[10:20, 10:20] = 115.0 # Looks like absolute trunk elevation + surface = SurfaceData( + dsm=dsm, cdsm=cdsm, tdsm=tdsm, cdsm_relative=True, tdsm_relative=True, svf=make_mock_svf((50, 50)) + ) + + warnings = validate_inputs(surface) + + assert any("TDSM minimum non-zero value is 115m" in w for w in warnings) + + +class TestErrorCatching: + """Tests for catching errors with proper exception types.""" + + def test_catch_all_solweig_errors(self): + """SolweigError catches all SOLWEIG-specific errors.""" + caught = [] + + try: + raise GridShapeMismatch("test", (10, 10), (5, 5)) + except SolweigError: + caught.append("GridShapeMismatch") + + try: + raise MissingPrecomputedData("test") + except SolweigError: + caught.append("MissingPrecomputedData") + + assert caught == ["GridShapeMismatch", "MissingPrecomputedData"] + + def test_catch_specific_error_types(self): + """Specific error types can be caught individually.""" + dsm = np.ones((100, 100), dtype=np.float32) + cdsm = np.ones((50, 50), dtype=np.float32) + surface = SurfaceData(dsm=dsm, cdsm=cdsm) + + with pytest.raises(GridShapeMismatch): + validate_inputs(surface) + + # Should NOT raise InvalidSurfaceData (which is the parent) + # when we specifically want GridShapeMismatch + try: + validate_inputs(surface) + except GridShapeMismatch: + pass # Expected + except InvalidSurfaceData: + pytest.fail("Should have raised GridShapeMismatch, not generic InvalidSurfaceData") + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_io.py b/tests/test_io.py new file mode 100644 index 0000000..a696f25 --- /dev/null +++ b/tests/test_io.py @@ -0,0 +1,267 @@ +""" +Tests for I/O functionality including EPW parser. + +Note: EPW parser is deliberately pandas-free for QGIS compatibility. +Tests must not assume pd.DataFrame - they test the _EpwDataFrame interface. +""" + +from pathlib import Path + +import numpy as np +import pytest +from solweig import io + + +class TestEPWParser: + """Test the standalone EPW parser (no pandas dependency).""" + + @pytest.fixture + def sample_epw_content(self): + """Create a minimal valid EPW file content.""" + # EPW header (8 lines) + data + # Timezone offset must be between -24 and +24 hours (field 8) + # EPW data lines must preserve exact format - long lines are intentional + return """LOCATION,Athens,GRC,NA,Shiny Weather Data,NA,37.90,23.73,2.0,107.0 +DESIGN CONDITIONS,1,Climate Design Data 2009 ASHRAE Handbook,,Heating,1,-2.1,-0.3,0.6,2.8,10.7,2.3,3.5,3.4,12.2,11.2,3.1,11.4,2.5,340,Cooling,8,35.2,23.7,33.2,23.3,31.4,23.0,29.7,24.1,27.2,32.8,26.1,31.1,25.2,29.6,4.2,330,23.5,18.5,27.8,22.7,17.8,27.1,22.0,17.2,26.4,68.2,32.9,64.8,31.2,62.0,29.7,951,Extremes,11.6,10.2,9.0,25.3,-3.9,37.5,2.7,1.7,-5.5,38.9,-7.0,39.9,-8.4,40.8,-10.1,42.2 +TYPICAL/EXTREME PERIODS,6,Summer - Week Nearest Max Temperature For Period,Extreme,7/ 9,7/15,Summer - Week Nearest Average Temperature For Period,Typical,7/30,8/ 5,Winter - Week Nearest Min Temperature For Period,Extreme,1/28,2/ 3,Winter - Week Nearest Average Temperature For Period,Typical,1/21,1/27,Autumn - Week Nearest Average Temperature For Period,Typical,11/11,11/17,Spring - Week Nearest Average Temperature For Period,Typical,4/22,4/28 +GROUND TEMPERATURES,3,.5,,,12.98,11.39,10.73,11.54,14.82,18.56,21.85,23.85,24.08,22.71,19.89,16.54,2,,,,,,,,,,,,,,,,4,,,,,,,,,,,,,, +HOLIDAYS/DAYLIGHT SAVINGS,No,0,0,0 +COMMENTS 1,Custom/IWEC Data +COMMENTS 2, -- Ground temps produced with a standard soil diffusivity of 2.3225760E-03 {m**2/day} +DATA PERIODS,1,1,Data,Sunday, 1/ 1,12/31 +2024,1,1,1,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9,9.0,3.9,65,101300,0,0,0,0,0,0,0,0,0,0,190,4.6,10,10,16.1,77777,9,999999999,0,0.0480,0,88,0.000,0.0,0.0 +2024,1,1,2,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9,8.3,3.9,69,101300,0,0,0,0,0,0,0,0,0,0,190,4.1,10,10,16.1,77777,9,999999999,0,0.0480,0,88,0.000,0.0,0.0 +2024,1,1,3,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9,7.8,3.9,72,101300,0,0,0,0,0,0,0,0,0,0,200,3.6,10,10,16.1,77777,9,999999999,0,0.0480,0,88,0.000,0.0,0.0 +2024,1,1,4,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9,7.2,3.9,76,101300,0,0,0,0,0,0,0,0,0,0,200,3.1,10,10,16.1,77777,9,999999999,0,0.0480,0,88,0.000,0.0,0.0 +2024,1,1,5,0,?9?9?9?9E0?9?9?9?9?9?9?9?9?9?9?9?9?9?9*_*9*9*9*9*9,6.7,3.3,76,101300,0,0,0,0,0,0,0,0,0,0,200,3.1,10,10,16.1,77777,9,999999999,0,0.0480,0,88,0.000,0.0,0.0 +""" + + @pytest.fixture + def epw_file(self, sample_epw_content, tmp_path): + """Create a temporary EPW file.""" + epw_path = tmp_path / "test.epw" + epw_path.write_text(sample_epw_content) + return epw_path + + def test_read_epw_returns_data_and_metadata(self, epw_file): + """Test that read_epw returns a data object and metadata dict.""" + df, metadata = io.read_epw(epw_file) + + assert len(df) == 5 + assert isinstance(metadata, dict) + + def test_epw_metadata_parsing(self, epw_file): + """Test that EPW metadata is correctly parsed.""" + df, metadata = io.read_epw(epw_file) + + assert metadata["city"] == "Athens" + assert abs(metadata["latitude"] - 37.90) < 0.01 + assert abs(metadata["longitude"] - 23.73) < 0.01 + assert abs(metadata["elevation"] - 107.0) < 0.1 + + def test_epw_data_columns(self, epw_file): + """Test that EPW data has expected columns.""" + df, _ = io.read_epw(epw_file) + + # Check for essential weather columns + expected_cols = [ + "temp_air", + "relative_humidity", + "atmospheric_pressure", + "wind_speed", + "wind_direction", + "ghi", + ] + for col in expected_cols: + assert col in df.columns, f"Missing column: {col}" + + def test_epw_datetime_index(self, epw_file): + """Test that EPW data has proper datetime index.""" + df, _ = io.read_epw(epw_file) + + assert df.index.name == "datetime" + + # Check first timestamp + first_timestamp = df.index[0] + assert first_timestamp.year == 2024 + assert first_timestamp.month == 1 + assert first_timestamp.day == 1 + assert first_timestamp.hour == 1 + + def test_epw_temperature_values(self, epw_file): + """Test that temperature values are reasonable.""" + df, _ = io.read_epw(epw_file) + + # Temperature should be in Celsius + assert df["temp_air"].min() >= -50 # Reasonable minimum + assert df["temp_air"].max() <= 60 # Reasonable maximum + + # Check specific values from sample data + assert abs(df.iloc[0]["temp_air"] - 9.0) < 0.1 + + def test_epw_humidity_values(self, epw_file): + """Test that humidity values are in valid range.""" + df, _ = io.read_epw(epw_file) + + assert (df["relative_humidity"] >= 0).all() + assert (df["relative_humidity"] <= 100).all() + + # Check specific value from sample data + assert df.iloc[0]["relative_humidity"] == 65 + + def test_epw_pressure_values(self, epw_file): + """Test that pressure values are reasonable.""" + df, _ = io.read_epw(epw_file) + + # Pressure should be in Pa + assert (df["atmospheric_pressure"] > 50000).all() # > 500 hPa + assert (df["atmospheric_pressure"] < 110000).all() # < 1100 hPa + + def test_epw_handles_pathlib_path(self, epw_file): + """Test that read_epw accepts pathlib.Path.""" + df, metadata = io.read_epw(Path(epw_file)) + + assert len(df) == 5 + assert metadata["city"] == "Athens" + + def test_epw_handles_string_path(self, epw_file): + """Test that read_epw accepts string path.""" + df, metadata = io.read_epw(str(epw_file)) + + assert len(df) == 5 + assert metadata["city"] == "Athens" + + def test_epw_missing_file_raises_error(self): + """Test that reading non-existent EPW file raises error.""" + with pytest.raises(FileNotFoundError): + io.read_epw("nonexistent.epw") + + def test_to_dataframe_converts_when_pandas_available(self, epw_file): + """Test that to_dataframe() converts to pandas when available.""" + import pandas as pd + + df, _ = io.read_epw(epw_file) + pdf = df.to_dataframe() + + assert isinstance(pdf, pd.DataFrame) + assert isinstance(pdf.index, pd.DatetimeIndex) + assert len(pdf) == 5 + + +class TestRasterIO: + """Test raster I/O with GDAL backend fallback.""" + + def test_gdal_backend_env_variable(self, monkeypatch): + """Test that UMEP_USE_GDAL environment variable works.""" + # Skip if GDAL is not available + try: + from osgeo import gdal # noqa: F401 + + del gdal # Silence unused import warning + except ImportError: + pytest.skip("GDAL not available") + + # Set environment variable + monkeypatch.setenv("UMEP_USE_GDAL", "1") + + # Reload _compat (the source of truth for backend selection) + # to pick up the environment variable change. + import importlib + + from solweig import _compat + + importlib.reload(_compat) + + # Should use GDAL backend + assert _compat.GDAL_ENV + + def test_rasterio_backend_default(self, monkeypatch): + """Test that rasterio is the default backend in a standard environment.""" + import importlib + import sys + + from solweig import _compat + + # Ensure environment variable is not set + monkeypatch.delenv("UMEP_USE_GDAL", raising=False) + + # Remove any QGIS mocks that earlier tests may have injected, + # so _compat.in_osgeo_environment() returns False. + qgis_keys = [k for k in sys.modules if k == "qgis" or k.startswith("qgis.")] + saved = {k: sys.modules.pop(k) for k in qgis_keys} + try: + importlib.reload(_compat) + finally: + sys.modules.update(saved) + + # In a standard environment with rasterio, GDAL_ENV should be False + assert _compat.RASTERIO_AVAILABLE is True + assert _compat.GDAL_ENV is False + + +class TestGeoTIFFLoading: + """Test GeoTIFF loading functionality.""" + + @pytest.fixture + def sample_geotiff(self, tmp_path): + """Create a minimal GeoTIFF file for testing.""" + try: + from osgeo import gdal, osr + + # Create a simple 10x10 raster + driver = gdal.GetDriverByName("GTiff") + ds = driver.Create( + str(tmp_path / "test.tif"), + 10, + 10, + 1, + gdal.GDT_Float32, + ) + + # Set geotransform + ds.SetGeoTransform([0, 1, 0, 0, 0, -1]) + + # Set projection (WGS84) + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + ds.SetProjection(srs.ExportToWkt()) + + # Write data + band = ds.GetRasterBand(1) + data = np.arange(100, dtype=np.float32).reshape(10, 10) + band.WriteArray(data) + band.SetNoDataValue(-9999) + + # Close dataset + ds = None + + return tmp_path / "test.tif" + + except ImportError: + pytest.skip("GDAL not available for creating test file") + + def test_load_raster_returns_tuple(self, sample_geotiff): + """Test that load_raster returns expected tuple.""" + result = io.load_raster(str(sample_geotiff)) + + # Should return (array, transform, crs, nodata) + assert len(result) == 4 + + array, transform, crs, nodata = result + + assert isinstance(array, np.ndarray) + assert array.shape == (10, 10) + assert transform is not None + assert crs is not None + + def test_load_raster_preserves_data(self, sample_geotiff): + """Test that loaded data matches written data.""" + array, _, _, _ = io.load_raster(str(sample_geotiff)) + + # Should match the data we wrote + expected = np.arange(100, dtype=np.float32).reshape(10, 10) + np.testing.assert_array_almost_equal(array, expected) + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_orchestration.py b/tests/test_orchestration.py new file mode 100644 index 0000000..1b03bba --- /dev/null +++ b/tests/test_orchestration.py @@ -0,0 +1,782 @@ +""" +Unit tests for the orchestration layer. + +Tests internal helper functions in computation.py, timeseries.py, and tiling.py +that aren't exercised by the higher-level integration tests. +""" + +from __future__ import annotations + +from datetime import datetime +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +import solweig +from solweig.bundles import GvfBundle, LupBundle +from solweig.computation import _apply_thermal_delay +from solweig.models.state import ThermalState, TileSpec +from solweig.tiling import ( + MAX_BUFFER_M, + MIN_TILE_SIZE, + _resolve_inflight_limit, + _resolve_tile_workers, + calculate_buffer_distance, + compute_max_tile_pixels, + compute_max_tile_side, + generate_tiles, + validate_tile_size, +) + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def small_shape(): + return (10, 10) + + +@pytest.fixture() +def state_10x10(small_shape): + """Fresh ThermalState for a 10×10 grid.""" + return ThermalState.initial(small_shape) + + +@pytest.fixture() +def gvf_bundle_10x10(small_shape): + """GvfBundle with synthetic upwelling longwave values.""" + ones = np.ones(small_shape, dtype=np.float32) + return GvfBundle( + lup=ones * 400.0, + lup_e=ones * 390.0, + lup_s=ones * 395.0, + lup_w=ones * 385.0, + lup_n=ones * 380.0, + gvfalb=ones * 0.15, + gvfalb_e=ones * 0.15, + gvfalb_s=ones * 0.15, + gvfalb_w=ones * 0.15, + gvfalb_n=ones * 0.15, + gvfalbnosh=ones * 0.15, + gvfalbnosh_e=ones * 0.15, + gvfalbnosh_s=ones * 0.15, + gvfalbnosh_w=ones * 0.15, + gvfalbnosh_n=ones * 0.15, + ) + + +def _make_weather(*, ta: float = 20.0, sun_altitude: float = -5.0, is_daytime: bool = False) -> MagicMock: + """Create a mock Weather object with controllable attributes.""" + w = MagicMock() + w.ta = ta + w.sun_altitude = sun_altitude + w.is_daytime = is_daytime + return w + + +# --------------------------------------------------------------------------- +# _apply_thermal_delay +# --------------------------------------------------------------------------- + + +class TestApplyThermalDelay: + """Tests for _apply_thermal_delay() in computation.py.""" + + def test_no_state_returns_raw_gvf(self, gvf_bundle_10x10): + """Without state, raw GVF lup values are returned (no thermal delay).""" + weather = _make_weather(ta=25.0, is_daytime=True) + shadow = np.ones((10, 10), dtype=np.float32) * 0.5 + ground_tg = np.ones((10, 10), dtype=np.float32) * 2.0 + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=None) + + assert isinstance(result, LupBundle) + np.testing.assert_array_equal(result.lup, gvf_bundle_10x10.lup) + np.testing.assert_array_equal(result.lup_e, gvf_bundle_10x10.lup_e) + np.testing.assert_array_equal(result.lup_n, gvf_bundle_10x10.lup_n) + assert result.state is None + + def test_no_state_output_is_float32(self, gvf_bundle_10x10): + """Output arrays are float32 even without state.""" + weather = _make_weather(ta=25.0) + shadow = np.ones((10, 10), dtype=np.float32) + ground_tg = np.ones((10, 10), dtype=np.float32) + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=None) + + assert result.lup.dtype == np.float32 + assert result.lup_e.dtype == np.float32 + + @patch("solweig.computation.ground_rust.ts_wave_delay_batch") + def test_with_state_calls_rust(self, mock_ts_wave, gvf_bundle_10x10, state_10x10): + """With state, calls Rust ts_wave_delay_batch and returns delayed values.""" + # Set up mock Rust result + shape = (10, 10) + mock_result = SimpleNamespace( + lup=np.ones(shape, dtype=np.float32) * 410.0, + lup_e=np.ones(shape, dtype=np.float32) * 405.0, + lup_s=np.ones(shape, dtype=np.float32) * 400.0, + lup_w=np.ones(shape, dtype=np.float32) * 395.0, + lup_n=np.ones(shape, dtype=np.float32) * 390.0, + timeadd=0.5, + tgmap1=np.ones(shape, dtype=np.float32) * 1.0, + tgmap1_e=np.ones(shape, dtype=np.float32) * 1.1, + tgmap1_s=np.ones(shape, dtype=np.float32) * 1.2, + tgmap1_w=np.ones(shape, dtype=np.float32) * 1.3, + tgmap1_n=np.ones(shape, dtype=np.float32) * 1.4, + tgout1=np.ones(shape, dtype=np.float32) * 2.0, + ) + mock_ts_wave.return_value = mock_result + + weather = _make_weather(ta=25.0, is_daytime=True) + shadow = np.ones(shape, dtype=np.float32) * 0.5 + ground_tg = np.ones(shape, dtype=np.float32) * 2.0 + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=state_10x10) + + mock_ts_wave.assert_called_once() + np.testing.assert_allclose(result.lup, 410.0) + np.testing.assert_allclose(result.lup_e, 405.0) + assert result.state is not None + + @patch("solweig.computation.ground_rust.ts_wave_delay_batch") + def test_state_updated_from_rust_result(self, mock_ts_wave, gvf_bundle_10x10, state_10x10): + """State arrays are updated from Rust result.""" + shape = (10, 10) + mock_result = SimpleNamespace( + lup=np.ones(shape, dtype=np.float32), + lup_e=np.ones(shape, dtype=np.float32), + lup_s=np.ones(shape, dtype=np.float32), + lup_w=np.ones(shape, dtype=np.float32), + lup_n=np.ones(shape, dtype=np.float32), + timeadd=0.75, + tgmap1=np.full(shape, 3.0, dtype=np.float32), + tgmap1_e=np.full(shape, 3.1, dtype=np.float32), + tgmap1_s=np.full(shape, 3.2, dtype=np.float32), + tgmap1_w=np.full(shape, 3.3, dtype=np.float32), + tgmap1_n=np.full(shape, 3.4, dtype=np.float32), + tgout1=np.full(shape, 4.0, dtype=np.float32), + ) + mock_ts_wave.return_value = mock_result + + weather = _make_weather(ta=20.0, is_daytime=True) + shadow = np.ones(shape, dtype=np.float32) + ground_tg = np.zeros(shape, dtype=np.float32) + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=state_10x10) + + # Output state has values from Rust result + assert result.state is not None + np.testing.assert_allclose(result.state.tgmap1, 3.0) + np.testing.assert_allclose(result.state.tgmap1_e, 3.1) + np.testing.assert_allclose(result.state.tgout1, 4.0) + + @patch("solweig.computation.ground_rust.ts_wave_delay_batch") + def test_daytime_clears_firstdaytime(self, mock_ts_wave, gvf_bundle_10x10, state_10x10): + """During daytime, firstdaytime is set to 0.0.""" + shape = (10, 10) + mock_result = SimpleNamespace( + lup=np.ones(shape, dtype=np.float32), + lup_e=np.ones(shape, dtype=np.float32), + lup_s=np.ones(shape, dtype=np.float32), + lup_w=np.ones(shape, dtype=np.float32), + lup_n=np.ones(shape, dtype=np.float32), + timeadd=0.5, + tgmap1=np.zeros(shape, dtype=np.float32), + tgmap1_e=np.zeros(shape, dtype=np.float32), + tgmap1_s=np.zeros(shape, dtype=np.float32), + tgmap1_w=np.zeros(shape, dtype=np.float32), + tgmap1_n=np.zeros(shape, dtype=np.float32), + tgout1=np.zeros(shape, dtype=np.float32), + ) + mock_ts_wave.return_value = mock_result + + state_10x10.firstdaytime = 1.0 # Morning state + weather = _make_weather(ta=25.0, is_daytime=True) + shadow = np.ones(shape, dtype=np.float32) + ground_tg = np.zeros(shape, dtype=np.float32) + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=state_10x10) + + assert result.state is not None + assert result.state.firstdaytime == 0.0 + + @patch("solweig.computation.ground_rust.ts_wave_delay_batch") + def test_nighttime_resets_firstdaytime_and_timeadd(self, mock_ts_wave, gvf_bundle_10x10, state_10x10): + """At night, firstdaytime resets to 1.0 and timeadd resets to 0.0.""" + shape = (10, 10) + mock_result = SimpleNamespace( + lup=np.ones(shape, dtype=np.float32), + lup_e=np.ones(shape, dtype=np.float32), + lup_s=np.ones(shape, dtype=np.float32), + lup_w=np.ones(shape, dtype=np.float32), + lup_n=np.ones(shape, dtype=np.float32), + timeadd=0.75, + tgmap1=np.zeros(shape, dtype=np.float32), + tgmap1_e=np.zeros(shape, dtype=np.float32), + tgmap1_s=np.zeros(shape, dtype=np.float32), + tgmap1_w=np.zeros(shape, dtype=np.float32), + tgmap1_n=np.zeros(shape, dtype=np.float32), + tgout1=np.zeros(shape, dtype=np.float32), + ) + mock_ts_wave.return_value = mock_result + + state_10x10.firstdaytime = 0.0 + state_10x10.timeadd = 5.0 + weather = _make_weather(ta=15.0, is_daytime=False) + shadow = np.ones(shape, dtype=np.float32) + ground_tg = np.zeros(shape, dtype=np.float32) + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=state_10x10) + + assert result.state is not None + assert result.state.firstdaytime == 1.0 + assert result.state.timeadd == 0.0 + + @patch("solweig.computation.ground_rust.ts_wave_delay_batch") + def test_output_state_is_copy(self, mock_ts_wave, gvf_bundle_10x10, state_10x10): + """Output state is a deep copy (mutating doesn't affect internal state).""" + shape = (10, 10) + mock_result = SimpleNamespace( + lup=np.ones(shape, dtype=np.float32), + lup_e=np.ones(shape, dtype=np.float32), + lup_s=np.ones(shape, dtype=np.float32), + lup_w=np.ones(shape, dtype=np.float32), + lup_n=np.ones(shape, dtype=np.float32), + timeadd=0.5, + tgmap1=np.ones(shape, dtype=np.float32), + tgmap1_e=np.ones(shape, dtype=np.float32), + tgmap1_s=np.ones(shape, dtype=np.float32), + tgmap1_w=np.ones(shape, dtype=np.float32), + tgmap1_n=np.ones(shape, dtype=np.float32), + tgout1=np.ones(shape, dtype=np.float32), + ) + mock_ts_wave.return_value = mock_result + + weather = _make_weather(ta=20.0, is_daytime=True) + shadow = np.ones(shape, dtype=np.float32) + ground_tg = np.zeros(shape, dtype=np.float32) + + result = _apply_thermal_delay(gvf_bundle_10x10, ground_tg, shadow, weather, state=state_10x10) + + # Mutating output state doesn't affect input + assert result.state is not None + result.state.tgmap1[:] = 999.0 + assert not np.any(state_10x10.tgmap1 == 999.0) + + +# --------------------------------------------------------------------------- +# _precompute_weather +# --------------------------------------------------------------------------- + + +class TestPrecomputeWeather: + """Tests for _precompute_weather() in timeseries.py.""" + + def test_empty_list_noop(self): + """Empty weather series is a no-op.""" + from solweig.timeseries import _precompute_weather + + location = solweig.Location(latitude=57.7, longitude=12.0, utc_offset=2) + _precompute_weather([], location) # Should not raise + + def test_assigns_precomputed_altmax(self): + """Each weather object gets precomputed_altmax assigned.""" + from solweig.timeseries import _precompute_weather + + location = solweig.Location(latitude=57.7, longitude=12.0, utc_offset=2) + dt_noon = datetime(2024, 7, 15, 12, 0) + w = solweig.Weather(datetime=dt_noon, ta=25.0, rh=50.0, global_rad=800.0) + + _precompute_weather([w], location) + + assert hasattr(w, "precomputed_altmax") + assert w.precomputed_altmax is not None + assert w.precomputed_altmax > 0 # Summer noon in Sweden: ~55° + + def test_same_day_shares_altmax(self): + """Multiple timesteps on the same day share the same altmax value.""" + from solweig.timeseries import _precompute_weather + + location = solweig.Location(latitude=57.7, longitude=12.0, utc_offset=2) + base = datetime(2024, 7, 15) + + weather_series = [ + solweig.Weather(datetime=base.replace(hour=h), ta=25.0, rh=50.0, global_rad=800.0) for h in range(6, 18) + ] + + _precompute_weather(weather_series, location) + + altmaxes = [w.precomputed_altmax for w in weather_series] + assert all(a == altmaxes[0] for a in altmaxes) + + def test_different_days_may_differ(self): + """Different days may have different altmax (season effect).""" + from solweig.timeseries import _precompute_weather + + location = solweig.Location(latitude=57.7, longitude=12.0, utc_offset=2) + + w_summer = solweig.Weather(datetime=datetime(2024, 6, 21, 12, 0), ta=25.0, rh=50.0, global_rad=800.0) + w_winter = solweig.Weather(datetime=datetime(2024, 12, 21, 12, 0), ta=0.0, rh=70.0, global_rad=200.0) + + _precompute_weather([w_summer, w_winter], location) + + assert w_summer.precomputed_altmax is not None + assert w_winter.precomputed_altmax is not None + assert w_summer.precomputed_altmax > w_winter.precomputed_altmax + + def test_marks_derived_computed(self): + """After precomputation, weather objects have derived values computed.""" + from solweig.timeseries import _precompute_weather + + location = solweig.Location(latitude=57.7, longitude=12.0, utc_offset=2) + w = solweig.Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0) + + assert not w._derived_computed + _precompute_weather([w], location) + assert w._derived_computed + + +# --------------------------------------------------------------------------- +# ThermalState +# --------------------------------------------------------------------------- + + +class TestThermalState: + """Tests for ThermalState creation and copying.""" + + def test_initial_state_shape(self, small_shape): + """Initial state arrays have the requested shape.""" + state = ThermalState.initial(small_shape) + assert state.tgmap1.shape == small_shape + assert state.tgmap1_e.shape == small_shape + assert state.tgmap1_s.shape == small_shape + assert state.tgmap1_w.shape == small_shape + assert state.tgmap1_n.shape == small_shape + assert state.tgout1.shape == small_shape + + def test_initial_state_zeros(self, small_shape): + """Initial state arrays are all zeros.""" + state = ThermalState.initial(small_shape) + np.testing.assert_array_equal(state.tgmap1, 0.0) + np.testing.assert_array_equal(state.tgout1, 0.0) + + def test_initial_state_flags(self, small_shape): + """Initial state has firstdaytime=1.0, timeadd=0.0.""" + state = ThermalState.initial(small_shape) + assert state.firstdaytime == 1.0 + assert state.timeadd == 0.0 + assert state.timestep_dec == 0.0 + + def test_copy_is_independent(self, small_shape): + """Copy creates independent arrays.""" + state = ThermalState.initial(small_shape) + state.tgmap1[:] = 5.0 + state.firstdaytime = 0.0 + + copy = state.copy() + copy.tgmap1[:] = 99.0 + copy.firstdaytime = 1.0 + + assert state.tgmap1[0, 0] == 5.0 # Original unchanged + assert state.firstdaytime == 0.0 # Original unchanged + + def test_copy_preserves_values(self, small_shape): + """Copy preserves all field values.""" + state = ThermalState.initial(small_shape) + state.tgmap1[:] = 3.0 + state.timeadd = 1.5 + state.timestep_dec = 0.042 + + copy = state.copy() + np.testing.assert_array_equal(copy.tgmap1, 3.0) + assert copy.timeadd == 1.5 + assert copy.timestep_dec == 0.042 + + +# --------------------------------------------------------------------------- +# TileSpec +# --------------------------------------------------------------------------- + + +class TestTileSpec: + """Tests for TileSpec properties.""" + + def test_core_shape(self): + tile = TileSpec( + row_start=0, + row_end=100, + col_start=0, + col_end=200, + row_start_full=0, + row_end_full=150, + col_start_full=0, + col_end_full=250, + overlap_top=0, + overlap_bottom=50, + overlap_left=0, + overlap_right=50, + ) + assert tile.core_shape == (100, 200) + + def test_full_shape(self): + tile = TileSpec( + row_start=0, + row_end=100, + col_start=0, + col_end=200, + row_start_full=0, + row_end_full=150, + col_start_full=0, + col_end_full=250, + overlap_top=0, + overlap_bottom=50, + overlap_left=0, + overlap_right=50, + ) + assert tile.full_shape == (150, 250) + + def test_core_slice_no_overlap(self): + """First tile (no top/left overlap) has core starting at (0, 0).""" + tile = TileSpec( + row_start=0, + row_end=100, + col_start=0, + col_end=100, + row_start_full=0, + row_end_full=150, + col_start_full=0, + col_end_full=150, + overlap_top=0, + overlap_bottom=50, + overlap_left=0, + overlap_right=50, + ) + rs, cs = tile.core_slice + assert rs == slice(0, 100) + assert cs == slice(0, 100) + + def test_core_slice_with_overlap(self): + """Middle tile has core offset by overlap.""" + tile = TileSpec( + row_start=100, + row_end=200, + col_start=100, + col_end=200, + row_start_full=50, + row_end_full=250, + col_start_full=50, + col_end_full=250, + overlap_top=50, + overlap_bottom=50, + overlap_left=50, + overlap_right=50, + ) + rs, cs = tile.core_slice + assert rs == slice(50, 150) + assert cs == slice(50, 150) + + def test_write_slice(self): + tile = TileSpec( + row_start=100, + row_end=200, + col_start=50, + col_end=150, + row_start_full=50, + row_end_full=250, + col_start_full=0, + col_end_full=200, + overlap_top=50, + overlap_bottom=50, + overlap_left=50, + overlap_right=50, + ) + rs, cs = tile.write_slice + assert rs == slice(100, 200) + assert cs == slice(50, 150) + + def test_read_slice(self): + tile = TileSpec( + row_start=100, + row_end=200, + col_start=50, + col_end=150, + row_start_full=50, + row_end_full=250, + col_start_full=0, + col_end_full=200, + overlap_top=50, + overlap_bottom=50, + overlap_left=50, + overlap_right=50, + ) + rs, cs = tile.read_slice + assert rs == slice(50, 250) + assert cs == slice(0, 200) + + +# --------------------------------------------------------------------------- +# calculate_buffer_distance +# --------------------------------------------------------------------------- + + +class TestCalculateBufferDistance: + """Tests for calculate_buffer_distance() in tiling.py.""" + + def test_zero_height(self): + assert calculate_buffer_distance(0.0) == 0.0 + + def test_negative_height(self): + assert calculate_buffer_distance(-5.0) == 0.0 + + def test_short_building(self): + """10m building: 10 / tan(3°) ≈ 190.8m.""" + result = calculate_buffer_distance(10.0) + assert 190 < result < 192 + + def test_tall_building_capped(self): + """60m building would need ~1145m, capped at 1000m.""" + result = calculate_buffer_distance(60.0) + assert result == MAX_BUFFER_M + + def test_custom_min_elevation(self): + """Higher min elevation reduces buffer distance.""" + buffer_3 = calculate_buffer_distance(10.0, min_sun_elev_deg=3.0) + buffer_10 = calculate_buffer_distance(10.0, min_sun_elev_deg=10.0) + assert buffer_10 < buffer_3 + + +# --------------------------------------------------------------------------- +# validate_tile_size +# --------------------------------------------------------------------------- + + +class TestComputeMaxTilePixels: + """Tests for compute_max_tile_pixels() in tiling.py.""" + + def test_backend_without_hint_uses_single_buffer_estimate(self, monkeypatch): + """Unknown backend should use largest-single-buffer bytes/pixel estimate.""" + max_buf = 1_000_000_000 # bytes (1 GB — large enough to exceed MIN_TILE_SIZE²) + headroom = compute_max_tile_pixels.__globals__["_GPU_HEADROOM"] + + monkeypatch.setattr(solweig, "get_gpu_limits", lambda: {"max_buffer_size": max_buf}) + monkeypatch.setitem(compute_max_tile_pixels.__globals__, "_get_total_ram_bytes", lambda: None) + + svf_pixels = compute_max_tile_pixels(context="svf") + solweig_pixels = compute_max_tile_pixels(context="solweig") + + expected_svf = int(max_buf * headroom) // 60 # _SVF_GPU_SINGLE_BPP + expected_solweig = int(max_buf * headroom) // 40 # _SHADOW_GPU_SINGLE_BPP + + assert svf_pixels == expected_svf + assert solweig_pixels == expected_solweig + assert svf_pixels < solweig_pixels + + def test_metal_backend_uses_total_working_set_estimate(self, monkeypatch): + """Metal backend should constrain by aggregate GPU working-set bytes/pixel.""" + max_buf = 1_000_000_000 # bytes (1 GB — large enough to exceed MIN_TILE_SIZE²) + headroom = compute_max_tile_pixels.__globals__["_GPU_HEADROOM"] + + monkeypatch.setattr(solweig, "get_gpu_limits", lambda: {"max_buffer_size": max_buf, "backend": "Metal"}) + monkeypatch.setitem(compute_max_tile_pixels.__globals__, "_get_total_ram_bytes", lambda: None) + + svf_pixels = compute_max_tile_pixels(context="svf") + solweig_pixels = compute_max_tile_pixels(context="solweig") + + expected_svf = int(max_buf * headroom) // 384 # _SVF_GPU_TOTAL_BPP + expected_solweig = int(max_buf * headroom) // 120 # _SHADOW_GPU_TOTAL_BPP + + assert svf_pixels == expected_svf + assert solweig_pixels == expected_solweig + assert svf_pixels < solweig_pixels + + +class TestValidateTileSize: + """Tests for validate_tile_size() — tile_size is core (excluding overlap).""" + + def test_valid_core_passes(self): + """Core size that fits with buffer within resource limit passes unchanged.""" + adjusted, warning = validate_tile_size(800, buffer_pixels=50, pixel_size=1.0) + assert adjusted == 800 + assert warning is None + + def test_below_minimum_adjusted(self): + adjusted, warning = validate_tile_size(100, buffer_pixels=10, pixel_size=1.0) + assert adjusted == MIN_TILE_SIZE + assert warning is not None + assert "below minimum" in warning + + def test_core_plus_buffer_exceeds_limit(self): + """Core is reduced so core + 2*buffer fits within resource limit.""" + max_full = compute_max_tile_side(context="solweig") + buffer_pixels = 50 + # Request core that with buffer would exceed limit + adjusted, warning = validate_tile_size(max_full, buffer_pixels=buffer_pixels, pixel_size=1.0) + assert adjusted == max_full - 2 * buffer_pixels + assert warning is not None + assert "exceeds resource limit" in warning + + def test_large_buffer_allows_subminimum_core_to_respect_limit(self): + """When overlap is huge, core may drop below MIN_TILE_SIZE to keep full tile valid.""" + max_full = compute_max_tile_side(context="solweig") + # Buffer so large that max_core < MIN_TILE_SIZE + huge_buffer = (max_full - MIN_TILE_SIZE) // 2 + 100 + adjusted, warning = validate_tile_size(800, buffer_pixels=huge_buffer, pixel_size=1.0) + max_core = max_full - 2 * huge_buffer + assert adjusted == max(1, max_core) + assert warning is not None + + def test_exact_minimum(self): + adjusted, warning = validate_tile_size(MIN_TILE_SIZE, buffer_pixels=10, pixel_size=1.0) + assert adjusted == MIN_TILE_SIZE + assert warning is None + + def test_small_buffer_allows_large_core(self): + """With small buffer, core can use nearly all of the resource limit.""" + max_full = compute_max_tile_side(context="solweig") + max_core = max_full - 2 * 10 + adjusted, warning = validate_tile_size(max_core, buffer_pixels=10, pixel_size=1.0) + assert adjusted == max_core + assert warning is None + + def test_context_uses_context_specific_limits(self, monkeypatch): + """SVF context should not be constrained by SOLWEIG tile limits.""" + + def _fake_max_side(*, context: str = "solweig"): + return 4000 if context == "svf" else 1000 + + # Patch directly on the function's globals dict. QGIS mock imports can + # cause a double-load of solweig.tiling so the module object in + # sys.modules may differ from the one validate_tile_size was defined in. + monkeypatch.setitem( + validate_tile_size.__globals__, + "compute_max_tile_side", + _fake_max_side, + ) + + svf_adjusted, _ = validate_tile_size(950, buffer_pixels=50, pixel_size=1.0, context="svf") + solweig_adjusted, _ = validate_tile_size(950, buffer_pixels=50, pixel_size=1.0, context="solweig") + + assert svf_adjusted == 950 + assert solweig_adjusted == 900 + + +# --------------------------------------------------------------------------- +# generate_tiles +# --------------------------------------------------------------------------- + + +class TestGenerateTiles: + """Tests for generate_tiles() in tiling.py.""" + + def test_single_tile(self): + """Small raster fits in one tile.""" + tiles = generate_tiles(100, 100, tile_size=256, overlap=50) + assert len(tiles) == 1 + assert tiles[0].row_start == 0 + assert tiles[0].row_end == 100 + assert tiles[0].col_start == 0 + assert tiles[0].col_end == 100 + + def test_single_tile_no_overlap(self): + """Single tile has no overlap (no neighbors).""" + tiles = generate_tiles(100, 100, tile_size=256, overlap=50) + assert tiles[0].overlap_top == 0 + assert tiles[0].overlap_bottom == 0 + assert tiles[0].overlap_left == 0 + assert tiles[0].overlap_right == 0 + + def test_2x2_tiles(self): + """500x500 raster with 256 tile size creates 2x2 grid.""" + tiles = generate_tiles(500, 500, tile_size=256, overlap=50) + assert len(tiles) == 4 + + def test_tiles_cover_entire_raster(self): + """All pixels are covered by at least one tile's core area.""" + rows, cols = 500, 700 + tiles = generate_tiles(rows, cols, tile_size=256, overlap=50) + + covered = np.zeros((rows, cols), dtype=bool) + for tile in tiles: + covered[tile.row_start : tile.row_end, tile.col_start : tile.col_end] = True + + assert np.all(covered) + + def test_overlap_only_on_inner_edges(self): + """Edge tiles don't extend beyond raster bounds.""" + tiles = generate_tiles(500, 500, tile_size=256, overlap=50) + + for tile in tiles: + assert tile.row_start_full >= 0 + assert tile.row_end_full <= 500 + assert tile.col_start_full >= 0 + assert tile.col_end_full <= 500 + + def test_first_tile_no_top_left_overlap(self): + """Top-left tile has no top or left overlap.""" + tiles = generate_tiles(500, 500, tile_size=256, overlap=50) + first = tiles[0] + assert first.overlap_top == 0 + assert first.overlap_left == 0 + + def test_last_tile_no_bottom_right_overlap(self): + """Bottom-right tile has no bottom or right overlap.""" + tiles = generate_tiles(500, 500, tile_size=256, overlap=50) + last = tiles[-1] + assert last.overlap_bottom == 0 + assert last.overlap_right == 0 + + def test_middle_tile_has_all_overlaps(self): + """Middle tile in a 3x3 grid has overlap on all sides.""" + tiles = generate_tiles(768, 768, tile_size=256, overlap=50) + assert len(tiles) == 9 # 3x3 + + middle = tiles[4] # center tile + assert middle.overlap_top == 50 + assert middle.overlap_bottom == 50 + assert middle.overlap_left == 50 + assert middle.overlap_right == 50 + + def test_non_square_raster(self): + """Non-square raster creates asymmetric tile grid.""" + tiles = generate_tiles(rows=200, cols=600, tile_size=256, overlap=30) + assert len(tiles) == 3 # 1 row × 3 cols + + def test_exact_tile_size_fit(self): + """Raster exactly matching tile size creates exactly 1 tile.""" + tiles = generate_tiles(256, 256, tile_size=256, overlap=50) + assert len(tiles) == 1 + + +# --------------------------------------------------------------------------- +# Tiling runtime controls +# --------------------------------------------------------------------------- + + +class TestTilingRuntimeControls: + """Tests for worker and queue-depth resolution helpers.""" + + def test_resolve_tile_workers_clamps_to_tile_count(self): + assert _resolve_tile_workers(tile_workers=16, n_tiles=3) == 3 + + def test_resolve_tile_workers_zero_raises(self): + with pytest.raises(ValueError, match="tile_workers must be >= 1"): + _resolve_tile_workers(tile_workers=0, n_tiles=8) + + def test_resolve_inflight_limit_with_prefetch_default(self): + # queue_depth=None + prefetch=True => queue_depth = n_workers + assert _resolve_inflight_limit(4, n_tiles=20, tile_queue_depth=None, prefetch_tiles=True) == 8 + + def test_resolve_inflight_limit_no_prefetch(self): + # queue_depth=None + prefetch=False => no queued tasks + assert _resolve_inflight_limit(4, n_tiles=20, tile_queue_depth=None, prefetch_tiles=False) == 4 + + def test_resolve_inflight_limit_clamped_to_n_tiles(self): + assert _resolve_inflight_limit(4, n_tiles=5, tile_queue_depth=8, prefetch_tiles=True) == 5 + + def test_resolve_inflight_limit_negative_queue_depth_raises(self): + with pytest.raises(ValueError, match="tile_queue_depth must be >= 0"): + _resolve_inflight_limit(2, n_tiles=8, tile_queue_depth=-1, prefetch_tiles=True) diff --git a/tests/test_progress.py b/tests/test_progress.py new file mode 100644 index 0000000..12c5912 --- /dev/null +++ b/tests/test_progress.py @@ -0,0 +1,328 @@ +""" +Tests for progress reporting abstraction. +""" + +import sys +from unittest.mock import MagicMock, patch + +import pytest +from solweig.progress import ProgressReporter, _ProgressIterator, get_progress_iterator, progress + + +class TestProgressReporter: + """Test the ProgressReporter class.""" + + def test_progress_reporter_basic_usage(self): + """Test basic ProgressReporter usage.""" + reporter = ProgressReporter(total=10, desc="Test", disable=True) + + for _i in range(10): + reporter.update(1) + + reporter.close() + + assert reporter.current == 10 + + def test_progress_reporter_update_increments(self): + """Test that update() increments current count.""" + reporter = ProgressReporter(total=100, disable=True) + + reporter.update(5) + assert reporter.current == 5 + + reporter.update(10) + assert reporter.current == 15 + + reporter.close() + + def test_progress_reporter_disabled_mode(self): + """Test that disabled mode doesn't crash.""" + reporter = ProgressReporter(total=10, disable=True) + + # Should work without any backend + reporter.update(5) + reporter.set_description("New description") + reporter.close() + + assert reporter.current == 5 + + def test_progress_reporter_no_backend(self): + """Test ProgressReporter with no tqdm or QGIS available.""" + # Hide both tqdm and QGIS + with patch.dict(sys.modules, {"tqdm": None, "qgis.core": None}): + reporter = ProgressReporter(total=10, desc="Test") + + # Should work silently + reporter.update(5) + reporter.close() + + assert reporter.current == 5 + + def test_progress_reporter_with_tqdm(self): + """Test ProgressReporter with tqdm backend.""" + try: + from tqdm import tqdm # noqa: F401 + + del tqdm # Silence unused import warning + reporter = ProgressReporter(total=10, desc="Test with tqdm") + + # Should have tqdm bar + assert reporter._tqdm_bar is not None + + reporter.update(5) + reporter.close() + + assert reporter.current == 5 + + except ImportError: + pytest.skip("tqdm not available") + + def test_progress_reporter_qgis_feedback(self): + """Test ProgressReporter with QGIS feedback.""" + # Mock QGIS feedback + mock_feedback = MagicMock() + mock_feedback.isCanceled.return_value = False + + reporter = ProgressReporter(total=100, desc="Test", feedback=mock_feedback) + + # Should use QGIS feedback + assert reporter._qgis_feedback is mock_feedback + + reporter.update(50) + + # Should have called setProgress with 50% + mock_feedback.setProgress.assert_called_with(50) + + reporter.close() + + def test_progress_reporter_qgis_cancel(self): + """Test that QGIS cancellation is detected.""" + mock_feedback = MagicMock() + mock_feedback.isCanceled.return_value = True + + reporter = ProgressReporter(total=100, feedback=mock_feedback) + + assert reporter.is_cancelled() is True + + def test_progress_reporter_set_description(self): + """Test updating progress description.""" + reporter = ProgressReporter(total=10, desc="Initial", disable=True) + + reporter.set_description("Updated") + + assert reporter.desc == "Updated" + + reporter.close() + + def test_progress_reporter_close_idempotent(self): + """Test that close() can be called multiple times.""" + reporter = ProgressReporter(total=10, disable=True) + + reporter.close() + reporter.close() # Should not crash + + assert reporter._closed + + def test_progress_reporter_update_after_close(self): + """Test that update after close is ignored.""" + reporter = ProgressReporter(total=10, disable=True) + + reporter.update(5) + reporter.close() + reporter.update(5) # Should be ignored + + # Should still be 5, not 10 + assert reporter.current == 5 + + +class TestProgressIterator: + """Test the get_progress_iterator function.""" + + def test_progress_iterator_basic(self): + """Test basic progress iterator usage.""" + items = range(10) + count = 0 + + for _item in get_progress_iterator(items, desc="Test", disable=True): + count += 1 + + assert count == 10 + + def test_progress_iterator_with_list(self): + """Test progress iterator with list.""" + items = [1, 2, 3, 4, 5] + result = [] + + for item in get_progress_iterator(items, desc="Test", disable=True): + result.append(item) + + assert result == items + + def test_progress_iterator_total_from_len(self): + """Test that total is computed from len() if not provided.""" + items = [1, 2, 3, 4, 5] + + iterator = get_progress_iterator(items, desc="Test", disable=True) + # Access the reporter through the iterator (implementation detail) + pi = iterator + assert isinstance(pi, _ProgressIterator) + reporter: ProgressReporter = pi._reporter # type: ignore[unresolved-attribute] + assert reporter.total == 5 + + def test_progress_iterator_explicit_total(self): + """Test providing explicit total.""" + items = range(10) + + iterator = get_progress_iterator(items, desc="Test", total=100, disable=True) + pi = iterator + assert isinstance(pi, _ProgressIterator) + reporter: ProgressReporter = pi._reporter # type: ignore[unresolved-attribute] + assert reporter.total == 100 + + def test_progress_iterator_generator(self): + """Test progress iterator with generator.""" + + def gen(): + yield from range(5) + + result = [] + for item in get_progress_iterator(gen(), desc="Test", total=5, disable=True): + result.append(item) + + assert result == [0, 1, 2, 3, 4] + + def test_progress_iterator_stops_at_end(self): + """Test that iterator properly stops at end.""" + items = [1, 2, 3] + result = [] + + for item in get_progress_iterator(items, desc="Test", disable=True): + result.append(item) + + assert len(result) == 3 + assert result == items + + +class TestProgressFunction: + """Test the progress() convenience function.""" + + def test_progress_function_drop_in_replacement(self): + """Test that progress() works like tqdm.""" + items = range(10) + count = 0 + + for _item in progress(items, desc="Test", disable=True): + count += 1 + + assert count == 10 + + def test_progress_function_kwargs_ignored(self): + """Test that extra kwargs are ignored (for tqdm compatibility).""" + items = range(5) + result = [] + + # These kwargs are tqdm-specific and should be ignored + for item in progress(items, desc="Test", disable=True, leave=False, ncols=80): + result.append(item) + + assert result == list(items) + + +class TestProgressQGISIntegration: + """Test QGIS integration scenarios.""" + + def test_qgis_environment_detection(self): + """Test that QGIS environment is detected.""" + # Mock QGIS being available + mock_qgis = MagicMock() + + with patch.dict("sys.modules", {"qgis.core": mock_qgis}): + # Reimport to trigger detection + from importlib import reload + + import solweig.progress + + reload(solweig.progress) + + # Should detect QGIS + assert solweig.progress._QGIS_AVAILABLE + + def test_tqdm_environment_detection(self): + """Test that tqdm environment is detected.""" + try: + # Reimport to trigger detection + from importlib import reload + + import solweig.progress + from tqdm import tqdm # noqa: F401 + + del tqdm # Silence unused import warning + reload(solweig.progress) + + # Should detect tqdm + assert solweig.progress._TQDM_AVAILABLE + + except ImportError: + pytest.skip("tqdm not available") + + def test_progress_without_dependencies(self): + """Test that progress works without tqdm or QGIS.""" + # Hide both dependencies + with patch.dict(sys.modules, {"tqdm": None, "qgis.core": None}): + # Should still work, just silently + items = range(5) + result = [] + + for item in get_progress_iterator(items): + result.append(item) + + assert result == list(items) + + +class TestProgressEdgeCases: + """Test edge cases and error conditions.""" + + def test_progress_zero_total(self): + """Test progress with zero total.""" + reporter = ProgressReporter(total=0, disable=True) + + reporter.update(1) + reporter.close() + + # Should handle gracefully + assert reporter.current == 1 + + def test_progress_negative_update(self): + """Test negative update values.""" + reporter = ProgressReporter(total=10, disable=True) + + reporter.update(-5) + + # Current should be negative + assert reporter.current == -5 + + reporter.close() + + def test_progress_large_update(self): + """Test update larger than total.""" + reporter = ProgressReporter(total=10, disable=True) + + reporter.update(100) + + assert reporter.current == 100 + + reporter.close() + + def test_empty_iterable(self): + """Test progress with empty iterable.""" + items = [] + result = [] + + for item in get_progress_iterator(items, disable=True): + result.append(item) + + assert result == [] + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) diff --git a/tests/test_qgis_base.py b/tests/test_qgis_base.py new file mode 100644 index 0000000..8baea1e --- /dev/null +++ b/tests/test_qgis_base.py @@ -0,0 +1,215 @@ +""" +Tests for QGIS plugin base algorithm class. + +Mocks QGIS and GDAL dependencies so these tests run without a QGIS installation. +Tests grid validation, output path logic, and georeferenced output saving. +""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest + +from tests.qgis_mocks import QgsProcessingException, install, install_osgeo, uninstall_osgeo + +install() # Must be called before any qgis_plugin imports +install_osgeo() # Temporarily needed for osgeo imports in base.py + +from qgis_plugin.solweig_qgis.algorithms.base import SolweigAlgorithmBase # noqa: E402 + +uninstall_osgeo() # Clean up immediately after imports to avoid polluting other tests + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def algo(): + """Create a SolweigAlgorithmBase instance for testing.""" + return SolweigAlgorithmBase() + + +@pytest.fixture() +def feedback(): + """Create a mock QgsProcessingFeedback.""" + fb = MagicMock() + fb.pushInfo = MagicMock() + return fb + + +@pytest.fixture() +def gdal_mocks(): + """Create fresh GDAL mock chain for save operations (patched on the base module).""" + mock_band = MagicMock() + mock_ds = MagicMock() + mock_ds.GetRasterBand.return_value = mock_band + mock_driver = MagicMock() + mock_driver.Create.return_value = mock_ds + + mock_gdal = MagicMock() + mock_gdal.GetDriverByName.return_value = mock_driver + mock_gdal.GDT_Float32 = 6 + + with patch("qgis_plugin.solweig_qgis.algorithms.base.gdal", mock_gdal): + yield mock_driver, mock_ds, mock_band + + +# --------------------------------------------------------------------------- +# check_grid_shapes_match +# --------------------------------------------------------------------------- + + +class TestCheckGridShapesMatch: + """Tests for SolweigAlgorithmBase.check_grid_shapes_match.""" + + def test_matching_shapes_pass(self, algo, feedback): + """No exception when all arrays match reference shape.""" + ref = (100, 200) + arrays = { + "CDSM": np.zeros((100, 200)), + "DEM": np.ones((100, 200)), + } + algo.check_grid_shapes_match(ref, arrays, feedback) + + def test_none_arrays_skipped(self, algo, feedback): + """None values are silently skipped.""" + ref = (100, 200) + arrays = { + "CDSM": None, + "DEM": np.zeros((100, 200)), + "TDSM": None, + } + algo.check_grid_shapes_match(ref, arrays, feedback) + + def test_mismatched_shape_raises(self, algo, feedback): + """Mismatched array shape raises QgsProcessingException.""" + ref = (100, 200) + arrays = {"CDSM": np.zeros((50, 200))} + with pytest.raises(QgsProcessingException, match="Grid shape mismatch"): + algo.check_grid_shapes_match(ref, arrays, feedback) + + def test_error_message_includes_name(self, algo, feedback): + """Error message includes the array name.""" + ref = (100, 200) + arrays = {"DEM": np.zeros((100, 100))} + with pytest.raises(QgsProcessingException, match="DEM"): + algo.check_grid_shapes_match(ref, arrays, feedback) + + def test_empty_arrays_pass(self, algo, feedback): + """Empty arrays dict doesn't raise.""" + algo.check_grid_shapes_match((100, 200), {}, feedback) + + +# --------------------------------------------------------------------------- +# get_output_path +# --------------------------------------------------------------------------- + + +class TestGetOutputPath: + """Tests for SolweigAlgorithmBase.get_output_path.""" + + def test_temp_file_when_no_param(self, algo): + """Returns temp path when output parameter is empty.""" + context = MagicMock() + result = algo.get_output_path({}, "OUTPUT_TMRT", "tmrt.tif", context) + assert result.endswith("tmrt.tif") + assert "solweig_qgis_output" in result + + def test_temp_file_when_param_empty(self, algo): + """Returns temp path when parameter is empty string.""" + context = MagicMock() + result = algo.get_output_path({"OUTPUT_TMRT": ""}, "OUTPUT_TMRT", "tmrt.tif", context) + assert result.endswith("tmrt.tif") + + +# --------------------------------------------------------------------------- +# save_georeferenced_output +# --------------------------------------------------------------------------- + + +class TestSaveGeoreferencedOutput: + """Tests for SolweigAlgorithmBase.save_georeferenced_output.""" + + def test_creates_output_dir(self, algo, tmp_path, gdal_mocks): + """Output directory is created if it doesn't exist.""" + mock_driver, mock_ds, mock_band = gdal_mocks + output_path = tmp_path / "subdir" / "output.tif" + geotransform = [0.0, 1.0, 0.0, 10.0, 0.0, -1.0] + + algo.save_georeferenced_output(np.ones((10, 10)), output_path, geotransform, "WKT") + + assert (tmp_path / "subdir").exists() + mock_driver.Create.assert_called_once() + mock_ds.SetGeoTransform.assert_called_once_with(geotransform) + mock_ds.SetProjection.assert_called_once_with("WKT") + mock_band.WriteArray.assert_called_once() + + def test_nan_replaced_with_nodata(self, algo, tmp_path, gdal_mocks): + """NaN values are replaced with nodata value.""" + _, _, mock_band = gdal_mocks + array = np.array([[1.0, np.nan], [np.nan, 2.0]]) + + algo.save_georeferenced_output(array, tmp_path / "out.tif", [0, 1, 0, 2, 0, -1], "") + + written = mock_band.WriteArray.call_args[0][0] + assert not np.any(np.isnan(written)) + assert written[0, 1] == -9999.0 + assert written[1, 0] == -9999.0 + + def test_custom_nodata(self, algo, tmp_path, gdal_mocks): + """Custom nodata value is used.""" + _, _, mock_band = gdal_mocks + array = np.array([[1.0, np.nan]]) + + algo.save_georeferenced_output(array, tmp_path / "out.tif", [0, 1, 0, 1, 0, -1], "", nodata=-999.0) + + mock_band.SetNoDataValue.assert_called_once_with(-999.0) + + def test_feedback_message(self, algo, tmp_path, feedback, gdal_mocks): + """Feedback reports saved file path.""" + algo.save_georeferenced_output( + np.ones((2, 2)), tmp_path / "output.tif", [0, 1, 0, 2, 0, -1], "", feedback=feedback + ) + + feedback.pushInfo.assert_called_once() + assert "output.tif" in feedback.pushInfo.call_args[0][0] + + def test_driver_create_failure_raises(self, algo, tmp_path): + """Raises QgsProcessingException when GDAL cannot create output.""" + mock_driver = MagicMock() + mock_driver.Create.return_value = None + mock_gdal = MagicMock() + mock_gdal.GetDriverByName.return_value = mock_driver + mock_gdal.GDT_Float32 = 6 + + with ( + patch("qgis_plugin.solweig_qgis.algorithms.base.gdal", mock_gdal), + pytest.raises(QgsProcessingException, match="Cannot create output"), + ): + algo.save_georeferenced_output(np.ones((2, 2)), tmp_path / "out.tif", [0, 1, 0, 2, 0, -1], "") + + +# --------------------------------------------------------------------------- +# createInstance / group / groupId +# --------------------------------------------------------------------------- + + +class TestAlgorithmMeta: + """Tests for algorithm metadata methods.""" + + def test_create_instance_returns_same_class(self, algo): + """createInstance returns a new instance of the same class.""" + new = algo.createInstance() + assert type(new) is SolweigAlgorithmBase + + def test_group_id(self, algo): + """Group ID is empty (algorithms appear directly under provider).""" + assert algo.groupId() == "" + + def test_help_url(self, algo): + """Help URL points to UMEP docs.""" + assert "umep" in algo.helpUrl().lower() diff --git a/tests/test_qgis_converters.py b/tests/test_qgis_converters.py new file mode 100644 index 0000000..a9c626c --- /dev/null +++ b/tests/test_qgis_converters.py @@ -0,0 +1,322 @@ +""" +Tests for QGIS plugin converter functions. + +Mocks QGIS and GDAL dependencies so these tests run without a QGIS installation. +Tests the pure logic: parameter dict -> solweig dataclass conversion. +""" + +from __future__ import annotations + +import contextlib +from datetime import datetime +from unittest.mock import MagicMock + +import pytest +import solweig + +from tests.qgis_mocks import QgsProcessingException, install, install_osgeo, uninstall_osgeo + +install() # Must be called before any qgis_plugin imports +install_osgeo() # Temporarily needed for osgeo imports in converters.py + +from qgis_plugin.solweig_qgis.utils.converters import ( # noqa: E402 + create_human_params_from_parameters, + create_location_from_parameters, + create_weather_from_parameters, + load_weather_from_epw, +) + +uninstall_osgeo() # Clean up immediately after imports to avoid polluting other tests + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def feedback(): + """Create a mock QgsProcessingFeedback.""" + fb = MagicMock() + fb.pushInfo = MagicMock() + fb.reportError = MagicMock() + return fb + + +# --------------------------------------------------------------------------- +# create_human_params_from_parameters +# --------------------------------------------------------------------------- + + +class TestCreateHumanParams: + """Tests for create_human_params_from_parameters.""" + + def test_defaults(self): + """Default parameters produce standing posture with abs_k=0.7.""" + human = create_human_params_from_parameters({}) + assert human.posture == "standing" + assert human.abs_k == 0.7 + + def test_posture_standing(self): + """Posture enum 0 -> 'standing'.""" + human = create_human_params_from_parameters({"POSTURE": 0}) + assert human.posture == "standing" + + def test_posture_sitting(self): + """Posture enum 1 -> 'sitting'.""" + human = create_human_params_from_parameters({"POSTURE": 1}) + assert human.posture == "sitting" + + def test_posture_unknown_defaults_standing(self): + """Unknown posture enum falls back to 'standing'.""" + human = create_human_params_from_parameters({"POSTURE": 99}) + assert human.posture == "standing" + + def test_custom_abs_k(self): + """Custom absorption coefficient.""" + human = create_human_params_from_parameters({"ABS_K": 0.5}) + assert human.abs_k == 0.5 + + def test_pet_body_params(self): + """PET body parameters are set when provided.""" + params = { + "WEIGHT": 80.0, + "HEIGHT": 1.80, + "AGE": 40, + "ACTIVITY": 100.0, + "CLOTHING": 1.2, + } + human = create_human_params_from_parameters(params) + assert human.weight == 80.0 + assert human.height == 1.80 + assert human.age == 40 + assert human.activity == 100.0 + assert human.clothing == 1.2 + + def test_sex_mapping_male(self): + """QGIS enum 0 (Male) -> solweig sex 1.""" + human = create_human_params_from_parameters({"SEX": 0}) + assert human.sex == 1 + + def test_sex_mapping_female(self): + """QGIS enum 1 (Female) -> solweig sex 2.""" + human = create_human_params_from_parameters({"SEX": 1}) + assert human.sex == 2 + + def test_sex_unknown_defaults_male(self): + """Unknown sex enum defaults to male (1).""" + human = create_human_params_from_parameters({"SEX": 99}) + assert human.sex == 1 + + def test_returns_human_params_instance(self): + """Result is a solweig.HumanParams dataclass.""" + human = create_human_params_from_parameters({}) + assert isinstance(human, solweig.HumanParams) + + def test_partial_pet_params(self): + """Only provided PET params are set, others keep defaults.""" + human = create_human_params_from_parameters({"WEIGHT": 90.0}) + assert human.weight == 90.0 + assert human.height == 1.75 # default + assert human.age == 35 # default + + +# --------------------------------------------------------------------------- +# create_weather_from_parameters +# --------------------------------------------------------------------------- + + +def _make_qdt(dt_obj: datetime): + """Create a mock QDateTime that returns the given datetime.""" + qdt = MagicMock() + qdt.toPyDateTime.return_value = dt_obj + return qdt + + +class TestCreateWeather: + """Tests for create_weather_from_parameters.""" + + def test_basic_weather(self, feedback): + """Basic weather parameters produce correct Weather object.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + params = { + "DATETIME": _make_qdt(dt_obj), + "TEMPERATURE": 30.0, + "HUMIDITY": 60.0, + "GLOBAL_RADIATION": 900.0, + "WIND_SPEED": 2.0, + "PRESSURE": 1010.0, + } + weather = create_weather_from_parameters(params, feedback) + assert weather.datetime == dt_obj + assert weather.ta == 30.0 + assert weather.rh == 60.0 + assert weather.global_rad == 900.0 + assert weather.ws == 2.0 + assert weather.pressure == 1010.0 + + def test_default_values(self, feedback): + """Missing parameters use default values.""" + dt_obj = datetime(2024, 1, 1, 12, 0) + params = {"DATETIME": _make_qdt(dt_obj)} + weather = create_weather_from_parameters(params, feedback) + assert weather.ta == 25.0 + assert weather.rh == 50.0 + assert weather.global_rad == 800.0 + assert weather.ws == 1.0 + assert weather.pressure == 1013.25 + + def test_feedback_message(self, feedback): + """Feedback receives info message about weather.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + params = { + "DATETIME": _make_qdt(dt_obj), + "TEMPERATURE": 30.0, + "HUMIDITY": 60.0, + "GLOBAL_RADIATION": 900.0, + } + create_weather_from_parameters(params, feedback) + feedback.pushInfo.assert_called_once() + msg = feedback.pushInfo.call_args[0][0] + assert "30.0" in msg + assert "60" in msg + assert "900" in msg + + def test_returns_weather_instance(self, feedback): + """Result is a solweig.Weather dataclass.""" + dt_obj = datetime(2024, 7, 15, 12, 0) + weather = create_weather_from_parameters({"DATETIME": _make_qdt(dt_obj)}, feedback) + assert isinstance(weather, solweig.Weather) + + +# --------------------------------------------------------------------------- +# create_location_from_parameters +# --------------------------------------------------------------------------- + + +class TestCreateLocation: + """Tests for create_location_from_parameters.""" + + def test_manual_location(self, feedback): + """Manual lat/lon input creates correct Location.""" + surface = MagicMock() + params = { + "AUTO_EXTRACT_LOCATION": False, + "LATITUDE": 37.97, + "LONGITUDE": 23.73, + "UTC_OFFSET": 2, + } + location = create_location_from_parameters(params, surface, feedback) + assert location.latitude == 37.97 + assert location.longitude == 23.73 + assert location.utc_offset == 2 + + def test_manual_location_defaults_utc0(self, feedback): + """UTC offset defaults to 0.""" + surface = MagicMock() + params = { + "AUTO_EXTRACT_LOCATION": False, + "LATITUDE": 51.5, + "LONGITUDE": -0.1, + } + location = create_location_from_parameters(params, surface, feedback) + assert location.utc_offset == 0 + + def test_manual_missing_coords_raises(self, feedback): + """Missing lat/lon raises QgsProcessingException.""" + surface = MagicMock() + params = {"AUTO_EXTRACT_LOCATION": False} + with pytest.raises(QgsProcessingException, match="Latitude and longitude are required"): + create_location_from_parameters(params, surface, feedback) + + def test_auto_extract_no_crs_raises(self, feedback): + """Auto-extract with missing CRS raises QgsProcessingException.""" + surface = MagicMock() + surface._crs_wkt = None + params = {"AUTO_EXTRACT_LOCATION": True, "UTC_OFFSET": 0} + with pytest.raises(QgsProcessingException, match="Cannot auto-extract"): + create_location_from_parameters(params, surface, feedback) + + def test_returns_location_instance(self, feedback): + """Result is a solweig.Location dataclass.""" + surface = MagicMock() + params = {"LATITUDE": 57.7, "LONGITUDE": 12.0} + location = create_location_from_parameters(params, surface, feedback) + assert isinstance(location, solweig.Location) + + def test_feedback_for_manual_location(self, feedback): + """Feedback reports manual coordinates.""" + surface = MagicMock() + params = { + "AUTO_EXTRACT_LOCATION": False, + "LATITUDE": 57.7, + "LONGITUDE": 12.0, + } + create_location_from_parameters(params, surface, feedback) + feedback.pushInfo.assert_called() + msg = feedback.pushInfo.call_args[0][0] + assert "57.7" in msg + assert "12.0" in msg + + +# --------------------------------------------------------------------------- +# load_weather_from_epw +# --------------------------------------------------------------------------- + + +class TestLoadWeatherFromEpw: + """Tests for load_weather_from_epw.""" + + def test_invalid_epw_path_raises(self, feedback): + """Non-existent EPW file raises QgsProcessingException.""" + with pytest.raises(QgsProcessingException, match="EPW file not found"): + load_weather_from_epw( + "/nonexistent/file.epw", + start_dt=datetime(2024, 1, 1), + end_dt=datetime(2024, 12, 31), + hours_filter=None, + feedback=feedback, + ) + + def test_qdatetime_conversion(self, feedback): + """QDateTime objects are converted to Python datetime.""" + qdt_start = MagicMock() + qdt_start.toPyDateTime.return_value = datetime(2024, 1, 1) + qdt_end = MagicMock() + qdt_end.toPyDateTime.return_value = datetime(2024, 12, 31) + + with pytest.raises(QgsProcessingException): + load_weather_from_epw("/nonexistent.epw", qdt_start, qdt_end, None, feedback) + + # Verify toPyDateTime was called (conversion happened) + qdt_start.toPyDateTime.assert_called_once() + qdt_end.toPyDateTime.assert_called_once() + + def test_hours_filter_parsing(self, feedback): + """Valid hours filter string is parsed and reported to feedback.""" + with contextlib.suppress(Exception): + load_weather_from_epw( + "/nonexistent.epw", + start_dt=datetime(2024, 1, 1), + end_dt=datetime(2024, 12, 31), + hours_filter="9,10,11,12", + feedback=feedback, + ) + + found_hour_msg = any("9" in str(call) and "10" in str(call) for call in feedback.pushInfo.call_args_list) + assert found_hour_msg, "Expected hour filter info message" + + def test_invalid_hours_filter_warns(self, feedback): + """Invalid hours filter reports error via feedback.""" + with contextlib.suppress(Exception): + load_weather_from_epw( + "/nonexistent.epw", + start_dt=datetime(2024, 1, 1), + end_dt=datetime(2024, 12, 31), + hours_filter="abc", + feedback=feedback, + ) + + feedback.reportError.assert_called_once() + msg = feedback.reportError.call_args[0][0] + assert "Invalid hours filter" in msg diff --git a/tests/test_shadow_cache.py b/tests/test_shadow_cache.py new file mode 100644 index 0000000..2bcd452 --- /dev/null +++ b/tests/test_shadow_cache.py @@ -0,0 +1,93 @@ +"""Tests for ShadowArrays float32 cache release (memory optimization).""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import numpy as np +import pytest +from solweig.models.precomputed import ShadowArrays + +if TYPE_CHECKING: + from numpy.typing import NDArray + + +@pytest.fixture() +def shadow_arrays(): + """Small ShadowArrays for testing cache behavior (bitpacked format).""" + n_patches = 5 + n_pack = (n_patches + 7) // 8 # 1 byte for 5 patches + shape = (10, 10, n_pack) + rng = np.random.default_rng(42) + # Random bitpacked data (each byte holds up to 8 patch bits) + shmat: NDArray[np.uint8] = rng.integers(0, 256, shape, dtype=np.uint8).astype(np.uint8) + vegshmat: NDArray[np.uint8] = rng.integers(0, 256, shape, dtype=np.uint8).astype(np.uint8) + vbshmat: NDArray[np.uint8] = rng.integers(0, 256, shape, dtype=np.uint8).astype(np.uint8) + return ShadowArrays( + _shmat_u8=shmat, + _vegshmat_u8=vegshmat, + _vbshmat_u8=vbshmat, + _n_patches=n_patches, + ) + + +class TestReleaseFloat32Cache: + """Tests for release_float32_cache() method.""" + + def test_release_clears_cache(self, shadow_arrays): + """After release, cached float32 arrays are None.""" + # Populate cache + _ = shadow_arrays.shmat + _ = shadow_arrays.vegshmat + _ = shadow_arrays.vbshmat + assert shadow_arrays._shmat_f32 is not None + + shadow_arrays.release_float32_cache() + + assert shadow_arrays._shmat_f32 is None + assert shadow_arrays._vegshmat_f32 is None + assert shadow_arrays._vbshmat_f32 is None + + def test_uint8_unchanged_after_release(self, shadow_arrays): + """uint8 originals remain intact after cache release.""" + original_shmat = shadow_arrays._shmat_u8.copy() + + _ = shadow_arrays.shmat # Populate cache + shadow_arrays.release_float32_cache() + + np.testing.assert_array_equal(shadow_arrays._shmat_u8, original_shmat) + + def test_cache_recreated_on_reaccess(self, shadow_arrays): + """Accessing properties after release recreates the cache correctly.""" + # First access + shmat_before = shadow_arrays.shmat.copy() + + # Release and re-access + shadow_arrays.release_float32_cache() + shmat_after = shadow_arrays.shmat + + np.testing.assert_array_equal(shmat_before, shmat_after) + assert shadow_arrays._shmat_f32 is not None + + def test_safe_to_call_before_access(self, shadow_arrays): + """Calling release before any cache access is a no-op.""" + shadow_arrays.release_float32_cache() # Should not raise + + assert shadow_arrays._shmat_f32 is None + + def test_safe_to_call_multiple_times(self, shadow_arrays): + """Calling release multiple times is safe.""" + _ = shadow_arrays.shmat + shadow_arrays.release_float32_cache() + shadow_arrays.release_float32_cache() # Second call is a no-op + + assert shadow_arrays._shmat_f32 is None + + def test_diffsh_works_after_release(self, shadow_arrays): + """diffsh() still works after cache release (re-converts from uint8).""" + diffsh_before = shadow_arrays.diffsh(transmissivity=0.03).copy() + + shadow_arrays.release_float32_cache() + diffsh_after = shadow_arrays.diffsh(transmissivity=0.03) + + np.testing.assert_array_equal(diffsh_before, diffsh_after) diff --git a/tests/test_svf_memmap.py b/tests/test_svf_memmap.py new file mode 100644 index 0000000..46136c4 --- /dev/null +++ b/tests/test_svf_memmap.py @@ -0,0 +1,138 @@ +"""Tests for SVF memmap caching functionality.""" + +import tempfile +from pathlib import Path + +import numpy as np +import pytest +from solweig.models.precomputed import SvfArrays + + +class TestSvfMemmap: + """Tests for memory-mapped SVF storage.""" + + @pytest.fixture + def sample_svf_data(self): + """Create sample SVF arrays for testing.""" + np.random.seed(42) + size = 100 + + return { + "svf": np.random.rand(size, size).astype(np.float32), + "svf_north": np.random.rand(size, size).astype(np.float32), + "svf_east": np.random.rand(size, size).astype(np.float32), + "svf_south": np.random.rand(size, size).astype(np.float32), + "svf_west": np.random.rand(size, size).astype(np.float32), + "svf_veg": np.random.rand(size, size).astype(np.float32), + "svf_veg_north": np.random.rand(size, size).astype(np.float32), + "svf_veg_east": np.random.rand(size, size).astype(np.float32), + "svf_veg_south": np.random.rand(size, size).astype(np.float32), + "svf_veg_west": np.random.rand(size, size).astype(np.float32), + "svf_aveg": np.random.rand(size, size).astype(np.float32), + "svf_aveg_north": np.random.rand(size, size).astype(np.float32), + "svf_aveg_east": np.random.rand(size, size).astype(np.float32), + "svf_aveg_south": np.random.rand(size, size).astype(np.float32), + "svf_aveg_west": np.random.rand(size, size).astype(np.float32), + } + + def test_save_and_load_memmap(self, sample_svf_data): + """Test saving and loading SVF arrays as memmap.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + + # Save to memmap + result_dir = svf.to_memmap(cache_dir) + assert result_dir.exists() + + # Verify files were created + assert (cache_dir / "svf.npy").exists() + assert (cache_dir / "svf_north.npy").exists() + + # Load from memmap + svf_loaded = SvfArrays.from_memmap(cache_dir) + + # Verify data matches + assert np.allclose(svf.svf, svf_loaded.svf) + assert np.allclose(svf.svf_north, svf_loaded.svf_north) + assert np.allclose(svf.svf_veg, svf_loaded.svf_veg) + + def test_memmap_preserves_dtype(self, sample_svf_data): + """Verify memmap arrays maintain float32 dtype.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + svf.to_memmap(cache_dir) + svf_loaded = SvfArrays.from_memmap(cache_dir) + + # Check dtype is preserved + assert svf_loaded.svf.dtype == np.float32 + assert svf_loaded.svf_veg.dtype == np.float32 + + def test_memmap_is_actually_memmap(self, sample_svf_data): + """Verify loaded arrays are actually memory-mapped.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + svf.to_memmap(cache_dir) + svf_loaded = SvfArrays.from_memmap(cache_dir) + + # Verify it's a memmap + assert isinstance(svf_loaded.svf, np.memmap) + assert isinstance(svf_loaded.svf_north, np.memmap) + + def test_memmap_slicing_works(self, sample_svf_data): + """Test that slicing memmap arrays works correctly.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + svf.to_memmap(cache_dir) + svf_loaded = SvfArrays.from_memmap(cache_dir) + + # Test slicing (simulates tiled access) + tile = svf_loaded.svf[20:40, 30:50] + assert tile.shape == (20, 20) + assert np.allclose(tile, svf.svf[20:40, 30:50]) + + def test_memmap_computed_properties_work(self, sample_svf_data): + """Test that computed properties (svfalfa, svfbuveg) work with memmap.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + svf.to_memmap(cache_dir) + svf_loaded = SvfArrays.from_memmap(cache_dir) + + # Computed properties should work + svfalfa = svf_loaded.svfalfa + svfbuveg = svf_loaded.svfbuveg + + assert svfalfa.shape == svf_loaded.svf.shape + assert svfbuveg.shape == svf_loaded.svf.shape + + # Values should match + assert np.allclose(svfalfa, svf.svfalfa) + assert np.allclose(svfbuveg, svf.svfbuveg) + + def test_from_memmap_nonexistent_raises(self): + """Test that loading from nonexistent directory raises error.""" + with pytest.raises(FileNotFoundError): + SvfArrays.from_memmap("/nonexistent/path") + + def test_from_memmap_missing_file_raises(self, sample_svf_data): + """Test that missing files raise error.""" + svf = SvfArrays(**sample_svf_data) + + with tempfile.TemporaryDirectory() as tmpdir: + cache_dir = Path(tmpdir) / "svf_cache" + svf.to_memmap(cache_dir) + + # Delete one file + (cache_dir / "svf.npy").unlink() + + with pytest.raises(FileNotFoundError): + SvfArrays.from_memmap(cache_dir) diff --git a/tests/test_tiling_integration.py b/tests/test_tiling_integration.py new file mode 100644 index 0000000..5c870e0 --- /dev/null +++ b/tests/test_tiling_integration.py @@ -0,0 +1,883 @@ +"""Integration tests for multi-tile processing. + +These tests use larger synthetic rasters to actually exercise multi-tile +processing rather than falling back to single-tile mode. +""" + +from datetime import datetime + +import numpy as np +import pytest +from conftest import make_mock_svf +from solweig import ( + Location, + PrecomputedData, + SurfaceData, + Weather, + calculate, + calculate_tiled, +) +from solweig.errors import MissingPrecomputedData +from solweig.models.state import ThermalState, TileSpec +from solweig.tiling import ( + _calculate_auto_tile_size, + _extract_tile_surface, + _merge_tile_state, + _should_use_tiling, + _slice_tile_state, + calculate_buffer_distance, + compute_max_tile_side, +) + +pytestmark = pytest.mark.slow + + +class TestMultiTileProcessing: + """Tests that actually exercise multi-tile processing.""" + + @pytest.fixture(scope="class") + def large_urban_surface(self): + """Create a 400x400 synthetic urban surface with low buildings. + + Uses LOW buildings (5m) so buffer requirement is small enough + to actually trigger multi-tile processing. + + Buffer formula: max_height / tan(3°) = 5 / 0.0524 ≈ 95m + With 95px buffer, tile_size=256 has 66px core which is too small. + But with tile_size=300, we get ~108px core (marginal). + """ + np.random.seed(42) + size = 400 + + # Base terrain at 10m + dsm = np.ones((size, size), dtype=np.float32) * 10.0 + + # Add LOW buildings (5m above ground = 15m total) to keep buffer small + for _ in range(15): + x, y = np.random.randint(50, size - 50, 2) + w, h = np.random.randint(15, 30, 2) + dsm[y : y + h, x : x + w] = 15.0 # 5m above ground + + # Create land cover (grass=5, buildings=2) + land_cover = np.ones((size, size), dtype=np.int32) * 5 + land_cover[dsm > 12] = 2 + + from conftest import make_mock_svf + + surface = SurfaceData( + dsm=dsm, + land_cover=land_cover, + pixel_size=1.0, + svf=make_mock_svf((size, size)), + ) + + return surface + + @pytest.fixture(scope="class") + def weather_noon(self): + """Summer noon weather conditions.""" + return Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=28.0, + rh=45.0, + global_rad=850.0, + ws=2.0, + ) + + @pytest.fixture(scope="class") + def location_gothenburg(self): + """Gothenburg, Sweden location.""" + return Location(latitude=57.7, longitude=12.0, utc_offset=2) + + def test_multitile_actually_tiles(self, large_urban_surface, location_gothenburg, weather_noon): + """Verify that large raster is actually processed in multiple tiles.""" + from unittest.mock import patch + + # 5m buildings (relative height = 15m DSM - 10m ground = 5m) + # buffer = 5 / tan(3°) = 95.4m → 96px (below cap of 100m) + # tile_size=350, buffer=96px → 4 tiles on a 400×400 raster + captured = {} + original_generate_tiles = __import__("solweig.tiling", fromlist=["generate_tiles"]).generate_tiles + + def spy_generate_tiles(rows, cols, tile_size, buffer_pixels): + captured["n_tiles"] = len(original_generate_tiles(rows, cols, tile_size, buffer_pixels)) + captured["buffer_pixels"] = buffer_pixels + return original_generate_tiles(rows, cols, tile_size, buffer_pixels) + + with patch("solweig.tiling.generate_tiles", side_effect=spy_generate_tiles): + result = calculate_tiled( + large_urban_surface, + location_gothenburg, + weather_noon, + tile_size=350, + max_shadow_distance_m=100.0, + ) + + # Check that multi-tile processing was used + assert captured.get("n_tiles", 0) > 1, f"Expected multiple tiles, got {captured.get('n_tiles', 0)}" + + # Buffer from 5m relative height: ceil(95.4 / 1.0) = 96px + assert captured["buffer_pixels"] == 96 + + # Verify output shape matches input + assert result.tmrt.shape == large_urban_surface.shape + + # Verify reasonable Tmrt values (not all NaN) + valid_pixels = np.isfinite(result.tmrt) + assert valid_pixels.sum() > 0.8 * result.tmrt.size, "Too many NaN values in Tmrt" + + # Tmrt should be in reasonable range for summer midday + valid_tmrt = result.tmrt[valid_pixels] + assert 20 < np.median(valid_tmrt) < 80, f"Median Tmrt {np.median(valid_tmrt):.1f}°C out of expected range" + + def test_multitile_vs_nontiled_comparison(self, location_gothenburg, weather_noon): + """Compare tiled vs non-tiled results on a moderate-size raster.""" + # Use 400x400 which can be processed either way + size = 400 + np.random.seed(123) + + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + # Add a few small buildings + for _ in range(5): + x, y = np.random.randint(50, size - 50, 2) + dsm[y : y + 20, x : x + 20] = 15.0 + + surface = SurfaceData(dsm=dsm, pixel_size=2.0, svf=make_mock_svf((size, size))) # 2m pixels = 800m extent + + # Non-tiled reference + result_ref = calculate(surface, location_gothenburg, weather_noon) + + # Tiled with limited shadow distance to keep buffer manageable + # With max_shadow_distance_m=200 and 2m pixels: buffer = 100 pixels + result_tiled = calculate_tiled( + surface, + location_gothenburg, + weather_noon, + tile_size=300, + max_shadow_distance_m=200.0, + ) + + # Compare Tmrt where both are valid + both_valid = np.isfinite(result_ref.tmrt) & np.isfinite(result_tiled.tmrt) + + if both_valid.sum() > 0: + diff = np.abs(result_tiled.tmrt[both_valid] - result_ref.tmrt[both_valid]) + mean_diff = diff.mean() + max_diff = diff.max() + + # Both paths now use the same mock SVF (tiled path slices from global). + # Only shadow edge effects from tiling should cause small differences. + assert mean_diff < 0.01, f"Mean Tmrt diff {mean_diff:.2f}°C too large" + assert max_diff < 0.1, f"Max Tmrt diff {max_diff:.2f}°C too large (possible tile boundary issue)" + + def test_tile_boundary_continuity(self, location_gothenburg, weather_noon): + """Verify results are continuous across tile boundaries.""" + size = 300 + + # Uniform flat terrain - should have smooth Tmrt + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + + result = calculate_tiled( + surface, + location_gothenburg, + weather_noon, + tile_size=256, + max_shadow_distance_m=50.0, + ) + + valid_tmrt = result.tmrt[np.isfinite(result.tmrt)] + + # For flat terrain, Tmrt should be nearly uniform + std_dev = np.std(valid_tmrt) + assert std_dev < 0.5, f"Tmrt std dev {std_dev:.2f}°C too high for flat terrain" + + def test_progress_callback(self, large_urban_surface, location_gothenburg, weather_noon): + """Test that progress callback is called correctly.""" + progress_calls = [] + + def track_progress(tile_idx, total_tiles): + progress_calls.append((tile_idx, total_tiles)) + + _result = calculate_tiled( + large_urban_surface, + location_gothenburg, + weather_noon, + tile_size=350, + max_shadow_distance_m=100.0, + progress_callback=track_progress, + ) + + # Should have received progress updates + assert len(progress_calls) > 0, "No progress callbacks received" + + # Last call should indicate completion + last_idx, total = progress_calls[-1] + assert last_idx == total, f"Final callback should show completion: {last_idx}/{total}" + + +class TestTilingMemoryBehavior: + """Tests focused on memory behavior of tiled processing.""" + + def test_tile_isolation(self): + """Verify tiles don't share mutable state.""" + size = 300 + dsm = np.ones((size, size), dtype=np.float32) * 10.0 + dsm[200:300, 200:300] = 25.0 # Building + + original_dsm = dsm.copy() + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + _ = calculate_tiled(surface, location, weather, tile_size=256, max_shadow_distance_m=50.0) + + # Original DSM should be unchanged + assert np.allclose(surface.dsm, original_dsm), "DSM was modified during tiled processing" + + +class TestTilingHelpers: + """Tests for tiling helper functions.""" + + def test_should_use_tiling_below_threshold(self): + """Rasters below resource limit should not trigger tiling.""" + assert not _should_use_tiling(100, 100) + assert not _should_use_tiling(400, 400) + + def test_should_use_tiling_above_threshold(self): + """Rasters exceeding resource-derived max should trigger tiling.""" + max_side = compute_max_tile_side(context="solweig") + assert _should_use_tiling(max_side + 1, max_side + 1) + assert _should_use_tiling(max_side + 1, 100) + assert _should_use_tiling(100, max_side + 1) + # Below resource limit — no tiling needed + assert not _should_use_tiling(max_side, max_side) + + def test_calculate_tiled_requires_svf(self): + """Tiled runtime must not implicitly compute missing SVF.""" + dsm = np.ones((320, 320), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + weather = Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + with pytest.raises(MissingPrecomputedData): + calculate_tiled( + surface, + location, + weather, + tile_size=128, + use_anisotropic_sky=False, + ) + + def test_auto_tile_size_returns_resource_max(self): + """Auto tile size returns resource-derived maximum.""" + max_side = compute_max_tile_side(context="solweig") + assert _calculate_auto_tile_size(max_side + 1000, max_side + 1000) == max_side + assert _calculate_auto_tile_size(100, 100) == max_side + + def test_extract_tile_surface_reuses_svf(self): + """When surface has precomputed SVF, tile surface should get sliced SVF.""" + size = 100 + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + mock_svf = make_mock_svf((size, size)) + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=mock_svf) + + # Create a tile covering rows 10-60, cols 10-60 (with 10px overlap) + tile = TileSpec( + row_start=20, + row_end=50, + col_start=20, + col_end=50, + row_start_full=10, + row_end_full=60, + col_start_full=10, + col_end_full=60, + overlap_top=10, + overlap_bottom=10, + overlap_left=10, + overlap_right=10, + ) + + tile_surface = _extract_tile_surface(surface, tile, pixel_size=1.0) + + # SVF should be set (sliced from global, not recomputed) + assert tile_surface.svf is not None + assert tile_surface.svf.svf.shape == (50, 50) # 60-10 = 50 + + # Values should match the sliced region of the global SVF + np.testing.assert_array_equal( + tile_surface.svf.svf, + mock_svf.svf[10:60, 10:60], + ) + + def test_extract_tile_surface_leaves_svf_unset_when_missing(self): + """When SVF is unavailable globally, tile extraction must not compute it.""" + size = 50 + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0) + assert surface.svf is None + + tile = TileSpec( + row_start=10, + row_end=40, + col_start=10, + col_end=40, + row_start_full=0, + row_end_full=50, + col_start_full=0, + col_end_full=50, + overlap_top=10, + overlap_bottom=10, + overlap_left=10, + overlap_right=10, + ) + + tile_surface = _extract_tile_surface(surface, tile, pixel_size=1.0) + + # SVF remains unset; callers enforce the SVF precondition. + assert tile_surface.svf is None + + def test_extract_tile_surface_uses_precomputed_svf_without_recompute(self): + """When surface.svf is missing, precomputed.svf should be sliced and reused.""" + from unittest.mock import patch + + size = 50 + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + surface = SurfaceData(dsm=dsm, pixel_size=1.0) + precomputed = PrecomputedData(svf=make_mock_svf((size, size))) + + tile = TileSpec( + row_start=10, + row_end=40, + col_start=10, + col_end=40, + row_start_full=0, + row_end_full=50, + col_start_full=0, + col_end_full=50, + overlap_top=10, + overlap_bottom=10, + overlap_left=10, + overlap_right=10, + ) + + with patch.object(SurfaceData, "compute_svf", side_effect=AssertionError("compute_svf should not be called")): + tile_surface = _extract_tile_surface(surface, tile, pixel_size=1.0, precomputed=precomputed) + + assert tile_surface.svf is not None + assert tile_surface.svf.svf.shape == (50, 50) + assert precomputed.svf is not None + np.testing.assert_array_equal( + tile_surface.svf.svf, + precomputed.svf.svf[0:50, 0:50], + ) + + +class TestSliceMergeState: + """Tests for _slice_tile_state and _merge_tile_state.""" + + @pytest.fixture + def global_state(self): + """Create a global state with distinctive values.""" + shape = (100, 100) + state = ThermalState( + tgmap1=np.random.rand(*shape).astype(np.float32), + tgmap1_e=np.random.rand(*shape).astype(np.float32), + tgmap1_s=np.random.rand(*shape).astype(np.float32), + tgmap1_w=np.random.rand(*shape).astype(np.float32), + tgmap1_n=np.random.rand(*shape).astype(np.float32), + tgout1=np.random.rand(*shape).astype(np.float32), + firstdaytime=1.0, + timeadd=0.5, + timestep_dec=0.042, + ) + return state + + @pytest.fixture + def tile(self): + """Create a tile spec for the center of a 100x100 grid.""" + # Core: rows 20-60, cols 30-70 + # Full (with 10px overlap): rows 10-70, cols 20-80 + return TileSpec( + row_start=20, + row_end=60, + col_start=30, + col_end=70, + row_start_full=10, + row_end_full=70, + col_start_full=20, + col_end_full=80, + overlap_top=10, + overlap_bottom=10, + overlap_left=10, + overlap_right=10, + ) + + def test_slice_tile_state_shape(self, global_state, tile): + """Sliced state should have full tile shape.""" + sliced = _slice_tile_state(global_state, tile) + expected_shape = (60, 60) # rows 10-70, cols 20-80 + assert sliced.tgmap1.shape == expected_shape + assert sliced.tgmap1_e.shape == expected_shape + assert sliced.tgout1.shape == expected_shape + + def test_slice_tile_state_values(self, global_state, tile): + """Sliced state should contain correct values from global state.""" + sliced = _slice_tile_state(global_state, tile) + read_slice = tile.read_slice + np.testing.assert_array_equal(sliced.tgmap1, global_state.tgmap1[read_slice]) + + def test_slice_tile_state_scalars(self, global_state, tile): + """Sliced state should copy scalar values.""" + sliced = _slice_tile_state(global_state, tile) + assert sliced.firstdaytime == 1.0 + assert sliced.timeadd == 0.5 + assert sliced.timestep_dec == 0.042 + + def test_slice_tile_state_independent(self, global_state, tile): + """Sliced state should be a copy, not a view.""" + sliced = _slice_tile_state(global_state, tile) + original_val = sliced.tgmap1[0, 0] + sliced.tgmap1[0, 0] = -999.0 + assert global_state.tgmap1[tile.row_start_full, tile.col_start_full] == original_val + + def test_merge_tile_state_writes_core(self, global_state, tile): + """Merge should write tile core region to correct global position.""" + sliced = _slice_tile_state(global_state, tile) + + # Modify tile state values + sliced.tgmap1[:] = 42.0 + sliced.firstdaytime = 0.0 + sliced.timeadd = 1.5 + + _merge_tile_state(sliced, tile, global_state) + + # Core region should be updated + write_slice = tile.write_slice + np.testing.assert_array_equal(global_state.tgmap1[write_slice], 42.0) + + # Scalar values should be updated + assert global_state.firstdaytime == 0.0 + assert global_state.timeadd == 1.5 + + def test_merge_tile_state_preserves_outside(self, global_state, tile): + """Merge should not modify areas outside the tile's write region.""" + original_tgmap1 = global_state.tgmap1.copy() + sliced = _slice_tile_state(global_state, tile) + sliced.tgmap1[:] = 42.0 + + _merge_tile_state(sliced, tile, global_state) + + # Areas outside write_slice should be unchanged + # Check top-left corner (row 0, col 0) — outside tile + assert global_state.tgmap1[0, 0] == original_tgmap1[0, 0] + # Check bottom-right corner — outside tile + assert global_state.tgmap1[99, 99] == original_tgmap1[99, 99] + + +class TestTimeseriesTiledIntegration: + """Integration tests for tiled timeseries processing.""" + + @pytest.fixture(scope="class") + def small_surface(self): + """Small 50x50 surface for fast tests (below tiling threshold).""" + np.random.seed(42) + size = 50 + dsm = np.ones((size, size), dtype=np.float32) * 5.0 + dsm[20:30, 20:30] = 10.0 # Small building + return SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + + @pytest.fixture(scope="class") + def location(self): + return Location(latitude=57.7, longitude=12.0, utc_offset=2) + + @pytest.fixture(scope="class") + def weather_pair(self): + """Two consecutive timesteps for minimal timeseries.""" + return [ + Weather(datetime=datetime(2024, 7, 15, 11, 0), ta=26.0, rh=50.0, global_rad=750.0, ws=2.0), + Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=28.0, rh=45.0, global_rad=850.0, ws=2.0), + ] + + def test_timeseries_tiled_matches_nontiled(self, small_surface, location, weather_pair): + """Tiled timeseries should match non-tiled within numerical precision. + + Both paths use the same mock SVF from the surface (tiled path slices + the global SVF per tile instead of recomputing). + """ + from solweig import calculate_timeseries, calculate_timeseries_tiled + + # Non-tiled (normal path — uses mock SVF from surface) + summary_ref = calculate_timeseries( + surface=small_surface, + weather_series=weather_pair, + location=location, + timestep_outputs=["tmrt"], + ) + + # Tiled (forced via direct call — slices mock SVF from surface) + summary_tiled = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + timestep_outputs=["tmrt"], + ) + + assert len(summary_ref) == len(summary_tiled) + + for i, (ref, tiled) in enumerate(zip(summary_ref.results, summary_tiled.results, strict=False)): + both_valid = np.isfinite(ref.tmrt) & np.isfinite(tiled.tmrt) + if both_valid.sum() > 0: + diff = np.abs(ref.tmrt[both_valid] - tiled.tmrt[both_valid]) + # Both paths now use the same mock SVF (tiled path slices from global). + assert diff.mean() < 0.01, f"Timestep {i}: mean Tmrt diff {diff.mean():.2f}°C too large" + assert diff.max() < 0.1, f"Timestep {i}: max Tmrt diff {diff.max():.2f}°C too large" + + def test_timeseries_tiled_state_accumulates(self, small_surface, location, weather_pair): + """Thermal state should evolve across timesteps in tiled mode.""" + from solweig import calculate_timeseries_tiled + + summary = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + timestep_outputs=["tmrt"], + ) + + # Both timesteps should produce valid results + assert len(summary) == 2 + for r in summary.results: + valid = np.isfinite(r.tmrt) + assert valid.sum() > 0, "Expected some valid Tmrt values" + + def test_timeseries_tiled_progress_callback(self, small_surface, location, weather_pair): + """Progress callback should be called for tiled timeseries.""" + from solweig import calculate_timeseries_tiled + + calls = [] + + def track(current, total): + calls.append((current, total)) + + calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + progress_callback=track, + ) + + assert len(calls) > 0, "No progress callbacks received" + + def test_timeseries_tiled_default_no_timestep_outputs(self, small_surface, location, weather_pair): + """Default mode should not retain tiled timestep results.""" + from solweig import calculate_timeseries_tiled + + summary = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + ) + + assert summary.results == [] + assert len(summary) == 2 + + def test_timeseries_tiled_summary_only_requests_tmrt_and_shadow(self, small_surface, location, weather_pair): + """Summary-only mode should request tmrt and shadow from tiled per-tile calculations.""" + from solweig import SolweigResult, calculate_timeseries_tiled + + captured: list[set[str] | None] = [] + + def _fake_calculate(**kwargs): + captured.append(kwargs.get("_requested_outputs")) + shape = kwargs["surface"].dsm.shape + return SolweigResult( + tmrt=np.zeros(shape, dtype=np.float32), + shadow=np.zeros(shape, dtype=np.float32), + kdown=None, + kup=None, + ldown=None, + lup=None, + utci=None, + pet=None, + state=None, + ) + + monkeypatch = pytest.MonkeyPatch() + monkeypatch.setattr("solweig.api.calculate", _fake_calculate) + try: + summary = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + ) + finally: + monkeypatch.undo() + + assert summary.results == [] + assert captured and all(req == {"tmrt", "shadow"} for req in captured) + + def test_timeseries_tiled_precreates_tile_surfaces_once(self, small_surface, location, weather_pair): + """Tile surfaces should be extracted once per tile, not once per timestep.""" + from unittest.mock import patch + + from solweig import calculate_timeseries_tiled + from solweig import tiling as tiling_module + + extract_calls = 0 + original_extract = tiling_module._extract_tile_surface + + def spy_extract(*args, **kwargs): + nonlocal extract_calls + extract_calls += 1 + return original_extract(*args, **kwargs) + + rows, cols = small_surface.shape + pixel_size = small_surface.pixel_size + max_height = small_surface.max_height + buffer_m = calculate_buffer_distance(max_height) + buffer_pixels = int(np.ceil(buffer_m / pixel_size)) + tile_size = tiling_module._calculate_auto_tile_size(rows, cols) + adjusted_tile_size, _warning = tiling_module.validate_tile_size(tile_size, buffer_pixels, pixel_size) + expected_tiles = len(tiling_module.generate_tiles(rows, cols, adjusted_tile_size, buffer_pixels)) + + with patch("solweig.tiling._extract_tile_surface", side_effect=spy_extract): + calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + ) + + assert extract_calls == expected_tiles, ( + f"Expected {expected_tiles} tile surface extractions, got {extract_calls} " + "(possible per-timestep recomputation regression)" + ) + + def test_timeseries_tiled_worker_parity(self, small_surface, location, weather_pair): + """Worker count should not materially change tiled timeseries outputs.""" + from solweig import calculate_timeseries_tiled + + summary_one = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + tile_workers=1, + tile_queue_depth=0, + prefetch_tiles=False, + timestep_outputs=["tmrt"], + ) + summary_multi = calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + tile_workers=2, + tile_queue_depth=2, + prefetch_tiles=True, + timestep_outputs=["tmrt"], + ) + + assert len(summary_one) == len(summary_multi) + for ref, got in zip(summary_one.results, summary_multi.results, strict=False): + both_valid = np.isfinite(ref.tmrt) & np.isfinite(got.tmrt) + if both_valid.sum() > 0: + diff = np.abs(ref.tmrt[both_valid] - got.tmrt[both_valid]) + assert diff.mean() < 0.01 + assert diff.max() < 0.1 + + def test_invalid_tile_workers_raises(self, small_surface, location, weather_pair): + """Invalid tile_workers should raise clear ValueError.""" + from solweig import calculate_timeseries_tiled + + with pytest.raises(ValueError, match="tile_workers must be >= 1"): + calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + tile_workers=0, + ) + + def test_invalid_tile_queue_depth_raises(self, small_surface, location, weather_pair): + """Invalid tile_queue_depth should raise clear ValueError.""" + from solweig import calculate_timeseries_tiled + + with pytest.raises(ValueError, match="tile_queue_depth must be >= 0"): + calculate_timeseries_tiled( + surface=small_surface, + weather_series=weather_pair, + location=location, + tile_queue_depth=-1, + ) + + +class TestTiledAnisotropicParity: + """Verify anisotropic sky produces matching results in tiled vs non-tiled mode. + + Shadow matrices are spatially sliced per tile, so anisotropic diffuse + radiation must agree between tiled and non-tiled paths. + + Uses a flat surface (no buildings → max_height=0 → zero buffer) so + tile boundaries introduce no shadow truncation artifacts, giving a + clean comparison of shadow matrix slicing. + """ + + @pytest.fixture(scope="class") + def aniso_surface(self): + """530x530 flat surface with synthetic shadow matrices (all visible). + + 530x530 at tile_size=256 → ceil(530/256)=3 → 9 tiles, ensuring + multi-tile processing is exercised. Flat terrain (max_height=0) + means zero overlap buffer so results should match exactly. + """ + from solweig.models.precomputed import ShadowArrays + + size = 530 + n_patches = 153 + n_pack = (n_patches + 7) // 8 + + dsm = np.ones((size, size), dtype=np.float32) * 2.0 + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + + # All-visible shadow matrices (0xFF = every patch visible) + shmat = np.full((size, size, n_pack), 0xFF, dtype=np.uint8) + vegshmat = np.full((size, size, n_pack), 0xFF, dtype=np.uint8) + vbshmat = np.full((size, size, n_pack), 0xFF, dtype=np.uint8) + + surface.shadow_matrices = ShadowArrays( + _shmat_u8=shmat, + _vegshmat_u8=vegshmat, + _vbshmat_u8=vbshmat, + _n_patches=n_patches, + ) + return surface + + @pytest.fixture(scope="class") + def aniso_location(self): + return Location(latitude=57.7, longitude=12.0, utc_offset=2) + + @pytest.fixture(scope="class") + def aniso_weather(self): + return Weather( + datetime=datetime(2024, 7, 15, 12, 0), + ta=25.0, + rh=50.0, + global_rad=800.0, + ) + + def test_anisotropic_tiled_vs_nontiled(self, aniso_surface, aniso_location, aniso_weather): + """Tiled anisotropic sky matches non-tiled within numerical precision.""" + # Non-tiled reference + result_ref = calculate( + aniso_surface, + aniso_location, + aniso_weather, + use_anisotropic_sky=True, + ) + + # Tiled: tile_size=256 on 530×530 → 9 tiles (3×3) + result_tiled = calculate_tiled( + aniso_surface, + aniso_location, + aniso_weather, + tile_size=256, + use_anisotropic_sky=True, + ) + + both_valid = np.isfinite(result_ref.tmrt) & np.isfinite(result_tiled.tmrt) + assert both_valid.sum() > 0, "Expected valid Tmrt pixels" + + diff = np.abs(result_tiled.tmrt[both_valid] - result_ref.tmrt[both_valid]) + mean_diff = diff.mean() + max_diff = diff.max() + + assert mean_diff < 0.01, f"Mean Tmrt diff {mean_diff:.4f}°C too large (tiled vs non-tiled anisotropic)" + assert max_diff < 0.1, f"Max Tmrt diff {max_diff:.4f}°C too large (possible shadow matrix slicing issue)" + + +class TestHeightAwareBuffer: + """Verify tiling functions compute buffer from actual building heights.""" + + def test_short_buildings_get_small_buffer(self): + """With 5m relative height the buffer should be ~95m, not the 500m default.""" + from unittest.mock import patch + + size = 400 + ground = 10.0 + building_height = 5.0 # above ground + max_dsm = ground + building_height # 15m absolute, 5m relative + dsm = np.full((size, size), ground, dtype=np.float32) + dsm[100:120, 100:120] = max_dsm + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + weather = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=28.0, rh=45.0, global_rad=850.0, ws=2.0) + + # Buffer uses relative height (5m), not absolute elevation (15m) + # buffer = 5 / tan(3°) ≈ 95.4m — much less than default 500m cap + expected_buffer = calculate_buffer_distance(building_height) + assert 90 < expected_buffer < 100, f"Expected ~95m buffer, got {expected_buffer}" + + # Patch generate_tiles to capture the buffer_pixels that calculate_tiled passes + captured = {} + original_generate_tiles = __import__("solweig.tiling", fromlist=["generate_tiles"]).generate_tiles + + def spy_generate_tiles(rows, cols, tile_size, buffer_pixels): + captured["buffer_pixels"] = buffer_pixels + return original_generate_tiles(rows, cols, tile_size, buffer_pixels) + + with patch("solweig.tiling.generate_tiles", side_effect=spy_generate_tiles): + _ = calculate_tiled(surface, location, weather, tile_size=350) + + # If generate_tiles was called, buffer should match relative-height-derived value + if "buffer_pixels" in captured: + expected_px = int(np.ceil(expected_buffer / surface.pixel_size)) + assert captured["buffer_pixels"] == expected_px, ( + f"Expected {expected_px}px buffer from {building_height}m relative height, " + f"got {captured['buffer_pixels']}px" + ) + + def test_tall_buildings_capped_at_max(self): + """With 30m relative height the buffer should cap at max_shadow_distance_m.""" + from unittest.mock import patch + + size = 400 + ground = 10.0 + building_height = 30.0 + max_dsm = ground + building_height # 40m absolute, 30m relative + dsm = np.full((size, size), ground, dtype=np.float32) + dsm[100:120, 100:120] = max_dsm + + surface = SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((size, size))) + location = Location(latitude=57.7, longitude=12.0, utc_offset=2) + weather = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=28.0, rh=45.0, global_rad=850.0, ws=2.0) + + cap = 200.0 + # 30m relative height: 30/tan(3°) ≈ 573m → capped at 200m + expected_buffer = calculate_buffer_distance(building_height, max_shadow_distance_m=cap) + assert expected_buffer == cap, f"Expected buffer capped at {cap}, got {expected_buffer}" + + captured = {} + original_generate_tiles = __import__("solweig.tiling", fromlist=["generate_tiles"]).generate_tiles + + def spy_generate_tiles(rows, cols, tile_size, buffer_pixels): + captured["buffer_pixels"] = buffer_pixels + return original_generate_tiles(rows, cols, tile_size, buffer_pixels) + + with patch("solweig.tiling.generate_tiles", side_effect=spy_generate_tiles): + _ = calculate_tiled(surface, location, weather, tile_size=350, max_shadow_distance_m=cap) + + if "buffer_pixels" in captured: + expected_px = int(np.ceil(cap / surface.pixel_size)) + assert captured["buffer_pixels"] == expected_px, ( + f"Expected {expected_px}px buffer (capped at {cap}m), got {captured['buffer_pixels']}px" + ) diff --git a/tests/test_timeseries.py b/tests/test_timeseries.py new file mode 100644 index 0000000..9e38b88 --- /dev/null +++ b/tests/test_timeseries.py @@ -0,0 +1,829 @@ +""" +Tests for calculate_timeseries() and validate_inputs(). + +These cover the critical gap: the primary user workflow (timeseries) had +zero dedicated tests, and validate_inputs() was untested. +""" + +import contextlib +from datetime import datetime, timedelta + +import numpy as np +import pytest +from solweig.api import ( + HumanParams, + Location, + ModelConfig, + SolweigResult, + SurfaceData, + TimeseriesSummary, + Weather, + calculate_timeseries, + validate_inputs, +) +from solweig.errors import GridShapeMismatch, MissingPrecomputedData + +pytestmark = pytest.mark.slow + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture(scope="module") +def flat_surface(): + """Simple flat DSM with one 10m building.""" + from conftest import make_mock_svf + + dsm = np.zeros((30, 30), dtype=np.float32) + dsm[10:20, 10:20] = 10.0 + return SurfaceData(dsm=dsm, pixel_size=1.0, svf=make_mock_svf((30, 30))) + + +@pytest.fixture(scope="module") +def location(): + return Location(latitude=57.7, longitude=12.0, utc_offset=1) + + +def _make_weather_series( + base_dt: datetime, + n_hours: int, + ta: float = 25.0, + rh: float = 50.0, + global_rad: float = 800.0, +) -> list[Weather]: + """Create a list of hourly Weather objects.""" + return [ + Weather( + datetime=base_dt + timedelta(hours=i), + ta=ta, + rh=rh, + global_rad=global_rad, + ) + for i in range(n_hours) + ] + + +# =========================================================================== +# calculate_timeseries() tests +# =========================================================================== + + +class TestCalculateTimeseries: + """Tests for the calculate_timeseries() function.""" + + def test_returns_summary(self, flat_surface, location): + """Returns a TimeseriesSummary with per-timestep results when requested.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt", "shadow"]) + + assert isinstance(summary, TimeseriesSummary) + assert len(summary) == 3 + assert len(summary.results) == 3 + for r in summary.results: + assert isinstance(r, SolweigResult) + + def test_result_shapes_match_surface(self, flat_surface, location): + """Each result has arrays matching the DSM shape.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt", "shadow"]) + + for r in summary.results: + assert r.tmrt.shape == (30, 30) + assert r.shadow is not None + assert r.shadow.shape == (30, 30) + + def test_empty_series_returns_empty_summary(self, flat_surface, location): + """Empty weather_series returns an empty TimeseriesSummary.""" + summary = calculate_timeseries(flat_surface, [], location) + assert isinstance(summary, TimeseriesSummary) + assert len(summary) == 0 + assert summary.results == [] + + def test_single_timestep(self, flat_surface, location): + """Works with a single-element weather_series.""" + weather_series = [Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0)] + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + assert len(summary) == 1 + assert summary.results[0].tmrt.shape == (30, 30) + + def test_tmrt_in_reasonable_range(self, flat_surface, location): + """Tmrt values are physically plausible across timesteps.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + for r in summary.results: + assert np.nanmin(r.tmrt) >= -50 + assert np.nanmax(r.tmrt) < 80 + + def test_utci_pet_default_none(self, flat_surface, location): + """UTCI and PET are None when not requested via timestep_outputs.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + for r in summary.results: + assert r.utci is None + assert r.pet is None + + def test_utci_per_timestep_when_requested(self, flat_surface, location): + """UTCI is computed per-timestep when included in timestep_outputs.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt", "utci"]) + + for r in summary.results: + assert r.tmrt is not None + assert r.utci is not None + assert r.utci.shape == (30, 30) + assert r.shadow is None # not requested + + def test_nighttime_series(self, flat_surface, location): + """Nighttime timesteps produce valid (low Tmrt) results.""" + weather_series = _make_weather_series( + datetime(2024, 7, 15, 0, 0), + n_hours=3, + ta=15.0, + global_rad=0.0, + ) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + assert len(summary) == 3 + for r in summary.results: + # At night, Tmrt is computed from full longwave balance. Under open + # sky (SVF~1) the cold sky pulls Tmrt below Ta, typically by 5-10 C. + valid = r.tmrt[np.isfinite(r.tmrt)] + assert np.all(valid < 15.0 + 2.0), "Night Tmrt should not exceed Ta by much" + assert np.all(valid > -10.0), "Night Tmrt should not be unreasonably cold" + + def test_day_night_transition(self, flat_surface, location): + """Handles transition from night to day.""" + # 4am, 5am, ... 9am + weather_series = [ + Weather( + datetime=datetime(2024, 7, 15, h, 0), + ta=15.0 + h, + rh=70.0, + global_rad=max(0.0, (h - 5) * 200.0), + ) + for h in range(4, 10) + ] + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + assert len(summary) == 6 + # Later timesteps (with sun up) should generally have higher Tmrt + # than early night timesteps + early_tmrt = np.nanmean(summary.results[0].tmrt) + late_tmrt = np.nanmean(summary.results[-1].tmrt) + assert late_tmrt > early_tmrt + + def test_location_auto_extracted_with_warning(self, flat_surface, caplog): + """When location is None, a warning is logged.""" + import logging + + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + + with caplog.at_level(logging.WARNING), contextlib.suppress(Exception): + # Should work but warn about auto-extraction + calculate_timeseries(flat_surface, weather_series, location=None) + # If it got past the location extraction, it should have warned + # (If it raised before logging, that's also acceptable for synthetic data) + + def test_config_precedence_explicit_wins(self, flat_surface, location): + """Explicit parameters override config values.""" + config = ModelConfig(use_anisotropic_sky=True) + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + + # use_anisotropic_sky=False should override config's True + # Since we don't have shadow matrices, aniso=True would fail. + # If explicit=False wins, this should succeed. + summary = calculate_timeseries( + flat_surface, + weather_series, + location, + config=config, + use_anisotropic_sky=False, + ) + + assert len(summary) == 1 + + def test_custom_human_params(self, flat_surface, location): + """Custom HumanParams are accepted.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + human = HumanParams(abs_k=0.7, abs_l=0.97, posture="standing") + + summary = calculate_timeseries(flat_surface, weather_series, location, human=human) + + assert len(summary) == 1 + + def test_results_differ_across_timesteps(self, flat_surface, location): + """Different hours produce different Tmrt patterns.""" + weather_series = [ + Weather(datetime=datetime(2024, 7, 15, 8, 0), ta=20.0, rh=60.0, global_rad=400.0), + Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=28.0, rh=40.0, global_rad=800.0), + Weather(datetime=datetime(2024, 7, 15, 16, 0), ta=25.0, rh=50.0, global_rad=500.0), + ] + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + # The three timesteps should produce meaningfully different Tmrt + means = [np.nanmean(r.tmrt) for r in summary.results] + assert not all(np.isclose(m, means[0], atol=0.5) for m in means), ( + "Expected different Tmrt across timesteps with different conditions" + ) + + def test_output_dir_saves_files(self, flat_surface, location, tmp_path): + """When output_dir is provided, files are saved.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location, + output_dir=str(tmp_path), + timestep_outputs=["tmrt", "shadow"], + ) + + assert len(summary) == 2 + # Check that some output files were created + output_files = list(tmp_path.iterdir()) + assert len(output_files) > 0 + # With timestep_outputs, arrays must remain available. + assert all(r.tmrt is not None for r in summary.results) + + def test_explicit_anisotropic_requires_shadow_matrices(self, flat_surface, location): + """Explicit anisotropic request should fail without shadow matrices.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + with pytest.raises(MissingPrecomputedData): + calculate_timeseries( + flat_surface, + weather_series, + location, + use_anisotropic_sky=True, + ) + + def test_default_no_timestep_outputs(self, flat_surface, location): + """Default mode (timestep_outputs=None) returns summary with empty results list.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location, + ) + + assert isinstance(summary, TimeseriesSummary) + assert summary.results == [] + assert len(summary) == 2 + + def test_summary_only_requests_tmrt_and_shadow(self, flat_surface, location, monkeypatch): + """Summary-only mode should request tmrt and shadow from fused Rust path.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=2) + captured: list[set[str] | None] = [] + + def _fake_calculate(**kwargs): + captured.append(kwargs.get("_requested_outputs")) + shape = kwargs["surface"].dsm.shape + return SolweigResult( + tmrt=np.zeros(shape, dtype=np.float32), + shadow=np.zeros(shape, dtype=np.float32), + kdown=None, + kup=None, + ldown=None, + lup=None, + utci=None, + pet=None, + state=None, + ) + + monkeypatch.setattr("solweig.api.calculate", _fake_calculate) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location, + ) + + assert summary.results == [] + assert captured and all(req == {"tmrt", "shadow"} for req in captured) + + def test_tiling_runtime_controls_forwarded_from_config(self, flat_surface, location, monkeypatch): + """ModelConfig tile runtime settings are forwarded to tiled runner.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + config = ModelConfig(tile_workers=3, tile_queue_depth=5, prefetch_tiles=False) + + captured: dict[str, object] = {} + + def _fake_tiled(**kwargs): + captured.update(kwargs) + return TimeseriesSummary.empty() + + monkeypatch.setattr("solweig.tiling._should_use_tiling", lambda _r, _c: True) + monkeypatch.setattr("solweig.tiling.calculate_timeseries_tiled", _fake_tiled) + + summary = calculate_timeseries(flat_surface, weather_series, location=location, config=config) + assert isinstance(summary, TimeseriesSummary) + assert captured["tile_workers"] == 3 + assert captured["tile_queue_depth"] == 5 + assert captured["prefetch_tiles"] is False + assert captured["timestep_outputs"] is None + + def test_explicit_tiling_runtime_controls_override_config(self, flat_surface, location, monkeypatch): + """Explicit tile runtime args override ModelConfig values.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + config = ModelConfig(tile_workers=2, tile_queue_depth=1, prefetch_tiles=False) + + captured: dict[str, object] = {} + + def _fake_tiled(**kwargs): + captured.update(kwargs) + return TimeseriesSummary.empty() + + monkeypatch.setattr("solweig.tiling._should_use_tiling", lambda _r, _c: True) + monkeypatch.setattr("solweig.tiling.calculate_timeseries_tiled", _fake_tiled) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location=location, + config=config, + tile_workers=6, + tile_queue_depth=9, + prefetch_tiles=True, + ) + assert isinstance(summary, TimeseriesSummary) + assert captured["tile_workers"] == 6 + assert captured["tile_queue_depth"] == 9 + assert captured["prefetch_tiles"] is True + + def test_timestep_outputs_forwarded_to_tiled_runner(self, flat_surface, location, monkeypatch): + """timestep_outputs should be forwarded when auto-tiling is selected.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + captured: dict[str, object] = {} + + def _fake_tiled(**kwargs): + captured.update(kwargs) + return TimeseriesSummary.empty() + + monkeypatch.setattr("solweig.tiling._should_use_tiling", lambda _r, _c: True) + monkeypatch.setattr("solweig.tiling.calculate_timeseries_tiled", _fake_tiled) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location=location, + timestep_outputs=["tmrt", "shadow"], + ) + assert isinstance(summary, TimeseriesSummary) + assert captured["timestep_outputs"] == ["tmrt", "shadow"] + + def test_invalid_tile_workers_raises_from_api(self, flat_surface, location, monkeypatch): + """calculate_timeseries surfaces invalid tile_workers when tiled path is used.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + monkeypatch.setattr("solweig.tiling._should_use_tiling", lambda _r, _c: True) + + with pytest.raises(ValueError, match="tile_workers must be >= 1"): + calculate_timeseries( + flat_surface, + weather_series, + location=location, + tile_workers=0, + ) + + def test_invalid_tile_queue_depth_raises_from_api(self, flat_surface, location, monkeypatch): + """calculate_timeseries surfaces invalid tile_queue_depth when tiled path is used.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 12, 0), n_hours=1) + monkeypatch.setattr("solweig.tiling._should_use_tiling", lambda _r, _c: True) + + with pytest.raises(ValueError, match="tile_queue_depth must be >= 0"): + calculate_timeseries( + flat_surface, + weather_series, + location=location, + tile_queue_depth=-1, + ) + + +class TestModelConfigTilingRuntimeSerialization: + """Tests for tile runtime fields in ModelConfig save/load.""" + + def test_model_config_save_load_tiling_runtime_fields(self, tmp_path): + config = ModelConfig( + use_anisotropic_sky=True, + tile_workers=4, + tile_queue_depth=7, + prefetch_tiles=False, + ) + config_path = tmp_path / "config.json" + config.save(config_path) + + loaded = ModelConfig.load(config_path) + assert loaded.tile_workers == 4 + assert loaded.tile_queue_depth == 7 + assert loaded.prefetch_tiles is False + + +# =========================================================================== +# TimeseriesSummary tests +# =========================================================================== + + +class TestTimeseriesSummary: + """Tests for summary grids produced by calculate_timeseries().""" + + def test_summary_grids_shapes(self, flat_surface, location): + """All summary grids match DSM shape.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + assert summary.tmrt_mean.shape == (30, 30) + assert summary.tmrt_max.shape == (30, 30) + assert summary.tmrt_min.shape == (30, 30) + assert summary.tmrt_day_mean.shape == (30, 30) + assert summary.tmrt_night_mean.shape == (30, 30) + assert summary.utci_mean.shape == (30, 30) + assert summary.utci_max.shape == (30, 30) + assert summary.utci_min.shape == (30, 30) + assert summary.sun_hours.shape == (30, 30) + assert summary.shade_hours.shape == (30, 30) + + def test_summary_tmrt_mean_consistent(self, flat_surface, location): + """Summary tmrt_mean matches manual mean of per-timestep results.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"]) + + # Manual mean from per-timestep arrays + stacked = np.stack([r.tmrt for r in summary.results], axis=0) + manual_mean = np.nanmean(stacked, axis=0) + + np.testing.assert_allclose(summary.tmrt_mean, manual_mean, atol=0.1) + + def test_summary_utci_grids_populated(self, flat_surface, location): + """UTCI summary grids are computed and finite.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + # UTCI mean should have some finite values + assert np.any(np.isfinite(summary.utci_mean)) + assert np.any(np.isfinite(summary.utci_max)) + assert np.any(np.isfinite(summary.utci_min)) + + def test_summary_default_heat_thresholds(self, flat_surface, location): + """Default thresholds produce expected dict keys.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + assert summary.heat_thresholds_day == [32.0, 38.0] + assert summary.heat_thresholds_night == [26.0] + # All three thresholds should appear in utci_hours_above + assert 32.0 in summary.utci_hours_above + assert 38.0 in summary.utci_hours_above + assert 26.0 in summary.utci_hours_above + + def test_summary_custom_heat_thresholds(self, flat_surface, location): + """Custom thresholds produce matching dict keys.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=2) + + summary = calculate_timeseries( + flat_surface, + weather_series, + location, + heat_thresholds_day=[30, 35, 40], + heat_thresholds_night=[20], + ) + + assert set(summary.utci_hours_above.keys()) == {20, 30, 35, 40} + + def test_summary_sun_shade_hours(self, flat_surface, location): + """Sun + shade hours per pixel should sum to total hours.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + assert summary.shadow_available + total = summary.sun_hours + summary.shade_hours + # Each daytime pixel should have total == n_hours * timestep_hours (3 hours) + valid = np.isfinite(total) + np.testing.assert_allclose(total[valid], 3.0, atol=0.01) + + def test_summary_day_night_counts(self, flat_surface, location): + """Day/night counts should sum to total timesteps.""" + # Mix day and night hours + weather_series = [ + Weather(datetime=datetime(2024, 7, 15, h, 0), ta=20.0, rh=50.0, global_rad=max(0.0, (h - 5) * 200.0)) + for h in range(2, 14) # 2am to 1pm + ] + + summary = calculate_timeseries(flat_surface, weather_series, location) + + assert summary.n_daytime + summary.n_nighttime == summary.n_timesteps + assert summary.n_daytime > 0 + assert summary.n_nighttime > 0 + + def test_summary_to_geotiff(self, flat_surface, location, tmp_path): + """Summary grids can be saved to GeoTIFF.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=2) + + summary = calculate_timeseries(flat_surface, weather_series, location) + summary.to_geotiff(str(tmp_path), surface=flat_surface) + + summary_dir = tmp_path / "summary" + assert summary_dir.exists() + tif_files = list(summary_dir.glob("*.tif")) + assert len(tif_files) > 0 + # Should have tmrt + utci + sun/shade + threshold files + names = {f.stem for f in tif_files} + assert "tmrt_mean" in names + assert "utci_mean" in names + assert "sun_hours" in names + + def test_summary_geotiff_threshold_day_night_suffix(self, flat_surface, location, tmp_path): + """Threshold GeoTIFFs should have _day or _night suffix.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=2) + + summary = calculate_timeseries(flat_surface, weather_series, location) + summary.to_geotiff(str(tmp_path), surface=flat_surface) + + summary_dir = tmp_path / "summary" + names = {f.stem for f in summary_dir.glob("*.tif")} + # Default thresholds: day=[32, 38], night=[26] + assert "utci_hours_above_32_day" in names + assert "utci_hours_above_38_day" in names + assert "utci_hours_above_26_night" in names + + def test_summary_len(self, flat_surface, location): + """len(summary) returns n_timesteps.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=5) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + assert len(summary) == 5 + + def test_timeseries_populated(self, flat_surface, location): + """summary.timeseries contains per-timestep scalar arrays.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=4) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + ts = summary.timeseries + assert ts is not None + assert len(ts.datetime) == 4 + assert ts.ta.shape == (4,) + assert ts.rh.shape == (4,) + assert ts.ws.shape == (4,) + assert ts.global_rad.shape == (4,) + assert ts.tmrt_mean.shape == (4,) + assert ts.utci_mean.shape == (4,) + assert ts.sun_fraction.shape == (4,) + assert ts.is_daytime.shape == (4,) + assert ts.sun_altitude.shape == (4,) + assert ts.direct_rad.shape == (4,) + assert ts.diffuse_rad.shape == (4,) + assert ts.diffuse_fraction.shape == (4,) + assert ts.clearness_index.shape == (4,) + + def test_timeseries_values_match_weather(self, flat_surface, location): + """Timeseries ta/rh/ws should match weather inputs.""" + weather_series = _make_weather_series( + datetime(2024, 7, 15, 10, 0), + n_hours=3, + ta=30.0, + rh=65.0, + ) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + ts = summary.timeseries + assert ts is not None + np.testing.assert_allclose(ts.ta, 30.0, rtol=1e-5) + np.testing.assert_allclose(ts.rh, 65.0, rtol=1e-5) + + def test_timeseries_datetimes_match(self, flat_surface, location): + """Timeseries datetimes should match weather datetimes.""" + base = datetime(2024, 7, 15, 10, 0) + weather_series = _make_weather_series(base, n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + ts = summary.timeseries + assert ts is not None + for i, w in enumerate(weather_series): + assert ts.datetime[i] == w.datetime + + def test_timeseries_tmrt_mean_finite(self, flat_surface, location): + """Spatial mean Tmrt should be finite for daytime steps.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + ts = summary.timeseries + assert ts is not None + # At least daytime steps should have finite Tmrt + daytime = ts.is_daytime + if daytime.any(): + assert np.all(np.isfinite(ts.tmrt_mean[daytime])) + + def test_timeseries_none_for_empty(self, flat_surface, location): + """Empty series should have timeseries=None.""" + summary = calculate_timeseries(flat_surface, [], location) + + assert summary.timeseries is None + + def test_report_returns_string(self, flat_surface, location): + """report() returns a non-empty string.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + report = summary.report() + assert isinstance(report, str) + assert "SOLWEIG Summary" in report + assert "Tmrt" in report + assert "UTCI" in report + + def test_report_empty_summary(self, flat_surface, location): + """report() on empty summary returns descriptive string.""" + summary = TimeseriesSummary.empty() + assert "0 timesteps" in summary.report() + + def test_report_includes_period(self, flat_surface, location): + """report() should include the simulation period.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + report = summary.report() + assert "2024-07-15" in report + + def test_repr_html(self, flat_surface, location): + """_repr_html_ returns HTML for Jupyter rendering.""" + weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3) + + summary = calculate_timeseries(flat_surface, weather_series, location) + + html = summary._repr_html_() + assert html.startswith("
")
+        assert "SOLWEIG Summary" in html
+
+    def test_plot_raises_without_matplotlib(self, flat_surface, location, monkeypatch):
+        """plot() raises ImportError when matplotlib is not available."""
+        weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3)
+        summary = calculate_timeseries(flat_surface, weather_series, location)
+
+        import builtins
+
+        real_import = builtins.__import__
+
+        def mock_import(name, *args, **kwargs):
+            if name.startswith("matplotlib"):
+                raise ImportError("mock")
+            return real_import(name, *args, **kwargs)
+
+        monkeypatch.setattr(builtins, "__import__", mock_import)
+
+        with pytest.raises(ImportError, match="matplotlib"):
+            summary.plot()
+
+    def test_plot_raises_on_empty_summary(self, flat_surface, location):
+        """plot() raises RuntimeError on empty summary."""
+        summary = TimeseriesSummary.empty()
+        with pytest.raises(RuntimeError, match="No timeseries data"):
+            summary.plot()
+
+    def test_plot_saves_to_file(self, flat_surface, location, tmp_path):
+        """plot(save_path=...) saves a figure to disk."""
+        pytest.importorskip("matplotlib")
+        weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=6)
+        summary = calculate_timeseries(flat_surface, weather_series, location)
+
+        out = tmp_path / "plot.png"
+        summary.plot(save_path=out)
+        assert out.exists()
+        assert out.stat().st_size > 0
+
+
+# ===========================================================================
+# validate_inputs() tests
+# ===========================================================================
+
+
+class TestValidateInputs:
+    """Tests for the validate_inputs() function."""
+
+    def test_valid_inputs_no_warnings(self, flat_surface, location):
+        """Valid inputs produce no warnings and don't raise."""
+        weather = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=800.0)
+
+        warnings = validate_inputs(flat_surface, location, weather)
+
+        assert isinstance(warnings, list)
+
+    def test_grid_shape_mismatch_raises(self):
+        """Mismatched grid shapes raise GridShapeMismatch."""
+        dsm = np.zeros((30, 30), dtype=np.float32)
+        cdsm = np.zeros((20, 20), dtype=np.float32)  # Wrong shape
+        surface = SurfaceData(dsm=dsm, cdsm=cdsm)
+
+        with pytest.raises(GridShapeMismatch) as exc_info:
+            validate_inputs(surface)
+
+        assert exc_info.value.field == "cdsm"
+        assert "(30, 30)" in str(exc_info.value.expected)
+        assert "(20, 20)" in str(exc_info.value.got)
+
+    def test_anisotropic_without_shadow_matrices(self, flat_surface):
+        """Anisotropic sky without shadow matrices raises."""
+        with pytest.raises(MissingPrecomputedData):
+            validate_inputs(flat_surface, use_anisotropic_sky=True)
+
+    def test_extreme_temperature_warning(self, flat_surface, location):
+        """Extreme temperatures produce warnings (ta > 60 triggers)."""
+        weather = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=65.0, rh=50.0, global_rad=800.0)
+
+        warnings = validate_inputs(flat_surface, location, weather)
+
+        assert any("ta=" in w for w in warnings)
+
+    def test_extreme_radiation_warning(self, flat_surface, location):
+        """Extreme radiation values produce warnings."""
+        weather = Weather(datetime=datetime(2024, 7, 15, 12, 0), ta=25.0, rh=50.0, global_rad=1500.0)
+
+        warnings = validate_inputs(flat_surface, location, weather)
+
+        assert any("global_rad" in w for w in warnings)
+
+    def test_validates_weather_list(self, flat_surface, location):
+        """Can validate a list of Weather objects."""
+        weather_list = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=5)
+
+        warnings = validate_inputs(flat_surface, location, weather_list)
+
+        assert isinstance(warnings, list)
+
+    def test_unpreprocessed_cdsm_warning(self):
+        """Warning when CDSM is relative but preprocess() not called."""
+        from conftest import make_mock_svf
+
+        dsm = np.zeros((20, 20), dtype=np.float32)
+        cdsm = np.ones((20, 20), dtype=np.float32) * 5.0
+        surface = SurfaceData(dsm=dsm, cdsm=cdsm, cdsm_relative=True, svf=make_mock_svf((20, 20)))
+
+        warnings = validate_inputs(surface)
+
+        assert any("preprocess" in w.lower() for w in warnings)
+
+    def test_surface_only_validation(self):
+        """Can validate with just a surface (no location/weather)."""
+        from conftest import make_mock_svf
+
+        dsm = np.zeros((20, 20), dtype=np.float32)
+        surface = SurfaceData(dsm=dsm, svf=make_mock_svf((20, 20)))
+
+        warnings = validate_inputs(surface)
+
+        assert isinstance(warnings, list)
+
+
+# ===========================================================================
+# Memory optimization tests
+# ===========================================================================
+
+
+class TestTimeseriesMemory:
+    """Tests for memory optimizations in calculate_timeseries()."""
+
+    def test_state_cleared_from_results(self, flat_surface, location):
+        """Returned results should have state=None to avoid ~23 MB waste per timestep."""
+        weather_series = _make_weather_series(datetime(2024, 7, 15, 10, 0), n_hours=3)
+
+        summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"])
+
+        assert len(summary) == 3
+        for r in summary.results:
+            assert r.state is None, "State should be cleared from results to save memory"
+
+    def test_state_still_propagates_correctly(self, flat_surface, location):
+        """Despite clearing state from results, thermal state should still propagate."""
+        # Night → day transition relies on state propagation for ground temperature
+        weather_series = [
+            Weather(datetime=datetime(2024, 7, 15, h, 0), ta=15.0 + h, rh=70.0, global_rad=max(0.0, (h - 5) * 200.0))
+            for h in range(4, 10)
+        ]
+
+        summary = calculate_timeseries(flat_surface, weather_series, location, timestep_outputs=["tmrt"])
+
+        # Later timesteps should have higher Tmrt (thermal state propagated correctly)
+        early_tmrt = np.nanmean(summary.results[0].tmrt)
+        late_tmrt = np.nanmean(summary.results[-1].tmrt)
+        assert late_tmrt > early_tmrt, "Thermal state should propagate despite being cleared from results"
diff --git a/tests/test_wall_material.py b/tests/test_wall_material.py
new file mode 100644
index 0000000..daf2664
--- /dev/null
+++ b/tests/test_wall_material.py
@@ -0,0 +1,177 @@
+"""Tests for wall material selection (scalar wall params from JSON)."""
+
+from __future__ import annotations
+
+import numpy as np
+import pytest
+
+
+class TestResolveWallParams:
+    """Test the resolve_wall_params() function directly."""
+
+    def test_brick_returns_correct_params(self):
+        from solweig.loaders import resolve_wall_params
+
+        tgk, tstart, tmaxlst = resolve_wall_params("brick")
+        assert tgk == pytest.approx(0.40)
+        assert tstart == pytest.approx(-4.0)
+        assert tmaxlst == pytest.approx(15.0)
+
+    def test_concrete_returns_correct_params(self):
+        from solweig.loaders import resolve_wall_params
+
+        tgk, tstart, tmaxlst = resolve_wall_params("concrete")
+        assert tgk == pytest.approx(0.35)
+        assert tstart == pytest.approx(-5.0)
+        assert tmaxlst == pytest.approx(16.0)
+
+    def test_wood_returns_correct_params(self):
+        from solweig.loaders import resolve_wall_params
+
+        tgk, tstart, tmaxlst = resolve_wall_params("wood")
+        assert tgk == pytest.approx(0.50)
+        assert tstart == pytest.approx(-2.0)
+        assert tmaxlst == pytest.approx(14.0)
+
+    def test_cobblestone_returns_default_params(self):
+        from solweig.loaders import resolve_wall_params
+
+        tgk, tstart, tmaxlst = resolve_wall_params("cobblestone")
+        assert tgk == pytest.approx(0.37)
+        assert tstart == pytest.approx(-3.41)
+        assert tmaxlst == pytest.approx(15.0)
+
+    def test_case_insensitive(self):
+        from solweig.loaders import resolve_wall_params
+
+        for name in ("Brick", "BRICK", "bRiCk"):
+            tgk, _, _ = resolve_wall_params(name)
+            assert tgk == pytest.approx(0.40), f"Failed for {name!r}"
+
+    def test_invalid_material_raises_valueerror(self):
+        from solweig.loaders import resolve_wall_params
+
+        with pytest.raises(ValueError, match="Unknown wall material"):
+            resolve_wall_params("marble")
+
+    def test_error_message_lists_valid_options(self):
+        from solweig.loaders import resolve_wall_params
+
+        with pytest.raises(ValueError, match="brick") as exc_info:
+            resolve_wall_params("unknown")
+        msg = str(exc_info.value)
+        assert "concrete" in msg
+        assert "wood" in msg
+        assert "cobblestone" in msg
+
+    def test_with_custom_materials(self):
+        from types import SimpleNamespace
+
+        from solweig.loaders import resolve_wall_params
+        from solweig.utils import dict_to_namespace
+
+        raw = dict_to_namespace(
+            {
+                "Ts_deg": {"Value": {"Brick_wall": 0.99}},
+                "Tstart": {"Value": {"Brick_wall": -1.0}},
+                "TmaxLST": {"Value": {"Brick_wall": 13.0}},
+            }
+        )
+        assert isinstance(raw, SimpleNamespace)
+        tgk, tstart, tmaxlst = resolve_wall_params("brick", materials=raw)
+        assert tgk == pytest.approx(0.99)
+        assert tstart == pytest.approx(-1.0)
+        assert tmaxlst == pytest.approx(13.0)
+
+
+class TestWallMaterialInCalculate:
+    """Test wall_material parameter in the full calculate() pipeline."""
+
+    @pytest.fixture
+    def simple_inputs(self):
+        """Minimal inputs for a daytime calculation."""
+        from datetime import datetime
+
+        from solweig import HumanParams, Location, SurfaceData, Weather
+
+        dsm = np.full((3, 3), 2.0, dtype=np.float32)
+        surface = SurfaceData(dsm=dsm)
+        surface.compute_svf()
+        location = Location(latitude=57.7, longitude=12.0, utc_offset=1)
+        weather = Weather(
+            datetime=datetime(2023, 7, 15, 12, 0),
+            ta=25.0,
+            rh=50.0,
+            global_rad=800.0,
+        )
+        human = HumanParams()
+        return surface, location, weather, human
+
+    def test_wall_material_none_uses_default(self, simple_inputs):
+        """wall_material=None should produce same result as no param."""
+        from solweig import calculate
+
+        surface, location, weather, human = simple_inputs
+        result_default = calculate(surface, location, weather, human=human, use_anisotropic_sky=False)
+        result_none = calculate(surface, location, weather, human=human, wall_material=None, use_anisotropic_sky=False)
+
+        np.testing.assert_array_equal(result_default.tmrt, result_none.tmrt)
+
+    def test_brick_differs_from_default(self, simple_inputs):
+        """Brick wall material should produce different Tmrt than default."""
+        from solweig import calculate
+
+        surface, location, weather, human = simple_inputs
+        # Use isotropic sky — this flat surface has no explicit wall pixels,
+        # but wall material parameters still affect ground temperature through
+        # the isotropic radiation pathway (tgk_wall / tstart_wall scalars).
+        result_default = calculate(surface, location, weather, human=human, use_anisotropic_sky=False)
+        result_brick = calculate(
+            surface, location, weather, human=human, wall_material="brick", use_anisotropic_sky=False
+        )
+
+        assert not np.array_equal(result_default.tmrt, result_brick.tmrt), (
+            "Brick wall material should produce different Tmrt than default"
+        )
+
+    def test_wood_higher_wall_temp_than_brick(self, simple_inputs):
+        """Wood (TgK=0.50) should produce higher wall temp than brick (TgK=0.40)."""
+        from solweig.components.ground import compute_ground_temperature
+
+        surface, location, weather, _ = simple_inputs
+        weather.compute_derived(location)
+
+        alb = np.full((3, 3), 0.15, dtype=np.float32)
+        emis = np.full((3, 3), 0.95, dtype=np.float32)
+        tgk = np.full((3, 3), 0.37, dtype=np.float32)
+        tstart = np.full((3, 3), -3.41, dtype=np.float32)
+        tmaxlst = np.full((3, 3), 15.0, dtype=np.float32)
+
+        gb_wood = compute_ground_temperature(
+            weather,
+            location,
+            alb,
+            emis,
+            tgk,
+            tstart,
+            tmaxlst,
+            tgk_wall=0.50,
+            tstart_wall=-2.0,
+            tmaxlst_wall=14.0,
+        )
+        gb_brick = compute_ground_temperature(
+            weather,
+            location,
+            alb,
+            emis,
+            tgk,
+            tstart,
+            tmaxlst,
+            tgk_wall=0.40,
+            tstart_wall=-4.0,
+            tmaxlst_wall=15.0,
+        )
+
+        assert gb_wood.tg_wall > gb_brick.tg_wall, (
+            f"Wood tg_wall ({gb_wood.tg_wall:.2f}) should exceed brick ({gb_brick.tg_wall:.2f})"
+        )
diff --git a/tests/validation/__init__.py b/tests/validation/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/validation/montpellier/presti_subset.csv b/tests/validation/montpellier/presti_subset.csv
new file mode 100644
index 0000000..e5dd64d
--- /dev/null
+++ b/tests/validation/montpellier/presti_subset.csv
@@ -0,0 +1,433 @@
+TIMESTAMP,SMn_TA,SMn_HR,SMn_Wspd,SMn_Wdir,G1_TA,G2_TA,G3_TA,G4_TA,G5_TA,G6_TA,G7_TA,G8_TA,G9_TA,GA_TA,GB_TA,GC_TA,GD_TA,GE_TA,GF_TA,SlrW_1,SlrW_2,SlrW_3,SlrW_4
+2023-08-03T00:00:00Z,26.79,55.63,0.98,94.6,26.78,26.59,26.84,26.53,26.89,26.59,26.83,26.51,26.72,26.84,27.18,27.44,27.44,28.26,27.27,0,0,0,0
+2023-08-03T00:10:00Z,26.69,55.93,1.026,108.4,26.66,26.42,26.67,26.49,26.8,26.54,26.75,26.51,26.51,26.84,27.18,27.31,27.27,28.18,27.19,0,0,0,0
+2023-08-03T00:20:00Z,26.56,56.37,1.173,103,26.5,26.21,26.67,26.32,26.59,26.42,26.66,26.34,26.47,26.68,26.93,27.1,27.1,28.01,27.14,0,0,0,0
+2023-08-03T00:30:00Z,26.25,55.24,0.451,264.9,26.2,26,26.17,26.07,26.21,26.16,26.24,25.92,26,26.34,26.42,26.76,26.72,27.59,26.68,0,0,0,0
+2023-08-03T00:40:00Z,25.52,54.34,2.218,94.3,25.52,25.28,25.66,25.35,25.54,25.57,25.65,25.32,25.49,25.66,25.84,26.09,26.09,26.92,26.08,0,0,0,0
+2023-08-03T00:50:00Z,25.14,52.66,1.421,6.725,25.19,24.9,25.2,24.89,25.22,24.99,25.15,24.9,25.07,25.24,25.41,25.66,25.66,26.58,25.74,0,0,0,0
+2023-08-03T01:00:00Z,24.9,47.93,1.421,83.8,24.98,24.78,25.07,24.89,25.04,24.82,25.14,24.81,24.9,25.03,25.33,25.66,25.49,26.45,25.58,0,0,0,0
+2023-08-03T01:10:00Z,24.86,39.64,1.754,81.4,25.01,24.65,25.07,24.8,25.09,24.82,24.98,24.81,24.82,25.07,25.33,25.49,25.49,26.41,25.49,0,0,0,0
+2023-08-03T01:20:00Z,24.75,38.97,0.493,298.7,24.88,24.53,24.9,24.8,25.05,24.73,24.98,24.77,24.77,24.99,25.33,25.58,25.49,26.41,25.49,0,0,0,0
+2023-08-03T01:30:00Z,24.63,37.94,1.271,105.3,24.75,24.41,24.83,24.55,24.8,24.62,24.72,24.48,24.6,24.83,25.09,25.32,25.28,26.2,25.32,0,0,0,0
+2023-08-03T01:40:00Z,24.41,38.53,1.212,81.3,24.4,24.09,24.5,24.21,24.55,24.33,24.55,24.22,24.22,24.51,24.76,24.99,24.99,25.82,25.03,0,0,0,0
+2023-08-03T01:50:00Z,24.28,39.09,0.47,331.6,24.32,24.06,24.31,24.13,24.38,24.26,24.38,24.05,24.22,24.44,24.61,24.86,24.9,25.69,24.86,0,0,0,0
+2023-08-03T02:00:00Z,24.15,39.22,1.274,86.5,24.23,23.98,24.28,24.13,24.29,24.03,24.38,24.1,24.1,24.37,24.54,24.78,24.74,25.65,24.82,0,0,0,0
+2023-08-03T02:10:00Z,24.1,39.83,0.84,78.33,24.14,23.92,24.21,23.96,24.2,23.96,24.22,23.89,23.98,24.21,24.47,24.7,24.74,25.56,24.66,0,0,0,0
+2023-08-03T02:20:00Z,23.94,40.95,0.163,273.2,23.93,23.76,24.01,23.84,24.08,23.8,24.13,23.85,23.89,24.14,24.31,24.57,24.53,25.48,24.57,0,0,0,0
+2023-08-03T02:30:00Z,23.86,41.48,0.405,340.8,23.9,23.6,23.93,23.79,24.03,23.77,24.05,23.65,23.73,24.06,24.36,24.5,24.5,25.31,24.41,0,0,0,0
+2023-08-03T02:40:00Z,23.79,42.87,0.902,68.7,23.9,23.65,23.86,23.79,23.95,23.73,24.05,23.7,23.78,24.07,24.16,24.5,24.42,25.4,24.5,0,0,0,0
+2023-08-03T02:50:00Z,23.7,43.89,0.477,95.8,23.82,23.53,23.78,23.62,23.86,23.57,23.88,23.57,23.61,23.99,24.08,24.42,24.42,25.23,24.33,0,0,0,0
+2023-08-03T03:00:00Z,23.56,45.49,0.51,331.4,23.69,23.44,23.69,23.5,23.78,23.65,23.88,23.56,23.56,23.78,23.99,24.33,24.24,25.14,24.41,0,0,0,0
+2023-08-03T03:10:00Z,23.33,47.02,0.091,52.63,23.4,23.19,23.48,23.37,23.53,23.39,23.46,23.35,23.35,23.53,23.74,24.07,24.07,24.89,24.07,0,0,0,0
+2023-08-03T03:20:00Z,23.14,46.96,2.375,273.8,23.22,23.02,23.39,23.12,23.36,23.14,23.29,23.14,23.14,23.4,23.61,23.82,23.73,24.64,23.82,0,0,0,0
+2023-08-03T03:30:00Z,22.94,47.85,2.257,74.28,23.06,22.85,23.1,22.87,23.11,23.02,23.12,22.8,23.05,23.15,23.32,23.65,23.56,24.51,23.6,0,0,0,0
+2023-08-03T03:40:00Z,22.74,49.37,1.777,63.13,22.82,22.6,22.93,22.7,22.86,22.72,22.95,22.64,22.85,22.98,23.19,23.4,23.36,24.3,23.4,0,0,0,0
+2023-08-03T03:50:00Z,22.55,50.99,0.928,84.5,22.66,22.34,22.72,22.53,22.77,22.63,22.78,22.47,22.64,22.72,23.06,23.32,23.32,24.05,23.23,0,0,0,0
+2023-08-03T04:00:00Z,22.43,52.03,0.637,270.1,22.49,22.3,22.68,22.45,22.6,22.47,22.62,22.38,22.38,22.73,22.94,23.14,23.14,23.97,23.1,0,0,0,0
+2023-08-03T04:10:00Z,22.36,52.66,0.758,30.17,22.37,22.09,22.43,22.36,22.6,22.22,22.62,22.33,22.29,22.56,22.73,23.06,23.01,23.97,22.97,0,0,0,0
+2023-08-03T04:20:00Z,22.24,52.77,1.264,86.5,22.33,22.01,22.26,22.07,22.35,22.05,22.37,22.05,22.22,22.39,22.56,22.81,22.9,23.71,22.81,0,0,0,0
+2023-08-03T04:30:00Z,22.19,52.35,2.44,73.13,22.33,22.01,22.22,22.11,22.35,22.05,22.33,22.22,22.18,22.39,22.56,22.82,22.82,23.76,22.86,0,0,0,0
+2023-08-03T04:40:00Z,22.17,51.63,2.172,353.5,22.33,22.09,22.42,22.2,22.43,22.22,22.37,22.17,22.3,22.43,22.64,22.98,22.89,23.84,22.94,0,0,0,0
+2023-08-03T04:50:00Z,22.18,51.44,1.509,78.72,22.41,22.18,22.43,22.24,22.52,22.22,22.54,22.21,22.38,22.48,22.81,22.98,23.06,23.88,23.06,0,0,0,0
+2023-08-03T05:00:00Z,22.27,50.62,0.405,181.5,22.29,22.18,22.38,22.28,22.52,22.26,22.53,22.3,22.35,22.6,22.64,23.07,22.98,23.88,23.02,0,0,0,0
+2023-08-03T05:10:00Z,22.18,49.98,0.817,0.024,22.41,22.05,22.47,22.28,22.53,22.26,22.41,22.23,22.35,22.43,22.64,22.99,22.99,23.84,22.95,0,0,0,0
+2023-08-03T05:20:00Z,22.15,49.39,2.724,123.2,22.25,22.05,22.34,22.19,22.44,22.26,22.45,22.14,22.35,22.39,22.72,22.98,22.82,23.8,22.94,0,0,0,0
+2023-08-03T05:30:00Z,22.18,49.1,0.817,58.95,22.5,22.09,22.42,22.28,22.54,22.29,22.53,22.22,22.39,22.51,22.72,23.03,22.99,23.92,22.98,0,0,0,0
+2023-08-03T05:40:00Z,22.21,48.25,1.062,84.9,22.33,22.09,22.42,22.23,22.45,22.26,22.45,22.17,22.38,22.51,22.72,22.94,22.94,23.75,23.02,0,0,0,0
+2023-08-03T05:50:00Z,22.19,47.59,1.326,88.5,22.41,22.17,22.51,22.2,22.53,22.3,22.45,22.21,22.38,22.47,22.77,23.06,23.02,23.8,23.02,0,0,0,0
+2023-08-03T06:00:00Z,22.19,47.35,1.447,86.2,22.41,22.09,22.46,22.19,22.61,22.25,22.45,22.13,22.38,22.47,22.64,22.98,22.94,23.84,22.93,0,0,0,0
+2023-08-03T06:10:00Z,22.18,47.4,1.267,91.2,22.33,22,22.34,22.15,22.44,22.21,22.45,22.14,22.26,22.43,22.72,22.98,22.98,23.79,22.98,0,0,0,0
+2023-08-03T06:20:00Z,22.15,47.29,2.359,58.21,22.41,22.09,22.42,22.2,22.48,22.25,22.45,22.14,22.39,22.51,22.64,22.98,22.81,23.84,22.98,0,0.335,0.335,0.335
+2023-08-03T06:30:00Z,22.17,47.45,0.372,68.51,22.41,22.08,22.37,22.28,22.53,22.21,22.45,22.18,22.31,22.46,22.68,22.99,22.94,23.84,22.94,0.335,0.335,0.335,0.335
+2023-08-03T06:40:00Z,22.11,48.17,1.379,81.9,22.33,21.92,22.26,22.11,22.36,22.05,22.36,22.22,22.18,22.34,22.6,22.86,22.9,23.67,22.86,0.67,1.005,1.005,1.005
+2023-08-03T06:50:00Z,22,49.95,0.745,75.7,22.16,21.84,22.17,22.03,22.27,22.04,22.2,22.05,22.17,22.26,22.47,22.81,22.77,23.55,22.77,1.005,1.675,1.675,1.675
+2023-08-03T07:00:00Z,21.86,51.1,1.379,71.25,22.07,21.75,22.13,21.78,22.11,21.88,22.12,21.88,22.05,22.17,22.3,22.6,22.6,23.46,22.64,2.011,2.011,2.346,2.011
+2023-08-03T07:10:00Z,21.76,52.1,1.166,68.63,21.99,21.67,22.04,21.86,22.11,21.79,22.12,21.8,21.92,22.05,22.3,22.64,22.56,23.42,22.56,3.351,4.021,4.356,4.021
+2023-08-03T07:20:00Z,21.67,52.85,0.637,64.67,21.91,21.58,21.88,21.78,22.02,21.75,21.95,21.71,21.84,21.96,22.14,22.47,22.47,23.29,22.47,4.356,5.362,5.362,5.026
+2023-08-03T07:30:00Z,21.66,53.11,1.294,81.7,21.82,21.58,21.83,21.61,21.94,21.62,21.94,21.75,21.79,22,22.17,22.43,22.47,23.29,22.51,5.026,6.032,6.367,5.697
+2023-08-03T07:40:00Z,21.64,53.56,0.467,89.8,21.91,21.66,21.87,21.77,22.03,21.62,21.94,21.79,21.79,22,22.13,22.47,22.47,23.29,22.55,10.39,12.73,13.4,12.4
+2023-08-03T07:50:00Z,21.63,53.56,0.902,68.87,21.91,21.67,21.92,21.82,22.11,21.79,22.03,21.79,21.91,22.01,22.3,22.55,22.55,23.46,22.59,17.43,21.11,22.79,21.11
+2023-08-03T08:00:00Z,21.66,53.74,1.503,82.7,22.08,21.83,22.08,21.94,22.24,21.83,22.2,21.95,22,22.17,22.21,22.63,22.59,23.46,22.8,30.83,35.52,37.53,35.52
+2023-08-03T08:10:00Z,21.72,53.8,0.395,108,22.2,21.91,22.21,NA,22.36,22,NA,NA,NA,22.38,22.42,NA,NA,NA,NA,25.13,31.5,33.51,30.49
+2023-08-03T08:20:00Z,21.77,53.63,0.764,289.1,22.41,22.17,22.42,NA,22.36,22,NA,NA,NA,22.47,22.64,NA,NA,NA,NA,24.13,29.49,33.51,31.16
+2023-08-03T08:30:00Z,21.91,52.48,0.621,61.35,22.58,22.25,22.59,NA,22.69,22.29,NA,NA,NA,22.72,22.63,NA,NA,NA,NA,49.26,54.62,57.97,53.95
+2023-08-03T08:40:00Z,22.04,50.45,0.454,85.9,22.75,22.5,22.71,22.7,22.86,22.54,22.79,22.73,22.73,22.89,22.89,23.32,23.45,24.14,23.45,45.24,54.29,58.31,53.28
+2023-08-03T08:50:00Z,22,51,0.833,254.5,22.67,22.34,22.72,22.62,22.77,22.55,22.79,22.64,22.73,22.81,22.81,23.24,23.28,24.13,23.4,48.92,57.97,61.99,57.64
+2023-08-03T09:00:00Z,22.12,50.28,0.376,54.92,23.09,22.72,22.93,22.78,23.02,22.55,22.87,22.81,22.85,23.14,23.14,23.49,23.53,24.26,23.57,57.3,65.68,70.37,64.67
+2023-08-03T09:10:00Z,22.15,51.27,0.918,15.42,23.26,22.93,23.23,23.12,23.36,22.93,23.21,23.24,23.24,23.4,23.4,23.75,23.83,24.6,23.92,78.41,91.8,98.2,89.8
+2023-08-03T09:20:00Z,22.19,52.12,0.163,71.59,23.94,23.73,23.86,23.58,23.82,23.35,23.46,23.49,23.53,24.03,24.07,24.09,24.34,24.81,24,95.8,110.9,117.6,107.2
+2023-08-03T09:30:00Z,22.29,51.76,0.843,329,23.64,23.36,23.61,23.33,23.57,23.27,23.21,23.39,23.48,23.65,23.53,23.78,24.07,24.72,24.03,101.2,116.3,121.3,107.6
+2023-08-03T09:40:00Z,22.34,50.83,2.355,65.7,23.35,23.19,23.52,23.25,23.44,23.4,23.38,23.52,23.48,23.74,23.36,23.82,23.9,24.77,24.28,93.2,106.6,112.6,100.2
+2023-08-03T09:50:00Z,22.29,51.41,0.007,122.7,24.53,24.11,24.4,24.43,24.45,23.9,24.05,24.16,24.25,24.53,24.41,24.67,25.01,25.44,24.88,122,141.4,149.1,137.4
+2023-08-03T10:00:00Z,22.73,47.68,1.339,73.46,24.78,24.49,24.78,24.68,24.87,24.53,24.47,24.84,24.84,25.17,24.58,25.1,25.44,26.03,25.64,148.8,174.2,183,165.5
+2023-08-03T10:10:00Z,22.89,47.78,0.755,116.5,25.2,25.2,25.45,25.02,25.21,24.78,24.51,24.76,24.93,25.33,25.42,25.36,25.7,25.95,25.36,153.8,182.3,192.3,173.9
+2023-08-03T10:20:00Z,23.21,45.4,1.254,93.4,26.47,26.18,26.3,26.41,26.26,25.96,25.94,26.46,26.25,26.6,25.93,26.54,26.8,27.59,27.09,251,284.5,294.2,262.4
+2023-08-03T10:30:00Z,23.56,44.94,0,354.3,28.71,28.16,28.2,28.77,28.26,26.64,26.87,27.6,26.45,28.46,27.57,28.4,28.45,28.39,27.77,784.1,823,827,783.1
+2023-08-03T10:40:00Z,24.28,44.05,0.114,259.7,29.46,28.71,28.58,29.24,28.52,27.65,27.51,28.35,27.37,29.55,27.95,28.95,28.86,29.32,28.81,270.1,294.9,301.9,251
+2023-08-03T10:50:00Z,24.86,40.91,0.431,267.8,29.23,29.02,29.44,29.49,28.74,28.18,28.48,29.02,27.91,30.04,28.35,29.27,29.19,29.96,29.31,375.6,395.4,397.1,340.1
+2023-08-03T11:00:00Z,25.62,39.57,1.235,52.79,32.19,31.57,31.95,32.19,30.9,30.64,31.22,31.22,29.61,33.39,30.52,31.98,31.56,32.91,32.7,601.5,618.9,611.9,351.2
+2023-08-03T11:10:00Z,25.62,39.79,0.82,344,29.4,29.27,29.9,29.15,29.11,29.39,29.07,29.77,29.6,30.07,28.38,29.44,29.9,30.38,29.9,358.9,387.4,391.4,330.4
+2023-08-03T11:20:00Z,25.01,40.51,0.862,62.47,28.26,27.67,28.38,28.56,28.49,28.13,28.22,28.71,28.58,28.6,27.67,28.84,29.05,29.62,29.3,511.4,548.6,556,399.5
+2023-08-03T11:30:00Z,25.8,39,1.261,100,30.55,30.29,30.42,30.59,29.84,29.83,30.51,30.62,29.48,31.69,29.54,30.92,30.45,32.41,31.77,373,402.2,405.5,318.4
+2023-08-03T11:40:00Z,25.34,40.77,0.97,54.19,29.46,28.53,28.82,29.41,28.62,28.49,29.11,29.22,28.41,29.8,28.24,29.73,29.26,30.89,30.2,501.7,513.4,515.5,325.4
+2023-08-03T11:50:00Z,25.59,39.98,0.346,69.85,29.67,28.51,28.3,29.78,28.29,28.42,29.07,28.88,28.16,29.86,28.22,29.9,29.14,30.67,29.98,223.9,231.3,224.9,151.5
+2023-08-03T12:00:00Z,25.66,39.44,0.095,77.58,33.45,30.7,30.82,33.67,31.45,31.07,32.79,32.36,30.96,32.35,31.25,33.97,32.11,35.54,34.53,630.4,632.8,626.4,514.5
+2023-08-03T12:10:00Z,26.34,38.58,0.699,120.1,32.17,30.95,31.24,32.78,30.95,31.37,32.11,32.1,30.83,32.51,30.87,33.17,31.77,34.18,33.68,886,883,880,480.6
+2023-08-03T12:20:00Z,26.8,36.11,2.715,74.41,33.52,32.38,32.42,32.87,31.31,33.35,32.66,32.66,31.85,34.28,31.92,33.47,32.15,34.81,34.87,894,889,885,849
+2023-08-03T12:30:00Z,27.18,33.67,0.457,87.9,34.29,32.71,32.5,34.52,32.56,33.05,33.84,33.34,32.61,34.61,32.5,34.7,33,35.7,35.12,906,902,898,313.7
+2023-08-03T12:40:00Z,27.2,34.79,0.892,70.42,33.17,32.18,32.47,33.24,32.21,33.45,33.37,33.08,33.04,34.21,32.23,33.34,33.21,34.89,34.44,922,914,915,779.5
+2023-08-03T12:50:00Z,27.38,34.98,1.82,72.47,33.94,32.35,32.77,34,32.44,33.91,33.83,33.59,33.89,34.47,32.61,34.14,33.5,35.69,35.88,978,963,966,944
+2023-08-03T13:00:00Z,27.59,33.98,1.297,270.2,33.8,32.5,32.42,33.45,32.56,33.35,33.03,32.74,32.65,34.28,32.17,33.34,33.46,34.38,34.14,953,945,949,931
+2023-08-03T13:10:00Z,27.65,35.32,0.513,167.9,36.31,34.65,34.05,35.14,34.77,34.73,34.34,34.1,34.44,35.62,34.91,35.67,35.54,36.12,35.75,978,969,982,970
+2023-08-03T13:20:00Z,27.93,35.49,0.921,60.62,36.69,34.98,34.34,36.24,34.52,34.77,35.32,34.61,34.48,36.21,34.1,34.99,35.2,36.46,35.46,961,953,959,939
+2023-08-03T13:30:00Z,28.31,32.55,0.487,204.4,34.9,33.5,33.41,34.6,33.21,33.92,34.39,33.88,32.99,34.98,33.12,34.47,34.22,35.57,34.98,954,939,947,937
+2023-08-03T13:40:00Z,28.34,31.07,1.398,79.41,35.79,33.88,33.66,35.77,33.93,34.38,35.99,34.76,34.21,35.61,33.92,35.53,34.85,37.17,36.63,957,938,950,938
+2023-08-03T13:50:00Z,27.95,33.05,1.124,19.12,34.73,33.96,33.66,34.03,32.92,33.03,33.44,33.02,32.46,34.72,33.5,33.23,33.53,34.28,33.35,963,941,951,942
+2023-08-03T14:00:00Z,28.24,31.01,0.8,130.4,35.28,33.7,34.08,35.56,34.1,34.04,35.01,33.65,32.88,35.31,33.58,35.05,34.67,36.19,35.22,962,940,951,943
+2023-08-03T14:10:00Z,28.3,31.8,0.663,57.31,34.86,33.16,33.66,35.51,34.05,33.58,35.38,34.07,33.61,34.93,33.42,35.1,34.75,36.44,35.65,960,934,949,939
+2023-08-03T14:20:00Z,28.68,30.99,0.637,320.3,36.94,34.17,34.71,36.19,34.35,34.42,35.98,35.01,34.54,36.79,33.41,35.18,35.35,37.24,36.2,989,968,980,973
+2023-08-03T14:30:00Z,28.69,31.34,0.307,230.1,36.22,34.04,34.29,35.84,34.77,34.12,35.89,35.01,34.5,35.94,33.79,35.6,35.35,36.86,35.73,966,942,958,946
+2023-08-03T14:40:00Z,29.14,29.02,1.839,64.01,36.3,34.8,34.71,34.66,33.97,34.67,34.07,33.68,34.49,36.19,34.59,34.87,34.92,35.3,34.62,958,930,948,938
+2023-08-03T14:50:00Z,29.3,30.13,0.725,279.2,36.81,34.93,35.26,37.15,35.53,36.15,36.65,35.33,36.27,36.96,35.06,36.57,36.82,37.92,36.44,941,913,933,935
+2023-08-03T15:00:00Z,29.1,29.63,0.467,309,32.39,32.3,32.76,31.15,30.81,32.85,30.98,31.3,32.19,32.77,31.42,31.17,32.06,32.04,31.17,142.1,148.8,156.2,148.8
+2023-08-03T15:10:00Z,28.2,30.93,2.858,86.9,29.94,29.98,29.85,29.2,29.29,30.1,29.66,29.64,29.85,30.44,29.43,29.85,29.6,30.97,30.06,186.7,196.7,202.1,181
+2023-08-03T15:20:00Z,29.47,30.96,0.474,328,35.24,34.67,34.12,34.48,33.34,34.5,33.98,33.51,35,35.85,34.38,34.32,34.7,34.99,34.61,249.3,252,257.4,236.6
+2023-08-03T15:30:00Z,28.86,30.86,1.862,318.3,34.3,33.87,33.36,33.6,32.92,34.5,34.32,33.61,35.22,34.84,33.45,34.29,34.42,35.67,35.43,393.5,394.1,411.9,397.8
+2023-08-03T15:40:00Z,27.82,33.46,0.408,145.9,29.85,29.4,29.32,29.29,29.03,28.98,29,29,29.08,29.66,29.45,29.55,29.68,30.18,29,143.8,153.5,158.8,136.7
+2023-08-03T15:50:00Z,28.21,33.96,0.366,94.9,34.77,33.33,32.78,34.65,33.78,32.95,34.36,33.54,34.14,34.6,34.52,35.67,34.48,35.97,35.07,930,898,925,914
+2023-08-03T16:00:00Z,28.72,29.67,1.803,83.5,33.62,32.83,32.78,33.56,32.65,33.46,33.98,32.99,34.52,33.84,33.25,33.85,33.89,34.74,33.97,867,836,864,855
+2023-08-03T16:10:00Z,29.02,29.79,0.474,49,30.19,30.25,30.63,30.3,29.92,30.8,30.35,30.92,31.55,30.6,29.96,30.58,30.88,31.44,30.49,175.6,184,194,182
+2023-08-03T16:20:00Z,28.32,32.16,0.425,300.3,30.7,30.42,30.34,30.39,29.86,30.04,29.8,30.11,30.79,30.85,30.09,30.54,30.79,31.02,30.28,228.2,233.9,245.3,230.6
+2023-08-03T16:30:00Z,27.9,32.27,0.242,261.4,29.85,29.42,29.25,29.46,29.27,28.87,29.46,29.3,29.26,29.72,29.63,29.94,30.11,30.51,29.43,541.5,521.4,541.5,522.4
+2023-08-03T16:40:00Z,28.77,30.48,1.447,1.873,35.71,35.22,34.54,34.95,34.86,34.75,34.86,34.02,37.29,35.98,36.83,36.31,36.44,36.3,36.44,858,829,865,856
+2023-08-03T16:50:00Z,28.86,31.38,2.672,280.9,34.56,34.81,34.85,34.48,33.52,34.51,34.02,33.63,37.54,34.6,35.4,35.04,35.42,35.16,35.46,804,772.9,807,802
+2023-08-03T17:00:00Z,29.41,30.38,2.803,279.5,32.61,32.74,33.16,31.91,31.48,32.4,31.74,32.49,34.53,32.91,32.03,32.19,32.36,32.75,32.36,244,257.4,270.1,248.7
+2023-08-03T17:10:00Z,29,31.24,1.506,277.6,34.05,33.99,34.58,34.31,33.83,33.53,34.35,33.72,37.03,34.25,34.84,35.21,35.08,35.37,35.04,773.5,744,779.8,772.1
+2023-08-03T17:20:00Z,28.8,31.67,2.117,261.7,31.59,31.87,32.79,31.56,31.31,31.78,31.43,31.89,33.76,31.83,32.33,32.11,32.28,32.61,32.44,229.6,244.7,257.7,236.3
+2023-08-03T17:30:00Z,29.75,30.62,1.81,72.35,35.58,34.94,35.45,35.62,35.6,34.86,35.58,34.91,37.71,35.92,35.75,36.7,36.7,36.85,36.82,731.6,708.1,747.3,739.9
+2023-08-03T17:40:00Z,29.37,28.97,3.766,94.3,32.49,32.54,33.25,32.27,32.38,33.04,32.74,32.78,34.61,33.05,32.84,33.08,33.64,33.84,33.76,614.6,589.1,630.3,626
+2023-08-03T17:50:00Z,29.02,30.62,1.836,73.83,32.84,32.41,33.21,32.86,32.98,33.34,33.37,33.25,35.54,33.01,33.18,33.59,34.23,34.35,34.31,535.2,514.4,554.3,553.3
+2023-08-03T18:00:00Z,28.7,32.4,2.038,89.2,32.5,32.13,33.02,32.61,32.56,32.76,32.95,33,35.25,32.77,32.9,33.34,33.89,34.09,34.02,542.9,510.1,546.2,398.8
+2023-08-03T18:10:00Z,28.49,33.8,3.466,128.9,31.83,31.93,32.77,NA,31.81,32.56,NA,NA,NA,32.23,32.36,NA,NA,NA,NA,524.5,490.3,526.5,510.4
+2023-08-03T18:20:00Z,27.73,35.39,3.077,77.99,30.48,30.42,31.47,NA,30.58,30.93,NA,NA,NA,30.85,30.93,NA,NA,NA,NA,441.7,413.5,446.7,408.5
+2023-08-03T18:30:00Z,27.59,35.4,2.114,90.6,30.52,30.24,31.5,NA,30.88,30.99,NA,NA,NA,30.62,31.25,NA,NA,NA,NA,144.1,403.2,438.7,443.7
+2023-08-03T18:40:00Z,27.44,34.93,1.748,89.1,29.97,30,31.43,30.33,30.29,30.54,30.58,30.88,32.74,30.42,30.59,30.62,31.6,31.89,31.38,330.4,369.6,405.2,412.2
+2023-08-03T18:50:00Z,27.13,34.33,0.954,291.4,30.73,30.56,31.52,30.75,30.87,30.51,30.84,31.38,32.91,30.69,31.53,31.17,31.98,32.4,31.43,49.93,339.8,375.3,430.6
+2023-08-03T19:00:00Z,27.44,33.34,1.937,66.2,31.16,30.98,32.25,31.09,31.73,31.49,31.48,31.89,34.57,31.45,32.13,31.43,32.87,33.08,32.4,46.58,311,346.5,355.9
+2023-08-03T19:10:00Z,27.03,33.66,1.8,66.7,30.18,30.15,31.5,29.79,29.45,30.61,30.42,31.09,33,30.45,30.83,30.2,31.77,32.15,31.13,43.57,279.5,313.7,324.4
+2023-08-03T19:20:00Z,26.99,33.26,0.836,93.1,29.5,29.92,31.43,27.21,29.76,30.09,27.21,30.32,32.23,29.71,30.89,29.94,31.22,31.01,30.07,40.55,246.7,282.5,294.9
+2023-08-03T19:30:00Z,26.91,32.59,2.702,87.2,28.45,29.56,30.78,26.57,29.64,29.9,27.04,29.94,31.47,29.82,29.99,28.97,30.41,29.74,29.86,38.21,111.6,250.7,262.4
+2023-08-03T19:40:00Z,26.55,33.63,1.682,68.72,26.92,29.07,30.46,26.45,29.13,29.45,26.66,29.39,31.04,29.07,29.79,28.93,30.07,28.73,29.18,35.53,42.9,219.5,231.2
+2023-08-03T19:50:00Z,26.3,33.76,1.385,263.3,26.28,29.05,30.15,25.69,29.39,29.18,26.11,29.18,31.34,29.01,29.9,26.81,30.45,27.38,29.6,32.17,38.88,187.7,200.4
+2023-08-03T20:00:00Z,25.83,34.38,2.29,70.98,25.9,27.93,29.4,25.6,27.04,28.22,25.9,25.83,29.77,28.01,28.61,26.51,28.71,27.33,28.46,28.49,34.86,141.4,169.3
+2023-08-03T20:10:00Z,25.55,35.68,1.97,74.89,25.56,27.22,28.56,25.27,25.77,27.93,25.65,25.41,28.88,27.26,27.81,26.25,26.76,27.04,26.21,24.8,30.83,36.53,134.4
+2023-08-03T20:20:00Z,25.31,37.09,0.206,42.75,25.25,25.24,25.83,25.1,25.37,26.97,25.31,25.07,25.87,25.58,25.92,25.83,26,26.7,25.83,21.11,26.47,31.5,81.4
+2023-08-03T20:30:00Z,25.17,38.02,1.699,77.8,24.9,24.78,25.03,24.86,25.14,25.07,25.11,24.73,24.94,25.2,25.41,25.58,25.54,26.46,25.49,15.08,18.43,20.44,20.78
+2023-08-03T20:40:00Z,24.91,39.22,0.83,84,24.6,24.44,24.69,24.62,24.79,24.56,24.87,24.48,24.56,24.9,25.2,25.32,25.32,26.18,25.15,11.06,14.07,15.75,15.75
+2023-08-03T20:50:00Z,24.72,39.97,0.568,212.5,24.3,24.1,24.4,24.29,24.54,24.23,24.54,24.14,24.22,24.53,24.86,24.99,24.99,25.85,24.9,6.702,8.71,9.72,9.72
+2023-08-03T21:00:00Z,24.56,40.66,0.924,39.68,23.97,23.78,23.99,23.96,24.14,23.74,24.21,23.8,23.76,24.04,24.5,24.65,24.65,25.56,24.56,3.686,4.356,5.027,5.027
+2023-08-03T21:10:00Z,24.34,41.6,0.823,80.9,23.94,23.71,24.09,23.96,24.22,23.83,24.13,23.67,23.76,24.13,24.56,24.65,24.69,25.52,24.48,1.34,1.676,2.011,2.011
+2023-08-03T21:20:00Z,24.11,43.41,3.75,88.9,23.78,23.55,23.93,23.79,24.05,23.76,23.96,23.63,23.68,24.02,24.27,24.61,24.44,25.39,24.4,0.335,0.335,0.335,0.335
+2023-08-03T21:30:00Z,23.84,44.3,0.634,42.47,23.15,22.99,23.36,23.33,23.45,23.28,23.46,23.09,23.13,23.45,23.79,23.98,23.98,24.89,23.89,0,0,0,0
+2023-08-03T21:40:00Z,23.42,45.7,0.464,265.7,22.76,22.66,22.87,22.87,23.02,22.79,22.99,22.68,22.72,23.04,23.34,23.49,23.49,24.43,23.48,0,0,0,0
+2023-08-03T21:50:00Z,23.17,46.65,0,106.2,22.37,22.26,22.46,22.36,22.6,22.38,22.53,22.18,22.3,22.64,22.81,22.98,23.07,23.92,22.98,0,0,0,0
+2023-08-03T22:00:00Z,22.93,47.83,0.637,290.9,22.38,22.18,22.55,22.44,22.6,22.3,22.53,22.18,22.13,22.77,22.98,23.11,23.07,23.96,23.02,0,0,0,0
+2023-08-03T22:10:00Z,22.76,48.6,0.676,187.5,22.3,22.09,22.34,22.32,22.52,22.26,22.49,22.13,22.18,22.56,22.9,23.11,23.07,23.84,22.94,0,0,0,0
+2023-08-03T22:20:00Z,22.6,49.09,0.343,289.8,22.1,21.8,22.09,22.28,22.18,21.96,22.2,21.71,21.84,22.39,22.56,22.73,22.64,23.54,22.6,0,0,0,0
+2023-08-03T22:30:00Z,22.45,49.35,0.928,91.6,22.06,21.8,22.09,22.07,22.19,21.88,22.16,21.79,21.96,22.22,22.48,22.72,22.72,23.47,22.72,0,0,0,0
+2023-08-03T22:40:00Z,22.32,49.94,0.255,68.35,21.73,21.5,21.8,21.82,22.03,21.63,21.95,21.45,21.62,22.1,22.27,22.47,22.55,23.3,22.38,0,0,0,0
+2023-08-03T22:50:00Z,22.15,50.71,0.248,19.1,21.39,21.29,21.58,21.44,21.65,21.41,21.7,21.36,21.45,21.67,21.97,22.13,22.21,23.04,22.17,0,0,0,0
+2023-08-03T23:00:00Z,21.97,51,0.546,84,21.57,21.33,21.54,21.61,21.83,21.54,21.7,21.33,21.45,21.76,22.09,22.17,22.22,23.13,22.13,0,0,0,0
+2023-08-03T23:10:00Z,21.84,50.64,1.735,87.8,21.48,21.16,21.58,21.52,21.8,21.46,21.65,21.29,21.5,21.63,21.97,22.23,22.23,23.08,22.14,0,0,0,0
+2023-08-03T23:20:00Z,21.77,50.36,1.996,57.66,21.48,21.16,21.58,21.52,21.84,21.45,21.69,21.31,21.47,21.62,22,22.24,22.15,23.12,22.07,0,0,0,0
+2023-08-03T23:30:00Z,21.64,50.5,1.365,43.79,21.44,21.12,21.54,21.35,21.63,21.37,21.52,21.15,21.4,21.55,21.93,22.16,22.16,22.96,22.08,0,0,0,0
+2023-08-03T23:40:00Z,21.48,51.47,1.699,39.35,21.15,20.84,21.26,21.18,21.5,21.21,21.35,21.06,21.23,21.3,21.64,21.87,21.95,22.7,21.82,0,0,0,0
+2023-08-03T23:50:00Z,21.35,51.84,1.558,65.32,21.15,20.76,21.26,21.06,21.37,21.01,21.27,20.98,21.15,21.23,21.56,21.83,21.79,22.57,21.75,0,0,0,0
+2023-08-04T00:00:00Z,21.25,52.36,1.914,81,20.89,20.69,20.98,20.93,21.15,20.81,21.1,20.74,20.87,21.03,21.33,21.63,21.68,22.45,21.59,0,0,0,0
+2023-08-04T00:10:00Z,21.14,52.76,1.793,95.9,20.81,20.57,20.86,20.76,21.01,20.82,21.02,20.67,20.79,20.95,21.25,21.6,21.6,22.28,21.51,0,0,0,0
+2023-08-04T00:20:00Z,21.04,52.89,1.039,83.8,20.6,20.44,20.61,20.6,20.76,20.4,20.77,20.45,20.54,20.78,20.99,21.3,21.34,22.07,21.17,0,0,0,0
+2023-08-04T00:30:00Z,20.94,53.26,2.016,83.9,20.54,20.44,20.7,20.65,20.85,20.49,20.78,20.54,20.63,20.7,21,21.31,21.31,22.21,21.26,0,0,0,0
+2023-08-04T00:40:00Z,20.86,53.5,2.009,43.24,20.46,20.37,20.62,20.52,20.77,20.41,20.69,20.42,20.59,20.71,21.09,21.27,21.27,22.08,21.18,0,0,0,0
+2023-08-04T00:50:00Z,20.79,54.07,1.614,67.99,20.42,20.29,20.58,20.44,20.72,20.37,20.61,20.34,20.51,20.54,20.92,21.15,21.15,21.92,21.06,0,0,0,0
+2023-08-04T01:00:00Z,20.67,54.43,1.254,92,20.37,20.21,20.46,20.36,20.6,20.33,20.53,20.26,20.43,20.51,20.84,21.06,21.1,21.88,21.1,0,0,0,0
+2023-08-04T01:10:00Z,20.56,55.16,1.63,78.7,20.28,20.12,20.5,20.29,20.51,20.16,20.46,20.26,20.34,20.38,20.76,20.98,21.06,21.8,20.93,0,0,0,0
+2023-08-04T01:20:00Z,20.4,56.05,1.98,77.02,19.94,19.71,20.09,19.96,20.22,19.92,20.21,19.92,20.13,20.13,20.51,20.72,20.72,21.47,20.6,0,0,0,0
+2023-08-04T01:30:00Z,20.4,56.4,0.454,307.1,20.11,19.87,20.12,20.13,20.26,19.91,20.21,19.88,20.09,20.25,20.55,20.77,20.85,21.52,20.76,0,0,0,0
+2023-08-04T01:40:00Z,20.4,56.82,1.94,83.8,20.03,19.83,20.21,20.04,20.35,20,20.3,19.92,20.05,20.25,20.59,20.81,20.85,21.65,20.68,0,0,0,0
+2023-08-04T01:50:00Z,20.33,57.1,0.895,91.6,19.86,19.63,19.97,19.96,20.09,19.92,20.13,19.67,19.88,20.06,20.35,20.64,20.64,21.43,20.51,0,0,0,0
+2023-08-04T02:00:00Z,20.28,57.22,0.647,106.2,19.81,19.73,19.94,19.87,20.01,19.77,20.04,19.75,19.88,19.98,20.32,20.6,20.6,21.39,20.51,0,0,0,0
+2023-08-04T02:10:00Z,20.22,57.29,1.797,98.2,19.94,19.81,20.06,20,20.18,19.94,20.17,19.84,20.09,20.11,20.53,20.77,20.77,21.48,20.68,0,0,0,0
+2023-08-04T02:20:00Z,20.14,57.71,1.829,72.38,19.77,19.6,19.94,19.79,20.05,19.77,20.05,19.75,19.92,19.9,20.32,20.56,20.6,21.39,20.47,0,0,0,0
+2023-08-04T02:30:00Z,20.04,57.71,1.921,65.87,19.69,19.52,19.94,19.79,20.01,19.69,19.96,19.75,19.79,19.82,20.2,20.55,20.47,21.35,20.43,0,0,0,0
+2023-08-04T02:40:00Z,20.1,57.47,2.342,96.3,19.82,19.61,19.98,19.91,20.09,19.77,20.04,19.83,19.92,19.94,20.32,20.68,20.6,21.43,20.51,0,0,0,0
+2023-08-04T02:50:00Z,20.04,57.43,1.369,46.85,19.6,19.32,19.86,19.66,19.84,19.61,19.79,19.5,19.71,19.7,20.08,20.3,20.43,21.14,20.34,0,0,0,0
+2023-08-04T03:00:00Z,19.94,57.6,1.026,99.7,19.51,19.41,19.74,19.62,19.84,19.45,19.79,19.46,19.54,19.75,20.09,20.34,20.34,21.1,20.26,0,0,0,0
+2023-08-04T03:10:00Z,19.83,57.84,0.523,77.02,19.35,19.24,19.45,19.37,19.72,19.11,19.63,19.33,19.41,19.53,19.92,20.13,20.17,20.89,20.17,0,0,0,0
+2023-08-04T03:20:00Z,19.88,57.59,0.376,95.5,19.39,19.31,19.44,19.42,19.59,19.27,19.55,19.33,19.54,19.49,19.95,20.17,20.13,20.89,20.09,0,0,0,0
+2023-08-04T03:30:00Z,19.83,57.76,1.091,75.1,19.34,19.19,19.6,19.38,19.72,19.31,19.63,19.33,19.45,19.57,19.91,20.17,20.13,20.89,20.09,0,0,0,0
+2023-08-04T03:40:00Z,19.8,57.98,1.738,76.39,19.42,19.31,19.65,19.54,19.72,19.4,19.71,19.41,19.58,19.57,19.99,20.26,20.22,20.98,20.17,0,0,0,0
+2023-08-04T03:50:00Z,19.78,58.37,1.888,83.8,19.5,19.39,19.73,19.55,19.89,19.48,19.71,19.5,19.67,19.65,20.03,20.34,20.3,21.14,20.26,0,0,0,0
+2023-08-04T04:00:00Z,19.81,57.66,2.509,82.7,19.59,19.4,19.74,19.63,19.89,19.57,19.8,19.41,19.67,19.66,20.08,20.39,20.3,21.19,20.26,0,0,0,0
+2023-08-04T04:10:00Z,19.72,58.33,2.323,63.78,19.38,19.15,19.65,19.46,19.72,19.44,19.71,19.33,19.54,19.49,19.95,20.22,20.26,20.98,20.21,0,0,0,0
+2023-08-04T04:20:00Z,19.74,57.5,2.342,87.6,19.51,19.4,19.77,19.55,19.89,19.48,19.8,19.41,19.67,19.69,20.08,20.39,20.3,21.23,20.26,0,0,0,0
+2023-08-04T04:30:00Z,19.7,57.32,1.424,89.2,19.25,19.1,19.52,19.29,19.63,19.23,19.46,19.24,19.41,19.4,19.82,20.09,20.05,20.81,20.09,0,0,0,0
+2023-08-04T04:40:00Z,19.65,57.21,0.14,54.99,19.25,19.06,19.48,19.29,19.51,19.27,19.46,19.16,19.41,19.44,19.78,19.96,20.01,20.81,20,0,0,0,0
+2023-08-04T04:50:00Z,19.8,56.12,1.777,84.8,19.59,19.31,19.73,19.67,19.84,19.48,19.8,19.5,19.71,19.65,20.07,20.43,20.34,21.19,20.3,0,0,0,0
+2023-08-04T05:00:00Z,19.84,55.65,3.159,95.7,19.8,19.48,19.98,19.8,20.05,19.69,19.97,19.75,19.92,19.9,20.2,20.56,20.6,21.4,20.51,0,0,0,0
+2023-08-04T05:10:00Z,19.78,56.21,1.496,62.65,19.33,19.06,19.36,19.42,19.63,19.27,19.5,19.16,19.41,19.4,19.82,20.17,20.05,20.81,20.09,0,0,0,0
+2023-08-04T05:20:00Z,19.73,56.06,0.679,67.89,19.16,19.14,19.39,19.29,19.42,19.14,19.34,18.99,19.2,19.35,19.73,19.88,19.96,20.64,19.84,0,0,0,0
+2023-08-04T05:30:00Z,19.72,55.97,1.509,339.2,19.38,19.23,19.56,19.38,19.59,19.27,19.55,19.24,19.37,19.48,19.91,20.17,20.09,20.93,20.09,0,0,0,0
+2023-08-04T05:40:00Z,19.59,56.6,1.607,65.1,19.12,18.93,19.39,19.21,19.42,19.1,19.29,18.99,19.2,19.36,19.61,19.88,20.01,20.64,19.84,0,0,0,0
+2023-08-04T05:50:00Z,19.4,57.45,3.061,66.83,19.21,18.9,19.4,19.21,19.59,19.15,19.34,19.24,19.29,19.36,19.7,20.05,20.01,20.81,19.92,0,0,0,0
+2023-08-04T06:00:00Z,19.26,58.27,2.169,90.9,18.74,18.6,19.06,18.96,19.13,18.81,19.13,18.82,18.99,18.98,19.36,19.63,19.63,20.47,19.58,0,0,0,0
+2023-08-04T06:10:00Z,19.19,58.52,1.767,89.8,18.83,18.64,19.06,18.96,19.17,18.85,19.13,18.86,19.07,19.06,19.4,19.67,19.67,20.51,19.58,0,0,0.335,0
+2023-08-04T06:20:00Z,19.22,58.37,0.575,66.26,18.49,18.48,18.85,18.79,18.83,18.68,18.79,18.57,18.86,18.81,19.15,19.41,19.46,20.22,19.37,0.335,0.335,0.335,0.335
+2023-08-04T06:30:00Z,19.15,58.63,1.065,69.67,18.74,18.56,18.98,18.83,19.12,18.68,19.04,18.74,18.86,18.94,19.32,19.54,19.62,20.3,19.5,1.34,1.675,1.675,1.675
+2023-08-04T06:40:00Z,19.07,58.88,1.519,74.86,18.74,18.55,18.97,18.79,19,18.6,18.96,18.65,18.9,18.85,19.23,19.54,19.58,20.3,19.5,3.351,4.356,4.691,4.356
+2023-08-04T06:50:00Z,19.08,57.68,1.156,71.52,18.78,18.59,18.97,18.83,19.12,18.76,19.04,18.74,18.86,18.89,19.23,19.62,19.54,20.3,19.5,6.702,8.71,9.38,8.71
+2023-08-04T07:00:00Z,19.12,56.65,1.473,126.1,18.99,18.81,19.02,19,19.11,18.81,19.13,18.9,19.12,19.1,19.4,19.75,19.75,20.47,19.71,11.39,14.41,15.75,14.07
+2023-08-04T07:10:00Z,19.18,55.71,1.702,94.1,18.91,18.68,19.14,18.96,19.16,18.93,19.08,18.82,19.12,19.06,19.44,19.67,19.63,20.47,19.58,13.07,16.75,17.76,15.41
+2023-08-04T07:20:00Z,19.14,55.1,0.823,64.45,19,18.72,19.14,18.96,19.28,18.97,19.13,18.9,19.07,19.14,19.44,19.75,19.75,20.47,19.71,15.41,19.1,20.11,18.09
+2023-08-04T07:30:00Z,19.14,54.77,1.284,96,18.91,18.81,19.1,18.96,19.21,18.93,19.13,18.9,19.03,19.15,19.36,19.75,19.67,20.51,19.62,17.76,22.12,23.46,20.78
+2023-08-04T07:40:00Z,19.08,55.03,1.411,72.84,18.91,18.64,19.06,19,19.12,18.97,19.13,18.9,18.99,19.19,19.4,19.71,19.75,20.47,19.62,20.11,24.8,26.14,23.12
+2023-08-04T07:50:00Z,19.03,55.34,0.039,24.6,18.87,18.72,18.97,18.91,19.25,18.89,18.96,18.78,18.91,19.14,19.36,19.54,19.62,20.39,19.54,24.46,30.16,31.83,28.15
+2023-08-04T08:00:00Z,19.28,54.66,1.447,73.75,19.42,19.15,19.56,19.5,19.66,19.31,19.55,19.33,19.54,19.7,19.95,20.13,20.13,20.94,20.13,29.49,37.19,38.54,34.18
+2023-08-04T08:10:00Z,19.69,53.52,1.607,92.9,21.36,23.22,22.89,NA,20.21,22.25,NA,19.75,20.26,23.35,23.57,20.64,22.29,NA,20.6,32.17,40.21,97.8,198.7
+2023-08-04T08:20:00Z,19.98,51.69,1.904,87.6,23.05,22.97,23.72,NA,23.16,22.93,NA,23.6,23.56,24.07,23.48,24.02,23.39,NA,25.29,215.1,243.3,237.9,69.03
+2023-08-04T08:30:00Z,20.42,51.35,1.48,73.29,24.11,24.11,25.08,NA,24.21,23.35,NA,24.91,24.61,25.25,24.92,25.42,24.7,NA,26.56,251,277.1,271.8,256
+2023-08-04T08:40:00Z,20.53,50.93,1.695,80.1,23.65,23.77,24.32,24.47,24.04,23.43,24.35,24.77,24.52,25.08,24.49,25.07,24.48,25.78,26.42,282.2,308,302.3,297.2
+2023-08-04T08:50:00Z,20.62,50.75,1.914,64.97,23.64,23.68,24.22,24.52,24.15,23.59,24.31,24.76,24.34,24.9,24.48,25.14,24.55,25.74,25.99,315.6,345.8,341.1,335.7
+2023-08-04T09:00:00Z,21.01,49.35,2.607,61.32,23.9,24.15,24.87,24.89,24.35,24.74,25.02,25.49,24.82,25.12,24.83,25.37,25.11,26.33,26.81,374.6,408.8,403.4,395.7
+2023-08-04T09:10:00Z,21.34,49.59,0.853,17.63,24.67,24.47,25.01,25.61,24.69,25.18,24.98,25.53,24.76,25.52,25.06,26.04,25.4,26.75,26.67,343.8,365.9,360.5,349.5
+2023-08-04T09:20:00Z,21,50.62,2.639,112.8,23.92,23.95,24.67,24.89,24.11,25.13,24.64,25.14,23.83,24.84,24.5,25.35,24.89,25.99,26.37,423.2,448,440.6,423.5
+2023-08-04T09:30:00Z,21.56,49.02,2.479,79.79,25.78,25.63,26.43,26.49,25.75,26.39,26.28,26.4,24.36,27.03,26.35,26.86,26.52,27.5,27.84,462.4,488.6,484.2,477.5
+2023-08-04T09:40:00Z,22.09,47.34,2.963,80,26.8,26.9,28.03,27.95,27.06,28.07,27.82,28.67,26.01,27.95,27.78,28.25,28.04,29.39,30.71,550.2,587.7,591.1,581.4
+2023-08-04T09:50:00Z,22.43,46.85,0.601,41,29.43,29.1,30.06,31.15,29.3,29.64,30.77,30.92,27.74,29.94,29.31,31.56,30.2,33.09,33.38,451.7,497.3,506,477.2
+2023-08-04T10:00:00Z,22.35,46.81,2.176,95.7,25.46,25.02,25.86,25.32,25.01,25.56,25.11,25.62,24.9,26.07,24.94,25.71,25.45,26.5,26.09,393.7,423.2,427.3,386.7
+2023-08-04T10:10:00Z,21.6,49.45,0.938,52.12,22.75,22.43,23.02,22.83,22.83,23.06,22.96,23.29,23.21,23.15,22.65,23.38,23.38,24.43,23.67,137.1,156.5,160.5,135
+2023-08-04T10:20:00Z,21.26,51.12,0.673,288.9,22.92,22.38,22.84,22.7,22.66,22.72,22.7,22.91,22.95,22.97,22.72,23.25,23.38,24.13,23.38,133,157.2,167.6,152.5
+2023-08-04T10:30:00Z,21.08,51.8,0.98,77.39,22.5,22.04,22.67,22.49,22.44,22.46,22.54,22.91,23.12,22.63,22.47,22.95,23.25,24.05,23.42,172.2,200,212.4,197.4
+2023-08-04T10:40:00Z,21.29,51.42,2.604,94.6,23.43,23.14,23.38,23.08,23.2,23.05,23.34,23.46,23.21,23.73,23.56,23.85,23.72,24.77,24.35,169.6,197.7,206.4,165.5
+2023-08-04T10:50:00Z,21.41,52.16,0.268,87.7,24.53,24.25,24.54,24.43,24.54,23.87,24.22,24.61,24.48,24.67,24.55,25.29,25.25,25.82,25.59,229.2,274.8,286.8,249.6
+2023-08-04T11:00:00Z,21.84,49.65,2.607,68.36,23.43,23.35,23.61,23.29,23.39,23.56,23.54,23.69,23.94,24.12,23.32,23.9,24.07,24.89,24.49,213.8,241.3,245.9,213.1
+2023-08-04T11:10:00Z,21.7,50.43,1.686,83.9,23.01,22.87,23.21,22.91,22.93,22.95,22.87,22.97,23.18,23.29,23.13,23.44,23.57,24.22,23.61,152.1,174.9,181.9,164.5
+2023-08-04T11:20:00Z,21.81,50.7,1.989,159.2,23.43,23.1,23.43,23.17,23.19,23.18,23.33,23.32,23.32,23.73,23.31,23.79,23.83,24.77,24.12,187.6,203.7,205.1,165.5
+2023-08-04T11:30:00Z,22.51,49.21,2.401,79.02,25.8,25.33,25.45,25.23,25.29,25.03,25.23,25.31,24.76,26.98,25.33,26.2,25.52,27,26.67,201.1,211.1,208.4,146.1
+2023-08-04T11:40:00Z,23.05,48.13,1.875,64.2,30.42,29.38,29.72,30.3,28.49,28.41,28.99,29.16,28.31,30.74,28.63,30.1,29.08,30.85,30.6,900,919,922,248
+2023-08-04T11:50:00Z,23.72,46.6,1.8,313.1,30.88,30.13,30.98,31.06,29.67,30.34,29.75,30.42,29.4,31.07,29.21,31.06,30.3,31.95,32.12,906,937,940,347.8
+2023-08-04T12:00:00Z,23.48,47.5,1.251,253.8,29.06,28.16,28.58,28.99,27.99,28.5,28.06,28.68,27.74,28.88,27.75,29.02,28.34,30.38,29.78,206.8,231.2,234.6,180.6
+2023-08-04T12:10:00Z,24.35,45.92,0.82,276.6,33.67,31.68,30.83,33.8,31.11,31.25,32.24,NA,NA,33.03,30.33,NA,NA,35.03,NA,957,963,958,625.3
+2023-08-04T12:20:00Z,24.67,43.3,1.143,49.19,32.22,31.48,30.76,31.81,30.67,31.22,30.42,NA,NA,32.28,30.55,NA,NA,32.24,NA,985,997,996,914
+2023-08-04T12:30:00Z,25.14,42.5,2.3,88.2,31.44,30.76,30.42,32.07,30.35,31.73,31.31,NA,NA,32.37,29.79,NA,NA,33,NA,1033,1054,1057,660.2
+2023-08-04T12:40:00Z,25.38,42.16,0.578,288.8,33.47,31.65,31.52,34.82,32.31,32.83,33.76,33.08,32.74,33.47,31.65,34.32,33.38,35.92,34.83,1046,1052,1056,858
+2023-08-04T12:50:00Z,25.52,41.01,1.006,113.8,32.92,31.21,31.33,33.89,31.63,32.6,33.29,32.45,32.28,33.15,30.79,33.08,32.74,34.78,34.48,1093,1092,1101,1028
+2023-08-04T13:00:00Z,25,42.91,2.218,152.6,28.51,28.06,28.15,28.26,27.42,27.94,28.01,27.99,27.87,28.61,27.86,28.16,28.16,28.94,28.03,324.4,341.8,349.9,304.3
+2023-08-04T13:10:00Z,24.69,43.2,2.375,88.7,29.83,28.43,28.76,30.33,29.11,29.14,30.5,29.64,29.18,29.9,28.43,30.24,29.69,31.43,31,693.7,680.3,675.6,615.6
+2023-08-04T13:20:00Z,24.54,43.19,2.855,108.1,28.18,27.32,27.66,28.38,27.94,27.61,28.47,28.5,27.82,28.25,27.41,28.63,28.25,29.86,28.67,709.8,708.8,717.2,688.7
+2023-08-04T13:30:00Z,25.86,41.03,1.058,259.1,34.07,32.09,31.63,33.16,31.91,31.42,31.77,30.79,30.54,33.83,31.51,32.32,32.62,32.86,32.27,1087,1069,1083,1050
+2023-08-04T13:40:00Z,25.62,41.17,1.179,329.4,32.84,30.55,30.5,32.36,30.96,30.76,31.98,30.71,30.96,32.32,30.38,31.85,31.56,32.74,32.36,1103,1084,1095,1056
+2023-08-04T13:50:00Z,25.56,41.18,1.215,234.4,30.85,29.54,29.74,31.56,30.35,29.87,31.26,30.41,29.81,31.06,29.75,31.39,31.09,32.49,31.17,775.2,778.5,802,780.2
+2023-08-04T14:00:00Z,25.59,40.2,4.152,128,30.39,29.4,29.74,30.42,29.54,30.29,30.42,29.81,29.94,31.01,29.24,30.37,30.58,31.48,31.55,1027,1005,1022,996
+2023-08-04T14:10:00Z,25.97,39.4,2.225,266.8,32.55,30.91,31.37,31.3,30.54,30.69,30.84,31,30.96,32.05,30.99,31.26,31.47,32.19,31.72,996,971,985,963
+2023-08-04T14:20:00Z,25.93,40.24,0.65,346.9,34.38,31.61,31.65,34.08,32.22,31.44,33.74,32.49,32.1,32.97,31.49,33.89,32.91,35.1,33.59,1078,1054,1069,1049
+2023-08-04T14:30:00Z,26.01,39.57,1.731,294.4,32.46,30.8,30.8,31.33,30.36,30.17,30.58,30.62,30.79,31.61,31.15,31.39,30.92,31.88,30.75,988,961,981,928
+2023-08-04T14:40:00Z,25.93,40.52,0.817,231,33.35,31.09,31.43,32.99,31.84,31.47,32.65,31.6,31.94,32.53,31.27,33.3,32.91,33.92,32.91,965,938,957,954
+2023-08-04T14:50:00Z,25.58,41.69,2.506,285.6,29.01,28.77,29.49,28.42,28.13,29.57,28.38,28.84,29.48,29.79,28.31,28.76,29.01,29.74,29.22,258.4,275.5,287.5,282.5
+2023-08-04T15:00:00Z,25.24,42.04,0.761,263.6,32.07,30.46,30.46,31.97,30.83,30.67,32.14,31.26,31.81,31.68,30.88,32.87,31.64,33.5,32.4,1031,1022,1048,1029
+2023-08-04T15:10:00Z,25.79,40.06,1.96,285.3,32.93,31.52,31.73,32.86,31.39,32.19,32.06,31.64,32.91,32.62,31.65,33.17,32.57,33.58,33.12,1055,1034,1062,1057
+2023-08-04T15:20:00Z,25.52,39.96,2.355,226.8,30.04,29.33,29.88,30.41,29.45,30.3,30.58,30.28,31.38,30.35,29.08,30.58,30.41,31.51,30.83,941,918,940,933
+2023-08-04T15:30:00Z,25.59,40.36,1.192,290.3,32.93,31.74,31.65,33.45,31.9,32.79,33.83,33.08,34.65,33.18,32.55,33.76,33.42,34.93,34.53,920,897,923,921
+2023-08-04T15:40:00Z,25.48,41.52,2.006,88.2,30.86,29.93,30.26,30.78,29.97,31.06,31.08,30.7,32.27,31.07,30.56,31.34,31.43,31.97,31.77,890,860,884,880
+2023-08-04T15:50:00Z,25.7,40.9,2.767,73.33,31.11,30.22,30.3,30.62,29.8,30.98,30.58,30.53,32.23,31.49,30.6,31.09,31.22,31.8,31.47,872,835,862,847
+2023-08-04T16:00:00Z,25.18,41.83,1.414,334.1,30.13,29.25,29.38,30.12,29.3,30.26,30.08,29.86,31.98,30.31,29.55,30.88,30.58,31.13,30.96,282.9,276.5,286.9,283.5
+2023-08-04T16:10:00Z,25.21,40.68,2.94,48.16,29.92,29.46,29.63,29.48,29.3,30.05,29.86,29.86,31.47,30.23,29.68,30.28,30.45,30.79,30.87,843,814,843,842
+2023-08-04T16:20:00Z,24.79,42.04,2.372,292.9,26.99,26.3,27.19,27.84,26.99,27.31,27.59,27.95,29.05,26.98,26.85,27.95,27.99,28.51,27.78,878,835,866,869
+2023-08-04T16:30:00Z,25.01,41.91,2.071,97.3,26.99,27.06,27.1,27.12,26.7,27.69,27.25,27.23,28.46,27.66,26.65,27.4,27.7,28.05,27.52,594.6,564.4,584.9,580.8
+2023-08-04T16:40:00Z,24.76,42.92,3.639,71.26,28.43,28.08,28.71,28.69,28.29,28.04,28.86,28.42,30.07,28.67,28.88,29.18,29.65,29.62,29.86,840,799.3,835,834
+2023-08-04T16:50:00Z,25.14,42.07,0.728,31.54,30.21,29.53,29.7,30.16,29.64,28.77,29.7,29.3,31.09,29.58,30.72,30.96,31.09,30.84,30.62,254.7,249.4,262.1,238.6
+2023-08-04T17:00:00Z,25.49,41.34,1.091,322,30.84,30.3,30.55,30.84,30.26,29.79,30.76,30.11,32.32,30.43,31.61,31.64,31.68,31.52,31.09,731.9,692.4,725.6,725.2
+2023-08-04T17:10:00Z,25.82,39.86,1.062,342.1,29.14,28.91,29.12,28.72,28.47,28.7,28.56,28.37,30.58,29.63,28.54,29.1,29.48,29.7,29.22,97.9,105.9,114.6,110.6
+2023-08-04T17:20:00Z,24.7,42.87,1.192,27.69,26.1,25.63,26.13,25.89,25.99,25.92,26.23,26.25,26.42,26.18,26.1,26.59,26.68,27.41,26.59,154.2,165.9,176.6,166.9
+2023-08-04T17:30:00Z,24.21,44.39,2.613,81.2,24.92,24.58,24.87,24.59,24.89,24.7,24.97,24.9,24.9,24.96,24.83,25.32,25.32,26.31,25.41,105.2,116.6,122.7,110.9
+2023-08-04T17:40:00Z,24.07,44.98,0.578,75.8,27.11,26.41,26.4,26.49,26.4,26.15,26.53,26.3,26.51,26.96,26.79,27.31,27.06,27.67,26.93,172.2,186.7,197.7,183
+2023-08-04T17:50:00Z,25.09,43.1,1.163,298.7,30.79,29.82,30.2,31.26,31.08,29.6,31.01,30.07,32.23,30.33,31.47,31.85,32.36,32.11,32.1,595.5,560.3,598.8,601.2
+2023-08-04T18:00:00Z,25.25,42.42,0.621,54.81,29.91,29.43,30.48,30.63,30.36,30.65,31.22,30.28,32.66,29.85,30.57,31.39,31.81,32.15,31.94,560,528.1,566,569.3
+2023-08-04T18:10:00Z,25.31,41.18,1.274,332.3,29.29,28.72,29.69,NA,29.56,29.73,NA,29.69,32.62,29.36,29.83,30.58,30.71,NA,30.58,509.4,468.5,503.3,510.4
+2023-08-04T18:20:00Z,24.83,41.53,0.317,298.9,29.16,28.77,29.7,NA,29.56,29.66,NA,29.81,31.6,28.86,30.59,30.66,30.96,NA,30.49,479.5,441,477.2,333.8
+2023-08-04T18:30:00Z,25.15,40.99,0.66,288.1,28.52,28.95,29.75,NA,28.72,29.11,NA,28.67,30.75,28.7,30.09,29.14,29.99,NA,28.58,275.5,413.2,450.7,459.8
+2023-08-04T18:40:00Z,24.72,40.83,1.601,93.8,25.93,25.79,26.46,25.43,25.58,26.25,25.64,26,26.93,26.04,26.21,26.17,26.34,26.87,26.34,90.2,103.6,112.6,106.9
+2023-08-04T18:50:00Z,24.38,41.48,3.241,85.1,25.66,25.62,26.16,25.42,25.79,25.83,25.71,25.75,26.3,26,26.04,26.09,26.51,26.89,26.42,114.9,149.5,163.9,160.9
+2023-08-04T19:00:00Z,24.13,42.66,3.561,77.53,25.62,25.37,26.21,25.75,25.91,25.41,26.05,26.42,26.72,25.66,25.96,26.21,26.72,26.93,26.42,140.1,440.7,488.3,492
+2023-08-04T19:10:00Z,24.7,40.56,1.385,71.22,28.07,28.23,29.24,27.44,27.34,27.93,27.32,28.42,30.07,28.02,29.25,28.51,29.82,28.79,28.5,62,306.3,341.1,346.5
+2023-08-04T19:20:00Z,24.55,41.63,1.643,65.86,26.85,26.84,28.65,24.88,26.99,27.22,24.96,28.08,29.48,26.84,27.56,27.06,28.5,27.96,27.78,35.52,246.6,282.8,293.9
+2023-08-04T19:30:00Z,24.45,42.07,1.496,73.5,25.84,26.81,28.32,24.33,27.07,27.23,24.71,27.78,29.09,26.77,27.57,26.81,28.16,27.03,27.53,34.18,93.8,251.7,265.4
+2023-08-04T19:40:00Z,24.28,43.35,0.343,38.67,25.03,27.12,28.84,24.62,27.83,27.58,25.04,28.5,30.2,26.78,28.09,27.48,28.93,26.52,28.25,64.68,80.1,263.4,275.8
+2023-08-04T19:50:00Z,24.25,43.33,1.281,36.24,24.56,26.71,28.18,24.36,26.95,27.09,24.66,27.4,28.84,26.38,27.43,25.24,27.99,25.97,27.36,51.61,62.33,219.8,231.9
+2023-08-04T20:00:00Z,24.14,43.62,0.934,79.48,24.68,25.04,25.88,24.53,24.73,25.29,24.79,24.65,25.83,25.3,25.38,25.24,25.75,26.01,25.53,47.92,56.97,74.73,74.06
+2023-08-04T20:10:00Z,23.81,43.89,2.225,55.36,24.09,25.35,26.15,23.94,24.22,25.81,24.2,23.88,26.17,25.31,25.73,24.73,25.16,25.54,24.69,31.84,37.87,41.89,93.2
+2023-08-04T20:20:00Z,23.55,44.56,1.957,93.2,23.58,23.46,23.92,23.53,23.7,24.72,23.74,23.38,23.8,23.84,24.13,24.23,24.27,25,24.18,21.11,26.81,31.16,57.64
+2023-08-04T20:30:00Z,23.25,45.31,2.241,66.24,23.2,23.09,23.42,23.07,23.36,23.46,23.37,23.13,23.3,23.51,23.64,23.85,23.81,24.63,23.85,17.09,23.46,25.13,25.13
+2023-08-04T20:40:00Z,23.1,45.82,0.206,6.053,23.29,23.01,23.3,23.2,23.35,23.3,23.41,23.13,23.18,23.44,23.73,23.81,23.94,24.72,23.81,13.4,16.75,18.43,17.76
+2023-08-04T20:50:00Z,22.9,45.84,0.637,75.33,22.91,22.68,22.97,22.86,22.94,22.81,23.03,22.64,22.85,23.15,23.4,23.49,23.57,24.34,23.49,8.04,9.72,10.39,10.05
+2023-08-04T21:00:00Z,22.67,46.08,0.745,245.1,22.49,22.35,22.72,22.53,22.69,22.47,22.7,22.4,22.53,22.81,22.98,23.25,23.25,24.05,23.24,3.351,4.021,4.691,4.691
+2023-08-04T21:10:00Z,22.53,46.79,0.653,57.65,22.2,21.96,22.26,22.19,22.27,22.05,22.36,21.97,22.06,22.39,22.72,22.86,22.82,23.63,22.74,1.675,2.011,2.011,2.011
+2023-08-04T21:20:00Z,22.28,47.74,1.022,48.17,21.92,21.76,21.96,21.94,22.01,21.88,22.19,21.64,21.81,22.14,22.31,22.57,22.57,23.46,22.48,0.335,0.335,0.335,0.67
+2023-08-04T21:30:00Z,22.07,48.76,0.96,93.1,21.63,21.42,21.71,21.69,21.76,21.59,21.86,21.47,21.55,21.85,22.06,22.31,22.31,23.2,22.27,0,0,0,0.335
+2023-08-04T21:40:00Z,21.87,49.7,2.326,63.16,21.56,21.34,21.59,21.52,21.68,21.42,21.77,21.37,21.29,21.72,21.97,22.22,22.14,23.04,22.05,0,0,0,0
+2023-08-04T21:50:00Z,21.7,50.22,1.722,264.2,21.48,21.16,21.33,21.4,21.51,21.16,21.61,21.12,21.16,21.5,21.84,22.05,22.05,22.87,21.92,0,0,0,0
+2023-08-04T22:00:00Z,21.6,50.9,0.206,331.8,20.97,20.79,21.04,20.89,21.09,20.91,21.19,20.7,20.95,21.21,21.46,21.55,21.72,22.41,21.59,0,0,0,0
+2023-08-04T22:10:00Z,21.34,51.9,1.303,79.2,21.06,20.83,21.12,21.02,21.2,20.87,21.19,20.8,20.92,21.17,21.51,21.73,21.64,22.58,21.64,0,0,0,0
+2023-08-04T22:20:00Z,21.18,52.55,1.022,27.6,20.81,20.58,20.91,20.76,21.02,20.7,20.97,20.47,20.73,20.92,21.21,21.49,21.53,22.24,21.36,0,0,0,0
+2023-08-04T22:30:00Z,21.04,52.62,0.412,98.3,20.72,20.34,20.76,20.68,20.85,20.55,20.85,20.48,20.48,20.97,21.14,21.41,21.41,22.24,21.41,0,0,0,0
+2023-08-04T22:40:00Z,21,51.99,0.242,51.87,20.46,20.31,20.6,20.56,20.68,20.44,20.69,20.28,20.45,20.73,20.99,21.17,21.34,22.04,21.12,0,0,0,0
+2023-08-04T22:50:00Z,20.93,51.71,1.607,67.7,20.54,20.2,20.58,20.66,20.77,20.41,20.79,20.41,20.45,20.75,21.01,21.34,21.3,22.18,21.17,0,0,0,0
+2023-08-04T23:00:00Z,20.9,52,0.124,330.1,20.45,20.16,20.45,20.62,20.68,20.37,20.79,20.42,20.38,20.75,20.92,21.35,21.18,22.09,21.26,0,0,0,0
+2023-08-04T23:10:00Z,20.91,52.1,0.807,288.5,20.54,20.21,20.54,20.62,20.76,20.42,20.88,20.51,20.51,20.76,20.93,21.44,21.27,22.31,21.4,0,0,0,0
+2023-08-04T23:20:00Z,20.81,52.59,0.069,349.7,20.37,20.12,20.46,20.42,20.6,20.25,20.46,20.26,20.43,20.63,20.84,21.02,21.19,21.94,21.06,0,0,0,0
+2023-08-04T23:30:00Z,20.72,53.3,1.967,74.5,20.41,19.87,20.29,20.51,20.6,20.25,20.64,20.34,20.34,20.59,20.8,21.19,21.1,22.07,21.15,0,0,0,0
+2023-08-04T23:40:00Z,20.7,53.49,0.696,266.6,20.11,19.88,20.21,20.3,20.43,20.08,20.47,20.09,20.17,20.42,20.76,20.85,21.02,21.69,20.89,0,0,0,0
+2023-08-04T23:50:00Z,20.64,53.4,0.853,297.5,20.11,19.72,20.09,20.34,20.34,19.96,20.47,20.13,20.05,20.35,20.56,20.94,20.85,21.94,20.98,0,0,0,0
+2023-08-14T00:00:00Z,23.6,87.8,1.238,285.4,24.07,23.86,24.03,23.76,23.86,23.82,24.1,23.89,24.06,24.16,24.24,24.57,24.57,25.36,24.69,0,0,0,0
+2023-08-14T00:10:00Z,23.62,88.2,0.546,272.1,24.02,23.9,24.15,23.93,23.95,23.9,24.02,23.97,24.14,24.16,24.41,24.65,24.69,25.41,24.69,0,0,0,0
+2023-08-14T00:20:00Z,23.58,88.7,0,283.8,24.15,23.81,23.9,23.93,23.9,23.89,24.18,23.88,23.89,24.24,24.32,24.69,24.56,25.45,24.65,0,0,0,0
+2023-08-14T00:30:00Z,23.53,89.1,1.039,255,24.06,23.77,24.02,23.93,23.9,23.9,24.06,23.89,24.05,24.15,24.4,24.65,24.65,25.45,24.77,0,0,0,0
+2023-08-14T00:40:00Z,23.55,89.3,0.872,279.4,23.81,23.6,23.89,23.68,23.73,23.77,23.93,23.71,23.8,23.9,24.07,24.48,24.48,25.2,24.56,0,0,0,0
+2023-08-14T00:50:00Z,23.6,88.9,0.238,260.8,24.06,23.94,24.24,23.8,23.86,23.98,24.1,23.84,23.88,24.12,24.33,24.68,24.6,25.45,24.73,0,0,0,0
+2023-08-14T01:00:00Z,23.49,89.3,0,286.7,23.97,23.77,23.94,23.85,23.77,23.77,24.02,23.79,23.79,24.07,24.28,24.47,24.47,25.36,24.56,0,0,0,0
+2023-08-14T01:10:00Z,23.52,89.3,0.519,268.5,23.98,23.72,23.89,23.85,23.78,23.64,24.1,23.75,23.79,24.06,24.15,24.47,24.47,25.28,24.56,0,0,0,0
+2023-08-14T01:20:00Z,23.46,89.5,0.258,269.7,23.89,23.69,23.98,23.85,23.78,23.73,24.02,23.8,23.8,23.98,24.15,24.48,24.48,25.28,24.6,0,0,0,0
+2023-08-14T01:30:00Z,23.55,89,0,263.8,24.15,23.94,24.11,24.01,23.9,23.86,24.18,23.88,23.88,24.32,24.41,24.65,24.6,25.45,24.64,0,0,0,0
+2023-08-14T01:40:00Z,23.5,89.3,0.562,300.4,23.9,23.78,23.94,23.85,23.77,23.82,24.1,23.8,23.8,24.07,24.24,24.56,24.48,25.28,24.56,0,0,0,0
+2023-08-14T01:50:00Z,23.44,89.7,0.418,276,23.72,23.61,23.86,23.68,23.61,23.65,23.93,23.71,23.8,23.78,23.99,24.39,24.31,25.2,24.39,0,0,0,0
+2023-08-14T02:00:00Z,23.46,89.7,0.039,331,23.89,23.86,24.03,23.8,23.86,23.9,24.02,23.8,23.88,24.03,24.24,24.56,24.56,25.32,24.6,0,0,0,0
+2023-08-14T02:10:00Z,23.53,89.6,0,291.7,24.06,23.86,24.07,23.85,23.86,23.73,24.18,23.88,23.8,24.11,24.41,24.65,24.56,25.45,24.56,0,0,0,0
+2023-08-14T02:20:00Z,23.45,89.7,0,196,23.81,23.61,23.9,23.68,23.61,23.73,23.85,23.72,23.72,23.9,24.07,24.39,24.35,25.24,24.52,0,0,0,0
+2023-08-14T02:30:00Z,23.44,90,0.176,217.9,23.81,23.61,23.86,23.68,23.61,23.73,23.93,23.71,23.71,23.86,23.99,24.31,24.31,25.15,24.39,0,0,0,0
+2023-08-14T02:40:00Z,23.49,89.9,0.634,289.7,23.89,23.69,24.07,23.64,23.78,23.78,23.93,23.72,23.8,23.99,24.29,24.4,24.44,25.2,24.48,0,0,0,0
+2023-08-14T02:50:00Z,23.46,89.9,0,94.7,23.81,23.57,23.95,23.51,23.69,23.78,23.85,23.72,23.8,23.91,24.16,24.4,24.48,25.2,24.56,0,0,0,0
+2023-08-14T03:00:00Z,23.45,89.8,0.657,280.4,23.98,23.7,23.86,23.68,23.78,23.65,23.93,23.72,23.8,23.99,24.16,24.48,24.48,25.28,24.48,0,0,0,0
+2023-08-14T03:10:00Z,23.48,89.8,0.702,273.5,23.85,23.65,23.9,23.76,23.78,23.73,23.85,23.8,23.8,23.99,24.2,24.48,24.48,25.24,24.56,0,0,0,0
+2023-08-14T03:20:00Z,23.44,89.8,0.833,253.5,23.72,23.53,23.73,23.55,23.53,23.65,23.72,23.67,23.55,23.87,24.08,24.31,24.22,25.11,24.31,0,0,0,0
+2023-08-14T03:30:00Z,23.39,90.4,0.709,265.5,23.89,23.61,23.86,23.68,23.78,23.65,24.02,23.8,23.8,23.95,24.07,24.48,24.48,25.28,24.48,0,0,0,0
+2023-08-14T03:40:00Z,23.42,90.2,0.448,299.8,23.81,23.52,23.69,23.68,23.57,23.56,23.93,23.55,23.59,23.9,23.91,24.31,24.27,25.2,24.31,0,0,0,0
+2023-08-14T03:50:00Z,23.42,90.4,0.062,78.22,23.73,23.57,23.78,23.55,23.61,23.61,23.76,23.63,23.63,23.82,23.99,24.31,24.23,25.2,24.35,0,0,0,0
+2023-08-14T04:00:00Z,23.46,90.6,0.056,280,23.73,23.61,23.99,23.68,23.78,23.69,23.85,23.71,23.71,23.91,24.16,24.52,24.39,25.28,24.47,0,0,0,0
+2023-08-14T04:10:00Z,23.44,90.5,0,277.4,23.9,23.65,23.9,23.76,23.69,23.57,23.93,23.71,23.71,23.99,24.08,24.47,24.39,25.2,24.47,0,0,0,0
+2023-08-14T04:20:00Z,23.39,90.4,0.287,246.3,23.48,23.32,23.65,23.34,23.44,23.44,23.68,23.38,23.38,23.61,23.83,24.14,24.14,24.94,24.14,0,0,0,0
+2023-08-14T04:30:00Z,23.26,90.7,0.167,285,23.47,23.27,23.57,23.34,23.36,23.44,23.55,23.39,23.39,23.53,23.74,24.15,24.07,24.86,24.23,0,0,0,0
+2023-08-14T04:40:00Z,23.22,91,0.572,299.4,23.43,23.15,23.57,23.26,23.36,23.44,23.59,23.39,23.47,23.53,23.74,24.15,24.15,24.86,24.23,0,0,0,0
+2023-08-14T04:50:00Z,23.18,90.9,0.621,264.5,23.48,23.11,23.44,23.26,23.36,23.31,23.51,23.3,23.39,23.49,23.66,24.07,24.07,24.77,24.11,0,0,0,0
+2023-08-14T05:00:00Z,23.05,91.2,0.869,276.1,23.23,22.98,23.19,23.17,23.19,23.06,23.34,23.05,23.13,23.36,23.49,23.9,23.81,24.61,23.81,0,0,0,0
+2023-08-14T05:10:00Z,23.04,91.5,0.572,319.3,23.31,23.06,23.39,23.17,23.19,23.22,23.34,23.14,23.14,23.44,23.61,23.86,23.99,24.65,23.9,0,0,0,0
+2023-08-14T05:20:00Z,22.97,91.6,0.562,312.2,23.15,22.81,23.23,22.96,23.02,22.98,23.17,22.97,22.97,23.23,23.4,23.73,23.73,24.52,23.82,0,0,0,0
+2023-08-14T05:30:00Z,22.93,91.9,0.062,268.9,23.07,22.85,23.23,23,23.02,23.02,23.09,22.97,22.97,23.23,23.4,23.77,23.73,24.52,23.73,0,0,0,0
+2023-08-14T05:40:00Z,22.89,91.9,0,323.2,22.9,22.68,22.97,22.83,22.81,22.72,23,22.71,22.8,23.06,23.23,23.56,23.56,24.27,23.56,0,0,0,0
+2023-08-14T05:50:00Z,22.87,91.8,0.627,291,23.45,23.15,23.4,23.18,23.19,23.06,23.51,23.21,23.13,23.44,23.66,23.98,23.97,24.82,23.93,0,0,0,0
+2023-08-14T06:00:00Z,22.93,91.6,0.225,268.9,23.24,22.98,23.23,23.09,23.02,23.06,23.22,23.05,23.05,23.27,23.4,23.82,23.77,24.65,23.81,0,0,0,0
+2023-08-14T06:10:00Z,22.9,91.5,0.846,279.3,23.4,23.02,23.27,23.17,23.19,23.19,23.34,23.23,23.31,23.4,23.57,23.95,23.99,24.61,23.99,0,0,0,0
+2023-08-14T06:20:00Z,22.93,91.2,0.095,285.5,23.32,23.06,23.36,23,23.19,23.14,23.26,23.14,23.22,23.4,23.53,23.9,23.9,24.61,23.9,0,0,0,0
+2023-08-14T06:30:00Z,22.86,91.3,0.457,252,23.24,22.89,23.23,23,23.02,22.98,23.09,23.05,23.14,23.23,23.4,23.73,23.73,24.52,23.81,0.335,0.335,0.335,0.335
+2023-08-14T06:40:00Z,22.86,91,0.526,293.6,23.21,23.02,23.27,23,23.11,23.06,23.26,23.14,23.14,23.32,23.49,23.82,23.82,24.61,23.9,0.67,1.005,1.005,1.005
+2023-08-14T06:50:00Z,22.9,90.9,0,86.1,23.25,23.02,23.31,23.13,23.19,23.06,23.34,23.19,23.15,23.4,23.57,23.91,23.91,24.61,23.91,2.011,2.346,2.346,2.011
+2023-08-14T07:00:00Z,22.87,90.4,0.369,291.6,23.25,22.98,23.27,23.09,23.11,23.14,23.26,23.18,23.22,23.28,23.49,23.81,23.81,24.56,23.9,4.691,5.361,6.032,5.361
+2023-08-14T07:10:00Z,22.86,90.2,0.644,298.9,23.3,23.02,23.31,23.13,23.19,23.06,23.43,23.22,23.22,23.4,23.61,23.9,23.85,24.69,23.9,5.026,6.032,6.367,6.032
+2023-08-14T07:20:00Z,22.83,89.9,0.918,277.4,23.33,23.1,23.36,23.13,23.15,23.23,23.34,23.22,23.22,23.4,23.61,23.9,23.9,24.69,23.98,4.691,5.361,5.697,5.026
+2023-08-14T07:30:00Z,22.73,90.1,1.477,272.9,23.25,23.02,23.31,23.09,23.19,23.14,23.26,23.22,23.35,23.32,23.49,23.9,24.03,24.69,24.11,12.06,13.74,14.07,12.06
+2023-08-14T07:40:00Z,22.89,89.5,0.173,280.2,23.46,23.15,23.52,23.34,23.27,23.23,23.51,23.31,23.39,23.57,23.82,24.03,24.07,24.82,24.11,10.05,12.06,12.73,11.39
+2023-08-14T07:50:00Z,22.89,89.1,1.428,286.7,23.42,23.23,23.57,23.26,23.19,23.23,23.34,23.27,23.48,23.45,23.66,23.91,23.99,24.69,24.07,15.08,17.09,18.09,16.08
+2023-08-14T08:00:00Z,22.83,89.1,0.49,276.8,23.33,23.11,23.44,23.26,23.27,23.23,23.43,23.32,23.49,23.45,23.66,24.04,24.04,24.77,24.12,12.06,14.74,15.75,14.41
+2023-08-14T08:10:00Z,22.85,88.9,0,267.3,23.46,23.31,23.61,NA,23.36,23.44,NA,NA,NA,23.57,23.82,NA,NA,NA,NA,24.13,27.14,28.48,25.13
+2023-08-14T08:20:00Z,22.91,88.5,0.954,285.9,23.5,23.31,23.61,NA,23.36,23.4,NA,NA,NA,23.57,23.74,NA,NA,NA,NA,23.12,28.82,30.49,27.81
+2023-08-14T08:30:00Z,22.94,88.1,1.016,284,23.54,23.44,23.69,NA,23.44,23.65,NA,NA,NA,23.78,23.86,NA,NA,NA,NA,38.54,44.57,46.58,42.22
+2023-08-14T08:40:00Z,23.04,87.9,0.552,271.9,23.96,23.86,24.11,23.85,23.82,23.94,24.02,24.12,24.25,23.95,24.16,24.5,24.71,25.37,24.88,55.62,64.34,67.35,60.32
+2023-08-14T08:50:00Z,23.21,86.7,0.199,284.4,24.26,24.16,24.49,24.06,24.11,24.16,24.23,24.41,24.49,24.41,24.5,24.88,24.92,25.7,25.09,64.34,74.05,77.41,69.7
+2023-08-14T09:00:00Z,23.39,86,0.085,73.6,24.68,24.7,25.04,24.57,24.62,24.74,24.78,24.92,25.01,24.92,25,25.35,25.43,26.21,25.6,75.39,89.1,93.8,86.5
+2023-08-14T09:10:00Z,23.59,85,0.376,282.2,24.84,24.87,25.25,24.61,24.7,24.91,24.9,25.09,25.26,25.09,25.17,25.39,25.52,26.3,25.69,86.5,98.2,102.9,92.1
+2023-08-14T09:20:00Z,23.69,84.1,0.101,250.2,25.47,25.46,25.67,25.24,25.28,25.38,25.37,25.64,25.68,25.47,25.85,25.89,26.27,26.8,26.27,98.8,113.9,119.3,108.2
+2023-08-14T09:30:00Z,23.91,83.1,1.506,270.6,25.3,25.21,25.46,25.11,24.94,25.17,25.16,25.41,25.63,25.42,25.34,25.59,25.92,26.67,26.01,90.1,103.2,108.2,97.2
+2023-08-14T09:40:00Z,24.04,82.3,0.777,114.5,25.63,25.67,25.84,25.15,25.23,25.42,25.24,25.66,25.92,25.68,25.93,25.96,26.26,26.8,26.26,93.1,111.9,118.6,109.2
+2023-08-14T09:50:00Z,24.18,82.3,0.924,289.7,25.63,25.79,26,25.45,25.35,25.7,25.45,25.87,26.25,25.83,25.83,26.09,26.26,27.01,26.55,114.3,133,139.4,124.3
+2023-08-14T10:00:00Z,24.18,82,1.561,290.6,25.88,26.02,26.23,25.58,25.51,25.81,25.74,26,26.25,26.11,25.94,26.21,26.46,27.18,26.55,125.3,144.4,151.4,141.1
+2023-08-14T10:10:00Z,24.45,80.9,0.389,280.5,26.01,26.09,26.34,25.74,25.86,25.97,26.04,26.42,26.59,26.14,26.22,26.46,26.8,27.52,27.01,104.5,123,129.3,115.9
+2023-08-14T10:20:00Z,24.76,79.61,0.02,31.08,28.97,29.92,30.08,29.12,28.86,29.11,28.74,29.85,28.83,29.67,29.37,30.02,30.11,30.69,30.83,231.2,264.7,277.4,249
+2023-08-14T10:30:00Z,24.87,78.6,0.559,93,26.64,26.91,26.87,26.42,26.38,26.79,26.67,27.1,27.22,26.79,26.88,27.06,27.52,28.11,27.52,129.7,143.1,149.1,133.4
+2023-08-14T10:40:00Z,24.93,78.64,0.581,282.4,27.15,27.32,27.36,27,26.76,26.9,27.05,27.65,27.6,27.28,27.2,27.65,27.82,28.61,27.94,188.3,209.4,217.5,190.3
+2023-08-14T10:50:00Z,25.37,77.21,0.072,319.6,29.32,30.06,30.05,29.83,29.2,28.83,29.58,30.91,29.68,30.06,29.68,30.53,30.36,31.65,30.91,632.3,693.3,716.7,306.3
+2023-08-14T11:00:00Z,26.34,72.31,0.098,101.5,34.28,34.91,34.31,34.94,33.88,31.44,33.93,35.12,31.93,35.55,33.18,35.29,34.86,36.55,37.03,640.7,673.9,677.9,293.2
+2023-08-14T11:10:00Z,26.64,69.9,0.568,263.9,31.77,32.62,32.71,32.56,32,30.98,32.4,34.18,32.19,33.18,31.95,33.12,33.33,34.81,35.29,788.8,793.9,759.3,560
+2023-08-14T11:20:00Z,26.68,69.99,0.924,94.2,30.19,30.11,30.53,30.58,30.09,30.1,30.92,31.72,30.66,31.29,29.77,30.96,31.08,32.9,32.44,588.2,589.2,572.4,264.4
+2023-08-14T11:30:00Z,26.94,69.65,0.598,77.05,33.16,33.02,32.89,32.86,31.81,31.33,31.97,32.61,31.63,33.65,32.18,32.95,33.16,34.38,34.18,234.3,253.4,255,200.1
+2023-08-14T11:40:00Z,27,68.87,0.987,260.1,33.71,33.25,33.84,33.44,32.85,31.81,32.68,33.72,31.97,34.47,32.83,33.76,34.23,35.6,35.71,768.7,808,819,505.4
+2023-08-14T11:50:00Z,26.45,71.63,0.291,335.6,30.7,30.94,30.77,30.52,30.4,30.18,29.93,30.91,29.94,31.66,30.95,31.34,31.64,32.08,31.97,370,385.4,387.4,294.9
+2023-08-14T12:00:00Z,26.31,70.45,0.604,74.6,29.93,30.12,30.16,29.63,29.43,29.83,29.17,30.32,29.77,30.93,30.13,30.07,30.53,31.24,31.08,341.5,357.6,359.2,293.9
+2023-08-14T12:10:00Z,26.44,68.96,0.608,274.1,29.59,29.81,29.68,29.42,29.05,29.22,29.42,29.94,30.15,29.73,29.52,29.85,30.19,30.85,30.32,261,290.2,297.2,265.1
+2023-08-14T12:20:00Z,26.72,68.59,0.774,64.27,31.88,31.48,31.1,32.08,31.63,31.1,31.79,32.27,31.25,33.13,31.4,32.44,32.78,34.62,34.35,746.3,774.4,781.1,579.7
+2023-08-14T12:30:00Z,27.17,66.3,0.885,267.1,32.18,32.04,30.94,31.61,30.42,30.44,31.23,31.63,31,32.3,30.86,31.34,31.38,32.59,32.14,434.3,449.4,449.4,354.6
+2023-08-14T12:40:00Z,27.08,66.97,0.307,112,31.72,31.79,31.91,32.29,31.84,32.42,32.08,32.91,32.23,33.31,32.05,32.65,33.16,34.23,34.27,846,863,871,649.8
+2023-08-14T12:50:00Z,27.06,67.24,1.212,39.61,34.05,33.81,32.67,34.19,33.48,33.51,33.77,33.67,32.48,35.16,34.53,34.14,34.91,35.84,36.18,925,930,935,859
+2023-08-14T13:00:00Z,27.35,66.63,0.467,124.8,31.59,32.61,31.68,31.15,30.95,31.76,30.9,32.61,32.06,32.87,31.18,31.34,32.23,32.88,33.04,478.5,496.3,496,405.1
+2023-08-14T13:10:00Z,27.35,65.61,0.111,276.8,30.79,31.11,30.82,30.26,30.11,30.73,30.47,31.04,31,31.2,30.65,30.74,31.13,31.78,31.34,257.7,268.4,270.8,224.9
+2023-08-14T13:20:00Z,26.85,64.71,0.862,103.9,31,30.81,30.63,31.02,30.57,30.47,31.32,31.51,30.62,31.44,30.73,31.34,31.55,33.09,32.27,880,878,891,834
+2023-08-14T13:30:00Z,27.59,61.64,0.493,5.524,34.22,33.51,32.95,34.48,33.23,33.21,34.61,34.48,32.99,33.84,33.42,33.59,35.03,36.39,35.8,907,893,897,882
+2023-08-14T13:40:00Z,27.75,60.78,0.232,212.7,35.5,33.84,33.88,35.16,34.16,34.26,36.05,34.99,33.42,35.28,34.52,34.61,35.67,37.58,36.94,899,880,890,878
+2023-08-14T13:50:00Z,28.08,55.4,0.624,269.4,37.02,35.32,34.72,36.39,35.14,35.32,37.24,36.01,34.9,36.38,35.95,35.24,37.33,38.98,38.22,906,886,895,889
+2023-08-14T14:00:00Z,28.01,55.4,0.849,64.88,33.16,32.61,32.86,33.18,32.58,33.66,33.48,33.96,33.49,34.51,32.49,32.94,34.18,35,35.11,900,879,889,822
+2023-08-14T14:10:00Z,28.34,57.9,0.938,76.38,36.17,34.63,34.8,35.04,35.03,34.97,36.18,35.56,34.88,36.75,35.01,34.88,36.5,37.75,37.35,903,880,890,422.6
+2023-08-14T14:20:00Z,28.07,58.39,0.183,26.69,36.94,34.96,34.7,36.94,35.66,34.37,38.13,35.85,35,36.27,36.06,36.19,36.96,39.44,37.13,896,874,886,693.1
+2023-08-14T14:30:00Z,28.01,57.52,1.009,290.6,34.69,33.51,33.05,34.61,33.13,33.51,34.78,34.86,35.33,33.81,33.73,33.67,35.08,36.22,35.71,891,864,878,749.4
+2023-08-14T14:40:00Z,28.28,57.13,1.055,284.2,34.69,34.1,33.08,34.57,33.42,33.76,34.83,34.81,35.92,34.23,34.01,33.71,35.5,36.14,35.58,886,861,875,873
+2023-08-14T14:50:00Z,28.14,61.63,0.755,126.7,34.47,34.37,33.94,34.57,34.27,35.38,35.55,35.24,36.77,35.21,35.76,34.14,36.64,36.82,36.98,870,845,857,856
+2023-08-14T15:00:00Z,27.97,63.5,0.281,80.4,34.86,34.28,33.39,35.04,34.52,34.66,35.17,34.98,37.41,34.87,35.85,34.6,36.56,35.97,36,900,868,882,866
+2023-08-14T15:10:00Z,27.55,65.79,0.973,262.3,34.13,33.64,33.09,33.85,33.04,34.61,33.22,34.34,36.98,33.77,34.87,33.41,35.24,35,35.66,906,876,893,876
+2023-08-14T15:20:00Z,27.46,59.99,0.448,253.4,35.45,34.31,33.92,35.63,34.61,35.62,35.84,35.36,39.06,34.82,36.47,35.54,36.85,37.2,36.85,875,844,860,849
+2023-08-14T15:30:00Z,27.87,57.42,1.326,79.26,34.44,34.22,33.46,33.6,33.17,34.09,33.48,34.22,37.11,33.84,34.35,33.71,35.11,35.04,35.24,854,825,847,845
+2023-08-14T15:40:00Z,28.01,60.25,0.225,153.9,34.39,33.92,33.25,34.07,33.13,34.39,33.94,34.05,36.81,34.56,34.77,34.09,34.98,35.04,34.73,876,852,883,877
+2023-08-14T15:50:00Z,28.18,60.78,0.529,309.5,35.83,35.74,35.4,35.89,35.79,36.2,36.27,35.88,39.49,35.95,37.05,36.68,37.75,37.96,37.83,904,882,914,914
+2023-08-14T16:00:00Z,28.28,62.71,0.794,294,36.94,36.25,35.79,36.35,35.58,36.51,36.6,36.09,40.94,36.55,37.1,36.68,37.96,37.96,38.34,841,818,847,850
+2023-08-14T16:10:00Z,28.44,63.32,1.045,230,34.48,34.4,35.07,34.74,34.14,35.41,34.83,35.24,39.62,34.19,35.46,34.94,36.13,36.64,36.77,795.6,764.5,786.6,785.6
+2023-08-14T16:20:00Z,27.85,64.03,0.124,291.2,33.03,33.25,33.97,33,33.09,34.01,33.3,33.41,37.36,33.25,34.82,33.58,35.49,34.32,34.98,811,790.6,818,820
+2023-08-14T16:30:00Z,27.93,63.88,0.235,251.4,36.91,35.54,36.17,37.28,36,35.49,37.37,36.09,41.28,35.92,38.12,37.66,38.9,38.26,38.85,737.6,744,775.5,777.5
+2023-08-14T16:40:00Z,27.56,65.57,0.595,246.8,34.78,33.3,34.23,34.79,33.34,33.68,35,34.47,39.74,33.13,34.61,34.73,35.92,36.4,36.6,704.1,677,708.5,709.5
+2023-08-14T16:50:00Z,27.86,65.58,0.18,11.38,34.23,33.51,34.9,33.85,33.63,34.01,34.15,33.71,38.81,33.34,35.41,34.52,36.3,35.08,36.13,677.3,645.5,675.3,678
+2023-08-14T17:00:00Z,28.37,64.09,0.268,229.8,34.49,34.57,36.08,34.62,34.86,34.44,35.38,34.6,38.6,34.69,36.64,35.28,36.34,36.1,36.51,648.8,614,641.5,642.5
+2023-08-14T17:10:00Z,27.78,64.65,0.565,112.2,32.7,32.52,33.86,32.5,32.62,32.35,33.01,33.2,37.88,32.27,34.29,33.2,34.56,34.07,34.73,626.7,588.8,618,620.3
+2023-08-14T17:20:00Z,27.56,64.45,0.67,91.6,33.43,32.87,35.06,33.43,33.64,31.94,33.86,33.5,38.22,32.62,35.57,33.8,35.97,35.08,35.67,597.6,562,589.5,594.2
+2023-08-14T17:30:00Z,27.17,65.69,1.852,87.1,31.14,31.07,31.74,30.73,31.05,31.28,31.62,31.94,35.26,31.62,31.45,31.44,32.79,32.63,33.17,562,527.2,556,559.3
+2023-08-14T17:40:00Z,27.33,64.63,0.624,350.2,33.77,32.55,33.86,33.43,33.03,32.55,33.73,33.7,37.35,32.47,34.25,33.79,35.31,35.04,35.14,535.9,501,531.5,535.2
+2023-08-14T17:50:00Z,27.21,64.16,1.614,84.4,31.41,31.17,32.31,31.27,31.42,32.39,31.66,31.97,34.94,31.55,32.1,31.84,32.9,32.92,32.9,504.4,470.2,501.3,505.4
+2023-08-14T18:00:00Z,27.7,59.35,0.372,269.3,33.99,32.95,34.72,33.64,33.53,33.71,33.68,33.54,38.39,32.99,35.02,34.27,35.8,35.08,35.5,470.8,438,469.8,474.2
+2023-08-14T18:10:00Z,27.62,58.51,0.418,287,32.68,32.2,33.71,32.33,32.73,33.29,32.33,33.08,37.33,31.82,34.48,33.04,34.61,33.89,33.84,441.4,408.9,441,445.4
+2023-08-14T18:20:00Z,27.8,56.85,0.232,117.8,31.91,31.75,33.31,31.56,32.11,31.92,31.78,32.48,36.56,31.83,33.23,32.23,34.1,33.17,33.5,405.5,375.7,407.9,412.5
+2023-08-14T18:30:00Z,27.56,56.33,1.882,99.2,30.85,31.22,31.68,30.46,30.96,32.14,30.68,31.76,34.69,31.64,31.31,30.96,32.44,31.95,32.02,370.6,342.8,374,224.5
+2023-08-14T18:40:00Z,27.61,57.53,0.31,112.5,32.92,32.29,33.56,32.45,33.12,33.05,32.67,32.91,36.39,32.17,34.62,33.08,34.82,34.4,33.84,279.5,310.7,341.8,348.5
+2023-08-14T18:50:00Z,27.7,59.15,0,0.168,32.06,31.85,32.69,32.11,32.36,32.82,31.99,32.91,36.09,31.47,33.84,32.74,34.18,33.47,33.25,93.8,280.8,311.3,320
+2023-08-14T19:00:00Z,27.61,59.52,0.261,70.95,33.07,32.41,33.21,31.99,32.27,33.08,31.95,32.78,36.56,32.2,34.02,32.61,34.23,33.72,32.95,48.93,247.3,276.8,286.2
+2023-08-14T19:10:00Z,27.14,60.42,0.137,52.96,31.03,30.85,32.03,30.3,29.46,31.44,30.26,31.76,34.27,30.65,31.83,31,32.91,32.16,31.72,44.57,214.2,244.3,253.4
+2023-08-14T19:20:00Z,26.68,63.22,0.271,200.5,30.27,30.36,31.45,27.81,27.77,30.44,27.38,30.53,33.38,29.9,31.42,29.6,31.85,31.06,30.74,40.89,152.5,214.5,223.9
+2023-08-14T19:30:00Z,26.51,63.94,0.209,83.3,29.46,29.67,30.72,26.11,28.93,29.75,26.33,29.98,32.57,29.12,31.11,28.62,31.17,28.27,29.89,37.2,59.65,181.3,190.7
+2023-08-14T19:40:00Z,26.21,66.4,1.539,254.7,28.62,28.89,29.9,26.11,28.5,28.93,26.41,29.43,32.06,28.3,29.4,28.84,29.85,27.84,29.6,33.18,41.89,151.5,160.9
+2023-08-14T19:50:00Z,26.03,66.97,0.392,87.3,26.12,28.6,30.2,25.78,28.73,29.02,26.24,27.48,31.93,28.48,29.49,26.84,29.34,27.51,29.81,29.16,36.53,119.6,129
+2023-08-14T20:00:00Z,25.65,69.23,0.238,314.9,25.61,27.61,29.08,25.32,27.11,27.95,25.66,25.91,30.57,25.97,28.67,26.25,28.33,27.01,28.11,25.14,31.17,41.56,101.2
+2023-08-14T20:10:00Z,25.2,72.21,0.265,248,25.04,25.27,25.9,24.73,24.84,25.6,24.9,25.06,26.25,25.23,25.48,25.49,25.74,26.17,25.61,19.1,23.46,26.81,27.82
+2023-08-14T20:20:00Z,24.82,74.59,0.196,262.1,24.71,24.81,25.06,24.64,24.67,24.85,24.73,24.64,24.98,24.98,25.07,25.32,25.32,26.08,25.23,14.08,17.76,19.77,20.44
+2023-08-14T20:30:00Z,24.76,75.12,0,279.9,24.5,24.4,24.65,24.48,24.44,24.39,24.36,24.3,24.22,24.65,24.86,25.06,25.19,25.75,24.98,9.05,11.39,12.73,12.73
+2023-08-14T20:40:00Z,24.45,76.6,0,287.4,24.06,23.93,24.06,23.89,23.88,23.93,24.06,23.79,23.54,24.23,24.4,24.56,24.56,25.33,24.55,4.356,5.697,6.032,5.697
+2023-08-14T20:50:00Z,24.18,77.59,0,258.3,23.76,23.68,23.68,23.65,23.7,23.39,23.65,23.5,23.07,23.9,24.06,24.3,24.26,25,24.22,2.011,2.681,2.681,2.681
+2023-08-14T21:00:00Z,24,77.58,0,82,23.37,23.22,23.47,23.23,23.29,23.13,23.49,23.12,22.78,23.47,23.56,23.96,24.01,24.67,23.83,0.67,1.005,1.005,1.005
+2023-08-14T21:10:00Z,23.79,78.57,0.183,273.4,23.37,23.22,23.13,23.08,23.05,22.63,23.25,22.86,22.35,23.39,23.56,23.71,23.62,24.47,23.58,0,0.335,0.335,0.335
+2023-08-14T21:20:00Z,23.49,78.59,0.222,83.4,22.82,22.84,23.01,23,22.96,22.8,22.87,22.52,22.18,23.18,23.31,23.54,23.62,24.18,23.37,0,0,0,0
+2023-08-14T21:30:00Z,23.19,79.59,0,251,22.62,22.52,22.81,22.58,22.44,22.39,22.54,22.27,21.93,22.82,22.82,23.12,23.12,24.01,22.99,0,0,0,0
+2023-08-14T21:40:00Z,22.91,80.5,0,91.9,22.38,22.33,22.5,22.41,22.35,22.16,22.58,22.11,21.69,22.5,22.84,22.88,23.04,23.8,22.92,0,0,0,0
+2023-08-14T21:50:00Z,22.59,81.6,0,268.5,22.13,22.1,22.23,21.99,22.01,21.77,22.16,21.86,21.43,22.23,22.53,22.7,22.7,23.34,22.53,0,0,0,0
+2023-08-14T22:00:00Z,22.48,81.7,0.02,121.9,21.83,21.91,22.16,21.87,21.85,21.82,22.08,21.69,21.48,22.08,22.33,22.54,22.54,23.26,22.37,0,0,0,0
+2023-08-14T22:10:00Z,22.29,83,0,262.1,21.86,21.75,21.92,21.82,21.76,21.45,21.78,21.53,21.28,21.96,22.13,22.38,22.34,23.13,22.33,0,0,0,0
+2023-08-14T22:20:00Z,22.09,83,0,104.5,21.53,21.42,21.88,21.57,21.51,21.46,21.7,21.57,21.19,21.72,22.05,22.21,22.12,22.92,22.21,0,0,0,0
+2023-08-14T22:30:00Z,21.91,84.7,0,302,21.46,21.42,21.46,21.49,21.35,21.04,21.66,21.28,21.03,21.63,21.97,22.17,22.13,23,21.96,0,0,0,0
+2023-08-14T22:40:00Z,21.7,84.3,0.02,103,21.17,21.17,21.46,21.19,21.18,21.21,21.49,21.12,21.03,21.47,21.72,21.96,21.96,22.67,21.88,0,0,0,0
+2023-08-14T22:50:00Z,21.57,85.5,0,207.1,21.29,21.21,21.25,21.15,21.12,20.87,21.4,20.91,20.69,21.38,21.68,21.75,21.71,22.5,21.62,0,0,0,0
+2023-08-14T23:00:00Z,21.38,84.8,0,23.4,21.2,21,21.25,21.02,21.04,20.91,21.15,20.78,20.57,21.26,21.51,21.71,21.71,22.41,21.58,0,0,0,0
+2023-08-14T23:10:00Z,21.29,84.5,0,87.5,20.96,20.66,20.66,20.85,20.88,20.28,21.02,20.48,20.22,20.96,21.13,21.45,21.54,22.25,21.24,0,0,0,0
+2023-08-14T23:20:00Z,21.22,84.4,0,277.8,20.97,20.75,20.91,20.81,20.91,20.54,20.81,20.6,20.35,21.04,21.21,21.45,21.45,22.24,21.45,0,0,0,0
+2023-08-14T23:30:00Z,21.01,85,0,86.6,20.42,20.41,20.7,20.48,20.53,20.37,20.73,20.48,20.31,20.66,20.83,21.2,21.16,21.91,21.11,0,0,0,0
+2023-08-14T23:40:00Z,20.95,85.4,0.379,99.7,20.59,20.49,20.7,20.65,20.78,20.53,20.78,20.43,20.18,20.75,21.04,21.28,21.28,22.08,21.28,0,0,0,0
+2023-08-14T23:50:00Z,20.87,85.7,0,116.3,20.72,20.5,20.66,20.57,20.53,20.24,20.65,20.26,20.09,20.79,20.92,21.2,21.36,21.91,20.98,0,0,0,0
diff --git a/tests/validation/test_validation_kolumbus.py b/tests/validation/test_validation_kolumbus.py
new file mode 100644
index 0000000..72f2a35
--- /dev/null
+++ b/tests/validation/test_validation_kolumbus.py
@@ -0,0 +1,591 @@
+"""
+Field-data validation tests using the Zenodo SOLWEIG v2025 Kolumbus dataset.
+
+Dataset: Wallenberg et al. (2025) - Wall surface temperature validation
+Source: https://zenodo.org/records/15309445
+Location: Gothenburg, Sweden (57.697°N, 11.930°E)
+Period: 2023-05-15 to 2023-08-31 (10-minute intervals)
+Measurements: IR radiometer wall surface temperatures (plastered brick + wood)
+
+These tests validate the SOLWEIG wall temperature model (tg_wall) against
+field measurements of wall surface temperature. With JSON-based wall params,
+material-specific parameters can be passed (e.g., brick vs wood).
+
+Tests are marked @pytest.mark.slow and @pytest.mark.validation since they
+require external data files and take significant time.
+"""
+
+from __future__ import annotations
+
+import csv
+from datetime import datetime
+from pathlib import Path
+
+import numpy as np
+import pytest
+
+# Skip all tests if validation data not present
+DATA_DIR = Path(__file__).parent.parent / "validation_data" / "zenodo_kolumbus"
+pytestmark = [
+    pytest.mark.skipif(not DATA_DIR.exists(), reason="Zenodo validation data not downloaded"),
+    pytest.mark.validation,
+]
+
+
+# ---------------------------------------------------------------------------
+# Data loading helpers
+# ---------------------------------------------------------------------------
+
+
+def load_kolumbus_observations(
+    start: str | None = None,
+    end: str | None = None,
+) -> list[dict]:
+    """Load kolumbus.csv wall temperature observations.
+
+    Returns list of dicts with keys:
+        time, ta, ts_pb_sim, kin_pb_sim, lin_pb_sim, ts_pb_obs,
+        ts_wood_sim, kin_wood_sim, lin_wood_sim, ts_wood_obs
+    """
+    csv_path = DATA_DIR / "kolumbus.csv"
+    start_dt = datetime.fromisoformat(start) if start else None
+    end_dt = datetime.fromisoformat(end) if end else None
+    # Make end inclusive of the full day
+    if end_dt and end_dt.hour == 0 and end_dt.minute == 0:
+        end_dt = end_dt.replace(hour=23, minute=59, second=59)
+    rows = []
+    with open(csv_path) as f:
+        reader = csv.DictReader(f)
+        for row in reader:
+            timestamp = datetime.fromisoformat(row["Time"])
+            if start_dt and timestamp < start_dt:
+                continue
+            if end_dt and timestamp > end_dt:
+                continue
+            rows.append(
+                {
+                    "time": timestamp,
+                    "ta": float(row["Ta"]),
+                    "ts_pb_sim": float(row["Ts_pb_sim"]),
+                    "kin_pb_sim": float(row["Kin_pb_sim"]),
+                    "lin_pb_sim": float(row["Lin_pb_sim"]),
+                    "ts_pb_obs": float(row["Ts_pb_obs"]),
+                    "ts_wood_sim": float(row["Ts_wood_sim"]),
+                    "kin_wood_sim": float(row["Kin_wood_sim"]),
+                    "lin_wood_sim": float(row["Lin_wood_sim"]),
+                    "ts_wood_obs": float(row["Ts_wood_obs"]),
+                }
+            )
+    return rows
+
+
+def load_hourly_observations(start: str, end: str) -> list[dict]:
+    """Load kolumbus observations, keeping only on-the-hour rows."""
+    obs = load_kolumbus_observations(start=start, end=end)
+    return [o for o in obs if o["time"].minute == 0]
+
+
+# ---------------------------------------------------------------------------
+# Test: Data loading and sanity checks
+# ---------------------------------------------------------------------------
+
+
+class TestDataLoading:
+    """Verify that the validation data loads correctly."""
+
+    def test_kolumbus_csv_loads(self):
+        obs = load_kolumbus_observations()
+        assert len(obs) > 15000, f"Expected >15000 rows, got {len(obs)}"
+
+    def test_kolumbus_date_range(self):
+        obs = load_kolumbus_observations()
+        assert obs[0]["time"] == datetime(2023, 5, 15, 0, 0)
+        assert obs[-1]["time"].month == 8
+
+    def test_kolumbus_no_nans(self):
+        obs = load_kolumbus_observations()
+        for row in obs[:100]:  # Spot-check first 100
+            for key, val in row.items():
+                if key == "time":
+                    continue
+                assert not np.isnan(val), f"NaN found in {key} at {row['time']}"
+
+    def test_kolumbus_daytime_wall_heating(self):
+        """During daytime, wall surface should be warmer than air."""
+        obs = load_kolumbus_observations(start="2023-07-01", end="2023-07-01")
+        noon = [o for o in obs if 11 <= o["time"].hour <= 14]
+        assert len(noon) > 0
+
+        for o in noon:
+            # At least one wall type should be warmer than air at noon
+            pb_excess = o["ts_pb_obs"] - o["ta"]
+            wood_excess = o["ts_wood_obs"] - o["ta"]
+            assert pb_excess > 0 or wood_excess > 0, (
+                f"Neither wall warmer than air at {o['time']}: pb={pb_excess:.1f}K, wood={wood_excess:.1f}K"
+            )
+
+    def test_umep_met_loader(self):
+        """Verify Weather.from_umep_met() loads the forcing data."""
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_may.txt",
+            start="2023-05-15",
+            end="2023-05-15",
+        )
+        assert len(met) == 24
+        assert met[0].datetime == datetime(2023, 5, 15, 0, 0)
+        assert met[0].ta > 0  # Not -999
+        assert met[0].rh > 0
+        assert met[0].pressure > 900  # Valid pressure in hPa
+
+    def test_umep_met_multi_file(self):
+        """Load multiple monthly UMEP met files."""
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            [DATA_DIR / "metdata_10min_may.txt", DATA_DIR / "metdata_10min_june.txt"],
+            start="2023-05-31",
+            end="2023-06-01",
+        )
+        # Should span the month boundary
+        assert met[0].datetime.month == 5
+        assert met[-1].datetime.month == 6
+
+    def test_geodata_loads(self):
+        """Verify DSM/DEM/CDSM/groundcover rasters load."""
+        import rasterio
+
+        for name in ["dsm", "dem", "cdsm", "groundcover"]:
+            path = DATA_DIR / "geodata" / f"{name}.tif"
+            assert path.exists(), f"Missing {name}.tif"
+            with rasterio.open(path) as src:
+                data = src.read(1)
+                assert data.shape == (80, 81), f"{name}.tif shape={data.shape}"
+
+
+# ---------------------------------------------------------------------------
+# Test: Wall temperature model validation
+# ---------------------------------------------------------------------------
+
+
+class TestWallTemperatureValidation:
+    """Validate SOLWEIG wall temperature (tg_wall) against field measurements.
+
+    This directly tests the ground temperature component without running the
+    full SOLWEIG pipeline (no SVF, shadows, or radiation budget needed).
+    """
+
+    @pytest.fixture
+    def gothenburg_location(self):
+        from solweig import Location
+
+        return Location(latitude=57.6966, longitude=11.9305, utc_offset=2, altitude=10.0)
+
+    def _compute_tg_wall_for_day(
+        self,
+        weather_list,
+        location,
+        tgk_wall=None,
+        tstart_wall=None,
+        tmaxlst_wall=None,
+    ):
+        """Compute tg_wall for each hourly Weather in a day.
+
+        Args:
+            weather_list: List of Weather objects for the day.
+            location: Location object.
+            tgk_wall: Optional wall TgK (temperature gain coefficient).
+            tstart_wall: Optional wall Tstart (baseline offset).
+            tmaxlst_wall: Optional wall TmaxLST (hour of max temperature).
+
+        Returns list of (datetime, ta, tg_wall) tuples.
+        """
+        from solweig.components.ground import compute_ground_temperature
+
+        # We need dummy grids for the ground temp model (any shape works
+        # since tg_wall is a scalar). Use 1x1 grids.
+        alb = np.array([[0.15]], dtype=np.float32)
+        emis = np.array([[0.95]], dtype=np.float32)
+        tgk = np.array([[0.37]], dtype=np.float32)
+        tstart = np.array([[-3.41]], dtype=np.float32)
+        tmaxlst = np.array([[15.0]], dtype=np.float32)
+
+        results = []
+        for w in weather_list:
+            w.compute_derived(location)
+            # Skip nighttime (clearness index has division-by-zero at night)
+            if w.sun_altitude <= 0:
+                results.append((w.datetime, w.ta, 0.0))
+                continue
+            bundle = compute_ground_temperature(
+                weather=w,
+                location=location,
+                alb_grid=alb,
+                emis_grid=emis,
+                tgk_grid=tgk,
+                tstart_grid=tstart,
+                tmaxlst_grid=tmaxlst,
+                tgk_wall=tgk_wall,
+                tstart_wall=tstart_wall,
+                tmaxlst_wall=tmaxlst_wall,
+            )
+            results.append((w.datetime, w.ta, bundle.tg_wall))
+        return results
+
+    @pytest.mark.slow
+    def test_wall_temp_diurnal_pattern(self, gothenburg_location):
+        """Wall temperature deviation should follow a diurnal cycle."""
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+        results = self._compute_tg_wall_for_day(met, gothenburg_location)
+
+        # tg_wall should be 0 at night, positive during day
+        night_vals = [tg for dt, ta, tg in results if dt.hour < 4 or dt.hour > 22]
+        day_vals = [tg for dt, ta, tg in results if 10 <= dt.hour <= 16]
+
+        assert all(v == 0.0 for v in night_vals), "tg_wall should be 0 at night"
+        assert any(v > 0 for v in day_vals), "tg_wall should be positive during day"
+        assert max(day_vals) > 1.0, f"Expected peak tg_wall > 1K, got {max(day_vals):.2f}K"
+
+    @pytest.mark.slow
+    def test_wall_temp_vs_observations_summer(self, gothenburg_location):
+        """Compare model wall temperature against observations for a clear summer day.
+
+        This is the primary field-data validation test. We compare:
+        - Model: Ta + tg_wall (using SOLWEIG cobblestone parameters)
+        - Observed: Ts_pb_obs (plastered brick) and Ts_wood_obs (wood)
+
+        The model uses generic parameters so we expect moderate agreement.
+        """
+        from solweig import Weather
+
+        # Use a summer day (July 15, 2023)
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+        model_results = self._compute_tg_wall_for_day(met, gothenburg_location)
+
+        # Load corresponding hourly observations
+        obs = load_hourly_observations("2023-07-15", "2023-07-15")
+
+        # Match timestamps
+        model_dict = {dt: (ta, tg) for dt, ta, tg in model_results}
+        matched_pb = []
+        matched_wood = []
+
+        for o in obs:
+            if o["time"] in model_dict:
+                ta, tg_wall = model_dict[o["time"]]
+                model_ts = ta + tg_wall
+                matched_pb.append((o["ts_pb_obs"], model_ts))
+                matched_wood.append((o["ts_wood_obs"], model_ts))
+
+        assert len(matched_pb) >= 20, f"Only {len(matched_pb)} matched timestamps"
+
+        # Compute statistics
+        obs_pb = np.array([x[0] for x in matched_pb])
+        obs_wood = np.array([x[0] for x in matched_wood])
+        mod = np.array([x[1] for x in matched_pb])
+
+        rmse_pb = np.sqrt(np.mean((obs_pb - mod) ** 2))
+        rmse_wood = np.sqrt(np.mean((obs_wood - mod) ** 2))
+        mae_pb = np.mean(np.abs(obs_pb - mod))
+        mae_wood = np.mean(np.abs(obs_wood - mod))
+
+        # Report statistics (print for visibility in pytest -v output)
+        print("\n--- Wall Temperature Validation (2023-07-15) ---")
+        print(f"Plastered brick: RMSE={rmse_pb:.2f}°C, MAE={mae_pb:.2f}°C")
+        print(f"Wood:            RMSE={rmse_wood:.2f}°C, MAE={mae_wood:.2f}°C")
+        print(f"Model peak Ts:   {mod.max():.1f}°C")
+        print(f"PB obs peak Ts:  {obs_pb.max():.1f}°C")
+        print(f"Wood obs peak:   {obs_wood.max():.1f}°C")
+
+        # Acceptance criteria: generous thresholds since model uses generic
+        # cobblestone parameters (tgk=0.37, tstart=-3.41) rather than
+        # material-specific properties. The reference paper (Wallenberg et al.
+        # 2025) reports RMSE ~2°C with tuned per-material params.
+        # Single-day RMSE is more variable than monthly; use 15°C threshold.
+        assert rmse_pb < 15.0, f"Plastered brick RMSE={rmse_pb:.2f}°C exceeds 15°C threshold"
+        assert rmse_wood < 15.0, f"Wood RMSE={rmse_wood:.2f}°C exceeds 15°C threshold"
+
+    @pytest.mark.slow
+    def test_wall_temp_multi_day_statistics(self, gothenburg_location):
+        """Compute validation statistics across multiple days in July.
+
+        This provides a more robust assessment than a single day.
+        """
+        from solweig import Weather
+
+        met_all = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+        )
+
+        all_pb_errors = []
+        all_wood_errors = []
+
+        # Process each day
+        for day in range(1, 32):
+            day_str = f"2023-07-{day:02d}"
+            day_met = [w for w in met_all if w.datetime.date().isoformat() == day_str]
+            if len(day_met) < 20:
+                continue
+
+            model_results = self._compute_tg_wall_for_day(day_met, gothenburg_location)
+            obs = load_hourly_observations(day_str, day_str)
+
+            model_dict = {dt: (ta, tg) for dt, ta, tg in model_results}
+            for o in obs:
+                if o["time"] in model_dict:
+                    ta, tg_wall = model_dict[o["time"]]
+                    model_ts = ta + tg_wall
+                    all_pb_errors.append(o["ts_pb_obs"] - model_ts)
+                    all_wood_errors.append(o["ts_wood_obs"] - model_ts)
+
+        assert len(all_pb_errors) > 500, f"Only {len(all_pb_errors)} matched points"
+
+        pb_errors = np.array(all_pb_errors)
+        wood_errors = np.array(all_wood_errors)
+
+        rmse_pb = np.sqrt(np.mean(pb_errors**2))
+        rmse_wood = np.sqrt(np.mean(wood_errors**2))
+        bias_pb = np.mean(pb_errors)
+        bias_wood = np.mean(wood_errors)
+
+        print("\n--- Wall Temperature Validation (July 2023, all days) ---")
+        print(f"Matched observations: {len(all_pb_errors)}")
+        print(f"Plastered brick: RMSE={rmse_pb:.2f}°C, Bias={bias_pb:+.2f}°C")
+        print(f"Wood:            RMSE={rmse_wood:.2f}°C, Bias={bias_wood:+.2f}°C")
+
+        # Multi-day statistics should be somewhat stable
+        assert rmse_pb < 10.0, f"Monthly RMSE PB={rmse_pb:.2f}°C too high"
+        assert rmse_wood < 10.0, f"Monthly RMSE wood={rmse_wood:.2f}°C too high"
+
+    @pytest.mark.slow
+    def test_wall_temp_brick_params(self, gothenburg_location):
+        """Validate with brick-specific wall params from JSON.
+
+        The Kolumbus plastered brick wall should be better modeled with
+        brick-appropriate thermal response parameters.
+        """
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+        # Brick wall params from default_materials.json
+        model_results = self._compute_tg_wall_for_day(
+            met,
+            gothenburg_location,
+            tgk_wall=0.40,
+            tstart_wall=-4.0,
+            tmaxlst_wall=15.0,
+        )
+
+        obs = load_hourly_observations("2023-07-15", "2023-07-15")
+        model_dict = {dt: (ta, tg) for dt, ta, tg in model_results}
+        matched = []
+        for o in obs:
+            if o["time"] in model_dict:
+                ta, tg_wall = model_dict[o["time"]]
+                matched.append((o["ts_pb_obs"], ta + tg_wall))
+
+        assert len(matched) >= 20
+        obs_arr = np.array([x[0] for x in matched])
+        mod_arr = np.array([x[1] for x in matched])
+        rmse = np.sqrt(np.mean((obs_arr - mod_arr) ** 2))
+
+        print(f"\n--- Brick params on PB wall (2023-07-15): RMSE={rmse:.2f}°C ---")
+        assert rmse < 15.0, f"Brick-param RMSE={rmse:.2f}°C exceeds threshold"
+
+    @pytest.mark.slow
+    def test_wall_temp_wood_params(self, gothenburg_location):
+        """Validate with wood-specific wall params from JSON.
+
+        The Kolumbus wood wall should be better modeled with
+        wood-appropriate thermal response parameters.
+        """
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+        # Wood wall params from default_materials.json
+        model_results = self._compute_tg_wall_for_day(
+            met,
+            gothenburg_location,
+            tgk_wall=0.50,
+            tstart_wall=-2.0,
+            tmaxlst_wall=14.0,
+        )
+
+        obs = load_hourly_observations("2023-07-15", "2023-07-15")
+        model_dict = {dt: (ta, tg) for dt, ta, tg in model_results}
+        matched = []
+        for o in obs:
+            if o["time"] in model_dict:
+                ta, tg_wall = model_dict[o["time"]]
+                matched.append((o["ts_wood_obs"], ta + tg_wall))
+
+        assert len(matched) >= 20
+        obs_arr = np.array([x[0] for x in matched])
+        mod_arr = np.array([x[1] for x in matched])
+        rmse = np.sqrt(np.mean((obs_arr - mod_arr) ** 2))
+
+        print(f"\n--- Wood params on wood wall (2023-07-15): RMSE={rmse:.2f}°C ---")
+        assert rmse < 15.0, f"Wood-param RMSE={rmse:.2f}°C exceeds threshold"
+
+    @pytest.mark.slow
+    def test_material_params_vs_default_comparison(self, gothenburg_location):
+        """Compare material-specific params against default cobblestone.
+
+        Material-specific params should produce different (ideally better)
+        results than the generic cobblestone default.
+        """
+        from solweig import Weather
+
+        met = Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+
+        # Default (cobblestone)
+        default_results = self._compute_tg_wall_for_day(met, gothenburg_location)
+        # Brick params
+        brick_results = self._compute_tg_wall_for_day(
+            met, gothenburg_location, tgk_wall=0.40, tstart_wall=-4.0, tmaxlst_wall=15.0
+        )
+        # Wood params
+        wood_results = self._compute_tg_wall_for_day(
+            met, gothenburg_location, tgk_wall=0.50, tstart_wall=-2.0, tmaxlst_wall=14.0
+        )
+
+        # The different params should produce different peak wall temperatures
+        default_peak = max(tg for _, _, tg in default_results)
+        brick_peak = max(tg for _, _, tg in brick_results)
+        wood_peak = max(tg for _, _, tg in wood_results)
+
+        print("\n--- Material comparison (2023-07-15 peak tg_wall) ---")
+        print(f"Default (cobblestone): {default_peak:.2f}K")
+        print(f"Brick:                 {brick_peak:.2f}K")
+        print(f"Wood:                  {wood_peak:.2f}K")
+
+        # Wood should have higher peak (faster response, higher TgK)
+        assert wood_peak > default_peak, "Wood should heat faster than cobblestone"
+        # Brick should also differ from default (different TgK/Tstart)
+        assert abs(brick_peak - default_peak) > 0.1, "Brick should differ from default"
+
+
+# ---------------------------------------------------------------------------
+# Test: Full SOLWEIG pipeline validation
+# ---------------------------------------------------------------------------
+
+
+class TestFullPipelineValidation:
+    """Run the full SOLWEIG pipeline on the Kolumbus site.
+
+    This validates that the complete model chain (shadows → SVF → radiation →
+    Tmrt) produces physically reasonable results with real-world inputs.
+    """
+
+    @pytest.fixture
+    def surface(self, tmp_path):
+        """Load SurfaceData from the Kolumbus GeoTIFFs."""
+        import solweig
+
+        geodata = DATA_DIR / "geodata"
+        surface = solweig.SurfaceData.prepare(
+            dsm=str(geodata / "dsm.tif"),
+            cdsm=str(geodata / "cdsm.tif"),
+            dem=str(geodata / "dem.tif"),
+            land_cover=str(geodata / "groundcover.tif"),
+            working_dir=str(tmp_path / "kolumbus_work"),
+        )
+        return surface
+
+    @pytest.fixture
+    def location(self):
+        from solweig import Location
+
+        return Location(latitude=57.6966, longitude=11.9305, utc_offset=2, altitude=10.0)
+
+    @pytest.mark.slow
+    def test_single_timestep_noon(self, surface, location):
+        """Run SOLWEIG for a single noon timestep and check outputs are physical."""
+        import solweig
+
+        met = solweig.Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+        # Pick noon
+        noon = [w for w in met if w.datetime.hour == 12][0]
+
+        result = solweig.calculate(surface=surface, location=location, weather=noon)
+
+        # WOI pixel (row=22, col=28) - at ground level near building wall
+        woi_tmrt = result.tmrt[22, 28]
+
+        print("\n--- Single Timestep (2023-07-15 12:00) ---")
+        print(f"WOI Tmrt:    {woi_tmrt:.1f}°C")
+        print(f"Tmrt range:  {np.nanmin(result.tmrt):.1f} to {np.nanmax(result.tmrt):.1f}°C")
+        print(f"Air temp:    {noon.ta:.1f}°C")
+
+        # Tmrt should be reasonable (not NaN, not extreme)
+        assert not np.isnan(woi_tmrt), "Tmrt at WOI is NaN"
+        assert 10 < woi_tmrt < 80, f"Tmrt at WOI={woi_tmrt:.1f}°C outside physical range"
+
+        # At noon in summer, Tmrt should generally exceed Ta
+        assert woi_tmrt > noon.ta - 5, "Tmrt much lower than Ta at noon"
+
+    @pytest.mark.slow
+    def test_timeseries_one_day(self, surface, location):
+        """Run full timeseries for one day and verify diurnal Tmrt pattern."""
+        import solweig
+
+        met = solweig.Weather.from_umep_met(
+            DATA_DIR / "metdata_10min_july.txt",
+            start="2023-07-15",
+            end="2023-07-15",
+        )
+
+        summary = solweig.calculate_timeseries(
+            surface=surface,
+            location=location,
+            weather_series=met,
+            timestep_outputs=["tmrt"],
+        )
+        results = summary.results
+
+        assert len(results) == 24
+
+        woi_tmrt = [r.tmrt[22, 28] for r in results]
+        hours = [met[i].datetime.hour for i in range(len(met))]
+        ta_series = [met[i].ta for i in range(len(met))]
+
+        print("\n--- Timeseries (2023-07-15) ---")
+        print(f"{'Hour':>4s} {'Ta':>6s} {'Tmrt':>6s} {'Tmrt-Ta':>7s}")
+        for h, ta, tmrt in zip(hours, ta_series, woi_tmrt, strict=False):
+            print(f"{h:4d} {ta:6.1f} {tmrt:6.1f} {tmrt - ta:+7.1f}")
+
+        # Daytime Tmrt should exceed nighttime
+        day_tmrt = [t for h, t in zip(hours, woi_tmrt, strict=False) if 10 <= h <= 16]
+        night_tmrt = [t for h, t in zip(hours, woi_tmrt, strict=False) if h < 5 or h > 22]
+
+        if day_tmrt and night_tmrt:
+            assert np.mean(day_tmrt) > np.mean(night_tmrt), (
+                f"Daytime Tmrt ({np.mean(day_tmrt):.1f}) should exceed nighttime ({np.mean(night_tmrt):.1f})"
+            )
diff --git a/tests/validation/test_validation_montpellier.py b/tests/validation/test_validation_montpellier.py
new file mode 100644
index 0000000..553bf70
--- /dev/null
+++ b/tests/validation/test_validation_montpellier.py
@@ -0,0 +1,820 @@
+"""
+Field-data validation tests using the INRAE PRESTI Montpellier canyon dataset.
+
+Dataset: Garcia de Cezar et al. (2025) - Microclimate in Mediterranean urban canyon
+Source: https://entrepot.recherche.data.gouv.fr/dataset.xhtml?persistentId=doi:10.57745/0MYJU4
+Paper: https://rmets.onlinelibrary.wiley.com/doi/10.1002/gdj3.70033
+Location: INRAE Campus Lavalette, Montpellier, France (43.64°N, 3.87°E)
+Period: 2023-07-21 to 2024-07-31 (10-min intervals)
+Measurements: Grey globe thermometers (15 sensors), pyranometers, weather station
+
+Canyon geometry:
+    - Orientation: East-West
+    - Wall height: 2.3 m (concrete blocks)
+    - Length: 12 m (E-W axis)
+    - Width: 5 m (N-S axis, between inner wall faces)
+    - Globe thermometers at 1.3 m above ground
+    - 40 mm ping-pong ball grey globes (RAL 7001 silver grey, PT100 sensors)
+
+These tests validate SOLWEIG Tmrt against globe-thermometer-derived Tmrt
+in a controlled reduced-scale urban canyon with known geometry.
+
+Tests are marked @pytest.mark.slow and @pytest.mark.validation since they
+require external data files and take significant time.
+"""
+
+from __future__ import annotations
+
+import csv
+import math
+from datetime import datetime
+from pathlib import Path
+
+import numpy as np
+import pytest
+
+# ---------------------------------------------------------------------------
+# Constants and paths
+# ---------------------------------------------------------------------------
+
+DATA_DIR = Path(__file__).parent.parent / "validation" / "montpellier"
+SUBSET_CSV = DATA_DIR / "presti_subset.csv"
+
+pytestmark = [
+    pytest.mark.skipif(not SUBSET_CSV.exists(), reason="Montpellier validation data not present"),
+    pytest.mark.validation,
+]
+
+# Canyon geometry (metres)
+CANYON_LENGTH = 12.0  # E-W
+CANYON_WIDTH = 5.0  # N-S (between inner wall faces)
+WALL_HEIGHT = 2.3
+WALL_THICKNESS = 1.0  # Approximate thickness of concrete block walls
+
+# Globe thermometer constants
+GLOBE_DIAMETER = 0.040  # 40 mm ping-pong ball
+GLOBE_EMISSIVITY = 0.95  # Longwave emissivity of painted surface
+SBC = 5.67e-8  # Stefan-Boltzmann constant
+
+# Site location
+LATITUDE = 43.64
+LONGITUDE = 3.87
+UTC_OFFSET = 2  # CEST (Central European Summer Time) for summer 2023
+
+# DSM resolution
+RESOLUTION = 0.5  # metres per pixel
+
+# Linke turbidity for clear Mediterranean sky (typical summer value)
+LINKE_TURBIDITY = 3.5
+
+
+# ---------------------------------------------------------------------------
+# Clear-sky radiation model
+# ---------------------------------------------------------------------------
+
+
+def clear_sky_ghi(sun_altitude_deg: float, day_of_year: int = 216) -> float:
+    """Estimate clear-sky Global Horizontal Irradiance (GHI) from sun altitude.
+
+    Uses a simplified Ineichen clear-sky model with Linke turbidity for
+    Mediterranean climate. This replaces in-canyon pyranometer readings
+    which are contaminated by wall shading and reflections.
+
+    Args:
+        sun_altitude_deg: Sun altitude in degrees above horizon.
+        day_of_year: Day of year (1-365). Default 216 = Aug 4.
+
+    Returns:
+        Clear-sky GHI in W/m².
+    """
+    if sun_altitude_deg <= 0:
+        return 0.0
+    # Solar constant with eccentricity correction
+    I0 = 1361.0 * (1 + 0.033 * np.cos(2 * np.pi * day_of_year / 365))
+    zen_rad = np.radians(90 - sun_altitude_deg)
+    cos_zen = np.cos(zen_rad)
+    # Air mass (Kasten & Young 1989)
+    am = 1.0 / (cos_zen + 0.50572 * (96.07995 - (90 - sun_altitude_deg)) ** (-1.6364))
+    am = min(am, 40.0)
+    # Ineichen clear-sky model (altitude = 50m for Montpellier)
+    fh1 = np.exp(-0.00050 / 8.434)
+    cg1 = 5.09e-5 * 50 + 0.868
+    cg2 = 3.92e-5 * 50 + 0.0387
+    ghi = cg1 * I0 * cos_zen * np.exp(-cg2 * am * (fh1 * LINKE_TURBIDITY - 1.0))
+    return max(float(ghi), 0.0)
+
+
+def compute_sun_altitude(dt: datetime, lat: float = LATITUDE, lon: float = LONGITUDE) -> float:
+    """Compute solar altitude angle for a given datetime and location.
+
+    Simple astronomical formula (no refraction correction).
+    """
+    doy = dt.timetuple().tm_yday
+    # Solar declination (Spencer 1971)
+    B = 2 * np.pi * (doy - 1) / 365
+    decl = np.degrees(
+        0.006918 - 0.399912 * np.cos(B) + 0.070257 * np.sin(B) - 0.006758 * np.cos(2 * B) + 0.000907 * np.sin(2 * B)
+    )
+    # Equation of time (minutes)
+    eot = 229.18 * (
+        0.000075 + 0.001868 * np.cos(B) - 0.032077 * np.sin(B) - 0.014615 * np.cos(2 * B) - 0.04089 * np.sin(2 * B)
+    )
+    # Solar time
+    solar_time = dt.hour + dt.minute / 60 + (lon - 15 * UTC_OFFSET) * 4 / 60 + eot / 60
+    ha = 15 * (solar_time - 12)  # Hour angle
+    sin_alt = np.sin(np.radians(lat)) * np.sin(np.radians(decl)) + np.cos(np.radians(lat)) * np.cos(
+        np.radians(decl)
+    ) * np.cos(np.radians(ha))
+    return float(np.degrees(np.arcsin(max(-1, min(1, sin_alt)))))
+
+
+# ---------------------------------------------------------------------------
+# Globe temperature -> Tmrt conversion (ISO 7726)
+# ---------------------------------------------------------------------------
+
+
+def globe_to_tmrt(
+    tg: float,
+    ta: float,
+    va: float,
+    D: float = GLOBE_DIAMETER,
+    emis: float = GLOBE_EMISSIVITY,
+) -> float:
+    """Convert globe temperature to Tmrt using ISO 7726 forced convection.
+
+    Args:
+        tg: Globe temperature (°C).
+        ta: Air temperature (°C).
+        va: Wind speed (m/s). Clamped to min 0.1 m/s.
+        D: Globe diameter (m).
+        emis: Globe longwave emissivity.
+
+    Returns:
+        Mean radiant temperature (°C).
+    """
+    va = max(va, 0.1)  # Prevent division by zero at zero wind
+    # Forced convection heat transfer coefficient (ASHRAE)
+    hcg = 6.3 * (va**0.6) / (D**0.4)
+    # ISO 7726 formula
+    tmrt_k4 = (tg + 273.15) ** 4 + (hcg / (emis * SBC)) * (tg - ta)
+    if tmrt_k4 <= 0:
+        return ta  # Fallback for extreme conditions
+    return tmrt_k4**0.25 - 273.15
+
+
+# ---------------------------------------------------------------------------
+# Data loading
+# ---------------------------------------------------------------------------
+
+
+def load_presti_observations(
+    day: str | None = None,
+) -> list[dict]:
+    """Load PRESTI subset CSV observations.
+
+    Args:
+        day: ISO date string to filter (e.g. "2023-08-04"). If None, load all.
+
+    Returns:
+        List of dicts with timestamp, met data, globe temps, and radiation.
+    """
+    rows = []
+    with open(SUBSET_CSV) as f:
+        reader = csv.DictReader(f)
+        for row in reader:
+            dt = datetime.fromisoformat(row["TIMESTAMP"].replace("Z", ""))
+            if day and dt.date().isoformat() != day:
+                continue
+
+            def _float(val: str) -> float:
+                try:
+                    return float(val)
+                except (ValueError, TypeError):
+                    return float("nan")
+
+            rows.append(
+                {
+                    "time": dt,
+                    "ta": _float(row.get("SMn_TA", "")),
+                    "rh": _float(row.get("SMn_HR", "")),
+                    "wspd": _float(row.get("SMn_Wspd", "")),
+                    "wdir": _float(row.get("SMn_Wdir", "")),
+                    # Globe temperatures (15 sensors at 1.3m)
+                    "G1": _float(row.get("G1_TA", "")),
+                    "G2": _float(row.get("G2_TA", "")),
+                    "G3": _float(row.get("G3_TA", "")),
+                    "G4": _float(row.get("G4_TA", "")),
+                    "G5": _float(row.get("G5_TA", "")),
+                    "G6": _float(row.get("G6_TA", "")),
+                    "G7": _float(row.get("G7_TA", "")),
+                    "G8": _float(row.get("G8_TA", "")),
+                    "G9": _float(row.get("G9_TA", "")),
+                    "GA": _float(row.get("GA_TA", "")),
+                    "GB": _float(row.get("GB_TA", "")),
+                    "GC": _float(row.get("GC_TA", "")),
+                    "GD": _float(row.get("GD_TA", "")),
+                    "GE": _float(row.get("GE_TA", "")),
+                    "GF": _float(row.get("GF_TA", "")),
+                    # Pyranometer solar radiation
+                    "slr1": _float(row.get("SlrW_1", "")),
+                    "slr2": _float(row.get("SlrW_2", "")),
+                    "slr3": _float(row.get("SlrW_3", "")),
+                    "slr4": _float(row.get("SlrW_4", "")),
+                }
+            )
+    return rows
+
+
+def compute_observed_tmrt(obs: list[dict]) -> list[dict]:
+    """Add observed Tmrt derived from globe temperatures to observation dicts.
+
+    Computes Tmrt from each globe thermometer and adds mean/center values.
+    Center globes (G2, G5, G8 at y=2.6m) are most representative of the
+    open canyon floor away from wall influence.
+    """
+    results = []
+    for o in obs:
+        ta = o["ta"]
+        va = o["wspd"]
+        if math.isnan(ta) or math.isnan(va):
+            continue
+
+        # Convert center-canyon globes (y=2.6, sections A/B/C)
+        center_globes = ["G2", "G5", "G8"]
+        tmrt_center = []
+        for g in center_globes:
+            tg = o[g]
+            if not math.isnan(tg):
+                tmrt_center.append(globe_to_tmrt(tg, ta, va))
+
+        # Convert all 15 globes
+        all_globes = [f"G{i}" for i in range(1, 10)] + [f"G{c}" for c in "ABCDEF"]
+        tmrt_all = []
+        for g in all_globes:
+            tg = o[g]
+            if not math.isnan(tg):
+                tmrt_all.append(globe_to_tmrt(tg, ta, va))
+
+        if tmrt_center:
+            entry = dict(o)
+            entry["tmrt_center"] = np.mean(tmrt_center)
+            entry["tmrt_all_mean"] = np.mean(tmrt_all) if tmrt_all else float("nan")
+            entry["tmrt_center_std"] = np.std(tmrt_center) if len(tmrt_center) > 1 else 0.0
+            results.append(entry)
+    return results
+
+
+# ---------------------------------------------------------------------------
+# Synthetic DSM construction
+# ---------------------------------------------------------------------------
+
+
+def build_canyon_dsm() -> np.ndarray:
+    """Build a synthetic DSM for the PRESTI canyon.
+
+    The DSM represents the canyon geometry at 0.5m resolution:
+    - E-W canyon (long axis along columns)
+    - North and south concrete walls at 2.3m height
+    - Open ground (0m) everywhere else
+
+    Grid layout (rows = N-S, cols = E-W):
+        rows 0-7:   open ground north of canyon (0m)
+        rows 8-9:   north wall (2.3m), 2 pixels = 1m thick
+        rows 10-19: canyon floor (0m), 10 pixels = 5m wide
+        rows 20-21: south wall (2.3m), 2 pixels = 1m thick
+        rows 22-29: open ground south of canyon (0m)
+
+        cols 0-7:   open ground west of canyon (0m)
+        cols 8-31:  canyon extent (24 pixels = 12m)
+        cols 32-39: open ground east of canyon (0m)
+
+    Returns:
+        DSM array of shape (30, 40) at 0.5m resolution.
+    """
+    nrows, ncols = 30, 40
+    dsm = np.zeros((nrows, ncols), dtype=np.float32)
+
+    # Wall columns span the canyon length (cols 8-31)
+    wall_cols = slice(8, 32)
+
+    # North wall (rows 8-9)
+    dsm[8:10, wall_cols] = WALL_HEIGHT
+
+    # South wall (rows 20-21)
+    dsm[20:22, wall_cols] = WALL_HEIGHT
+
+    return dsm
+
+
+# Canyon center pixel coordinates (row, col)
+# The canyon floor spans rows 10-19, cols 8-31
+# Center of canyon: row 14-15, col 19-20
+CANYON_CENTER_ROW = 15
+CANYON_CENTER_COL = 20
+
+
+# ---------------------------------------------------------------------------
+# Test: Data loading and sanity checks
+# ---------------------------------------------------------------------------
+
+
+class TestDataLoading:
+    """Verify that the validation data loads correctly."""
+
+    def test_subset_csv_loads(self):
+        obs = load_presti_observations()
+        assert len(obs) > 400, f"Expected >400 rows, got {len(obs)}"
+
+    def test_aug04_has_data(self):
+        obs = load_presti_observations(day="2023-08-04")
+        assert len(obs) == 144, f"Expected 144 rows (24h × 6/hr), got {len(obs)}"
+
+    def test_globe_temps_physical(self):
+        """Globe temperatures should be in a physical range."""
+        obs = load_presti_observations(day="2023-08-04")
+        for o in obs:
+            for g in ["G1", "G2", "G3", "G4", "G5", "G6", "G7", "G8", "G9"]:
+                tg = o[g]
+                if not math.isnan(tg):
+                    assert 5 < tg < 70, f"{g}={tg}°C outside physical range at {o['time']}"
+
+    def test_globe_exceeds_air_temp_at_noon(self):
+        """Globe temperature should exceed air temperature during sunny hours."""
+        obs = load_presti_observations(day="2023-08-04")
+        noon_obs = [o for o in obs if 12 <= o["time"].hour <= 15]
+        assert len(noon_obs) > 0
+
+        for o in noon_obs:
+            ta = o["ta"]
+            # Center globe (G5) should be warmer than air during peak sun
+            g5 = o["G5"]
+            if not math.isnan(g5) and not math.isnan(ta):
+                assert g5 >= ta - 1.0, f"Globe temp G5={g5:.1f}°C < Ta={ta:.1f}°C at {o['time']}"
+
+    def test_solar_radiation_diurnal(self):
+        """Solar radiation should show a clear diurnal pattern."""
+        obs = load_presti_observations(day="2023-08-04")
+        night_rad = [o["slr2"] for o in obs if o["time"].hour < 6 and not math.isnan(o["slr2"])]
+        day_rad = [o["slr2"] for o in obs if 10 <= o["time"].hour <= 16 and not math.isnan(o["slr2"])]
+
+        assert all(r <= 5.0 for r in night_rad), "Radiation should be ~0 at night"
+        assert max(day_rad) > 200, f"Peak daytime radiation={max(day_rad):.0f} too low"
+
+
+class TestGlobeToTmrt:
+    """Test the globe temperature to Tmrt conversion."""
+
+    def test_equilibrium(self):
+        """When globe temp equals air temp, Tmrt should equal air temp."""
+        tmrt = globe_to_tmrt(tg=25.0, ta=25.0, va=1.0)
+        assert abs(tmrt - 25.0) < 0.5, f"Tmrt={tmrt:.1f} should be ~25°C"
+
+    def test_globe_above_air(self):
+        """When globe > air, Tmrt should exceed both."""
+        tmrt = globe_to_tmrt(tg=35.0, ta=25.0, va=1.0)
+        assert tmrt > 35.0, f"Tmrt={tmrt:.1f} should exceed globe temp 35°C"
+
+    def test_globe_below_air(self):
+        """When globe < air (cold radiation), Tmrt should be below air."""
+        tmrt = globe_to_tmrt(tg=18.0, ta=25.0, va=1.0)
+        assert tmrt < 18.0, f"Tmrt={tmrt:.1f} should be below globe temp 18°C"
+
+    def test_wind_sensitivity(self):
+        """Higher wind increases convective coefficient, amplifying Tmrt."""
+        tmrt_calm = globe_to_tmrt(tg=35.0, ta=25.0, va=0.5)
+        tmrt_windy = globe_to_tmrt(tg=35.0, ta=25.0, va=3.0)
+        # At same globe and air temp, more wind means more convective
+        # heat loss needed to balance, so Tmrt must be higher
+        assert tmrt_windy > tmrt_calm
+
+    def test_observed_tmrt_computation(self):
+        """Compute Tmrt from observed globe temps for one timestep."""
+        obs = load_presti_observations(day="2023-08-04")
+        results = compute_observed_tmrt(obs)
+        assert len(results) > 100
+
+        # At noon, observed Tmrt should be well above air temperature
+        noon = [r for r in results if 12 <= r["time"].hour <= 15]
+        for r in noon:
+            assert r["tmrt_center"] > r["ta"], f"Tmrt_center={r['tmrt_center']:.1f} < Ta={r['ta']:.1f} at {r['time']}"
+
+
+class TestSyntheticDSM:
+    """Verify the synthetic DSM geometry."""
+
+    def test_dsm_shape(self):
+        dsm = build_canyon_dsm()
+        assert dsm.shape == (30, 40)
+
+    def test_wall_height(self):
+        dsm = build_canyon_dsm()
+        assert dsm[8, 20] == pytest.approx(WALL_HEIGHT)  # North wall
+        assert dsm[20, 20] == pytest.approx(WALL_HEIGHT)  # South wall
+
+    def test_canyon_floor(self):
+        dsm = build_canyon_dsm()
+        assert dsm[15, 20] == pytest.approx(0.0)  # Canyon center
+
+    def test_open_ground(self):
+        dsm = build_canyon_dsm()
+        assert dsm[0, 0] == pytest.approx(0.0)  # Corner
+
+
+# ---------------------------------------------------------------------------
+# Test: SOLWEIG Tmrt validation against globe thermometer observations
+# ---------------------------------------------------------------------------
+
+
+class TestTmrtValidation:
+    """Validate SOLWEIG Tmrt against globe-derived Tmrt in the canyon."""
+
+    @pytest.fixture
+    def surface(self):
+        """Build SurfaceData from synthetic canyon DSM."""
+        from solweig import SurfaceData
+
+        dsm = build_canyon_dsm()
+        surface = SurfaceData(dsm=dsm, pixel_size=RESOLUTION)
+        surface.compute_svf()
+        return surface
+
+    @pytest.fixture
+    def location(self):
+        from solweig import Location
+
+        return Location(
+            latitude=LATITUDE,
+            longitude=LONGITUDE,
+            utc_offset=UTC_OFFSET,
+            altitude=50.0,
+        )
+
+    @pytest.fixture
+    def aug04_weather(self, location):
+        """Build hourly Weather objects for August 4, 2023.
+
+        Uses clear-sky GHI model instead of in-canyon pyranometers.
+        The in-canyon pyranometers are contaminated by wall shading and
+        reflections and cannot serve as open-sky radiation input.
+        """
+        from solweig import Weather
+
+        obs = load_presti_observations(day="2023-08-04")
+
+        weather_list = []
+        for o in obs:
+            if o["time"].minute != 0:
+                continue
+            if math.isnan(o["ta"]) or math.isnan(o["rh"]):
+                continue
+
+            # Clear-sky GHI from sun position (not in-canyon pyranometers)
+            sun_alt = compute_sun_altitude(o["time"])
+            doy = o["time"].timetuple().tm_yday
+            global_rad = clear_sky_ghi(sun_alt, doy)
+
+            w = Weather(
+                datetime=o["time"],
+                ta=o["ta"],
+                rh=o["rh"],
+                global_rad=global_rad,
+                ws=max(o["wspd"], 0.1) if not math.isnan(o["wspd"]) else 1.0,
+            )
+            weather_list.append(w)
+        return weather_list
+
+    @pytest.fixture
+    def aug04_observed_tmrt(self):
+        """Compute observed Tmrt from globe thermometers for Aug 4."""
+        obs = load_presti_observations(day="2023-08-04")
+        return compute_observed_tmrt(obs)
+
+    @pytest.mark.slow
+    def test_single_timestep_noon(self, surface, location, aug04_weather):
+        """Run SOLWEIG for noon and check Tmrt is physical."""
+        import solweig
+
+        noon = [w for w in aug04_weather if w.datetime.hour == 14][0]
+        result = solweig.calculate(
+            surface=surface,
+            location=location,
+            weather=noon,
+            wall_material="concrete",
+        )
+
+        tmrt_center = result.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL]
+        print(f"\n--- Noon Tmrt at canyon center: {tmrt_center:.1f}°C (Ta={noon.ta:.1f}°C) ---")
+
+        assert not np.isnan(tmrt_center), "Tmrt at canyon center is NaN"
+        assert 10 < tmrt_center < 80, f"Tmrt={tmrt_center:.1f}°C outside physical range"
+        # At 14:00 in summer, Tmrt should exceed air temperature
+        assert tmrt_center > noon.ta, f"Tmrt={tmrt_center:.1f} should exceed Ta={noon.ta:.1f} at peak sun"
+
+    @pytest.mark.slow
+    def test_timeseries_diurnal_pattern(self, surface, location, aug04_weather):
+        """Run full-day timeseries and check diurnal Tmrt pattern."""
+        import solweig
+
+        summary = solweig.calculate_timeseries(
+            surface=surface,
+            location=location,
+            weather_series=aug04_weather,
+            wall_material="concrete",
+            timestep_outputs=["tmrt"],
+        )
+        results = summary.results
+
+        assert len(results) == len(aug04_weather)
+
+        tmrt_series = [r.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL] for r in results]
+        hours = [w.datetime.hour for w in aug04_weather]
+
+        print("\n--- Diurnal Tmrt at canyon center (Aug 4, 2023) ---")
+        print(f"{'Hour':>4s} {'Ta':>6s} {'Tmrt':>6s} {'Tmrt-Ta':>7s}")
+        for h, w, tmrt in zip(hours, aug04_weather, tmrt_series, strict=False):
+            print(f"{h:4d} {w.ta:6.1f} {tmrt:6.1f} {tmrt - w.ta:+7.1f}")
+
+        # Daytime Tmrt should exceed nighttime
+        day_tmrt = [t for h, t in zip(hours, tmrt_series, strict=False) if 10 <= h <= 16]
+        night_tmrt = [t for h, t in zip(hours, tmrt_series, strict=False) if h < 5 or h > 22]
+
+        if day_tmrt and night_tmrt:
+            assert np.mean(day_tmrt) > np.mean(night_tmrt), (
+                f"Daytime Tmrt ({np.mean(day_tmrt):.1f}) should exceed nighttime ({np.mean(night_tmrt):.1f})"
+            )
+
+    @pytest.mark.slow
+    def test_tmrt_vs_globe_observations(self, surface, location, aug04_weather, aug04_observed_tmrt):
+        """Compare SOLWEIG Tmrt against globe-derived Tmrt.
+
+        This is the primary validation test. We compare:
+        - Model: SOLWEIG Tmrt at canyon center pixel
+        - Observed: Tmrt derived from center globe thermometers (G2, G5, G8)
+
+        The comparison is at hourly resolution.
+        """
+        import solweig
+
+        summary = solweig.calculate_timeseries(
+            surface=surface,
+            location=location,
+            weather_series=aug04_weather,
+            wall_material="concrete",
+            timestep_outputs=["tmrt"],
+        )
+        results = summary.results
+
+        # Build model Tmrt dict by hour
+        model_tmrt = {}
+        for w, r in zip(aug04_weather, results, strict=False):
+            model_tmrt[w.datetime.hour] = r.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL]
+
+        # Match with hourly observations
+        matched = []
+        for o in aug04_observed_tmrt:
+            h = o["time"].hour
+            if h in model_tmrt and o["time"].minute == 0:
+                matched.append(
+                    {
+                        "hour": h,
+                        "ta": o["ta"],
+                        "obs_tmrt": o["tmrt_center"],
+                        "mod_tmrt": model_tmrt[h],
+                    }
+                )
+
+        assert len(matched) >= 20, f"Only {len(matched)} matched hours"
+
+        obs_arr = np.array([m["obs_tmrt"] for m in matched])
+        mod_arr = np.array([m["mod_tmrt"] for m in matched])
+
+        rmse = np.sqrt(np.mean((obs_arr - mod_arr) ** 2))
+        mae = np.mean(np.abs(obs_arr - mod_arr))
+        bias = np.mean(mod_arr - obs_arr)
+        r_squared = np.corrcoef(obs_arr, mod_arr)[0, 1] ** 2
+
+        print("\n--- SOLWEIG vs Globe-Derived Tmrt (Aug 4, 2023) ---")
+        print(f"{'Hour':>4s} {'Ta':>6s} {'Obs':>6s} {'Model':>6s} {'Diff':>6s}")
+        for m in matched:
+            diff = m["mod_tmrt"] - m["obs_tmrt"]
+            print(f"{m['hour']:4d} {m['ta']:6.1f} {m['obs_tmrt']:6.1f} {m['mod_tmrt']:6.1f} {diff:+6.1f}")
+        print(f"\nRMSE:  {rmse:.2f}°C")
+        print(f"MAE:   {mae:.2f}°C")
+        print(f"Bias:  {bias:+.2f}°C")
+        print(f"R²:    {r_squared:.3f}")
+
+        # Acceptance criteria:
+        # Globe-derived Tmrt has ~5°C uncertainty (40mm globe accuracy).
+        # Combined model + measurement uncertainty allows generous thresholds.
+        # The canyon is simplified (no planters, no vegetation, uniform walls).
+        assert rmse < 20.0, f"RMSE={rmse:.2f}°C exceeds 20°C threshold"
+        # Model should at least correlate with observations
+        assert r_squared > 0.3, f"R²={r_squared:.3f} too low (no correlation)"
+
+    @pytest.mark.slow
+    def test_canyon_shading_spatial_pattern(self, surface, location, aug04_weather):
+        """Verify that the canyon shows spatial Tmrt variation from wall shading.
+
+        Near the south wall (shaded in morning), Tmrt should differ from
+        near the north wall (shaded in afternoon) at asymmetric sun angles.
+        """
+        import solweig
+
+        # Pick early afternoon (14:00) when sun is from the south
+        afternoon = [w for w in aug04_weather if w.datetime.hour == 14][0]
+        result = solweig.calculate(
+            surface=surface,
+            location=location,
+            weather=afternoon,
+            wall_material="concrete",
+        )
+
+        # Near-south-wall pixel (row 18) vs near-north-wall pixel (row 12)
+        # Avoid rows immediately adjacent to walls (may be NaN in SOLWEIG)
+        tmrt_near_south = result.tmrt[18, CANYON_CENTER_COL]
+        tmrt_near_north = result.tmrt[12, CANYON_CENTER_COL]
+        tmrt_center = result.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL]
+
+        print("\n--- Canyon spatial Tmrt at 14:00 ---")
+        print(f"Near north wall (row 12): {tmrt_near_north:.1f}°C")
+        print(f"Canyon center (row 15):   {tmrt_center:.1f}°C")
+        print(f"Near south wall (row 18): {tmrt_near_south:.1f}°C")
+
+        # All should be physical
+        for val, label in [
+            (tmrt_near_north, "north"),
+            (tmrt_center, "center"),
+            (tmrt_near_south, "south"),
+        ]:
+            assert not np.isnan(val), f"Tmrt near {label} wall is NaN"
+            assert 5 < val < 80, f"Tmrt near {label} wall = {val:.1f}°C out of range"
+
+    @pytest.mark.slow
+    def test_multi_day_statistics(self, surface, location):
+        """Compute validation statistics across multiple clear-sky days."""
+        import solweig
+
+        all_errors = []
+
+        for day_str in ["2023-08-03", "2023-08-04", "2023-08-14"]:
+            obs = load_presti_observations(day=day_str)
+            obs_tmrt = compute_observed_tmrt(obs)
+            if not obs_tmrt:
+                continue
+
+            # Build hourly Weather with clear-sky GHI
+            weather_list = []
+            for o in obs:
+                if o["time"].minute != 0:
+                    continue
+                if math.isnan(o["ta"]) or math.isnan(o["rh"]):
+                    continue
+                sun_alt = compute_sun_altitude(o["time"])
+                doy = o["time"].timetuple().tm_yday
+                global_rad = clear_sky_ghi(sun_alt, doy)
+                w = solweig.Weather(
+                    datetime=o["time"],
+                    ta=o["ta"],
+                    rh=o["rh"],
+                    global_rad=global_rad,
+                    ws=max(o["wspd"], 0.1) if not math.isnan(o["wspd"]) else 1.0,
+                )
+                weather_list.append(w)
+
+            if len(weather_list) < 20:
+                continue
+
+            summary = solweig.calculate_timeseries(
+                surface=surface,
+                location=location,
+                weather_series=weather_list,
+                wall_material="concrete",
+                timestep_outputs=["tmrt"],
+            )
+            results = summary.results
+
+            model_tmrt = {}
+            for w, r in zip(weather_list, results, strict=False):
+                model_tmrt[w.datetime] = r.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL]
+
+            for o in obs_tmrt:
+                # Match on-the-hour observations
+                if o["time"].minute == 0 and o["time"] in model_tmrt:
+                    all_errors.append(model_tmrt[o["time"]] - o["tmrt_center"])
+
+        assert len(all_errors) > 50, f"Only {len(all_errors)} matched points"
+
+        errors = np.array(all_errors)
+        rmse = np.sqrt(np.mean(errors**2))
+        bias = np.mean(errors)
+
+        print("\n--- Multi-day Tmrt validation (3 clear-sky days) ---")
+        print(f"Matched points: {len(all_errors)}")
+        print(f"RMSE: {rmse:.2f}°C")
+        print(f"Bias: {bias:+.2f}°C")
+
+        # Multi-day RMSE threshold (generous due to synthetic DSM + globe uncertainty)
+        assert rmse < 20.0, f"Multi-day RMSE={rmse:.2f}°C exceeds threshold"
+
+
+# ---------------------------------------------------------------------------
+# Test: Isotropic vs Anisotropic sky model comparison
+# ---------------------------------------------------------------------------
+
+
+class TestSkyModelComparison:
+    """Compare isotropic vs anisotropic sky radiation models against observations.
+
+    The anisotropic (Perez) sky model should better capture directional
+    diffuse radiation in the canyon geometry. This test quantifies the
+    accuracy improvement.
+    """
+
+    @pytest.fixture
+    def surface(self):
+        from solweig import SurfaceData
+
+        dsm = build_canyon_dsm()
+        surface = SurfaceData(dsm=dsm, pixel_size=RESOLUTION)
+        surface.compute_svf()
+        return surface
+
+    @pytest.fixture
+    def location(self):
+        from solweig import Location
+
+        return Location(
+            latitude=LATITUDE,
+            longitude=LONGITUDE,
+            utc_offset=UTC_OFFSET,
+            altitude=50.0,
+        )
+
+    def _build_weather(self, day: str) -> list:
+        from solweig import Weather
+
+        obs = load_presti_observations(day=day)
+        weather_list = []
+        for o in obs:
+            if o["time"].minute != 0:
+                continue
+            if math.isnan(o["ta"]) or math.isnan(o["rh"]):
+                continue
+            sun_alt = compute_sun_altitude(o["time"])
+            doy = o["time"].timetuple().tm_yday
+            global_rad = clear_sky_ghi(sun_alt, doy)
+            w = Weather(
+                datetime=o["time"],
+                ta=o["ta"],
+                rh=o["rh"],
+                global_rad=global_rad,
+                ws=max(o["wspd"], 0.1) if not math.isnan(o["wspd"]) else 1.0,
+            )
+            weather_list.append(w)
+        return weather_list
+
+    @pytest.mark.slow
+    def test_isotropic_sky_rmse(self, surface, location):
+        """Validate isotropic sky model RMSE against globe observations.
+
+        Note: The anisotropic (Perez) sky model requires precomputed shadow
+        matrices for 145 sky patches, which are only available when using
+        SurfaceData.prepare() with GeoTIFF inputs. With a synthetic DSM,
+        the model falls back to isotropic. A full anisotropic comparison
+        would require real DSM data (e.g., from IGN Lidar HD for Montpellier).
+        """
+        import solweig
+
+        day = "2023-08-04"
+        weather_list = self._build_weather(day)
+        obs_tmrt = compute_observed_tmrt(load_presti_observations(day=day))
+
+        summary_iso = solweig.calculate_timeseries(
+            surface=surface,
+            location=location,
+            weather_series=weather_list,
+            use_anisotropic_sky=False,
+            timestep_outputs=["tmrt"],
+        )
+        results_iso = summary_iso.results
+
+        tmrt_iso = {}
+        for w, r in zip(weather_list, results_iso, strict=False):
+            tmrt_iso[w.datetime.hour] = r.tmrt[CANYON_CENTER_ROW, CANYON_CENTER_COL]
+
+        errors = []
+        for o in obs_tmrt:
+            h = o["time"].hour
+            if h in tmrt_iso and o["time"].minute == 0:
+                errors.append(tmrt_iso[h] - o["tmrt_center"])
+
+        assert len(errors) >= 20
+
+        rmse = np.sqrt(np.mean(np.array(errors) ** 2))
+        bias = np.mean(errors)
+
+        print("\n--- Isotropic Sky Model Validation (Aug 4, 2023) ---")
+        print(f"RMSE: {rmse:.2f}°C, Bias: {bias:+.2f}°C")
+        print("Note: Anisotropic comparison requires precomputed shadow matrices")
+
+        assert rmse < 20.0, f"Isotropic RMSE={rmse:.2f}°C too high"
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..96850aa
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,2899 @@
+version = 1
+revision = 3
+requires-python = ">=3.11, <3.14"
+resolution-markers = [
+    "sys_platform == 'win32'",
+    "sys_platform == 'emscripten'",
+    "sys_platform != 'emscripten' and sys_platform != 'win32'",
+]
+
+[[package]]
+name = "affine"
+version = "2.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/69/98/d2f0bb06385069e799fc7d2870d9e078cfa0fa396dc8a2b81227d0da08b9/affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea", size = 17132, upload-time = "2023-01-19T23:44:30.696Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/0b/f7/85273299ab57117850cc0a936c64151171fac4da49bc6fba0dad984a7c5f/affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92", size = 15662, upload-time = "2023-01-19T23:44:28.833Z" },
+]
+
+[[package]]
+name = "anyio"
+version = "4.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "idna" },
+    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
+]
+
+[[package]]
+name = "appnope"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" },
+]
+
+[[package]]
+name = "argon2-cffi"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "argon2-cffi-bindings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706, upload-time = "2025-06-03T06:55:32.073Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657, upload-time = "2025-06-03T06:55:30.804Z" },
+]
+
+[[package]]
+name = "argon2-cffi-bindings"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/db8af0df73c1cf454f71b2bbe5e356b8c1f8041c979f505b3d3186e520a9/argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d", size = 1783441, upload-time = "2025-07-30T10:02:05.147Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1d/57/96b8b9f93166147826da5f90376e784a10582dd39a393c99bb62cfcf52f0/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500", size = 54121, upload-time = "2025-07-30T10:01:50.815Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44", size = 29177, upload-time = "2025-07-30T10:01:51.681Z" },
+    { url = "https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0", size = 31090, upload-time = "2025-07-30T10:01:53.184Z" },
+    { url = "https://files.pythonhosted.org/packages/c1/93/44365f3d75053e53893ec6d733e4a5e3147502663554b4d864587c7828a7/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6", size = 81246, upload-time = "2025-07-30T10:01:54.145Z" },
+    { url = "https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a", size = 87126, upload-time = "2025-07-30T10:01:55.074Z" },
+    { url = "https://files.pythonhosted.org/packages/72/70/7a2993a12b0ffa2a9271259b79cc616e2389ed1a4d93842fac5a1f923ffd/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d", size = 80343, upload-time = "2025-07-30T10:01:56.007Z" },
+    { url = "https://files.pythonhosted.org/packages/78/9a/4e5157d893ffc712b74dbd868c7f62365618266982b64accab26bab01edc/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99", size = 86777, upload-time = "2025-07-30T10:01:56.943Z" },
+    { url = "https://files.pythonhosted.org/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2", size = 27180, upload-time = "2025-07-30T10:01:57.759Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98", size = 31715, upload-time = "2025-07-30T10:01:58.56Z" },
+    { url = "https://files.pythonhosted.org/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94", size = 27149, upload-time = "2025-07-30T10:01:59.329Z" },
+]
+
+[[package]]
+name = "arrow"
+version = "1.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "python-dateutil" },
+    { name = "tzdata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/33/032cdc44182491aa708d06a68b62434140d8c50820a087fac7af37703357/arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7", size = 152931, upload-time = "2025-10-18T17:46:46.761Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797, upload-time = "2025-10-18T17:46:45.663Z" },
+]
+
+[[package]]
+name = "asttokens"
+version = "3.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
+]
+
+[[package]]
+name = "async-lru"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ef/c3/bbf34f15ea88dfb649ab2c40f9d75081784a50573a9ea431563cab64adb8/async_lru-2.1.0.tar.gz", hash = "sha256:9eeb2fecd3fe42cc8a787fc32ead53a3a7158cc43d039c3c55ab3e4e5b2a80ed", size = 12041, upload-time = "2026-01-17T22:52:18.931Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2e/e9/eb6a5db5ac505d5d45715388e92bced7a5bb556facc4d0865d192823f2d2/async_lru-2.1.0-py3-none-any.whl", hash = "sha256:fa12dcf99a42ac1280bc16c634bbaf06883809790f6304d85cdab3f666f33a7e", size = 6933, upload-time = "2026-01-17T22:52:17.389Z" },
+]
+
+[[package]]
+name = "attrs"
+version = "25.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
+]
+
+[[package]]
+name = "babel"
+version = "2.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" },
+]
+
+[[package]]
+name = "backrefs"
+version = "6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/86/e3/bb3a439d5cb255c4774724810ad8073830fac9c9dee123555820c1bcc806/backrefs-6.1.tar.gz", hash = "sha256:3bba1749aafe1db9b915f00e0dd166cba613b6f788ffd63060ac3485dc9be231", size = 7011962, upload-time = "2025-11-15T14:52:08.323Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3b/ee/c216d52f58ea75b5e1841022bbae24438b19834a29b163cb32aa3a2a7c6e/backrefs-6.1-py310-none-any.whl", hash = "sha256:2a2ccb96302337ce61ee4717ceacfbf26ba4efb1d55af86564b8bbaeda39cac1", size = 381059, upload-time = "2025-11-15T14:51:59.758Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/9a/8da246d988ded941da96c7ed945d63e94a445637eaad985a0ed88787cb89/backrefs-6.1-py311-none-any.whl", hash = "sha256:e82bba3875ee4430f4de4b6db19429a27275d95a5f3773c57e9e18abc23fd2b7", size = 392854, upload-time = "2025-11-15T14:52:01.194Z" },
+    { url = "https://files.pythonhosted.org/packages/37/c9/fd117a6f9300c62bbc33bc337fd2b3c6bfe28b6e9701de336b52d7a797ad/backrefs-6.1-py312-none-any.whl", hash = "sha256:c64698c8d2269343d88947c0735cb4b78745bd3ba590e10313fbf3f78c34da5a", size = 398770, upload-time = "2025-11-15T14:52:02.584Z" },
+    { url = "https://files.pythonhosted.org/packages/eb/95/7118e935b0b0bd3f94dfec2d852fd4e4f4f9757bdb49850519acd245cd3a/backrefs-6.1-py313-none-any.whl", hash = "sha256:4c9d3dc1e2e558965202c012304f33d4e0e477e1c103663fd2c3cc9bb18b0d05", size = 400726, upload-time = "2025-11-15T14:52:04.093Z" },
+    { url = "https://files.pythonhosted.org/packages/02/e3/a4fa1946722c4c7b063cc25043a12d9ce9b4323777f89643be74cef2993c/backrefs-6.1-py39-none-any.whl", hash = "sha256:a9e99b8a4867852cad177a6430e31b0f6e495d65f8c6c134b68c14c3c95bf4b0", size = 381058, upload-time = "2025-11-15T14:52:06.698Z" },
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.14.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "soupsieve" },
+    { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" },
+]
+
+[[package]]
+name = "bleach"
+version = "6.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" },
+]
+
+[package.optional-dependencies]
+css = [
+    { name = "tinycss2" },
+]
+
+[[package]]
+name = "certifi"
+version = "2026.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
+]
+
+[[package]]
+name = "cffi"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pycparser", marker = "implementation_name != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
+    { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
+    { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
+    { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
+    { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
+    { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
+    { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
+    { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
+    { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
+    { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
+    { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
+    { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
+    { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
+    { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
+    { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
+    { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
+    { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
+    { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
+    { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
+    { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
+    { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
+    { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
+    { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
+    { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
+    { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
+    { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
+]
+
+[[package]]
+name = "cfgv"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
+    { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
+    { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
+    { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
+    { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
+    { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
+    { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
+    { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
+    { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
+    { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
+    { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
+    { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
+    { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
+    { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
+    { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
+    { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
+    { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
+    { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
+    { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
+    { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
+    { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
+    { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
+    { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
+    { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
+    { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
+    { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
+    { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
+    { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
+    { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
+    { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
+    { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
+    { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
+    { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
+    { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
+    { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
+    { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
+    { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
+]
+
+[[package]]
+name = "click-plugins"
+version = "1.1.1.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "click" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" },
+]
+
+[[package]]
+name = "cligj"
+version = "0.7.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "click" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/0d/837dbd5d8430fd0f01ed72c4cfb2f548180f4c68c635df84ce87956cff32/cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27", size = 9803, upload-time = "2021-05-28T21:23:27.935Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/73/86/43fa9f15c5b9fb6e82620428827cd3c284aa933431405d1bcf5231ae3d3e/cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df", size = 7069, upload-time = "2021-05-28T21:23:26.877Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "comm"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" },
+]
+
+[[package]]
+name = "contourpy"
+version = "1.3.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" },
+    { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" },
+    { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" },
+    { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" },
+    { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" },
+    { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" },
+    { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" },
+    { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" },
+    { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" },
+    { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" },
+    { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" },
+    { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" },
+    { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" },
+    { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" },
+    { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" },
+    { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" },
+    { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" },
+    { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" },
+    { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" },
+    { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" },
+    { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" },
+    { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" },
+    { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" },
+    { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" },
+    { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" },
+    { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" },
+    { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" },
+    { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" },
+    { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" },
+    { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" },
+    { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" },
+    { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" },
+    { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" },
+]
+
+[[package]]
+name = "cycler"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" },
+]
+
+[[package]]
+name = "debugpy"
+version = "1.8.19"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/73/75/9e12d4d42349b817cd545b89247696c67917aab907012ae5b64bbfea3199/debugpy-1.8.19.tar.gz", hash = "sha256:eea7e5987445ab0b5ed258093722d5ecb8bb72217c5c9b1e21f64efe23ddebdb", size = 1644590, upload-time = "2025-12-15T21:53:28.044Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/80/e2/48531a609b5a2aa94c6b6853afdfec8da05630ab9aaa96f1349e772119e9/debugpy-1.8.19-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:c5dcfa21de1f735a4f7ced4556339a109aa0f618d366ede9da0a3600f2516d8b", size = 2207620, upload-time = "2025-12-15T21:53:37.1Z" },
+    { url = "https://files.pythonhosted.org/packages/1b/d4/97775c01d56071969f57d93928899e5616a4cfbbf4c8cc75390d3a51c4a4/debugpy-1.8.19-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:806d6800246244004625d5222d7765874ab2d22f3ba5f615416cf1342d61c488", size = 3170796, upload-time = "2025-12-15T21:53:38.513Z" },
+    { url = "https://files.pythonhosted.org/packages/8d/7e/8c7681bdb05be9ec972bbb1245eb7c4c7b0679bb6a9e6408d808bc876d3d/debugpy-1.8.19-cp311-cp311-win32.whl", hash = "sha256:783a519e6dfb1f3cd773a9bda592f4887a65040cb0c7bd38dde410f4e53c40d4", size = 5164287, upload-time = "2025-12-15T21:53:40.857Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/a8/aaac7ff12ddf5d68a39e13a423a8490426f5f661384f5ad8d9062761bd8e/debugpy-1.8.19-cp311-cp311-win_amd64.whl", hash = "sha256:14035cbdbb1fe4b642babcdcb5935c2da3b1067ac211c5c5a8fdc0bb31adbcaa", size = 5188269, upload-time = "2025-12-15T21:53:42.359Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/15/d762e5263d9e25b763b78be72dc084c7a32113a0bac119e2f7acae7700ed/debugpy-1.8.19-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:bccb1540a49cde77edc7ce7d9d075c1dbeb2414751bc0048c7a11e1b597a4c2e", size = 2549995, upload-time = "2025-12-15T21:53:43.773Z" },
+    { url = "https://files.pythonhosted.org/packages/a7/88/f7d25c68b18873b7c53d7c156ca7a7ffd8e77073aa0eac170a9b679cf786/debugpy-1.8.19-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:e9c68d9a382ec754dc05ed1d1b4ed5bd824b9f7c1a8cd1083adb84b3c93501de", size = 4309891, upload-time = "2025-12-15T21:53:45.26Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/4f/a65e973aba3865794da65f71971dca01ae66666132c7b2647182d5be0c5f/debugpy-1.8.19-cp312-cp312-win32.whl", hash = "sha256:6599cab8a783d1496ae9984c52cb13b7c4a3bd06a8e6c33446832a5d97ce0bee", size = 5286355, upload-time = "2025-12-15T21:53:46.763Z" },
+    { url = "https://files.pythonhosted.org/packages/d8/3a/d3d8b48fec96e3d824e404bf428276fb8419dfa766f78f10b08da1cb2986/debugpy-1.8.19-cp312-cp312-win_amd64.whl", hash = "sha256:66e3d2fd8f2035a8f111eb127fa508469dfa40928a89b460b41fd988684dc83d", size = 5328239, upload-time = "2025-12-15T21:53:48.868Z" },
+    { url = "https://files.pythonhosted.org/packages/71/3d/388035a31a59c26f1ecc8d86af607d0c42e20ef80074147cd07b180c4349/debugpy-1.8.19-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:91e35db2672a0abaf325f4868fcac9c1674a0d9ad9bb8a8c849c03a5ebba3e6d", size = 2538859, upload-time = "2025-12-15T21:53:50.478Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/19/c93a0772d0962294f083dbdb113af1a7427bb632d36e5314297068f55db7/debugpy-1.8.19-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:85016a73ab84dea1c1f1dcd88ec692993bcbe4532d1b49ecb5f3c688ae50c606", size = 4292575, upload-time = "2025-12-15T21:53:51.821Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/56/09e48ab796b0a77e3d7dc250f95251832b8bf6838c9632f6100c98bdf426/debugpy-1.8.19-cp313-cp313-win32.whl", hash = "sha256:b605f17e89ba0ecee994391194285fada89cee111cfcd29d6f2ee11cbdc40976", size = 5286209, upload-time = "2025-12-15T21:53:53.602Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/4e/931480b9552c7d0feebe40c73725dd7703dcc578ba9efc14fe0e6d31cfd1/debugpy-1.8.19-cp313-cp313-win_amd64.whl", hash = "sha256:c30639998a9f9cd9699b4b621942c0179a6527f083c72351f95c6ab1728d5b73", size = 5328206, upload-time = "2025-12-15T21:53:55.433Z" },
+    { url = "https://files.pythonhosted.org/packages/25/3e/e27078370414ef35fafad2c06d182110073daaeb5d3bf734b0b1eeefe452/debugpy-1.8.19-py2.py3-none-any.whl", hash = "sha256:360ffd231a780abbc414ba0f005dad409e71c78637efe8f2bd75837132a41d38", size = 5292321, upload-time = "2025-12-15T21:54:16.024Z" },
+]
+
+[[package]]
+name = "decorator"
+version = "5.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
+]
+
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },
+]
+
+[[package]]
+name = "distlib"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
+]
+
+[[package]]
+name = "executing"
+version = "2.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
+]
+
+[[package]]
+name = "fastjsonschema"
+version = "2.21.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130, upload-time = "2025-08-14T18:49:36.666Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024, upload-time = "2025-08-14T18:49:34.776Z" },
+]
+
+[[package]]
+name = "filelock"
+version = "3.20.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" },
+]
+
+[[package]]
+name = "fonttools"
+version = "4.61.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" },
+    { url = "https://files.pythonhosted.org/packages/79/61/1ca198af22f7dd22c17ab86e9024ed3c06299cfdb08170640e9996d501a0/fonttools-4.61.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75c1a6dfac6abd407634420c93864a1e274ebc1c7531346d9254c0d8f6ca00f9", size = 5036039, upload-time = "2025-12-12T17:29:53.659Z" },
+    { url = "https://files.pythonhosted.org/packages/99/cc/fa1801e408586b5fce4da9f5455af8d770f4fc57391cd5da7256bb364d38/fonttools-4.61.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0de30bfe7745c0d1ffa2b0b7048fb7123ad0d71107e10ee090fa0b16b9452e87", size = 5034714, upload-time = "2025-12-12T17:29:55.592Z" },
+    { url = "https://files.pythonhosted.org/packages/bf/aa/b7aeafe65adb1b0a925f8f25725e09f078c635bc22754f3fecb7456955b0/fonttools-4.61.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58b0ee0ab5b1fc9921eccfe11d1435added19d6494dde14e323f25ad2bc30c56", size = 5158648, upload-time = "2025-12-12T17:29:57.861Z" },
+    { url = "https://files.pythonhosted.org/packages/99/f9/08ea7a38663328881384c6e7777bbefc46fd7d282adfd87a7d2b84ec9d50/fonttools-4.61.1-cp311-cp311-win32.whl", hash = "sha256:f79b168428351d11e10c5aeb61a74e1851ec221081299f4cf56036a95431c43a", size = 2280681, upload-time = "2025-12-12T17:29:59.943Z" },
+    { url = "https://files.pythonhosted.org/packages/07/ad/37dd1ae5fa6e01612a1fbb954f0927681f282925a86e86198ccd7b15d515/fonttools-4.61.1-cp311-cp311-win_amd64.whl", hash = "sha256:fe2efccb324948a11dd09d22136fe2ac8a97d6c1347cf0b58a911dcd529f66b7", size = 2331951, upload-time = "2025-12-12T17:30:02.254Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" },
+    { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" },
+    { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" },
+    { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" },
+    { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" },
+    { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" },
+    { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/cf/00ba28b0990982530addb8dc3e9e6f2fa9cb5c20df2abdda7baa755e8fe1/fonttools-4.61.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c56c488ab471628ff3bfa80964372fc13504ece601e0d97a78ee74126b2045c", size = 2846454, upload-time = "2025-12-12T17:30:24.938Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/ca/468c9a8446a2103ae645d14fee3f610567b7042aba85031c1c65e3ef7471/fonttools-4.61.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc492779501fa723b04d0ab1f5be046797fee17d27700476edc7ee9ae535a61e", size = 2398191, upload-time = "2025-12-12T17:30:27.343Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/4b/d67eedaed19def5967fade3297fed8161b25ba94699efc124b14fb68cdbc/fonttools-4.61.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:64102ca87e84261419c3747a0d20f396eb024bdbeb04c2bfb37e2891f5fadcb5", size = 4928410, upload-time = "2025-12-12T17:30:29.771Z" },
+    { url = "https://files.pythonhosted.org/packages/b0/8d/6fb3494dfe61a46258cd93d979cf4725ded4eb46c2a4ca35e4490d84daea/fonttools-4.61.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c1b526c8d3f615a7b1867f38a9410849c8f4aef078535742198e942fba0e9bd", size = 4984460, upload-time = "2025-12-12T17:30:32.073Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/f1/a47f1d30b3dc00d75e7af762652d4cbc3dff5c2697a0dbd5203c81afd9c3/fonttools-4.61.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:41ed4b5ec103bd306bb68f81dc166e77409e5209443e5773cb4ed837bcc9b0d3", size = 4925800, upload-time = "2025-12-12T17:30:34.339Z" },
+    { url = "https://files.pythonhosted.org/packages/a7/01/e6ae64a0981076e8a66906fab01539799546181e32a37a0257b77e4aa88b/fonttools-4.61.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b501c862d4901792adaec7c25b1ecc749e2662543f68bb194c42ba18d6eec98d", size = 5067859, upload-time = "2025-12-12T17:30:36.593Z" },
+    { url = "https://files.pythonhosted.org/packages/73/aa/28e40b8d6809a9b5075350a86779163f074d2b617c15d22343fce81918db/fonttools-4.61.1-cp313-cp313-win32.whl", hash = "sha256:4d7092bb38c53bbc78e9255a59158b150bcdc115a1e3b3ce0b5f267dc35dd63c", size = 2267821, upload-time = "2025-12-12T17:30:38.478Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/59/453c06d1d83dc0951b69ef692d6b9f1846680342927df54e9a1ca91c6f90/fonttools-4.61.1-cp313-cp313-win_amd64.whl", hash = "sha256:21e7c8d76f62ab13c9472ccf74515ca5b9a761d1bde3265152a6dc58700d895b", size = 2318169, upload-time = "2025-12-12T17:30:40.951Z" },
+    { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" },
+]
+
+[[package]]
+name = "fqdn"
+version = "1.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015, upload-time = "2021-03-11T07:16:29.08Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" },
+]
+
+[[package]]
+name = "geopandas"
+version = "1.1.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+    { name = "packaging" },
+    { name = "pandas" },
+    { name = "pyogrio" },
+    { name = "pyproj" },
+    { name = "shapely" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8d/24/5eb5685d7bf89d64218919379f882d19a60f8219d66d833c83b1cf264c95/geopandas-1.1.2.tar.gz", hash = "sha256:33f7b33565c46a45b8459a2ab699ec943fdbb5716e58e251b3c413cf7783106c", size = 336037, upload-time = "2025-12-22T21:06:13.749Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/54/e4/fac19dc34cb686c96011388b813ff7b858a70681e5ce6ce7698e5021b0f4/geopandas-1.1.2-py3-none-any.whl", hash = "sha256:2bb0b1052cb47378addb4ba54c47f8d4642dcbda9b61375638274f49d9f0bb0d", size = 341734, upload-time = "2025-12-22T21:06:12.498Z" },
+]
+
+[[package]]
+name = "ghp-import"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" },
+]
+
+[[package]]
+name = "griffe"
+version = "1.15.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "colorama" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" },
+]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
+]
+
+[[package]]
+name = "h5py"
+version = "3.15.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4d/6a/0d79de0b025aa85dc8864de8e97659c94cf3d23148394a954dc5ca52f8c8/h5py-3.15.1.tar.gz", hash = "sha256:c86e3ed45c4473564de55aa83b6fc9e5ead86578773dfbd93047380042e26b69", size = 426236, upload-time = "2025-10-16T10:35:27.404Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/41/fd/8349b48b15b47768042cff06ad6e1c229f0a4bd89225bf6b6894fea27e6d/h5py-3.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aaa330bcbf2830150c50897ea5dcbed30b5b6d56897289846ac5b9e529ec243", size = 3434135, upload-time = "2025-10-16T10:33:47.954Z" },
+    { url = "https://files.pythonhosted.org/packages/c1/b0/1c628e26a0b95858f54aba17e1599e7f6cd241727596cc2580b72cb0a9bf/h5py-3.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c970fb80001fffabb0109eaf95116c8e7c0d3ca2de854e0901e8a04c1f098509", size = 2870958, upload-time = "2025-10-16T10:33:50.907Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/e3/c255cafc9b85e6ea04e2ad1bba1416baa1d7f57fc98a214be1144087690c/h5py-3.15.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80e5bb5b9508d5d9da09f81fd00abbb3f85da8143e56b1585d59bc8ceb1dba8b", size = 4504770, upload-time = "2025-10-16T10:33:54.357Z" },
+    { url = "https://files.pythonhosted.org/packages/8b/23/4ab1108e87851ccc69694b03b817d92e142966a6c4abd99e17db77f2c066/h5py-3.15.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b849ba619a066196169763c33f9f0f02e381156d61c03e000bb0100f9950faf", size = 4700329, upload-time = "2025-10-16T10:33:57.616Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/e4/932a3a8516e4e475b90969bf250b1924dbe3612a02b897e426613aed68f4/h5py-3.15.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e7f6c841efd4e6e5b7e82222eaf90819927b6d256ab0f3aca29675601f654f3c", size = 4152456, upload-time = "2025-10-16T10:34:00.843Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/0a/f74d589883b13737021b2049ac796328f188dbb60c2ed35b101f5b95a3fc/h5py-3.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca8a3a22458956ee7b40d8e39c9a9dc01f82933e4c030c964f8b875592f4d831", size = 4617295, upload-time = "2025-10-16T10:34:04.154Z" },
+    { url = "https://files.pythonhosted.org/packages/23/95/499b4e56452ef8b6c95a271af0dde08dac4ddb70515a75f346d4f400579b/h5py-3.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:550e51131376889656feec4aff2170efc054a7fe79eb1da3bb92e1625d1ac878", size = 2882129, upload-time = "2025-10-16T10:34:06.886Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/bb/cfcc70b8a42222ba3ad4478bcef1791181ea908e2adbd7d53c66395edad5/h5py-3.15.1-cp311-cp311-win_arm64.whl", hash = "sha256:b39239947cb36a819147fc19e86b618dcb0953d1cd969f5ed71fc0de60392427", size = 2477121, upload-time = "2025-10-16T10:34:09.579Z" },
+    { url = "https://files.pythonhosted.org/packages/62/b8/c0d9aa013ecfa8b7057946c080c0c07f6fa41e231d2e9bd306a2f8110bdc/h5py-3.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:316dd0f119734f324ca7ed10b5627a2de4ea42cc4dfbcedbee026aaa361c238c", size = 3399089, upload-time = "2025-10-16T10:34:12.135Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/5e/3c6f6e0430813c7aefe784d00c6711166f46225f5d229546eb53032c3707/h5py-3.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51469890e58e85d5242e43aab29f5e9c7e526b951caab354f3ded4ac88e7b76", size = 2847803, upload-time = "2025-10-16T10:34:14.564Z" },
+    { url = "https://files.pythonhosted.org/packages/00/69/ba36273b888a4a48d78f9268d2aee05787e4438557450a8442946ab8f3ec/h5py-3.15.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a33bfd5dfcea037196f7778534b1ff7e36a7f40a89e648c8f2967292eb6898e", size = 4914884, upload-time = "2025-10-16T10:34:18.452Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/30/d1c94066343a98bb2cea40120873193a4fed68c4ad7f8935c11caf74c681/h5py-3.15.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25c8843fec43b2cc368aa15afa1cdf83fc5e17b1c4e10cd3771ef6c39b72e5ce", size = 5109965, upload-time = "2025-10-16T10:34:21.853Z" },
+    { url = "https://files.pythonhosted.org/packages/81/3d/d28172116eafc3bc9f5991b3cb3fd2c8a95f5984f50880adfdf991de9087/h5py-3.15.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a308fd8681a864c04423c0324527237a0484e2611e3441f8089fd00ed56a8171", size = 4561870, upload-time = "2025-10-16T10:34:26.69Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/83/393a7226024238b0f51965a7156004eaae1fcf84aa4bfecf7e582676271b/h5py-3.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f4a016df3f4a8a14d573b496e4d1964deb380e26031fc85fb40e417e9131888a", size = 5037161, upload-time = "2025-10-16T10:34:30.383Z" },
+    { url = "https://files.pythonhosted.org/packages/cf/51/329e7436bf87ca6b0fe06dd0a3795c34bebe4ed8d6c44450a20565d57832/h5py-3.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:59b25cf02411bf12e14f803fef0b80886444c7fe21a5ad17c6a28d3f08098a1e", size = 2874165, upload-time = "2025-10-16T10:34:33.461Z" },
+    { url = "https://files.pythonhosted.org/packages/09/a8/2d02b10a66747c54446e932171dd89b8b4126c0111b440e6bc05a7c852ec/h5py-3.15.1-cp312-cp312-win_arm64.whl", hash = "sha256:61d5a58a9851e01ee61c932bbbb1c98fe20aba0a5674776600fb9a361c0aa652", size = 2458214, upload-time = "2025-10-16T10:34:35.733Z" },
+    { url = "https://files.pythonhosted.org/packages/88/b3/40207e0192415cbff7ea1d37b9f24b33f6d38a5a2f5d18a678de78f967ae/h5py-3.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8440fd8bee9500c235ecb7aa1917a0389a2adb80c209fa1cc485bd70e0d94a5", size = 3376511, upload-time = "2025-10-16T10:34:38.596Z" },
+    { url = "https://files.pythonhosted.org/packages/31/96/ba99a003c763998035b0de4c299598125df5fc6c9ccf834f152ddd60e0fb/h5py-3.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ab2219dbc6fcdb6932f76b548e2b16f34a1f52b7666e998157a4dfc02e2c4123", size = 2826143, upload-time = "2025-10-16T10:34:41.342Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/c2/fc6375d07ea3962df7afad7d863fe4bde18bb88530678c20d4c90c18de1d/h5py-3.15.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8cb02c3a96255149ed3ac811eeea25b655d959c6dd5ce702c9a95ff11859eb5", size = 4908316, upload-time = "2025-10-16T10:34:44.619Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/69/4402ea66272dacc10b298cca18ed73e1c0791ff2ae9ed218d3859f9698ac/h5py-3.15.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:121b2b7a4c1915d63737483b7bff14ef253020f617c2fb2811f67a4bed9ac5e8", size = 5103710, upload-time = "2025-10-16T10:34:48.639Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/f6/11f1e2432d57d71322c02a97a5567829a75f223a8c821764a0e71a65cde8/h5py-3.15.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59b0d63b318bf3cc06687def2b45afd75926bbc006f7b8cd2b1a231299fc8599", size = 4556042, upload-time = "2025-10-16T10:34:51.841Z" },
+    { url = "https://files.pythonhosted.org/packages/18/88/3eda3ef16bfe7a7dbc3d8d6836bbaa7986feb5ff091395e140dc13927bcc/h5py-3.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e02fe77a03f652500d8bff288cbf3675f742fc0411f5a628fa37116507dc7cc0", size = 5030639, upload-time = "2025-10-16T10:34:55.257Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/ea/fbb258a98863f99befb10ed727152b4ae659f322e1d9c0576f8a62754e81/h5py-3.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:dea78b092fd80a083563ed79a3171258d4a4d307492e7cf8b2313d464c82ba52", size = 2864363, upload-time = "2025-10-16T10:34:58.099Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/c9/35021cc9cd2b2915a7da3026e3d77a05bed1144a414ff840953b33937fb9/h5py-3.15.1-cp313-cp313-win_arm64.whl", hash = "sha256:c256254a8a81e2bddc0d376e23e2a6d2dc8a1e8a2261835ed8c1281a0744cd97", size = 2449570, upload-time = "2025-10-16T10:35:00.473Z" },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "certifi" },
+    { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "anyio" },
+    { name = "certifi" },
+    { name = "httpcore" },
+    { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
+]
+
+[[package]]
+name = "identify"
+version = "2.6.16"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" },
+]
+
+[[package]]
+name = "idna"
+version = "3.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
+]
+
+[[package]]
+name = "ipykernel"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "appnope", marker = "sys_platform == 'darwin'" },
+    { name = "comm" },
+    { name = "debugpy" },
+    { name = "ipython" },
+    { name = "jupyter-client" },
+    { name = "jupyter-core" },
+    { name = "matplotlib-inline" },
+    { name = "nest-asyncio" },
+    { name = "packaging" },
+    { name = "psutil" },
+    { name = "pyzmq" },
+    { name = "tornado" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" },
+]
+
+[[package]]
+name = "ipython"
+version = "9.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "colorama", marker = "sys_platform == 'win32'" },
+    { name = "decorator" },
+    { name = "ipython-pygments-lexers" },
+    { name = "jedi" },
+    { name = "matplotlib-inline" },
+    { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
+    { name = "prompt-toolkit" },
+    { name = "pygments" },
+    { name = "stack-data" },
+    { name = "traitlets" },
+    { name = "typing-extensions", marker = "python_full_version < '3.12'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/46/dd/fb08d22ec0c27e73c8bc8f71810709870d51cadaf27b7ddd3f011236c100/ipython-9.9.0.tar.gz", hash = "sha256:48fbed1b2de5e2c7177eefa144aba7fcb82dac514f09b57e2ac9da34ddb54220", size = 4425043, upload-time = "2026-01-05T12:36:46.233Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl", hash = "sha256:b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b", size = 621431, upload-time = "2026-01-05T12:36:44.669Z" },
+]
+
+[[package]]
+name = "ipython-pygments-lexers"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
+]
+
+[[package]]
+name = "ipywidgets"
+version = "8.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "comm" },
+    { name = "ipython" },
+    { name = "jupyterlab-widgets" },
+    { name = "traitlets" },
+    { name = "widgetsnbextension" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4c/ae/c5ce1edc1afe042eadb445e95b0671b03cee61895264357956e61c0d2ac0/ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668", size = 116739, upload-time = "2025-11-01T21:18:12.393Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/56/6d/0d9848617b9f753b87f214f1c682592f7ca42de085f564352f10f0843026/ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e", size = 139808, upload-time = "2025-11-01T21:18:10.956Z" },
+]
+
+[[package]]
+name = "isoduration"
+version = "20.11.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "arrow" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649, upload-time = "2020-11-01T11:00:00.312Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321, upload-time = "2020-11-01T10:59:58.02Z" },
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "parso" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+]
+
+[[package]]
+name = "json5"
+version = "0.13.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/77/e8/a3f261a66e4663f22700bc8a17c08cb83e91fbf086726e7a228398968981/json5-0.13.0.tar.gz", hash = "sha256:b1edf8d487721c0bf64d83c28e91280781f6e21f4a797d3261c7c828d4c165bf", size = 52441, upload-time = "2026-01-01T19:42:14.99Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d7/9e/038522f50ceb7e74f1f991bf1b699f24b0c2bbe7c390dd36ad69f4582258/json5-0.13.0-py3-none-any.whl", hash = "sha256:9a08e1dd65f6a4d4c6fa82d216cf2477349ec2346a38fd70cc11d2557499fbcc", size = 36163, upload-time = "2026-01-01T19:42:13.962Z" },
+]
+
+[[package]]
+name = "jsonpointer"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" },
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.26.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "attrs" },
+    { name = "jsonschema-specifications" },
+    { name = "referencing" },
+    { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
+]
+
+[package.optional-dependencies]
+format-nongpl = [
+    { name = "fqdn" },
+    { name = "idna" },
+    { name = "isoduration" },
+    { name = "jsonpointer" },
+    { name = "rfc3339-validator" },
+    { name = "rfc3986-validator" },
+    { name = "rfc3987-syntax" },
+    { name = "uri-template" },
+    { name = "webcolors" },
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2025.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "referencing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
+]
+
+[[package]]
+name = "jupyter"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "ipykernel" },
+    { name = "ipywidgets" },
+    { name = "jupyter-console" },
+    { name = "jupyterlab" },
+    { name = "nbconvert" },
+    { name = "notebook" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959, upload-time = "2024-08-30T07:15:48.299Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657, upload-time = "2024-08-30T07:15:47.045Z" },
+]
+
+[[package]]
+name = "jupyter-client"
+version = "8.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jupyter-core" },
+    { name = "python-dateutil" },
+    { name = "pyzmq" },
+    { name = "tornado" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/05/e4/ba649102a3bc3fbca54e7239fb924fd434c766f855693d86de0b1f2bec81/jupyter_client-8.8.0.tar.gz", hash = "sha256:d556811419a4f2d96c869af34e854e3f059b7cc2d6d01a9cd9c85c267691be3e", size = 348020, upload-time = "2026-01-08T13:55:47.938Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2d/0b/ceb7694d864abc0a047649aec263878acb9f792e1fec3e676f22dc9015e3/jupyter_client-8.8.0-py3-none-any.whl", hash = "sha256:f93a5b99c5e23a507b773d3a1136bd6e16c67883ccdbd9a829b0bbdb98cd7d7a", size = 107371, upload-time = "2026-01-08T13:55:45.562Z" },
+]
+
+[[package]]
+name = "jupyter-console"
+version = "6.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "ipykernel" },
+    { name = "ipython" },
+    { name = "jupyter-client" },
+    { name = "jupyter-core" },
+    { name = "prompt-toolkit" },
+    { name = "pygments" },
+    { name = "pyzmq" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363, upload-time = "2023-03-06T14:13:31.02Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510, upload-time = "2023-03-06T14:13:28.229Z" },
+]
+
+[[package]]
+name = "jupyter-core"
+version = "5.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "platformdirs" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/02/49/9d1284d0dc65e2c757b74c6687b6d319b02f822ad039e5c512df9194d9dd/jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508", size = 89814, upload-time = "2025-10-16T19:19:18.444Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e7/e7/80988e32bf6f73919a113473a604f5a8f09094de312b9d52b79c2df7612b/jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407", size = 29032, upload-time = "2025-10-16T19:19:16.783Z" },
+]
+
+[[package]]
+name = "jupyter-events"
+version = "0.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jsonschema", extra = ["format-nongpl"] },
+    { name = "packaging" },
+    { name = "python-json-logger" },
+    { name = "pyyaml" },
+    { name = "referencing" },
+    { name = "rfc3339-validator" },
+    { name = "rfc3986-validator" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196, upload-time = "2025-02-03T17:23:41.485Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430, upload-time = "2025-02-03T17:23:38.643Z" },
+]
+
+[[package]]
+name = "jupyter-lsp"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jupyter-server" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823, upload-time = "2025-08-27T17:47:34.671Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1a/60/1f6cee0c46263de1173894f0fafcb3475ded276c472c14d25e0280c18d6d/jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f", size = 76687, upload-time = "2025-08-27T17:47:33.15Z" },
+]
+
+[[package]]
+name = "jupyter-server"
+version = "2.17.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "anyio" },
+    { name = "argon2-cffi" },
+    { name = "jinja2" },
+    { name = "jupyter-client" },
+    { name = "jupyter-core" },
+    { name = "jupyter-events" },
+    { name = "jupyter-server-terminals" },
+    { name = "nbconvert" },
+    { name = "nbformat" },
+    { name = "overrides", marker = "python_full_version < '3.12'" },
+    { name = "packaging" },
+    { name = "prometheus-client" },
+    { name = "pywinpty", marker = "os_name == 'nt'" },
+    { name = "pyzmq" },
+    { name = "send2trash" },
+    { name = "terminado" },
+    { name = "tornado" },
+    { name = "traitlets" },
+    { name = "websocket-client" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5b/ac/e040ec363d7b6b1f11304cc9f209dac4517ece5d5e01821366b924a64a50/jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5", size = 731949, upload-time = "2025-08-21T14:42:54.042Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/92/80/a24767e6ca280f5a49525d987bf3e4d7552bf67c8be07e8ccf20271f8568/jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f", size = 388221, upload-time = "2025-08-21T14:42:52.034Z" },
+]
+
+[[package]]
+name = "jupyter-server-terminals"
+version = "0.5.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pywinpty", marker = "os_name == 'nt'" },
+    { name = "terminado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f4/a7/bcd0a9b0cbba88986fe944aaaf91bfda603e5a50bda8ed15123f381a3b2f/jupyter_server_terminals-0.5.4.tar.gz", hash = "sha256:bbda128ed41d0be9020349f9f1f2a4ab9952a73ed5f5ac9f1419794761fb87f5", size = 31770, upload-time = "2026-01-14T16:53:20.213Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d1/2d/6674563f71c6320841fc300911a55143925112a72a883e2ca71fba4c618d/jupyter_server_terminals-0.5.4-py3-none-any.whl", hash = "sha256:55be353fc74a80bc7f3b20e6be50a55a61cd525626f578dcb66a5708e2007d14", size = 13704, upload-time = "2026-01-14T16:53:18.738Z" },
+]
+
+[[package]]
+name = "jupyterlab"
+version = "4.5.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "async-lru" },
+    { name = "httpx" },
+    { name = "ipykernel" },
+    { name = "jinja2" },
+    { name = "jupyter-core" },
+    { name = "jupyter-lsp" },
+    { name = "jupyter-server" },
+    { name = "jupyterlab-server" },
+    { name = "notebook-shim" },
+    { name = "packaging" },
+    { name = "setuptools" },
+    { name = "tornado" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/93/dc/2c8c4ff1aee27ac999ba04c373c5d0d7c6c181b391640d7b916b884d5985/jupyterlab-4.5.2.tar.gz", hash = "sha256:c80a6b9f6dace96a566d590c65ee2785f61e7cd4aac5b4d453dcc7d0d5e069b7", size = 23990371, upload-time = "2026-01-12T12:27:08.493Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a4/78/7e455920f104ef2aa94a4c0d2b40e5b44334ee7057eae1aa1fb97b9631ad/jupyterlab-4.5.2-py3-none-any.whl", hash = "sha256:76466ebcfdb7a9bb7e2fbd6459c0e2c032ccf75be673634a84bee4b3e6b13ab6", size = 12385807, upload-time = "2026-01-12T12:27:03.923Z" },
+]
+
+[[package]]
+name = "jupyterlab-pygments"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" },
+]
+
+[[package]]
+name = "jupyterlab-server"
+version = "2.28.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "babel" },
+    { name = "jinja2" },
+    { name = "json5" },
+    { name = "jsonschema" },
+    { name = "jupyter-server" },
+    { name = "packaging" },
+    { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d6/2c/90153f189e421e93c4bb4f9e3f59802a1f01abd2ac5cf40b152d7f735232/jupyterlab_server-2.28.0.tar.gz", hash = "sha256:35baa81898b15f93573e2deca50d11ac0ae407ebb688299d3a5213265033712c", size = 76996, upload-time = "2025-10-22T13:59:18.37Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e0/07/a000fe835f76b7e1143242ab1122e6362ef1c03f23f83a045c38859c2ae0/jupyterlab_server-2.28.0-py3-none-any.whl", hash = "sha256:e4355b148fdcf34d312bbbc80f22467d6d20460e8b8736bf235577dd18506968", size = 59830, upload-time = "2025-10-22T13:59:16.767Z" },
+]
+
+[[package]]
+name = "jupyterlab-widgets"
+version = "3.0.16"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/26/2d/ef58fed122b268c69c0aa099da20bc67657cdfb2e222688d5731bd5b971d/jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0", size = 897423, upload-time = "2025-11-01T21:11:29.724Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ab/b5/36c712098e6191d1b4e349304ef73a8d06aed77e56ceaac8c0a306c7bda1/jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8", size = 914926, upload-time = "2025-11-01T21:11:28.008Z" },
+]
+
+[[package]]
+name = "kiwisolver"
+version = "1.4.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" },
+    { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" },
+    { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" },
+    { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" },
+    { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" },
+    { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" },
+    { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" },
+    { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" },
+    { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" },
+    { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" },
+    { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" },
+    { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" },
+    { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" },
+    { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" },
+    { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" },
+    { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" },
+    { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" },
+    { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" },
+    { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" },
+    { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" },
+    { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" },
+    { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" },
+    { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" },
+    { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" },
+    { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" },
+    { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" },
+    { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" },
+    { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" },
+    { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" },
+    { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" },
+    { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" },
+    { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" },
+    { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" },
+    { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" },
+    { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" },
+    { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" },
+    { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" },
+    { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" },
+    { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" },
+    { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" },
+    { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" },
+    { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" },
+]
+
+[[package]]
+name = "lark"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/da/34/28fff3ab31ccff1fd4f6c7c7b0ceb2b6968d8ea4950663eadcb5720591a0/lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905", size = 382732, upload-time = "2025-10-27T18:25:56.653Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" },
+]
+
+[[package]]
+name = "llvmlite"
+version = "0.46.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/74/cd/08ae687ba099c7e3d21fe2ea536500563ef1943c5105bf6ab4ee3829f68e/llvmlite-0.46.0.tar.gz", hash = "sha256:227c9fd6d09dce2783c18b754b7cd9d9b3b3515210c46acc2d3c5badd9870ceb", size = 193456, upload-time = "2025-12-08T18:15:36.295Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7a/a1/2ad4b2367915faeebe8447f0a057861f646dbf5fbbb3561db42c65659cf3/llvmlite-0.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82f3d39b16f19aa1a56d5fe625883a6ab600d5cc9ea8906cca70ce94cabba067", size = 37232766, upload-time = "2025-12-08T18:14:48.836Z" },
+    { url = "https://files.pythonhosted.org/packages/12/b5/99cf8772fdd846c07da4fd70f07812a3c8fd17ea2409522c946bb0f2b277/llvmlite-0.46.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a3df43900119803bbc52720e758c76f316a9a0f34612a886862dfe0a5591a17e", size = 56275175, upload-time = "2025-12-08T18:14:51.604Z" },
+    { url = "https://files.pythonhosted.org/packages/38/f2/ed806f9c003563732da156139c45d970ee435bd0bfa5ed8de87ba972b452/llvmlite-0.46.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de183fefc8022d21b0aa37fc3e90410bc3524aed8617f0ff76732fc6c3af5361", size = 55128630, upload-time = "2025-12-08T18:14:55.107Z" },
+    { url = "https://files.pythonhosted.org/packages/19/0c/8f5a37a65fc9b7b17408508145edd5f86263ad69c19d3574e818f533a0eb/llvmlite-0.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8b10bc585c58bdffec9e0c309bb7d51be1f2f15e169a4b4d42f2389e431eb93", size = 38138652, upload-time = "2025-12-08T18:14:58.171Z" },
+    { url = "https://files.pythonhosted.org/packages/2b/f8/4db016a5e547d4e054ff2f3b99203d63a497465f81ab78ec8eb2ff7b2304/llvmlite-0.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b9588ad4c63b4f0175a3984b85494f0c927c6b001e3a246a3a7fb3920d9a137", size = 37232767, upload-time = "2025-12-08T18:15:00.737Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/85/4890a7c14b4fa54400945cb52ac3cd88545bbdb973c440f98ca41591cdc5/llvmlite-0.46.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3535bd2bb6a2d7ae4012681ac228e5132cdb75fefb1bcb24e33f2f3e0c865ed4", size = 56275176, upload-time = "2025-12-08T18:15:03.936Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/07/3d31d39c1a1a08cd5337e78299fca77e6aebc07c059fbd0033e3edfab45c/llvmlite-0.46.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cbfd366e60ff87ea6cc62f50bc4cd800ebb13ed4c149466f50cf2163a473d1e", size = 55128630, upload-time = "2025-12-08T18:15:07.196Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/6b/d139535d7590a1bba1ceb68751bef22fadaa5b815bbdf0e858e3875726b2/llvmlite-0.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:398b39db462c39563a97b912d4f2866cd37cba60537975a09679b28fbbc0fb38", size = 38138940, upload-time = "2025-12-08T18:15:10.162Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/ff/3eba7eb0aed4b6fca37125387cd417e8c458e750621fce56d2c541f67fa8/llvmlite-0.46.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:30b60892d034bc560e0ec6654737aaa74e5ca327bd8114d82136aa071d611172", size = 37232767, upload-time = "2025-12-08T18:15:13.22Z" },
+    { url = "https://files.pythonhosted.org/packages/0e/54/737755c0a91558364b9200702c3c9c15d70ed63f9b98a2c32f1c2aa1f3ba/llvmlite-0.46.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6cc19b051753368a9c9f31dc041299059ee91aceec81bd57b0e385e5d5bf1a54", size = 56275176, upload-time = "2025-12-08T18:15:16.339Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/91/14f32e1d70905c1c0aa4e6609ab5d705c3183116ca02ac6df2091868413a/llvmlite-0.46.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bca185892908f9ede48c0acd547fe4dc1bafefb8a4967d47db6cf664f9332d12", size = 55128629, upload-time = "2025-12-08T18:15:19.493Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/a7/d526ae86708cea531935ae777b6dbcabe7db52718e6401e0fb9c5edea80e/llvmlite-0.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:67438fd30e12349ebb054d86a5a1a57fd5e87d264d2451bcfafbbbaa25b82a35", size = 38138941, upload-time = "2025-12-08T18:15:22.536Z" },
+]
+
+[[package]]
+name = "markdown"
+version = "3.10.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b7/b1/af95bcae8549f1f3fd70faacb29075826a0d689a27f232e8cee315efa053/markdown-3.10.1.tar.gz", hash = "sha256:1c19c10bd5c14ac948c53d0d762a04e2fa35a6d58a6b7b1e6bfcbe6fefc0001a", size = 365402, upload-time = "2026-01-21T18:09:28.206Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/59/1b/6ef961f543593969d25b2afe57a3564200280528caa9bd1082eecdd7b3bc/markdown-3.10.1-py3-none-any.whl", hash = "sha256:867d788939fe33e4b736426f5b9f651ad0c0ae0ecf89df0ca5d1176c70812fe3", size = 107684, upload-time = "2026-01-21T18:09:27.203Z" },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" },
+    { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" },
+    { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" },
+    { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" },
+    { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" },
+    { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" },
+    { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" },
+    { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" },
+    { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" },
+    { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
+    { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
+    { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
+    { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
+    { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
+    { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
+    { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
+    { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
+    { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
+    { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
+    { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
+    { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
+    { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
+    { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
+    { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
+    { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
+    { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
+    { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
+    { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
+    { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
+    { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
+    { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
+    { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
+    { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
+    { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
+    { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
+]
+
+[[package]]
+name = "matplotlib"
+version = "3.10.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "contourpy" },
+    { name = "cycler" },
+    { name = "fonttools" },
+    { name = "kiwisolver" },
+    { name = "numpy" },
+    { name = "packaging" },
+    { name = "pillow" },
+    { name = "pyparsing" },
+    { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" },
+    { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" },
+    { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" },
+    { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" },
+    { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" },
+    { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" },
+    { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" },
+    { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" },
+    { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" },
+    { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" },
+    { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" },
+    { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" },
+    { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" },
+    { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" },
+    { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" },
+    { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" },
+    { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" },
+    { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" },
+    { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" },
+    { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" },
+    { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" },
+    { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" },
+    { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" },
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" },
+]
+
+[[package]]
+name = "maturin"
+version = "1.11.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a4/84/bfed8cc10e2d8b6656cf0f0ca6609218e6fcb45a62929f5094e1063570f7/maturin-1.11.5.tar.gz", hash = "sha256:7579cf47640fb9595a19fe83a742cbf63203f0343055c349c1cab39045a30c29", size = 226885, upload-time = "2026-01-09T11:06:13.801Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d2/6c/3443d2f8c6d4eae5fc7479cd4053542aff4c1a8566d0019d0612d241b15a/maturin-1.11.5-py3-none-linux_armv6l.whl", hash = "sha256:edd1d4d35050ea2b9ef42aa01e87fe019a1e822940346b35ccb973e0aa8f6d82", size = 8845897, upload-time = "2026-01-09T11:06:17.327Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/03/abf1826d8aebc0d47ef6d21bdd752d98d63ac4372ad2b115db9cd5176229/maturin-1.11.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2a596eab137cb3e169b97e89a739515abfa7a8755e2e5f0fc91432ef446f74f4", size = 17233855, upload-time = "2026-01-09T11:06:04.272Z" },
+    { url = "https://files.pythonhosted.org/packages/90/a1/5ad62913271724035a7e4bcf796d7c95b4119317ae5f8cb034844aa99bc4/maturin-1.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:1c27a2eb47821edf26c75d100b3150b52dca2c1a5f074d7514af06f7a7acb9d5", size = 8881776, upload-time = "2026-01-09T11:06:10.24Z" },
+    { url = "https://files.pythonhosted.org/packages/c6/66/997974b44f8d3de641281ec04fbf5b6ca821bdc8291a2fa73305978db74d/maturin-1.11.5-py3-none-manylinux_2_12_i686.manylinux2010_i686.musllinux_1_1_i686.whl", hash = "sha256:f1320dacddcd3aa84a4bdfc77ee6fdb60e4c3835c853d7eb79c09473628b0498", size = 8870347, upload-time = "2026-01-09T11:06:12.178Z" },
+    { url = "https://files.pythonhosted.org/packages/58/e0/c8fa042daf0608cc2e9a59b6df3a9e287bfc7f229136f17727f4118bac2d/maturin-1.11.5-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:ffe7418834ff3b4a6c987187b7abb85ba033f4733e089d77d84e2de87057b4e7", size = 9291396, upload-time = "2026-01-09T11:06:02.05Z" },
+    { url = "https://files.pythonhosted.org/packages/99/af/9d3edc8375efc8d435d5f24794bc4de234d4e743447592da970d53b31361/maturin-1.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:c739b243d012386902f112ea63a54a94848932b70ae3565fa5e121fd1c0200e0", size = 8827831, upload-time = "2026-01-09T11:06:19.523Z" },
+    { url = "https://files.pythonhosted.org/packages/8a/12/cc341f6abbf9005f90935a4ee5dc7b30e2df7d1bb90b96d48b756b2c0ee7/maturin-1.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:8127d2cd25950bacbcdc8a2ec6daab1d4d27200f7d73964392680ad64d27f7f0", size = 8718895, upload-time = "2026-01-09T11:06:21.617Z" },
+    { url = "https://files.pythonhosted.org/packages/76/17/654a59c66287e287373f2a0086e4fc8a23f0545a81c2bd6e324db26a5801/maturin-1.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:2a4e872fb78e77748217084ffeb59de565d08a86ccefdace054520aaa7b66db4", size = 11384741, upload-time = "2026-01-09T11:06:15.261Z" },
+    { url = "https://files.pythonhosted.org/packages/2e/da/7118de648182971d723ea99d79c55007f96cdafc95f5322cc1ad15f6683e/maturin-1.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2079447967819b5cf615e5b5b99a406d662effdc8d6afd493dcd253c6afc3707", size = 9423814, upload-time = "2026-01-09T11:05:57.242Z" },
+    { url = "https://files.pythonhosted.org/packages/cf/8f/be14395c6e23b19ddaa0c171e68915bdcd1ef61ad1f411739c6721196903/maturin-1.11.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:50f6c668c1d5d4d4dc1c3ffec7b4270dab493e5b2368f8e4213f4bcde6a50eea", size = 9104378, upload-time = "2026-01-09T11:05:59.835Z" },
+    { url = "https://files.pythonhosted.org/packages/77/83/53ea82a2f42a03930ea5545673d11a4ef49bb886827353a701f41a5f11c4/maturin-1.11.5-py3-none-win32.whl", hash = "sha256:49f85ce6cbe478e9743ecddd6da2964afc0ded57013aa4d054256be702d23d40", size = 7738696, upload-time = "2026-01-09T11:06:06.651Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/41/353a26d49aa80081c514a6354d429efbecedb90d0153ec598cece3baa607/maturin-1.11.5-py3-none-win_amd64.whl", hash = "sha256:70d3e5beffb9ef9dfae5f3c1a7eeb572091505eb8cb076e9434518df1c42a73b", size = 9029838, upload-time = "2026-01-09T11:05:54.543Z" },
+    { url = "https://files.pythonhosted.org/packages/15/67/c94f8f5440bc42d54113a2d99de0d6107f06b5a33f31823e52b2715d856f/maturin-1.11.5-py3-none-win_arm64.whl", hash = "sha256:9348f7f0a346108e0c96e6719be91da4470bd43c15802435e9f4157f5cca43d4", size = 7624029, upload-time = "2026-01-09T11:06:08.728Z" },
+]
+
+[[package]]
+name = "memory-profiler"
+version = "0.61.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "psutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b2/88/e1907e1ca3488f2d9507ca8b0ae1add7b1cd5d3ca2bc8e5b329382ea2c7b/memory_profiler-0.61.0.tar.gz", hash = "sha256:4e5b73d7864a1d1292fb76a03e82a3e78ef934d06828a698d9dada76da2067b0", size = 35935, upload-time = "2022-11-15T17:57:28.994Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/49/26/aaca612a0634ceede20682e692a6c55e35a94c21ba36b807cc40fe910ae1/memory_profiler-0.61.0-py3-none-any.whl", hash = "sha256:400348e61031e3942ad4d4109d18753b2fb08c2f6fb8290671c5513a34182d84", size = 31803, upload-time = "2022-11-15T17:57:27.031Z" },
+]
+
+[[package]]
+name = "mergedeep"
+version = "1.3.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" },
+]
+
+[[package]]
+name = "mistune"
+version = "3.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/9d/55/d01f0c4b45ade6536c51170b9043db8b2ec6ddf4a35c7ea3f5f559ac935b/mistune-3.2.0.tar.gz", hash = "sha256:708487c8a8cdd99c9d90eb3ed4c3ed961246ff78ac82f03418f5183ab70e398a", size = 95467, upload-time = "2025-12-23T11:36:34.994Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9b/f7/4a5e785ec9fbd65146a27b6b70b6cdc161a66f2024e4b04ac06a67f5578b/mistune-3.2.0-py3-none-any.whl", hash = "sha256:febdc629a3c78616b94393c6580551e0e34cc289987ec6c35ed3f4be42d0eee1", size = 53598, upload-time = "2025-12-23T11:36:33.211Z" },
+]
+
+[[package]]
+name = "mkdocs"
+version = "1.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "click" },
+    { name = "colorama", marker = "sys_platform == 'win32'" },
+    { name = "ghp-import" },
+    { name = "jinja2" },
+    { name = "markdown" },
+    { name = "markupsafe" },
+    { name = "mergedeep" },
+    { name = "mkdocs-get-deps" },
+    { name = "packaging" },
+    { name = "pathspec" },
+    { name = "pyyaml" },
+    { name = "pyyaml-env-tag" },
+    { name = "watchdog" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" },
+]
+
+[[package]]
+name = "mkdocs-autorefs"
+version = "1.4.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "markdown" },
+    { name = "markupsafe" },
+    { name = "mkdocs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/51/fa/9124cd63d822e2bcbea1450ae68cdc3faf3655c69b455f3a7ed36ce6c628/mkdocs_autorefs-1.4.3.tar.gz", hash = "sha256:beee715b254455c4aa93b6ef3c67579c399ca092259cc41b7d9342573ff1fc75", size = 55425, upload-time = "2025-08-26T14:23:17.223Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9f/4d/7123b6fa2278000688ebd338e2a06d16870aaf9eceae6ba047ea05f92df1/mkdocs_autorefs-1.4.3-py3-none-any.whl", hash = "sha256:469d85eb3114801d08e9cc55d102b3ba65917a869b893403b8987b601cf55dc9", size = 25034, upload-time = "2025-08-26T14:23:15.906Z" },
+]
+
+[[package]]
+name = "mkdocs-get-deps"
+version = "0.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "mergedeep" },
+    { name = "platformdirs" },
+    { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" },
+]
+
+[[package]]
+name = "mkdocs-material"
+version = "9.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "babel" },
+    { name = "backrefs" },
+    { name = "colorama" },
+    { name = "jinja2" },
+    { name = "markdown" },
+    { name = "mkdocs" },
+    { name = "mkdocs-material-extensions" },
+    { name = "paginate" },
+    { name = "pygments" },
+    { name = "pymdown-extensions" },
+    { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/27/e2/2ffc356cd72f1473d07c7719d82a8f2cbd261666828614ecb95b12169f41/mkdocs_material-9.7.1.tar.gz", hash = "sha256:89601b8f2c3e6c6ee0a918cc3566cb201d40bf37c3cd3c2067e26fadb8cce2b8", size = 4094392, upload-time = "2025-12-18T09:49:00.308Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3e/32/ed071cb721aca8c227718cffcf7bd539620e9799bbf2619e90c757bfd030/mkdocs_material-9.7.1-py3-none-any.whl", hash = "sha256:3f6100937d7d731f87f1e3e3b021c97f7239666b9ba1151ab476cabb96c60d5c", size = 9297166, upload-time = "2025-12-18T09:48:56.664Z" },
+]
+
+[[package]]
+name = "mkdocs-material-extensions"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847, upload-time = "2023-11-22T19:09:45.208Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728, upload-time = "2023-11-22T19:09:43.465Z" },
+]
+
+[[package]]
+name = "mkdocstrings"
+version = "1.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jinja2" },
+    { name = "markdown" },
+    { name = "markupsafe" },
+    { name = "mkdocs" },
+    { name = "mkdocs-autorefs" },
+    { name = "pymdown-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/4d/1ca8a9432579184599714aaeb36591414cc3d3bfd9d494f6db540c995ae4/mkdocstrings-1.0.2.tar.gz", hash = "sha256:48edd0ccbcb9e30a3121684e165261a9d6af4d63385fc4f39a54a49ac3b32ea8", size = 101048, upload-time = "2026-01-24T15:57:25.735Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/57/32/407a9a5fdd7d8ecb4af8d830b9bcdf47ea68f916869b3f44bac31f081250/mkdocstrings-1.0.2-py3-none-any.whl", hash = "sha256:41897815a8026c3634fe5d51472c3a569f92ded0ad8c7a640550873eea3b6817", size = 35443, upload-time = "2026-01-24T15:57:23.933Z" },
+]
+
+[package.optional-dependencies]
+python = [
+    { name = "mkdocstrings-python" },
+]
+
+[[package]]
+name = "mkdocstrings-python"
+version = "2.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "griffe" },
+    { name = "mkdocs-autorefs" },
+    { name = "mkdocstrings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/24/75/d30af27a2906f00eb90143470272376d728521997800f5dce5b340ba35bc/mkdocstrings_python-2.0.1.tar.gz", hash = "sha256:843a562221e6a471fefdd4b45cc6c22d2607ccbad632879234fa9692e9cf7732", size = 199345, upload-time = "2025-12-03T14:26:11.755Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/81/06/c5f8deba7d2cbdfa7967a716ae801aa9ca5f734b8f54fd473ef77a088dbe/mkdocstrings_python-2.0.1-py3-none-any.whl", hash = "sha256:66ecff45c5f8b71bf174e11d49afc845c2dfc7fc0ab17a86b6b337e0f24d8d90", size = 105055, upload-time = "2025-12-03T14:26:10.184Z" },
+]
+
+[[package]]
+name = "nbclient"
+version = "0.10.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jupyter-client" },
+    { name = "jupyter-core" },
+    { name = "nbformat" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/56/91/1c1d5a4b9a9ebba2b4e32b8c852c2975c872aec1fe42ab5e516b2cecd193/nbclient-0.10.4.tar.gz", hash = "sha256:1e54091b16e6da39e297b0ece3e10f6f29f4ac4e8ee515d29f8a7099bd6553c9", size = 62554, upload-time = "2025-12-23T07:45:46.369Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/83/a0/5b0c2f11142ed1dddec842457d3f65eaf71a0080894eb6f018755b319c3a/nbclient-0.10.4-py3-none-any.whl", hash = "sha256:9162df5a7373d70d606527300a95a975a47c137776cd942e52d9c7e29ff83440", size = 25465, upload-time = "2025-12-23T07:45:44.51Z" },
+]
+
+[[package]]
+name = "nbconvert"
+version = "7.16.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "beautifulsoup4" },
+    { name = "bleach", extra = ["css"] },
+    { name = "defusedxml" },
+    { name = "jinja2" },
+    { name = "jupyter-core" },
+    { name = "jupyterlab-pygments" },
+    { name = "markupsafe" },
+    { name = "mistune" },
+    { name = "nbclient" },
+    { name = "nbformat" },
+    { name = "packaging" },
+    { name = "pandocfilters" },
+    { name = "pygments" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" },
+]
+
+[[package]]
+name = "nbformat"
+version = "5.10.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "fastjsonschema" },
+    { name = "jsonschema" },
+    { name = "jupyter-core" },
+    { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" },
+]
+
+[[package]]
+name = "nest-asyncio"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
+]
+
+[[package]]
+name = "notebook"
+version = "7.5.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jupyter-server" },
+    { name = "jupyterlab" },
+    { name = "jupyterlab-server" },
+    { name = "notebook-shim" },
+    { name = "tornado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3b/b6/6b2c653570b02e4ec2a94c0646a4a25132be0749617776d0b72a2bcedb9b/notebook-7.5.2.tar.gz", hash = "sha256:83e82f93c199ca730313bea1bb24bc279ea96f74816d038a92d26b6b9d5f3e4a", size = 14059605, upload-time = "2026-01-12T14:56:53.483Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/32/55/b754cd51c6011d90ef03e3f06136f1ebd44658b9529dbcf0c15fc0d6a0b7/notebook-7.5.2-py3-none-any.whl", hash = "sha256:17d078a98603d70d62b6b4b3fcb67e87d7a68c398a7ae9b447eb2d7d9aec9979", size = 14468915, upload-time = "2026-01-12T14:56:47.87Z" },
+]
+
+[[package]]
+name = "notebook-shim"
+version = "0.2.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "jupyter-server" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167, upload-time = "2024-02-14T23:35:18.353Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307, upload-time = "2024-02-14T23:35:16.286Z" },
+]
+
+[[package]]
+name = "numba"
+version = "0.63.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "llvmlite" },
+    { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/dc/60/0145d479b2209bd8fdae5f44201eceb8ce5a23e0ed54c71f57db24618665/numba-0.63.1.tar.gz", hash = "sha256:b320aa675d0e3b17b40364935ea52a7b1c670c9037c39cf92c49502a75902f4b", size = 2761666, upload-time = "2025-12-10T02:57:39.002Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/70/90/5f8614c165d2e256fbc6c57028519db6f32e4982475a372bbe550ea0454c/numba-0.63.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b33db00f18ccc790ee9911ce03fcdfe9d5124637d1ecc266f5ae0df06e02fec3", size = 2680501, upload-time = "2025-12-10T02:57:09.797Z" },
+    { url = "https://files.pythonhosted.org/packages/dc/9d/d0afc4cf915edd8eadd9b2ab5b696242886ee4f97720d9322650d66a88c6/numba-0.63.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d31ea186a78a7c0f6b1b2a3fe68057fdb291b045c52d86232b5383b6cf4fc25", size = 3744945, upload-time = "2025-12-10T02:57:11.697Z" },
+    { url = "https://files.pythonhosted.org/packages/05/a9/d82f38f2ab73f3be6f838a826b545b80339762ee8969c16a8bf1d39395a8/numba-0.63.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed3bb2fbdb651d6aac394388130a7001aab6f4541837123a4b4ab8b02716530c", size = 3450827, upload-time = "2025-12-10T02:57:13.709Z" },
+    { url = "https://files.pythonhosted.org/packages/18/3f/a9b106e93c5bd7434e65f044bae0d204e20aa7f7f85d72ceb872c7c04216/numba-0.63.1-cp311-cp311-win_amd64.whl", hash = "sha256:1ecbff7688f044b1601be70113e2fb1835367ee0b28ffa8f3adf3a05418c5c87", size = 2747262, upload-time = "2025-12-10T02:57:15.664Z" },
+    { url = "https://files.pythonhosted.org/packages/14/9c/c0974cd3d00ff70d30e8ff90522ba5fbb2bcee168a867d2321d8d0457676/numba-0.63.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2819cd52afa5d8d04e057bdfd54367575105f8829350d8fb5e4066fb7591cc71", size = 2680981, upload-time = "2025-12-10T02:57:17.579Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/70/ea2bc45205f206b7a24ee68a159f5097c9ca7e6466806e7c213587e0c2b1/numba-0.63.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5cfd45dbd3d409e713b1ccfdc2ee72ca82006860254429f4ef01867fdba5845f", size = 3801656, upload-time = "2025-12-10T02:57:19.106Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/82/4f4ba4fd0f99825cbf3cdefd682ca3678be1702b63362011de6e5f71f831/numba-0.63.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69a599df6976c03b7ecf15d05302696f79f7e6d10d620367407517943355bcb0", size = 3501857, upload-time = "2025-12-10T02:57:20.721Z" },
+    { url = "https://files.pythonhosted.org/packages/af/fd/6540456efa90b5f6604a86ff50dabefb187e43557e9081adcad3be44f048/numba-0.63.1-cp312-cp312-win_amd64.whl", hash = "sha256:bbad8c63e4fc7eb3cdb2c2da52178e180419f7969f9a685f283b313a70b92af3", size = 2750282, upload-time = "2025-12-10T02:57:22.474Z" },
+    { url = "https://files.pythonhosted.org/packages/57/f7/e19e6eff445bec52dde5bed1ebb162925a8e6f988164f1ae4b3475a73680/numba-0.63.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0bd4fd820ef7442dcc07da184c3f54bb41d2bdb7b35bacf3448e73d081f730dc", size = 2680954, upload-time = "2025-12-10T02:57:24.145Z" },
+    { url = "https://files.pythonhosted.org/packages/e9/6c/1e222edba1e20e6b113912caa9b1665b5809433cbcb042dfd133c6f1fd38/numba-0.63.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:53de693abe4be3bd4dee38e1c55f01c55ff644a6a3696a3670589e6e4c39cde2", size = 3809736, upload-time = "2025-12-10T02:57:25.836Z" },
+    { url = "https://files.pythonhosted.org/packages/76/0a/590bad11a8b3feeac30a24d01198d46bdb76ad15c70d3a530691ce3cae58/numba-0.63.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81227821a72a763c3d4ac290abbb4371d855b59fdf85d5af22a47c0e86bf8c7e", size = 3508854, upload-time = "2025-12-10T02:57:27.438Z" },
+    { url = "https://files.pythonhosted.org/packages/4e/f5/3800384a24eed1e4d524669cdbc0b9b8a628800bb1e90d7bd676e5f22581/numba-0.63.1-cp313-cp313-win_amd64.whl", hash = "sha256:eb227b07c2ac37b09432a9bda5142047a2d1055646e089d4a240a2643e508102", size = 2750228, upload-time = "2025-12-10T02:57:30.36Z" },
+]
+
+[[package]]
+name = "numpy"
+version = "1.26.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" },
+    { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" },
+    { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" },
+    { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" },
+    { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" },
+    { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" },
+    { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" },
+    { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" },
+    { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" },
+    { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" },
+    { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613, upload-time = "2024-02-05T23:56:56.054Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172, upload-time = "2024-02-05T23:57:21.56Z" },
+    { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643, upload-time = "2024-02-05T23:57:56.585Z" },
+    { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803, upload-time = "2024-02-05T23:58:08.963Z" },
+    { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" },
+]
+
+[[package]]
+name = "overrides"
+version = "7.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "26.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
+]
+
+[[package]]
+name = "paginate"
+version = "0.5.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252, upload-time = "2024-08-25T14:17:24.139Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746, upload-time = "2024-08-25T14:17:22.55Z" },
+]
+
+[[package]]
+name = "pandas"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+    { name = "python-dateutil" },
+    { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/de/da/b1dc0481ab8d55d0f46e343cfe67d4551a0e14fcee52bd38ca1bd73258d8/pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f", size = 4633005, upload-time = "2026-01-21T15:52:04.726Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/46/1e/b184654a856e75e975a6ee95d6577b51c271cd92cb2b020c9378f53e0032/pandas-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d64ce01eb9cdca96a15266aa679ae50212ec52757c79204dbc7701a222401850", size = 10313247, upload-time = "2026-01-21T15:50:15.775Z" },
+    { url = "https://files.pythonhosted.org/packages/dd/5e/e04a547ad0f0183bf151fd7c7a477468e3b85ff2ad231c566389e6cc9587/pandas-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:613e13426069793aa1ec53bdcc3b86e8d32071daea138bbcf4fa959c9cdaa2e2", size = 9913131, upload-time = "2026-01-21T15:50:18.611Z" },
+    { url = "https://files.pythonhosted.org/packages/a2/93/bb77bfa9fc2aba9f7204db807d5d3fb69832ed2854c60ba91b4c65ba9219/pandas-3.0.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0192fee1f1a8e743b464a6607858ee4b071deb0b118eb143d71c2a1d170996d5", size = 10741925, upload-time = "2026-01-21T15:50:21.058Z" },
+    { url = "https://files.pythonhosted.org/packages/62/fb/89319812eb1d714bfc04b7f177895caeba8ab4a37ef6712db75ed786e2e0/pandas-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b853319dec8d5e0c8b875374c078ef17f2269986a78168d9bd57e49bf650ae", size = 11245979, upload-time = "2026-01-21T15:50:23.413Z" },
+    { url = "https://files.pythonhosted.org/packages/a9/63/684120486f541fc88da3862ed31165b3b3e12b6a1c7b93be4597bc84e26c/pandas-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:707a9a877a876c326ae2cb640fbdc4ef63b0a7b9e2ef55c6df9942dcee8e2af9", size = 11756337, upload-time = "2026-01-21T15:50:25.932Z" },
+    { url = "https://files.pythonhosted.org/packages/39/92/7eb0ad232312b59aec61550c3c81ad0743898d10af5df7f80bc5e5065416/pandas-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:afd0aa3d0b5cda6e0b8ffc10dbcca3b09ef3cbcd3fe2b27364f85fdc04e1989d", size = 12325517, upload-time = "2026-01-21T15:50:27.952Z" },
+    { url = "https://files.pythonhosted.org/packages/51/27/bf9436dd0a4fc3130acec0828951c7ef96a0631969613a9a35744baf27f6/pandas-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:113b4cca2614ff7e5b9fee9b6f066618fe73c5a83e99d721ffc41217b2bf57dd", size = 9881576, upload-time = "2026-01-21T15:50:30.149Z" },
+    { url = "https://files.pythonhosted.org/packages/e7/2b/c618b871fce0159fd107516336e82891b404e3f340821853c2fc28c7830f/pandas-3.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c14837eba8e99a8da1527c0280bba29b0eb842f64aa94982c5e21227966e164b", size = 9140807, upload-time = "2026-01-21T15:50:32.308Z" },
+    { url = "https://files.pythonhosted.org/packages/0b/38/db33686f4b5fa64d7af40d96361f6a4615b8c6c8f1b3d334eee46ae6160e/pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd", size = 10334013, upload-time = "2026-01-21T15:50:34.771Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/7b/9254310594e9774906bacdd4e732415e1f86ab7dbb4b377ef9ede58cd8ec/pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740", size = 9874154, upload-time = "2026-01-21T15:50:36.67Z" },
+    { url = "https://files.pythonhosted.org/packages/63/d4/726c5a67a13bc66643e66d2e9ff115cead482a44fc56991d0c4014f15aaf/pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801", size = 10384433, upload-time = "2026-01-21T15:50:39.132Z" },
+    { url = "https://files.pythonhosted.org/packages/bf/2e/9211f09bedb04f9832122942de8b051804b31a39cfbad199a819bb88d9f3/pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a", size = 10864519, upload-time = "2026-01-21T15:50:41.043Z" },
+    { url = "https://files.pythonhosted.org/packages/00/8d/50858522cdc46ac88b9afdc3015e298959a70a08cd21e008a44e9520180c/pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb", size = 11394124, upload-time = "2026-01-21T15:50:43.377Z" },
+    { url = "https://files.pythonhosted.org/packages/86/3f/83b2577db02503cd93d8e95b0f794ad9d4be0ba7cb6c8bcdcac964a34a42/pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f", size = 11920444, upload-time = "2026-01-21T15:50:45.932Z" },
+    { url = "https://files.pythonhosted.org/packages/64/2d/4f8a2f192ed12c90a0aab47f5557ece0e56b0370c49de9454a09de7381b2/pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1", size = 9730970, upload-time = "2026-01-21T15:50:47.962Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/64/ff571be435cf1e643ca98d0945d76732c0b4e9c37191a89c8550b105eed1/pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0", size = 9041950, upload-time = "2026-01-21T15:50:50.422Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/fa/7f0ac4ca8877c57537aaff2a842f8760e630d8e824b730eb2e859ffe96ca/pandas-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b78d646249b9a2bc191040988c7bb524c92fa8534fb0898a0741d7e6f2ffafa6", size = 10307129, upload-time = "2026-01-21T15:50:52.877Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/11/28a221815dcea4c0c9414dfc845e34a84a6a7dabc6da3194498ed5ba4361/pandas-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc9cba7b355cb4162442a88ce495e01cb605f17ac1e27d6596ac963504e0305f", size = 9850201, upload-time = "2026-01-21T15:50:54.807Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/da/53bbc8c5363b7e5bd10f9ae59ab250fc7a382ea6ba08e4d06d8694370354/pandas-3.0.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c9a1a149aed3b6c9bf246033ff91e1b02d529546c5d6fb6b74a28fea0cf4c70", size = 10354031, upload-time = "2026-01-21T15:50:57.463Z" },
+    { url = "https://files.pythonhosted.org/packages/f7/a3/51e02ebc2a14974170d51e2410dfdab58870ea9bcd37cda15bd553d24dc4/pandas-3.0.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95683af6175d884ee89471842acfca29172a85031fccdabc35e50c0984470a0e", size = 10861165, upload-time = "2026-01-21T15:50:59.32Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/fe/05a51e3cac11d161472b8297bd41723ea98013384dd6d76d115ce3482f9b/pandas-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1fbbb5a7288719e36b76b4f18d46ede46e7f916b6c8d9915b756b0a6c3f792b3", size = 11359359, upload-time = "2026-01-21T15:51:02.014Z" },
+    { url = "https://files.pythonhosted.org/packages/ee/56/ba620583225f9b85a4d3e69c01df3e3870659cc525f67929b60e9f21dcd1/pandas-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e8b9808590fa364416b49b2a35c1f4cf2785a6c156935879e57f826df22038e", size = 11912907, upload-time = "2026-01-21T15:51:05.175Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/8c/c6638d9f67e45e07656b3826405c5cc5f57f6fd07c8b2572ade328c86e22/pandas-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:98212a38a709feb90ae658cb6227ea3657c22ba8157d4b8f913cd4c950de5e7e", size = 9732138, upload-time = "2026-01-21T15:51:07.569Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/bf/bd1335c3bf1770b6d8fed2799993b11c4971af93bb1b729b9ebbc02ca2ec/pandas-3.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:177d9df10b3f43b70307a149d7ec49a1229a653f907aa60a48f1877d0e6be3be", size = 9033568, upload-time = "2026-01-21T15:51:09.484Z" },
+    { url = "https://files.pythonhosted.org/packages/8e/c6/f5e2171914d5e29b9171d495344097d54e3ffe41d2d85d8115baba4dc483/pandas-3.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2713810ad3806767b89ad3b7b69ba153e1c6ff6d9c20f9c2140379b2a98b6c98", size = 10741936, upload-time = "2026-01-21T15:51:11.693Z" },
+    { url = "https://files.pythonhosted.org/packages/51/88/9a0164f99510a1acb9f548691f022c756c2314aad0d8330a24616c14c462/pandas-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:15d59f885ee5011daf8335dff47dcb8a912a27b4ad7826dc6cbe809fd145d327", size = 10393884, upload-time = "2026-01-21T15:51:14.197Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/53/b34d78084d88d8ae2b848591229da8826d1e65aacf00b3abe34023467648/pandas-3.0.0-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24e6547fb64d2c92665dd2adbfa4e85fa4fd70a9c070e7cfb03b629a0bbab5eb", size = 10310740, upload-time = "2026-01-21T15:51:16.093Z" },
+    { url = "https://files.pythonhosted.org/packages/5b/d3/bee792e7c3d6930b74468d990604325701412e55d7aaf47460a22311d1a5/pandas-3.0.0-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48ee04b90e2505c693d3f8e8f524dab8cb8aaf7ddcab52c92afa535e717c4812", size = 10700014, upload-time = "2026-01-21T15:51:18.818Z" },
+    { url = "https://files.pythonhosted.org/packages/55/db/2570bc40fb13aaed1cbc3fbd725c3a60ee162477982123c3adc8971e7ac1/pandas-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66f72fb172959af42a459e27a8d8d2c7e311ff4c1f7db6deb3b643dbc382ae08", size = 11323737, upload-time = "2026-01-21T15:51:20.784Z" },
+    { url = "https://files.pythonhosted.org/packages/bc/2e/297ac7f21c8181b62a4cccebad0a70caf679adf3ae5e83cb676194c8acc3/pandas-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a4a400ca18230976724a5066f20878af785f36c6756e498e94c2a5e5d57779c", size = 11771558, upload-time = "2026-01-21T15:51:22.977Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/46/e1c6876d71c14332be70239acce9ad435975a80541086e5ffba2f249bcf6/pandas-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:940eebffe55528074341a5a36515f3e4c5e25e958ebbc764c9502cfc35ba3faa", size = 10473771, upload-time = "2026-01-21T15:51:25.285Z" },
+]
+
+[[package]]
+name = "pandas-stubs"
+version = "2.3.3.260113"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+    { name = "types-pytz" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/92/5d/be23854a73fda69f1dbdda7bc10fbd6f930bd1fa87aaec389f00c901c1e8/pandas_stubs-2.3.3.260113.tar.gz", hash = "sha256:076e3724bcaa73de78932b012ec64b3010463d377fa63116f4e6850643d93800", size = 116131, upload-time = "2026-01-13T22:30:16.704Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d1/c6/df1fe324248424f77b89371116dab5243db7f052c32cc9fe7442ad9c5f75/pandas_stubs-2.3.3.260113-py3-none-any.whl", hash = "sha256:ec070b5c576e1badf12544ae50385872f0631fc35d99d00dc598c2954ec564d3", size = 168246, upload-time = "2026-01-13T22:30:15.244Z" },
+]
+
+[[package]]
+name = "pandocfilters"
+version = "1.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" },
+]
+
+[[package]]
+name = "parso"
+version = "0.8.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
+]
+
+[[package]]
+name = "pastel"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" },
+]
+
+[[package]]
+name = "pathspec"
+version = "1.0.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" },
+]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "ptyprocess", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
+]
+
+[[package]]
+name = "pillow"
+version = "12.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/43/c4/bf8328039de6cc22182c3ef007a2abfbbdab153661c0a9aa78af8d706391/pillow-12.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a83e0850cb8f5ac975291ebfc4170ba481f41a28065277f7f735c202cd8e0af3", size = 5304057, upload-time = "2026-01-02T09:10:46.627Z" },
+    { url = "https://files.pythonhosted.org/packages/43/06/7264c0597e676104cc22ca73ee48f752767cd4b1fe084662620b17e10120/pillow-12.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6e53e82ec2db0717eabb276aa56cf4e500c9a7cec2c2e189b55c24f65a3e8c0", size = 4657811, upload-time = "2026-01-02T09:10:49.548Z" },
+    { url = "https://files.pythonhosted.org/packages/72/64/f9189e44474610daf83da31145fa56710b627b5c4c0b9c235e34058f6b31/pillow-12.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40a8e3b9e8773876d6e30daed22f016509e3987bab61b3b7fe309d7019a87451", size = 6232243, upload-time = "2026-01-02T09:10:51.62Z" },
+    { url = "https://files.pythonhosted.org/packages/ef/30/0df458009be6a4caca4ca2c52975e6275c387d4e5c95544e34138b41dc86/pillow-12.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800429ac32c9b72909c671aaf17ecd13110f823ddb7db4dfef412a5587c2c24e", size = 8037872, upload-time = "2026-01-02T09:10:53.446Z" },
+    { url = "https://files.pythonhosted.org/packages/e4/86/95845d4eda4f4f9557e25381d70876aa213560243ac1a6d619c46caaedd9/pillow-12.1.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b022eaaf709541b391ee069f0022ee5b36c709df71986e3f7be312e46f42c84", size = 6345398, upload-time = "2026-01-02T09:10:55.426Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/1f/8e66ab9be3aaf1435bc03edd1ebdf58ffcd17f7349c1d970cafe87af27d9/pillow-12.1.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f345e7bc9d7f368887c712aa5054558bad44d2a301ddf9248599f4161abc7c0", size = 7034667, upload-time = "2026-01-02T09:10:57.11Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/f6/683b83cb9b1db1fb52b87951b1c0b99bdcfceaa75febf11406c19f82cb5e/pillow-12.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d70347c8a5b7ccd803ec0c85c8709f036e6348f1e6a5bf048ecd9c64d3550b8b", size = 6458743, upload-time = "2026-01-02T09:10:59.331Z" },
+    { url = "https://files.pythonhosted.org/packages/9a/7d/de833d63622538c1d58ce5395e7c6cb7e7dce80decdd8bde4a484e095d9f/pillow-12.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fcc52d86ce7a34fd17cb04e87cfdb164648a3662a6f20565910a99653d66c18", size = 7159342, upload-time = "2026-01-02T09:11:01.82Z" },
+    { url = "https://files.pythonhosted.org/packages/8c/40/50d86571c9e5868c42b81fe7da0c76ca26373f3b95a8dd675425f4a92ec1/pillow-12.1.0-cp311-cp311-win32.whl", hash = "sha256:3ffaa2f0659e2f740473bcf03c702c39a8d4b2b7ffc629052028764324842c64", size = 6328655, upload-time = "2026-01-02T09:11:04.556Z" },
+    { url = "https://files.pythonhosted.org/packages/6c/af/b1d7e301c4cd26cd45d4af884d9ee9b6fab893b0ad2450d4746d74a6968c/pillow-12.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:806f3987ffe10e867bab0ddad45df1148a2b98221798457fa097ad85d6e8bc75", size = 7031469, upload-time = "2026-01-02T09:11:06.538Z" },
+    { url = "https://files.pythonhosted.org/packages/48/36/d5716586d887fb2a810a4a61518a327a1e21c8b7134c89283af272efe84b/pillow-12.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9f5fefaca968e700ad1a4a9de98bf0869a94e397fe3524c4c9450c1445252304", size = 2452515, upload-time = "2026-01-02T09:11:08.226Z" },
+    { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642, upload-time = "2026-01-02T09:11:10.138Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464, upload-time = "2026-01-02T09:11:12.319Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878, upload-time = "2026-01-02T09:11:14.096Z" },
+    { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868, upload-time = "2026-01-02T09:11:15.903Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468, upload-time = "2026-01-02T09:11:17.631Z" },
+    { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518, upload-time = "2026-01-02T09:11:19.389Z" },
+    { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829, upload-time = "2026-01-02T09:11:21.28Z" },
+    { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756, upload-time = "2026-01-02T09:11:23.559Z" },
+    { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770, upload-time = "2026-01-02T09:11:25.661Z" },
+    { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406, upload-time = "2026-01-02T09:11:27.474Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612, upload-time = "2026-01-02T09:11:29.309Z" },
+    { url = "https://files.pythonhosted.org/packages/dd/c7/2530a4aa28248623e9d7f27316b42e27c32ec410f695929696f2e0e4a778/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:7b5dd7cbae20285cdb597b10eb5a2c13aa9de6cde9bb64a3c1317427b1db1ae1", size = 4062543, upload-time = "2026-01-02T09:11:31.566Z" },
+    { url = "https://files.pythonhosted.org/packages/8f/1f/40b8eae823dc1519b87d53c30ed9ef085506b05281d313031755c1705f73/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:29a4cef9cb672363926f0470afc516dbf7305a14d8c54f7abbb5c199cd8f8179", size = 4138373, upload-time = "2026-01-02T09:11:33.367Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/77/6fa60634cf06e52139fd0e89e5bbf055e8166c691c42fb162818b7fda31d/pillow-12.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:681088909d7e8fa9e31b9799aaa59ba5234c58e5e4f1951b4c4d1082a2e980e0", size = 3601241, upload-time = "2026-01-02T09:11:35.011Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/bf/28ab865de622e14b747f0cd7877510848252d950e43002e224fb1c9ababf/pillow-12.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:983976c2ab753166dc66d36af6e8ec15bb511e4a25856e2227e5f7e00a160587", size = 5262410, upload-time = "2026-01-02T09:11:36.682Z" },
+    { url = "https://files.pythonhosted.org/packages/1c/34/583420a1b55e715937a85bd48c5c0991598247a1fd2eb5423188e765ea02/pillow-12.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:db44d5c160a90df2d24a24760bbd37607d53da0b34fb546c4c232af7192298ac", size = 4657312, upload-time = "2026-01-02T09:11:38.535Z" },
+    { url = "https://files.pythonhosted.org/packages/1d/fd/f5a0896839762885b3376ff04878f86ab2b097c2f9a9cdccf4eda8ba8dc0/pillow-12.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b7a9d1db5dad90e2991645874f708e87d9a3c370c243c2d7684d28f7e133e6b", size = 6232605, upload-time = "2026-01-02T09:11:40.602Z" },
+    { url = "https://files.pythonhosted.org/packages/98/aa/938a09d127ac1e70e6ed467bd03834350b33ef646b31edb7452d5de43792/pillow-12.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6258f3260986990ba2fa8a874f8b6e808cf5abb51a94015ca3dc3c68aa4f30ea", size = 8041617, upload-time = "2026-01-02T09:11:42.721Z" },
+    { url = "https://files.pythonhosted.org/packages/17/e8/538b24cb426ac0186e03f80f78bc8dc7246c667f58b540bdd57c71c9f79d/pillow-12.1.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e115c15e3bc727b1ca3e641a909f77f8ca72a64fff150f666fcc85e57701c26c", size = 6346509, upload-time = "2026-01-02T09:11:44.955Z" },
+    { url = "https://files.pythonhosted.org/packages/01/9a/632e58ec89a32738cabfd9ec418f0e9898a2b4719afc581f07c04a05e3c9/pillow-12.1.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6741e6f3074a35e47c77b23a4e4f2d90db3ed905cb1c5e6e0d49bff2045632bc", size = 7038117, upload-time = "2026-01-02T09:11:46.736Z" },
+    { url = "https://files.pythonhosted.org/packages/c7/a2/d40308cf86eada842ca1f3ffa45d0ca0df7e4ab33c83f81e73f5eaed136d/pillow-12.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:935b9d1aed48fcfb3f838caac506f38e29621b44ccc4f8a64d575cb1b2a88644", size = 6460151, upload-time = "2026-01-02T09:11:48.625Z" },
+    { url = "https://files.pythonhosted.org/packages/f1/88/f5b058ad6453a085c5266660a1417bdad590199da1b32fb4efcff9d33b05/pillow-12.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5fee4c04aad8932da9f8f710af2c1a15a83582cfb884152a9caa79d4efcdbf9c", size = 7164534, upload-time = "2026-01-02T09:11:50.445Z" },
+    { url = "https://files.pythonhosted.org/packages/19/ce/c17334caea1db789163b5d855a5735e47995b0b5dc8745e9a3605d5f24c0/pillow-12.1.0-cp313-cp313-win32.whl", hash = "sha256:a786bf667724d84aa29b5db1c61b7bfdde380202aaca12c3461afd6b71743171", size = 6332551, upload-time = "2026-01-02T09:11:52.234Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/07/74a9d941fa45c90a0d9465098fe1ec85de3e2afbdc15cc4766622d516056/pillow-12.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:461f9dfdafa394c59cd6d818bdfdbab4028b83b02caadaff0ffd433faf4c9a7a", size = 7040087, upload-time = "2026-01-02T09:11:54.822Z" },
+    { url = "https://files.pythonhosted.org/packages/88/09/c99950c075a0e9053d8e880595926302575bc742b1b47fe1bbcc8d388d50/pillow-12.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:9212d6b86917a2300669511ed094a9406888362e085f2431a7da985a6b124f45", size = 2452470, upload-time = "2026-01-02T09:11:56.522Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/ba/970b7d85ba01f348dee4d65412476321d40ee04dcb51cd3735b9dc94eb58/pillow-12.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:00162e9ca6d22b7c3ee8e61faa3c3253cd19b6a37f126cad04f2f88b306f557d", size = 5264816, upload-time = "2026-01-02T09:11:58.227Z" },
+    { url = "https://files.pythonhosted.org/packages/10/60/650f2fb55fdba7a510d836202aa52f0baac633e50ab1cf18415d332188fb/pillow-12.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7d6daa89a00b58c37cb1747ec9fb7ac3bc5ffd5949f5888657dfddde6d1312e0", size = 4660472, upload-time = "2026-01-02T09:12:00.798Z" },
+    { url = "https://files.pythonhosted.org/packages/2b/c0/5273a99478956a099d533c4f46cbaa19fd69d606624f4334b85e50987a08/pillow-12.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2479c7f02f9d505682dc47df8c0ea1fc5e264c4d1629a5d63fe3e2334b89554", size = 6268974, upload-time = "2026-01-02T09:12:02.572Z" },
+    { url = "https://files.pythonhosted.org/packages/b4/26/0bf714bc2e73d5267887d47931d53c4ceeceea6978148ed2ab2a4e6463c4/pillow-12.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f188d580bd870cda1e15183790d1cc2fa78f666e76077d103edf048eed9c356e", size = 8073070, upload-time = "2026-01-02T09:12:04.75Z" },
+    { url = "https://files.pythonhosted.org/packages/43/cf/1ea826200de111a9d65724c54f927f3111dc5ae297f294b370a670c17786/pillow-12.1.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0fde7ec5538ab5095cc02df38ee99b0443ff0e1c847a045554cf5f9af1f4aa82", size = 6380176, upload-time = "2026-01-02T09:12:06.626Z" },
+    { url = "https://files.pythonhosted.org/packages/03/e0/7938dd2b2013373fd85d96e0f38d62b7a5a262af21ac274250c7ca7847c9/pillow-12.1.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ed07dca4a8464bada6139ab38f5382f83e5f111698caf3191cb8dbf27d908b4", size = 7067061, upload-time = "2026-01-02T09:12:08.624Z" },
+    { url = "https://files.pythonhosted.org/packages/86/ad/a2aa97d37272a929a98437a8c0ac37b3cf012f4f8721e1bd5154699b2518/pillow-12.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f45bd71d1fa5e5749587613037b172e0b3b23159d1c00ef2fc920da6f470e6f0", size = 6491824, upload-time = "2026-01-02T09:12:10.488Z" },
+    { url = "https://files.pythonhosted.org/packages/a4/44/80e46611b288d51b115826f136fb3465653c28f491068a72d3da49b54cd4/pillow-12.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:277518bf4fe74aa91489e1b20577473b19ee70fb97c374aa50830b279f25841b", size = 7190911, upload-time = "2026-01-02T09:12:12.772Z" },
+    { url = "https://files.pythonhosted.org/packages/86/77/eacc62356b4cf81abe99ff9dbc7402750044aed02cfd6a503f7c6fc11f3e/pillow-12.1.0-cp313-cp313t-win32.whl", hash = "sha256:7315f9137087c4e0ee73a761b163fc9aa3b19f5f606a7fc08d83fd3e4379af65", size = 6336445, upload-time = "2026-01-02T09:12:14.775Z" },
+    { url = "https://files.pythonhosted.org/packages/e7/3c/57d81d0b74d218706dafccb87a87ea44262c43eef98eb3b164fd000e0491/pillow-12.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:0ddedfaa8b5f0b4ffbc2fa87b556dc59f6bb4ecb14a53b33f9189713ae8053c0", size = 7045354, upload-time = "2026-01-02T09:12:16.599Z" },
+    { url = "https://files.pythonhosted.org/packages/ac/82/8b9b97bba2e3576a340f93b044a3a3a09841170ab4c1eb0d5c93469fd32f/pillow-12.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:80941e6d573197a0c28f394753de529bb436b1ca990ed6e765cf42426abc39f8", size = 2454547, upload-time = "2026-01-02T09:12:18.704Z" },
+    { url = "https://files.pythonhosted.org/packages/8b/bc/224b1d98cffd7164b14707c91aac83c07b047fbd8f58eba4066a3e53746a/pillow-12.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ca94b6aac0d7af2a10ba08c0f888b3d5114439b6b3ef39968378723622fed377", size = 5228605, upload-time = "2026-01-02T09:13:14.084Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/ca/49ca7769c4550107de049ed85208240ba0f330b3f2e316f24534795702ce/pillow-12.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:351889afef0f485b84078ea40fe33727a0492b9af3904661b0abbafee0355b72", size = 4622245, upload-time = "2026-01-02T09:13:15.964Z" },
+    { url = "https://files.pythonhosted.org/packages/73/48/fac807ce82e5955bcc2718642b94b1bd22a82a6d452aea31cbb678cddf12/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb0984b30e973f7e2884362b7d23d0a348c7143ee559f38ef3eaab640144204c", size = 5247593, upload-time = "2026-01-02T09:13:17.913Z" },
+    { url = "https://files.pythonhosted.org/packages/d2/95/3e0742fe358c4664aed4fd05d5f5373dcdad0b27af52aa0972568541e3f4/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84cabc7095dd535ca934d57e9ce2a72ffd216e435a84acb06b2277b1de2689bd", size = 6989008, upload-time = "2026-01-02T09:13:20.083Z" },
+    { url = "https://files.pythonhosted.org/packages/5a/74/fe2ac378e4e202e56d50540d92e1ef4ff34ed687f3c60f6a121bcf99437e/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53d8b764726d3af1a138dd353116f774e3862ec7e3794e0c8781e30db0f35dfc", size = 5313824, upload-time = "2026-01-02T09:13:22.405Z" },
+    { url = "https://files.pythonhosted.org/packages/f3/77/2a60dee1adee4e2655ac328dd05c02a955c1cd683b9f1b82ec3feb44727c/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5da841d81b1a05ef940a8567da92decaa15bc4d7dedb540a8c219ad83d91808a", size = 5963278, upload-time = "2026-01-02T09:13:24.706Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/71/64e9b1c7f04ae0027f788a248e6297d7fcc29571371fe7d45495a78172c0/pillow-12.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:75af0b4c229ac519b155028fa1be632d812a519abba9b46b20e50c6caa184f19", size = 7029809, upload-time = "2026-01-02T09:13:26.541Z" },
+]
+
+[[package]]
+name = "pip"
+version = "25.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/6e/74a3f0179a4a73a53d66ce57fdb4de0080a8baa1de0063de206d6167acc2/pip-25.3.tar.gz", hash = "sha256:8d0538dbbd7babbd207f261ed969c65de439f6bc9e5dbd3b3b9a77f25d95f343", size = 1803014, upload-time = "2025-10-25T00:55:41.394Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/44/3c/d717024885424591d5376220b5e836c2d5293ce2011523c9de23ff7bf068/pip-25.3-py3-none-any.whl", hash = "sha256:9655943313a94722b7774661c21049070f6bbb0a1516bf02f7c8d5d9201514cd", size = 1778622, upload-time = "2025-10-25T00:55:39.247Z" },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
+[[package]]
+name = "poethepoet"
+version = "0.40.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pastel" },
+    { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/9d/054c8435b03324ed9abd5d5ab8c45065b1f42c23952cd23f13a5921d8465/poethepoet-0.40.0.tar.gz", hash = "sha256:91835f00d03d6c4f0e146f80fa510e298ad865e7edd27fe4cb9c94fdc090791b", size = 81114, upload-time = "2026-01-05T19:09:13.116Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/fb/bc/73327d12b176abea7a3c6c7d760e1a953992f7b59d72c0354e39d7a353b5/poethepoet-0.40.0-py3-none-any.whl", hash = "sha256:afd276ae31d5c53573c0c14898118d4848ccee3709b6b0be6a1c6cbe522bbc8a", size = 106672, upload-time = "2026-01-05T19:09:11.536Z" },
+]
+
+[[package]]
+name = "pre-commit"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "cfgv" },
+    { name = "identify" },
+    { name = "nodeenv" },
+    { name = "pyyaml" },
+    { name = "virtualenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" },
+]
+
+[[package]]
+name = "prometheus-client"
+version = "0.24.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f0/58/a794d23feb6b00fc0c72787d7e87d872a6730dd9ed7c7b3e954637d8f280/prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9", size = 85616, upload-time = "2026-01-14T15:26:26.965Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055", size = 64057, upload-time = "2026-01-14T15:26:24.42Z" },
+]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.52"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "wcwidth" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
+]
+
+[[package]]
+name = "psutil"
+version = "7.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/77/8e/f0c242053a368c2aa89584ecd1b054a18683f13d6e5a318fc9ec36582c94/psutil-7.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9f33bb525b14c3ea563b2fd521a84d2fa214ec59e3e6a2858f78d0844dd60d", size = 129624, upload-time = "2025-12-29T08:26:04.255Z" },
+    { url = "https://files.pythonhosted.org/packages/26/97/a58a4968f8990617decee234258a2b4fc7cd9e35668387646c1963e69f26/psutil-7.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81442dac7abfc2f4f4385ea9e12ddf5a796721c0f6133260687fec5c3780fa49", size = 130132, upload-time = "2025-12-29T08:26:06.228Z" },
+    { url = "https://files.pythonhosted.org/packages/db/6d/ed44901e830739af5f72a85fa7ec5ff1edea7f81bfbf4875e409007149bd/psutil-7.2.1-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea46c0d060491051d39f0d2cff4f98d5c72b288289f57a21556cc7d504db37fc", size = 180612, upload-time = "2025-12-29T08:26:08.276Z" },
+    { url = "https://files.pythonhosted.org/packages/c7/65/b628f8459bca4efbfae50d4bf3feaab803de9a160b9d5f3bd9295a33f0c2/psutil-7.2.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35630d5af80d5d0d49cfc4d64c1c13838baf6717a13effb35869a5919b854cdf", size = 183201, upload-time = "2025-12-29T08:26:10.622Z" },
+    { url = "https://files.pythonhosted.org/packages/fb/23/851cadc9764edcc18f0effe7d0bf69f727d4cf2442deb4a9f78d4e4f30f2/psutil-7.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:923f8653416604e356073e6e0bccbe7c09990acef442def2f5640dd0faa9689f", size = 139081, upload-time = "2025-12-29T08:26:12.483Z" },
+    { url = "https://files.pythonhosted.org/packages/59/82/d63e8494ec5758029f31c6cb06d7d161175d8281e91d011a4a441c8a43b5/psutil-7.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cfbe6b40ca48019a51827f20d830887b3107a74a79b01ceb8cc8de4ccb17b672", size = 134767, upload-time = "2025-12-29T08:26:14.528Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" },
+    { url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" },
+    { url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" },
+    { url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" },
+    { url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" },
+    { url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" },
+]
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
+]
+
+[[package]]
+name = "pvlib"
+version = "0.14.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "h5py" },
+    { name = "numpy" },
+    { name = "pandas" },
+    { name = "pytz" },
+    { name = "requests" },
+    { name = "scipy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8d/78/363c5914715e30e16a169d9d8102ddfe18c3bb80cc0d3446eacb9aae5ce7/pvlib-0.14.0.tar.gz", hash = "sha256:9e6a66865240ce4e31cbe9d360a28d6eb7953babb62ac403af2f90aed16a4509", size = 38684793, upload-time = "2026-01-16T22:22:48.883Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/54/a9/9cb15f22e949e865aacf146455a61ea13be875b36685e142b1d3d3d11df2/pvlib-0.14.0-py3-none-any.whl", hash = "sha256:d97409a53a08576541ba880bd8983d5e3ff57361c25b5e4dde349b263e3baf88", size = 19352615, upload-time = "2026-01-16T22:22:46.314Z" },
+]
+
+[[package]]
+name = "pycparser"
+version = "3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pymdown-extensions"
+version = "10.20.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "markdown" },
+    { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1e/6c/9e370934bfa30e889d12e61d0dae009991294f40055c238980066a7fbd83/pymdown_extensions-10.20.1.tar.gz", hash = "sha256:e7e39c865727338d434b55f1dd8da51febcffcaebd6e1a0b9c836243f660740a", size = 852860, upload-time = "2026-01-24T05:56:56.758Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/40/6d/b6ee155462a0156b94312bdd82d2b92ea56e909740045a87ccb98bf52405/pymdown_extensions-10.20.1-py3-none-any.whl", hash = "sha256:24af7feacbca56504b313b7b418c4f5e1317bb5fea60f03d57be7fcc40912aa0", size = 268768, upload-time = "2026-01-24T05:56:54.537Z" },
+]
+
+[[package]]
+name = "pyogrio"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "certifi" },
+    { name = "numpy" },
+    { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/49/d4/12f86b1ed09721363da4c09622464b604c851a9223fc0c6b393fb2012208/pyogrio-0.12.1.tar.gz", hash = "sha256:e548ab705bb3e5383693717de1e6c76da97f3762ab92522cb310f93128a75ff1", size = 303289, upload-time = "2025-11-28T19:04:53.341Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/02/46/b2c2dcdfd88759b56f103365905fffb85e8b08c1db1ec7c8f8b4c4c26016/pyogrio-0.12.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:01b322dac2a258d24b024d1028dcaa03c9bb6d9c3988b86d298a64873d10dc65", size = 23670744, upload-time = "2025-11-28T19:03:11.299Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/21/b69f1bc51d805c00dd7c484a18e1fd2e75b41da1d9f5b8591d7d9d4a7d2f/pyogrio-0.12.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:e10087abcbd6b7e8212560a7002984e5078ac7b3a969ddc2c9929044dbb0d403", size = 25246184, upload-time = "2025-11-28T19:03:13.997Z" },
+    { url = "https://files.pythonhosted.org/packages/19/8c/b6aae08e8fcc4f2a903da5f6bd8f888d2b6d7290e54dde5abe15b4cca8df/pyogrio-0.12.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f6c621972b09fd81a32317e742c69ff4a7763a803da211361a78317f9577765", size = 31434449, upload-time = "2025-11-28T19:03:16.777Z" },
+    { url = "https://files.pythonhosted.org/packages/70/f9/9538fa893c29a3fdfeddf3b4c9f8db77f2d4134bc766587929fec8405ebf/pyogrio-0.12.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c38253427b688464caad5316d4ebcec116b5e13f1f02cc4e3588502f136ca1b4", size = 30987586, upload-time = "2025-11-28T19:03:19.586Z" },
+    { url = "https://files.pythonhosted.org/packages/89/a4/0aef5837b4e11840f501e48e01c31242838476c4f4aff9c05e228a083982/pyogrio-0.12.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5f47787251de7ce13cc06038da93a1214dc283cbccf816be6e03c080358226c8", size = 32534386, upload-time = "2025-11-28T19:03:22.292Z" },
+    { url = "https://files.pythonhosted.org/packages/34/97/e8f2ed8a339152b86f8403c258ae5d5f23ab32d690eeb0545bb3473d0c69/pyogrio-0.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:c1d756cf2da4cdf5609779f260d1e1e89be023184225855d6f3dcd33bbe17cb0", size = 22941718, upload-time = "2025-11-28T19:03:24.82Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/e0/656b6536549d41b5aec57e0deca1f269b4f17532f0636836f587e581603a/pyogrio-0.12.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:7a0d5ca39184030aec4cde30f4258f75b227a854530d2659babc8189d76e657d", size = 23661857, upload-time = "2025-11-28T19:03:27.744Z" },
+    { url = "https://files.pythonhosted.org/packages/14/78/313259e40da728bdb60106ffdc7ea8224d164498cb838ecb79b634aab967/pyogrio-0.12.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:feaff42bbe8087ca0b30e33b09d1ce049ca55fe83ad83db1139ef37d1d04f30c", size = 25237106, upload-time = "2025-11-28T19:03:30.018Z" },
+    { url = "https://files.pythonhosted.org/packages/8f/ca/5368571a8b00b941ccfbe6ea29a5566aaffd45d4eb1553b956f7755af43e/pyogrio-0.12.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:81096a5139532de5a8003ef02b41d5d2444cb382a9aecd1165b447eb549180d3", size = 31417048, upload-time = "2025-11-28T19:03:32.572Z" },
+    { url = "https://files.pythonhosted.org/packages/ef/85/6eeb875f27bf498d657eb5dab9f58e4c48b36c9037122787abee9a1ba4ba/pyogrio-0.12.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:41b78863f782f7a113ed0d36a5dc74d59735bd3a82af53510899bb02a18b06bb", size = 30952115, upload-time = "2025-11-28T19:03:35.332Z" },
+    { url = "https://files.pythonhosted.org/packages/36/f7/cf8bec9024625947e1a71441906f60a5fa6f9e4c441c4428037e73b1fcc8/pyogrio-0.12.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:8b65be8c4258b27cc8f919b21929cecdadda4c353e3637fa30850339ef4d15c5", size = 32537246, upload-time = "2025-11-28T19:03:37.969Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/10/7c9f5e428273574e69f217eba3a6c0c42936188ad4dcd9e2c41ebb711188/pyogrio-0.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:1291b866c2c81d991bda15021b08b3621709b40ee3a85689229929e9465788bf", size = 22933980, upload-time = "2025-11-28T19:03:41.047Z" },
+    { url = "https://files.pythonhosted.org/packages/be/56/f56e79f71b84aa9bea25fdde39fab3846841bd7926be96f623eb7253b7e1/pyogrio-0.12.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:ec0e47a5a704e575092b2fd5c83fa0472a1d421e590f94093eb837bb0a11125d", size = 23658483, upload-time = "2025-11-28T19:03:43.567Z" },
+    { url = "https://files.pythonhosted.org/packages/66/ac/5559f8a35d58a16cbb2dd7602dd11936ff8796d8c9bf789f14da88764ec3/pyogrio-0.12.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:b4c888fc08f388be4dd99dfca5e84a5cdc5994deeec0230cc45144d3460e2b21", size = 25232737, upload-time = "2025-11-28T19:03:45.92Z" },
+    { url = "https://files.pythonhosted.org/packages/59/58/925f1c129ddd7cbba8dea4e7609797cea7a76dbc863ac9afd318a679c4b9/pyogrio-0.12.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:73a88436f9962750d782853727897ac2722cac5900d920e39fab3e56d7a6a7f1", size = 31377986, upload-time = "2025-11-28T19:03:48.495Z" },
+    { url = "https://files.pythonhosted.org/packages/18/5f/c87034e92847b1844d0e8492a6a8e3301147d32c5e57909397ce64dbedf5/pyogrio-0.12.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b5d248a0d59fe9bbf9a35690b70004c67830ee0ebe7d4f7bb8ffd8659f684b3a", size = 30915791, upload-time = "2025-11-28T19:03:51.267Z" },
+    { url = "https://files.pythonhosted.org/packages/46/35/b874f79d03e9f900012cf609f7fff97b77164f2e14ee5aac282f8a999c1b/pyogrio-0.12.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0622bc1a186421547660271083079b38d42e6f868802936d8538c0b379f1ab6b", size = 32499754, upload-time = "2025-11-28T19:03:58.776Z" },
+    { url = "https://files.pythonhosted.org/packages/c3/c4/705678c9c4200130290b3a104b45c0cc10aaa48fcef3b2585b34e34ab3e1/pyogrio-0.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:207bd60c7ffbcea84584596e3637653aa7095e9ee20fa408f90c7f9460392613", size = 22933945, upload-time = "2025-11-28T19:04:01.551Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/e0/d92d4944001330bc87742d43f112d63d12fc89378b6187e62ff3fc1e8e85/pyogrio-0.12.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:1511b39a283fa27cda906cd187a791578942a87a40b6a06697d9b43bb8ac80b0", size = 23692697, upload-time = "2025-11-28T19:04:04.208Z" },
+    { url = "https://files.pythonhosted.org/packages/e5/d7/40acbe06d1b1140e3bb27b79e9163776469c1dc785f1be7d9a7fc7b95c87/pyogrio-0.12.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:e486cd6aa9ea8a15394a5f84e019d61ec18f257eeeb642348bd68c3d1e57280b", size = 25258083, upload-time = "2025-11-28T19:04:07.121Z" },
+    { url = "https://files.pythonhosted.org/packages/87/a1/39fefd9cddd95986700524f43d3093b4350f6e4fc200623c3838424a5080/pyogrio-0.12.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3f1a19f63bfd1d3042e45f37ad1d6598123a5a604b6c4ba3f38b419273486cd", size = 31368995, upload-time = "2025-11-28T19:04:09.88Z" },
+    { url = "https://files.pythonhosted.org/packages/18/d7/da88c566e67d741a03851eb8d01358949d52e0b0fc2cd953582dc6d89ff8/pyogrio-0.12.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:f3dcc59b3316b8a0f59346bcc638a4d69997864a4d21da839192f50c4c92369a", size = 31035589, upload-time = "2025-11-28T19:04:12.993Z" },
+    { url = "https://files.pythonhosted.org/packages/11/ac/8f0199f0d31b8ddbc4b4ea1918df8070fdf3e0a63100b898633ec9396224/pyogrio-0.12.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:a0643e041dee3e8e038fce69f52a915ecb486e6d7b674c0f9919f3c9e9629689", size = 32487973, upload-time = "2025-11-28T19:04:16.103Z" },
+    { url = "https://files.pythonhosted.org/packages/bd/64/8541a27e9635a335835d234dfaeb19d6c26097fd88224eda7791f83ca98d/pyogrio-0.12.1-cp313-cp313t-win_amd64.whl", hash = "sha256:5881017f29e110d3613819667657844d8e961b747f2d35cf92f273c27af6d068", size = 22987374, upload-time = "2025-11-28T19:04:18.91Z" },
+]
+
+[[package]]
+name = "pyparsing"
+version = "3.3.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" },
+]
+
+[[package]]
+name = "pyproj"
+version = "3.7.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "certifi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/04/90/67bd7260b4ea9b8b20b4f58afef6c223ecb3abf368eb4ec5bc2cdef81b49/pyproj-3.7.2.tar.gz", hash = "sha256:39a0cf1ecc7e282d1d30f36594ebd55c9fae1fda8a2622cee5d100430628f88c", size = 226279, upload-time = "2025-08-14T12:05:42.18Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a6/bd/f205552cd1713b08f93b09e39a3ec99edef0b3ebbbca67b486fdf1abe2de/pyproj-3.7.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2514d61f24c4e0bb9913e2c51487ecdaeca5f8748d8313c933693416ca41d4d5", size = 6227022, upload-time = "2025-08-14T12:03:51.474Z" },
+    { url = "https://files.pythonhosted.org/packages/75/4c/9a937e659b8b418ab573c6d340d27e68716928953273e0837e7922fcac34/pyproj-3.7.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:8693ca3892d82e70de077701ee76dd13d7bca4ae1c9d1e739d72004df015923a", size = 4625810, upload-time = "2025-08-14T12:03:53.808Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/7d/a9f41e814dc4d1dc54e95b2ccaf0b3ebe3eb18b1740df05fe334724c3d89/pyproj-3.7.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5e26484d80fea56273ed1555abaea161e9661d81a6c07815d54b8e883d4ceb25", size = 9638694, upload-time = "2025-08-14T12:03:55.669Z" },
+    { url = "https://files.pythonhosted.org/packages/ad/ab/9bdb4a6216b712a1f9aab1c0fcbee5d3726f34a366f29c3e8c08a78d6b70/pyproj-3.7.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:281cb92847814e8018010c48b4069ff858a30236638631c1a91dd7bfa68f8a8a", size = 9493977, upload-time = "2025-08-14T12:03:57.937Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/db/2db75b1b6190f1137b1c4e8ef6a22e1c338e46320f6329bfac819143e063/pyproj-3.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9c8577f0b7bb09118ec2e57e3babdc977127dd66326d6c5d755c76b063e6d9dc", size = 10841151, upload-time = "2025-08-14T12:04:00.271Z" },
+    { url = "https://files.pythonhosted.org/packages/89/f7/989643394ba23a286e9b7b3f09981496172f9e0d4512457ffea7dc47ffc7/pyproj-3.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a23f59904fac3a5e7364b3aa44d288234af267ca041adb2c2b14a903cd5d3ac5", size = 10751585, upload-time = "2025-08-14T12:04:02.228Z" },
+    { url = "https://files.pythonhosted.org/packages/53/6d/ad928fe975a6c14a093c92e6a319ca18f479f3336bb353a740bdba335681/pyproj-3.7.2-cp311-cp311-win32.whl", hash = "sha256:f2af4ed34b2cf3e031a2d85b067a3ecbd38df073c567e04b52fa7a0202afde8a", size = 5908533, upload-time = "2025-08-14T12:04:04.821Z" },
+    { url = "https://files.pythonhosted.org/packages/79/e0/b95584605cec9ed50b7ebaf7975d1c4ddeec5a86b7a20554ed8b60042bd7/pyproj-3.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:0b7cb633565129677b2a183c4d807c727d1c736fcb0568a12299383056e67433", size = 6320742, upload-time = "2025-08-14T12:04:06.357Z" },
+    { url = "https://files.pythonhosted.org/packages/b7/4d/536e8f93bca808175c2d0a5ac9fdf69b960d8ab6b14f25030dccb07464d7/pyproj-3.7.2-cp311-cp311-win_arm64.whl", hash = "sha256:38b08d85e3a38e455625b80e9eb9f78027c8e2649a21dec4df1f9c3525460c71", size = 6245772, upload-time = "2025-08-14T12:04:08.365Z" },
+    { url = "https://files.pythonhosted.org/packages/8d/ab/9893ea9fb066be70ed9074ae543914a618c131ed8dff2da1e08b3a4df4db/pyproj-3.7.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:0a9bb26a6356fb5b033433a6d1b4542158fb71e3c51de49b4c318a1dff3aeaab", size = 6219832, upload-time = "2025-08-14T12:04:10.264Z" },
+    { url = "https://files.pythonhosted.org/packages/53/78/4c64199146eed7184eb0e85bedec60a4aa8853b6ffe1ab1f3a8b962e70a0/pyproj-3.7.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:567caa03021178861fad27fabde87500ec6d2ee173dd32f3e2d9871e40eebd68", size = 4620650, upload-time = "2025-08-14T12:04:11.978Z" },
+    { url = "https://files.pythonhosted.org/packages/b6/ac/14a78d17943898a93ef4f8c6a9d4169911c994e3161e54a7cedeba9d8dde/pyproj-3.7.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c203101d1dc3c038a56cff0447acc515dd29d6e14811406ac539c21eed422b2a", size = 9667087, upload-time = "2025-08-14T12:04:13.964Z" },
+    { url = "https://files.pythonhosted.org/packages/b8/be/212882c450bba74fc8d7d35cbd57e4af84792f0a56194819d98106b075af/pyproj-3.7.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1edc34266c0c23ced85f95a1ee8b47c9035eae6aca5b6b340327250e8e281630", size = 9552797, upload-time = "2025-08-14T12:04:16.624Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/c0/c0f25c87b5d2a8686341c53c1792a222a480d6c9caf60311fec12c99ec26/pyproj-3.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa9f26c21bc0e2dc3d224cb1eb4020cf23e76af179a7c66fea49b828611e4260", size = 10837036, upload-time = "2025-08-14T12:04:18.733Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/37/5cbd6772addde2090c91113332623a86e8c7d583eccb2ad02ea634c4a89f/pyproj-3.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9428b318530625cb389b9ddc9c51251e172808a4af79b82809376daaeabe5e9", size = 10775952, upload-time = "2025-08-14T12:04:20.709Z" },
+    { url = "https://files.pythonhosted.org/packages/69/a1/dc250e3cf83eb4b3b9a2cf86fdb5e25288bd40037ae449695550f9e96b2f/pyproj-3.7.2-cp312-cp312-win32.whl", hash = "sha256:b3d99ed57d319da042f175f4554fc7038aa4bcecc4ac89e217e350346b742c9d", size = 5898872, upload-time = "2025-08-14T12:04:22.485Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/a6/6fe724b72b70f2b00152d77282e14964d60ab092ec225e67c196c9b463e5/pyproj-3.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:11614a054cd86a2ed968a657d00987a86eeb91fdcbd9ad3310478685dc14a128", size = 6312176, upload-time = "2025-08-14T12:04:24.736Z" },
+    { url = "https://files.pythonhosted.org/packages/5d/68/915cc32c02a91e76d02c8f55d5a138d6ef9e47a0d96d259df98f4842e558/pyproj-3.7.2-cp312-cp312-win_arm64.whl", hash = "sha256:509a146d1398bafe4f53273398c3bb0b4732535065fa995270e52a9d3676bca3", size = 6233452, upload-time = "2025-08-14T12:04:27.287Z" },
+    { url = "https://files.pythonhosted.org/packages/be/14/faf1b90d267cea68d7e70662e7f88cefdb1bc890bd596c74b959e0517a72/pyproj-3.7.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:19466e529b1b15eeefdf8ff26b06fa745856c044f2f77bf0edbae94078c1dfa1", size = 6214580, upload-time = "2025-08-14T12:04:28.804Z" },
+    { url = "https://files.pythonhosted.org/packages/35/48/da9a45b184d375f62667f62eba0ca68569b0bd980a0bb7ffcc1d50440520/pyproj-3.7.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:c79b9b84c4a626c5dc324c0d666be0bfcebd99f7538d66e8898c2444221b3da7", size = 4615388, upload-time = "2025-08-14T12:04:30.553Z" },
+    { url = "https://files.pythonhosted.org/packages/5e/e7/d2b459a4a64bca328b712c1b544e109df88e5c800f7c143cfbc404d39bfb/pyproj-3.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ceecf374cacca317bc09e165db38ac548ee3cad07c3609442bd70311c59c21aa", size = 9628455, upload-time = "2025-08-14T12:04:32.435Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/85/c2b1706e51942de19076eff082f8495e57d5151364e78b5bef4af4a1d94a/pyproj-3.7.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5141a538ffdbe4bfd157421828bb2e07123a90a7a2d6f30fa1462abcfb5ce681", size = 9514269, upload-time = "2025-08-14T12:04:34.599Z" },
+    { url = "https://files.pythonhosted.org/packages/34/38/07a9b89ae7467872f9a476883a5bad9e4f4d1219d31060f0f2b282276cbe/pyproj-3.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f000841e98ea99acbb7b8ca168d67773b0191de95187228a16110245c5d954d5", size = 10808437, upload-time = "2025-08-14T12:04:36.485Z" },
+    { url = "https://files.pythonhosted.org/packages/12/56/fda1daeabbd39dec5b07f67233d09f31facb762587b498e6fc4572be9837/pyproj-3.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8115faf2597f281a42ab608ceac346b4eb1383d3b45ab474fd37341c4bf82a67", size = 10745540, upload-time = "2025-08-14T12:04:38.568Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/90/c793182cbba65a39a11db2ac6b479fe76c59e6509ae75e5744c344a0da9d/pyproj-3.7.2-cp313-cp313-win32.whl", hash = "sha256:f18c0579dd6be00b970cb1a6719197fceecc407515bab37da0066f0184aafdf3", size = 5896506, upload-time = "2025-08-14T12:04:41.059Z" },
+    { url = "https://files.pythonhosted.org/packages/be/0f/747974129cf0d800906f81cd25efd098c96509026e454d4b66868779ab04/pyproj-3.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:bb41c29d5f60854b1075853fe80c58950b398d4ebb404eb532536ac8d2834ed7", size = 6310195, upload-time = "2025-08-14T12:04:42.974Z" },
+    { url = "https://files.pythonhosted.org/packages/82/64/fc7598a53172c4931ec6edf5228280663063150625d3f6423b4c20f9daff/pyproj-3.7.2-cp313-cp313-win_arm64.whl", hash = "sha256:2b617d573be4118c11cd96b8891a0b7f65778fa7733ed8ecdb297a447d439100", size = 6230748, upload-time = "2025-08-14T12:04:44.491Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/f0/611dd5cddb0d277f94b7af12981f56e1441bf8d22695065d4f0df5218498/pyproj-3.7.2-cp313-cp313t-macosx_13_0_x86_64.whl", hash = "sha256:d27b48f0e81beeaa2b4d60c516c3a1cfbb0c7ff6ef71256d8e9c07792f735279", size = 6241729, upload-time = "2025-08-14T12:04:46.274Z" },
+    { url = "https://files.pythonhosted.org/packages/15/93/40bd4a6c523ff9965e480870611aed7eda5aa2c6128c6537345a2b77b542/pyproj-3.7.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:55a3610d75023c7b1c6e583e48ef8f62918e85a2ae81300569d9f104d6684bb6", size = 4652497, upload-time = "2025-08-14T12:04:48.203Z" },
+    { url = "https://files.pythonhosted.org/packages/1b/ae/7150ead53c117880b35e0d37960d3138fe640a235feb9605cb9386f50bb0/pyproj-3.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8d7349182fa622696787cc9e195508d2a41a64765da9b8a6bee846702b9e6220", size = 9942610, upload-time = "2025-08-14T12:04:49.652Z" },
+    { url = "https://files.pythonhosted.org/packages/d8/17/7a4a7eafecf2b46ab64e5c08176c20ceb5844b503eaa551bf12ccac77322/pyproj-3.7.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:d230b186eb876ed4f29a7c5ee310144c3a0e44e89e55f65fb3607e13f6db337c", size = 9692390, upload-time = "2025-08-14T12:04:51.731Z" },
+    { url = "https://files.pythonhosted.org/packages/c3/55/ae18f040f6410f0ea547a21ada7ef3e26e6c82befa125b303b02759c0e9d/pyproj-3.7.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:237499c7862c578d0369e2b8ac56eec550e391a025ff70e2af8417139dabb41c", size = 11047596, upload-time = "2025-08-14T12:04:53.748Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/2e/d3fff4d2909473f26ae799f9dda04caa322c417a51ff3b25763f7d03b233/pyproj-3.7.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8c225f5978abd506fd9a78eaaf794435e823c9156091cabaab5374efb29d7f69", size = 10896975, upload-time = "2025-08-14T12:04:55.875Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/bc/8fc7d3963d87057b7b51ebe68c1e7c51c23129eee5072ba6b86558544a46/pyproj-3.7.2-cp313-cp313t-win32.whl", hash = "sha256:2da731876d27639ff9d2d81c151f6ab90a1546455fabd93368e753047be344a2", size = 5953057, upload-time = "2025-08-14T12:04:58.466Z" },
+    { url = "https://files.pythonhosted.org/packages/cc/27/ea9809966cc47d2d51e6d5ae631ea895f7c7c7b9b3c29718f900a8f7d197/pyproj-3.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f54d91ae18dd23b6c0ab48126d446820e725419da10617d86a1b69ada6d881d3", size = 6375414, upload-time = "2025-08-14T12:04:59.861Z" },
+    { url = "https://files.pythonhosted.org/packages/5b/f8/1ef0129fba9a555c658e22af68989f35e7ba7b9136f25758809efec0cd6e/pyproj-3.7.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fc52ba896cfc3214dc9f9ca3c0677a623e8fdd096b257c14a31e719d21ff3fdd", size = 6262501, upload-time = "2025-08-14T12:05:01.39Z" },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "colorama", marker = "sys_platform == 'win32'" },
+    { name = "iniconfig" },
+    { name = "packaging" },
+    { name = "pluggy" },
+    { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
+[[package]]
+name = "python-json-logger"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" },
+]
+
+[[package]]
+name = "pytz"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
+]
+
+[[package]]
+name = "pywinpty"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669, upload-time = "2025-10-03T21:16:29.205Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/a6/a1/409c1651c9f874d598c10f51ff586c416625601df4bca315d08baec4c3e3/pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23", size = 2050304, upload-time = "2025-10-03T21:19:29.466Z" },
+    { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391, upload-time = "2025-10-03T21:19:01.642Z" },
+    { url = "https://files.pythonhosted.org/packages/fc/19/b757fe28008236a4a713e813283721b8a40aa60cd7d3f83549f2e25a3155/pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51", size = 2050057, upload-time = "2025-10-03T21:19:26.732Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/44/cbae12ecf6f4fa4129c36871fd09c6bef4f98d5f625ecefb5e2449765508/pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b", size = 2049874, upload-time = "2025-10-03T21:18:53.923Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
+    { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
+    { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
+    { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
+    { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
+    { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
+    { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+    { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+    { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+    { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+    { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+    { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+    { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+    { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+    { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+    { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+    { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+    { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+    { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+    { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+    { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+    { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+    { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+]
+
+[[package]]
+name = "pyyaml-env-tag"
+version = "1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737, upload-time = "2025-05-13T15:24:01.64Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722, upload-time = "2025-05-13T15:23:59.629Z" },
+]
+
+[[package]]
+name = "pyzmq"
+version = "27.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "cffi", marker = "implementation_name == 'pypy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" },
+    { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" },
+    { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" },
+    { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" },
+    { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" },
+    { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" },
+    { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" },
+    { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" },
+    { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" },
+    { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" },
+    { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" },
+    { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" },
+    { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" },
+    { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" },
+    { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" },
+    { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436, upload-time = "2025-09-08T23:08:20.801Z" },
+    { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301, upload-time = "2025-09-08T23:08:22.47Z" },
+    { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197, upload-time = "2025-09-08T23:08:24.286Z" },
+    { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275, upload-time = "2025-09-08T23:08:26.063Z" },
+    { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469, upload-time = "2025-09-08T23:08:27.623Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961, upload-time = "2025-09-08T23:08:29.672Z" },
+    { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282, upload-time = "2025-09-08T23:08:31.349Z" },
+    { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468, upload-time = "2025-09-08T23:08:33.543Z" },
+    { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394, upload-time = "2025-09-08T23:08:35.51Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" },
+    { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" },
+    { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" },
+    { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" },
+    { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" },
+    { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" },
+]
+
+[[package]]
+name = "rasterio"
+version = "1.4.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "affine" },
+    { name = "attrs" },
+    { name = "certifi" },
+    { name = "click" },
+    { name = "click-plugins" },
+    { name = "cligj" },
+    { name = "numpy" },
+    { name = "pyparsing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ec/fa/fce8dc9f09e5bc6520b6fc1b4ecfa510af9ca06eb42ad7bdff9c9b8989d0/rasterio-1.4.4.tar.gz", hash = "sha256:c95424e2c7f009b8f7df1095d645c52895cd332c0c2e1b4c2e073ea28b930320", size = 445004, upload-time = "2025-12-12T18:01:08.971Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c6/0d/d3859e49ab94464de2623fec82c6798d8d7c8bea2473cd2696fc5e09f717/rasterio-1.4.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:b8eea428b5f0c78a963f6003a19b60777df83a0aba8c28231d65431e32ac160e", size = 21144125, upload-time = "2025-12-12T17:58:59.511Z" },
+    { url = "https://files.pythonhosted.org/packages/aa/3c/97ba4b146309cdc0e36f289b02ac69465b026a21afc828e4e4e1dc39466a/rasterio-1.4.4-cp311-cp311-macosx_15_0_x86_64.whl", hash = "sha256:1cc0ea5aa0d22f5f349aa221674481de689b7b3a99607ce6bb58a29e5be54d17", size = 25746406, upload-time = "2025-12-12T17:59:02.902Z" },
+    { url = "https://files.pythonhosted.org/packages/ce/33/75f81bd837ac2336b24456fdb249597a4b9af2a212b7151f64d09022be36/rasterio-1.4.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7eb25b23666b29dadfc49a59206cead62c99190584b61771bba0e95f7da06801", size = 34587242, upload-time = "2025-12-12T17:59:05.848Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/77/3869a426f6e752dde13f3868cdf16253ca0214f92107db79c1583c9aa07b/rasterio-1.4.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e24b7b8c2df801dde2a1dffb44c58902bd76b5cab740dc11de4ff9963992a71a", size = 35881871, upload-time = "2025-12-12T17:59:09.779Z" },
+    { url = "https://files.pythonhosted.org/packages/66/d0/3818859ddbd3750d0ef5a6580a3272e81764286d943c689dd41e49b8b786/rasterio-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:0718630f607be2f5742d8e4b34b434746fd788a192d77eefc9bb924399fea802", size = 25716477, upload-time = "2025-12-12T17:59:13.519Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/02/039eb4970c93aaef4c9eb1ee159abad18e6e7f932c2eed575c95f78d94f6/rasterio-1.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:0308ff4762ae9eb40a991f12d758626b59af4376b13675480391dd7295d17bbf", size = 24075993, upload-time = "2025-12-12T17:59:16.407Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/fc/63d89ddfcb4643730553683ee322566b9b15fe56d026e4c21c4f4f5d9d26/rasterio-1.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3c4f0cbd188f893011f2a0a6dc2852b3892799b3a0d79eddf92f2b115ec7ed7", size = 21120715, upload-time = "2025-12-12T17:59:19.35Z" },
+    { url = "https://files.pythonhosted.org/packages/43/70/2c003f76a23dbb078fdee35c8e2ec490d2ad8982f4dc956ba08b56027b87/rasterio-1.4.4-cp312-cp312-macosx_15_0_x86_64.whl", hash = "sha256:6fce26090b9f509eab337228420145947c491a13628965410f25bc3e6e05cf75", size = 25732944, upload-time = "2025-12-12T17:59:22.533Z" },
+    { url = "https://files.pythonhosted.org/packages/f6/cc/4a8e92362c0ff496dd1007c3dcba66e9ededf1a45eca8ad1db302b071c49/rasterio-1.4.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c1c722da390dc264aeccdc0dc200ca37923875d910ca4cd5bec0fec351bb818e", size = 34295209, upload-time = "2025-12-12T17:59:26.035Z" },
+    { url = "https://files.pythonhosted.org/packages/e6/6d/717d2dec47fbefad33ca0d27bd5f0d543b1d1bc9fcab5ef82a13adaaf38d/rasterio-1.4.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:98b6dfb8282b2a54b9d75c3dc8d2520a69bbc66916c7d43de8e0bbf6e0240ca1", size = 35661866, upload-time = "2025-12-12T17:59:29.928Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/60/ae3351fba2726ec0976974ce2eb030c159edd3363b8771e832b8db571c24/rasterio-1.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:9513f4c7a6d93b45098f8dff2421fa9516604e3bfbf35aa144484a88d36a321f", size = 25682853, upload-time = "2025-12-12T17:59:35.869Z" },
+    { url = "https://files.pythonhosted.org/packages/38/ee/35387296bbacfc5cbbb4273228b1b959793d3ce38b0402a07f11a248420b/rasterio-1.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:60b49a482e0f12f12ce9d2cc3090add02f89f3d422e85f2cffaa9207adb83c04", size = 24043249, upload-time = "2025-12-12T17:59:39.915Z" },
+    { url = "https://files.pythonhosted.org/packages/c1/fe/e3e37041c49956f4f4cbe473c3fe290aaba96ed20e9c07da304e0cad2015/rasterio-1.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:df26c96aa81ffbd0b33189680859211eadf9950123c21579f84de73bb0f91d81", size = 21107336, upload-time = "2025-12-12T17:59:43.585Z" },
+    { url = "https://files.pythonhosted.org/packages/f3/02/c217fdcc8e80a4b7d1b1bc4529d78f98452816e9add53ff8742049a77ae7/rasterio-1.4.4-cp313-cp313-macosx_15_0_x86_64.whl", hash = "sha256:b3af0ecc922a80f3755516629f7948e37bade9077b5f5c12a3869a5e7f01619b", size = 25719929, upload-time = "2025-12-12T17:59:47.64Z" },
+    { url = "https://files.pythonhosted.org/packages/c0/d0/7f177f37bc9595d809dabb0073abd0c42358469f6b10875192b46331c652/rasterio-1.4.4-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:7ce3b0f9a22e95a27790087908753973644d7c3877d495ec9bd6e04a25233ca4", size = 34198845, upload-time = "2025-12-12T17:59:52.405Z" },
+    { url = "https://files.pythonhosted.org/packages/7b/84/66c0d9cca2a09074ec2ce6fffa87709ca51b0d197ae742d835e841bac660/rasterio-1.4.4-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c072450caa96428b1218b030500bb908fd6f09bc013a88969ff81a124b6a112a", size = 35576074, upload-time = "2025-12-12T17:59:56.392Z" },
+    { url = "https://files.pythonhosted.org/packages/32/68/f7df5478458ace2fa50be43e9fab1a39957a0e71afaa3e6147ec289e0fc8/rasterio-1.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:16ee92ef10c0ba89f45f9c2b40fca9f971f357385f04ee9b716fb09cbd9ce20c", size = 25680573, upload-time = "2025-12-12T18:00:00.45Z" },
+    { url = "https://files.pythonhosted.org/packages/34/e5/1bdaccb658430dfd391ad4a63d206546f36639d7e4130bf31f125c6525b4/rasterio-1.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:65c10afe64b5e488185aaff0b659e08eda22c89285b54a3e433b80e6c6621770", size = 24040367, upload-time = "2025-12-12T18:00:04.443Z" },
+    { url = "https://files.pythonhosted.org/packages/32/76/54643a7d1d650fd7f1acea9093c298603e4c01bba6f90be2254310b48507/rasterio-1.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:18c2c1130e789dc2771d0aa5ec4b56d5b8a0097c648ccb94882d5ff3ab55c928", size = 21247203, upload-time = "2025-12-12T18:00:07.547Z" },
+    { url = "https://files.pythonhosted.org/packages/76/ef/434b4849ccd6a3e03a0b1ac37c963c1771564945745613d15c5d96ce768d/rasterio-1.4.4-cp313-cp313t-macosx_15_0_x86_64.whl", hash = "sha256:2d1654b7ffa6f3dde42c5fd27159ae45148c11e352de26f12fe7313a3236aeed", size = 25822050, upload-time = "2025-12-12T18:00:11.081Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/fa/fe9a478aa0cde246da58baeb0df3248c7ca174e4d9c9b27e81b504e40a76/rasterio-1.4.4-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c4022cbddb659856e120603b12233cec8913ae760fff220657ce888c3c6b9f9d", size = 34833783, upload-time = "2025-12-12T18:00:14.525Z" },
+    { url = "https://files.pythonhosted.org/packages/04/cd/ed4716590dbcd4b8ae633417d758564e510bee4d6aaac5050a0f6d5179c5/rasterio-1.4.4-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:96b88880551a07b7a3b50439483cefbd9af91a09e19ff2b736815994e5671314", size = 35738114, upload-time = "2025-12-12T18:00:17.96Z" },
+    { url = "https://files.pythonhosted.org/packages/7e/29/da7050d11ba1d041e0333ac14768e6e9ca1aa2b9fa8416f317d2650ed276/rasterio-1.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:def75d486d0ab8f306f918a913c425ed57159495518c54efe8e18d5164d37d90", size = 25896835, upload-time = "2025-12-12T18:00:21.411Z" },
+    { url = "https://files.pythonhosted.org/packages/88/80/304dbe5434c4aa8dfaf90480c16d770161796a6a61fa88e72e8a402153df/rasterio-1.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:770b7e86f6c565e6f9cf30f6fa4479a5a2bab4e10ff44fe7acfd518ca4a71d1b", size = 24128074, upload-time = "2025-12-12T18:00:24.653Z" },
+]
+
+[[package]]
+name = "referencing"
+version = "0.37.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "attrs" },
+    { name = "rpds-py" },
+    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "certifi" },
+    { name = "charset-normalizer" },
+    { name = "idna" },
+    { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+]
+
+[[package]]
+name = "rfc3339-validator"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
+]
+
+[[package]]
+name = "rfc3986-validator"
+version = "0.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760, upload-time = "2019-10-28T16:00:19.144Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" },
+]
+
+[[package]]
+name = "rfc3987-syntax"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "lark" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2c/06/37c1a5557acf449e8e406a830a05bf885ac47d33270aec454ef78675008d/rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d", size = 14239, upload-time = "2025-07-18T01:05:05.015Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046, upload-time = "2025-07-18T01:05:03.843Z" },
+]
+
+[[package]]
+name = "rioxarray"
+version = "0.19.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+    { name = "packaging" },
+    { name = "pyproj" },
+    { name = "rasterio" },
+    { name = "xarray" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/8e/fe4e87460f8c62d8d5c683e09f19fbde5d9cfcfd0342d02df1f452999b5d/rioxarray-0.19.0.tar.gz", hash = "sha256:7819a0036fd874c8c8e280447cbbe43d8dc72fc4a14ac7852a665b1bdb7d4b04", size = 54600, upload-time = "2025-04-21T17:46:54.183Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2a/2f/63d2cacc0e525f8e3398bcf32bd3620385f22cd1600834ec49d7f3597a7b/rioxarray-0.19.0-py3-none-any.whl", hash = "sha256:494ee4fff1781072d55ee5276f5d07b63d93b05093cb33b926a12186ba5bb8ef", size = 62151, upload-time = "2025-04-21T17:46:52.801Z" },
+]
+
+[[package]]
+name = "rpds-py"
+version = "0.30.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" },
+    { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" },
+    { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" },
+    { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" },
+    { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" },
+    { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" },
+    { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" },
+    { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" },
+    { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" },
+    { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" },
+    { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" },
+    { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" },
+    { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" },
+    { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" },
+    { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" },
+    { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" },
+    { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" },
+    { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" },
+    { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" },
+    { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" },
+    { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" },
+    { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" },
+    { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" },
+    { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" },
+    { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" },
+    { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" },
+    { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" },
+    { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" },
+    { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" },
+    { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" },
+    { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" },
+    { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" },
+    { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" },
+    { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" },
+    { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" },
+    { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" },
+    { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" },
+    { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" },
+    { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" },
+    { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" },
+    { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" },
+    { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" },
+    { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" },
+    { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" },
+    { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" },
+    { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" },
+    { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" },
+    { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" },
+    { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" },
+    { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" },
+    { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" },
+    { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" },
+    { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" },
+    { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" },
+    { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" },
+    { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" },
+]
+
+[[package]]
+name = "ruff"
+version = "0.15.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" },
+    { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" },
+    { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" },
+    { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" },
+    { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" },
+    { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" },
+    { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" },
+    { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" },
+    { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" },
+    { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" },
+    { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" },
+    { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" },
+]
+
+[[package]]
+name = "scipy"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1e/4b/c89c131aa87cad2b77a54eb0fb94d633a842420fa7e919dc2f922037c3d8/scipy-1.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd", size = 31381316, upload-time = "2026-01-10T21:24:33.42Z" },
+    { url = "https://files.pythonhosted.org/packages/5e/5f/a6b38f79a07d74989224d5f11b55267714707582908a5f1ae854cf9a9b84/scipy-1.17.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558", size = 27966760, upload-time = "2026-01-10T21:24:38.911Z" },
+    { url = "https://files.pythonhosted.org/packages/c1/20/095ad24e031ee8ed3c5975954d816b8e7e2abd731e04f8be573de8740885/scipy-1.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7", size = 20138701, upload-time = "2026-01-10T21:24:43.249Z" },
+    { url = "https://files.pythonhosted.org/packages/89/11/4aad2b3858d0337756f3323f8960755704e530b27eb2a94386c970c32cbe/scipy-1.17.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6", size = 22480574, upload-time = "2026-01-10T21:24:47.266Z" },
+    { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" },
+    { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" },
+    { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" },
+    { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" },
+    { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" },
+    { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" },
+    { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" },
+    { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" },
+    { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" },
+    { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" },
+    { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" },
+    { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" },
+    { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" },
+    { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" },
+    { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" },
+    { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" },
+    { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" },
+    { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" },
+    { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" },
+    { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" },
+    { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" },
+    { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" },
+    { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" },
+    { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" },
+    { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" },
+    { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" },
+    { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" },
+    { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" },
+    { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" },
+    { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" },
+    { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" },
+    { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" },
+]
+
+[[package]]
+name = "send2trash"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c5/f0/184b4b5f8d00f2a92cf96eec8967a3d550b52cf94362dad1100df9e48d57/send2trash-2.1.0.tar.gz", hash = "sha256:1c72b39f09457db3c05ce1d19158c2cbef4c32b8bedd02c155e49282b7ea7459", size = 17255, upload-time = "2026-01-14T06:27:36.056Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/1c/78/504fdd027da3b84ff1aecd9f6957e65f35134534ccc6da8628eb71e76d3f/send2trash-2.1.0-py3-none-any.whl", hash = "sha256:0da2f112e6d6bb22de6aa6daa7e144831a4febf2a87261451c4ad849fe9a873c", size = 17610, upload-time = "2026-01-14T06:27:35.218Z" },
+]
+
+[[package]]
+name = "setuptools"
+version = "80.10.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/86/ff/f75651350db3cf2ef767371307eb163f3cc1ac03e16fdf3ac347607f7edb/setuptools-80.10.1.tar.gz", hash = "sha256:bf2e513eb8144c3298a3bd28ab1a5edb739131ec5c22e045ff93cd7f5319703a", size = 1229650, upload-time = "2026-01-21T09:42:03.061Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e0/76/f963c61683a39084aa575f98089253e1e852a4417cb8a3a8a422923a5246/setuptools-80.10.1-py3-none-any.whl", hash = "sha256:fc30c51cbcb8199a219c12cc9c281b5925a4978d212f84229c909636d9f6984e", size = 1099859, upload-time = "2026-01-21T09:42:00.688Z" },
+]
+
+[[package]]
+name = "shapely"
+version = "2.1.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" },
+    { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" },
+    { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" },
+    { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" },
+    { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" },
+    { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" },
+    { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" },
+    { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" },
+    { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" },
+    { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" },
+    { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" },
+    { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" },
+    { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" },
+    { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" },
+    { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" },
+    { url = "https://files.pythonhosted.org/packages/c3/90/98ef257c23c46425dc4d1d31005ad7c8d649fe423a38b917db02c30f1f5a/shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8", size = 1832644, upload-time = "2025-09-24T13:50:44.886Z" },
+    { url = "https://files.pythonhosted.org/packages/6d/ab/0bee5a830d209adcd3a01f2d4b70e587cdd9fd7380d5198c064091005af8/shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a", size = 1642887, upload-time = "2025-09-24T13:50:46.735Z" },
+    { url = "https://files.pythonhosted.org/packages/2d/5e/7d7f54ba960c13302584c73704d8c4d15404a51024631adb60b126a4ae88/shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e", size = 2970931, upload-time = "2025-09-24T13:50:48.374Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/a2/83fc37e2a58090e3d2ff79175a95493c664bcd0b653dd75cb9134645a4e5/shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6", size = 3082855, upload-time = "2025-09-24T13:50:50.037Z" },
+    { url = "https://files.pythonhosted.org/packages/44/2b/578faf235a5b09f16b5f02833c53822294d7f21b242f8e2d0cf03fb64321/shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af", size = 3979960, upload-time = "2025-09-24T13:50:51.74Z" },
+    { url = "https://files.pythonhosted.org/packages/4d/04/167f096386120f692cc4ca02f75a17b961858997a95e67a3cb6a7bbd6b53/shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd", size = 4142851, upload-time = "2025-09-24T13:50:53.49Z" },
+    { url = "https://files.pythonhosted.org/packages/48/74/fb402c5a6235d1c65a97348b48cdedb75fb19eca2b1d66d04969fc1c6091/shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350", size = 1541890, upload-time = "2025-09-24T13:50:55.337Z" },
+    { url = "https://files.pythonhosted.org/packages/41/47/3647fe7ad990af60ad98b889657a976042c9988c2807cf322a9d6685f462/shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715", size = 1722151, upload-time = "2025-09-24T13:50:57.153Z" },
+    { url = "https://files.pythonhosted.org/packages/3c/49/63953754faa51ffe7d8189bfbe9ca34def29f8c0e34c67cbe2a2795f269d/shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40", size = 1834130, upload-time = "2025-09-24T13:50:58.49Z" },
+    { url = "https://files.pythonhosted.org/packages/7f/ee/dce001c1984052970ff60eb4727164892fb2d08052c575042a47f5a9e88f/shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b", size = 1642802, upload-time = "2025-09-24T13:50:59.871Z" },
+    { url = "https://files.pythonhosted.org/packages/da/e7/fc4e9a19929522877fa602f705706b96e78376afb7fad09cad5b9af1553c/shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801", size = 3018460, upload-time = "2025-09-24T13:51:02.08Z" },
+    { url = "https://files.pythonhosted.org/packages/a1/18/7519a25db21847b525696883ddc8e6a0ecaa36159ea88e0fef11466384d0/shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0", size = 3095223, upload-time = "2025-09-24T13:51:04.472Z" },
+    { url = "https://files.pythonhosted.org/packages/48/de/b59a620b1f3a129c3fecc2737104a0a7e04e79335bd3b0a1f1609744cf17/shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c", size = 4030760, upload-time = "2025-09-24T13:51:06.455Z" },
+    { url = "https://files.pythonhosted.org/packages/96/b3/c6655ee7232b417562bae192ae0d3ceaadb1cc0ffc2088a2ddf415456cc2/shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99", size = 4170078, upload-time = "2025-09-24T13:51:08.584Z" },
+    { url = "https://files.pythonhosted.org/packages/a0/8e/605c76808d73503c9333af8f6cbe7e1354d2d238bda5f88eea36bfe0f42a/shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf", size = 1559178, upload-time = "2025-09-24T13:51:10.73Z" },
+    { url = "https://files.pythonhosted.org/packages/36/f7/d317eb232352a1f1444d11002d477e54514a4a6045536d49d0c59783c0da/shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c", size = 1739756, upload-time = "2025-09-24T13:51:12.105Z" },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+]
+
+[[package]]
+name = "solweig"
+version = "0.1.0b47"
+source = { editable = "." }
+dependencies = [
+    { name = "numpy" },
+    { name = "pyproj" },
+    { name = "shapely" },
+]
+
+[package.optional-dependencies]
+full = [
+    { name = "geopandas" },
+    { name = "pillow" },
+    { name = "rasterio" },
+    { name = "tqdm" },
+]
+
+[package.dev-dependencies]
+dev = [
+    { name = "geopandas" },
+    { name = "ipykernel" },
+    { name = "jupyter" },
+    { name = "jupyterlab" },
+    { name = "maturin" },
+    { name = "memory-profiler" },
+    { name = "mkdocs" },
+    { name = "mkdocs-material" },
+    { name = "mkdocstrings", extra = ["python"] },
+    { name = "pandas-stubs" },
+    { name = "pillow" },
+    { name = "pip" },
+    { name = "poethepoet" },
+    { name = "pre-commit" },
+    { name = "pytest" },
+    { name = "rasterio" },
+    { name = "ruff" },
+    { name = "scipy" },
+    { name = "tqdm" },
+    { name = "ty" },
+    { name = "umep" },
+]
+qgis-compat = [
+    { name = "maturin" },
+    { name = "numpy" },
+    { name = "pandas" },
+    { name = "pyproj" },
+    { name = "pytest" },
+    { name = "shapely" },
+]
+
+[package.metadata]
+requires-dist = [
+    { name = "geopandas", marker = "extra == 'full'", specifier = ">=1.0.1" },
+    { name = "numpy", specifier = ">=1.26.0" },
+    { name = "pillow", marker = "extra == 'full'", specifier = ">=9.0.0" },
+    { name = "pyproj", specifier = ">=3.7.0" },
+    { name = "rasterio", marker = "extra == 'full'", specifier = ">=1.3.0" },
+    { name = "shapely", specifier = ">=2.0.4" },
+    { name = "tqdm", marker = "extra == 'full'", specifier = ">=4.67.1" },
+]
+provides-extras = ["full", "qgis"]
+
+[package.metadata.requires-dev]
+dev = [
+    { name = "geopandas", specifier = ">=1.0.1" },
+    { name = "ipykernel", specifier = ">=6.31.0" },
+    { name = "jupyter", specifier = ">=1.0.0" },
+    { name = "jupyterlab", specifier = ">=3.5.2" },
+    { name = "maturin", specifier = ">=1.8.3" },
+    { name = "memory-profiler", specifier = ">=0.61.0" },
+    { name = "mkdocs", specifier = ">=1.6.0" },
+    { name = "mkdocs-material", specifier = ">=9.5.0" },
+    { name = "mkdocstrings", extras = ["python"], specifier = ">=0.27.0" },
+    { name = "pandas-stubs", specifier = ">=1.5.2.221213" },
+    { name = "pillow", specifier = ">=9.0.0" },
+    { name = "pip", specifier = ">=23.2" },
+    { name = "poethepoet", specifier = ">=0.29.0" },
+    { name = "pre-commit", specifier = ">=4.3.0" },
+    { name = "pytest", specifier = ">=7.2.0" },
+    { name = "rasterio", specifier = ">=1.3.0" },
+    { name = "ruff", specifier = ">=0.5.1" },
+    { name = "scipy", specifier = ">=1.13.0" },
+    { name = "tqdm", specifier = ">=4.67.1" },
+    { name = "ty", specifier = ">=0.0.12" },
+    { name = "umep", specifier = ">=0.0.1a18" },
+]
+qgis-compat = [
+    { name = "maturin", specifier = ">=1.8.3" },
+    { name = "numpy", specifier = ">=1.26.0" },
+    { name = "pandas", specifier = ">=2.2.2" },
+    { name = "pyproj", specifier = ">=3.7.0" },
+    { name = "pytest", specifier = ">=7.2.0" },
+    { name = "shapely", specifier = ">=2.0.4" },
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.8.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" },
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "asttokens" },
+    { name = "executing" },
+    { name = "pure-eval" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
+]
+
+[[package]]
+name = "terminado"
+version = "0.18.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "ptyprocess", marker = "os_name != 'nt'" },
+    { name = "pywinpty", marker = "os_name == 'nt'" },
+    { name = "tornado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701, upload-time = "2024-03-12T14:34:39.026Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" },
+]
+
+[[package]]
+name = "tinycss2"
+version = "1.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" },
+]
+
+[[package]]
+name = "tornado"
+version = "6.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/37/1d/0a336abf618272d53f62ebe274f712e213f5a03c0b2339575430b8362ef2/tornado-6.5.4.tar.gz", hash = "sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7", size = 513632, upload-time = "2025-12-15T19:21:03.836Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/ab/a9/e94a9d5224107d7ce3cc1fab8d5dc97f5ea351ccc6322ee4fb661da94e35/tornado-6.5.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9", size = 443909, upload-time = "2025-12-15T19:20:48.382Z" },
+    { url = "https://files.pythonhosted.org/packages/db/7e/f7b8d8c4453f305a51f80dbb49014257bb7d28ccb4bbb8dd328ea995ecad/tornado-6.5.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843", size = 442163, upload-time = "2025-12-15T19:20:49.791Z" },
+    { url = "https://files.pythonhosted.org/packages/ba/b5/206f82d51e1bfa940ba366a8d2f83904b15942c45a78dd978b599870ab44/tornado-6.5.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17", size = 445746, upload-time = "2025-12-15T19:20:51.491Z" },
+    { url = "https://files.pythonhosted.org/packages/8e/9d/1a3338e0bd30ada6ad4356c13a0a6c35fbc859063fa7eddb309183364ac1/tornado-6.5.4-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335", size = 445083, upload-time = "2025-12-15T19:20:52.778Z" },
+    { url = "https://files.pythonhosted.org/packages/50/d4/e51d52047e7eb9a582da59f32125d17c0482d065afd5d3bc435ff2120dc5/tornado-6.5.4-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f", size = 445315, upload-time = "2025-12-15T19:20:53.996Z" },
+    { url = "https://files.pythonhosted.org/packages/27/07/2273972f69ca63dbc139694a3fc4684edec3ea3f9efabf77ed32483b875c/tornado-6.5.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84", size = 446003, upload-time = "2025-12-15T19:20:56.101Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/83/41c52e47502bf7260044413b6770d1a48dda2f0246f95ee1384a3cd9c44a/tornado-6.5.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f", size = 445412, upload-time = "2025-12-15T19:20:57.398Z" },
+    { url = "https://files.pythonhosted.org/packages/10/c7/bc96917f06cbee182d44735d4ecde9c432e25b84f4c2086143013e7b9e52/tornado-6.5.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8", size = 445392, upload-time = "2025-12-15T19:20:58.692Z" },
+    { url = "https://files.pythonhosted.org/packages/0c/1a/d7592328d037d36f2d2462f4bc1fbb383eec9278bc786c1b111cbbd44cfa/tornado-6.5.4-cp39-abi3-win32.whl", hash = "sha256:1768110f2411d5cd281bac0a090f707223ce77fd110424361092859e089b38d1", size = 446481, upload-time = "2025-12-15T19:21:00.008Z" },
+    { url = "https://files.pythonhosted.org/packages/d6/6d/c69be695a0a64fd37a97db12355a035a6d90f79067a3cf936ec2b1dc38cd/tornado-6.5.4-cp39-abi3-win_amd64.whl", hash = "sha256:fa07d31e0cd85c60713f2b995da613588aa03e1303d75705dca6af8babc18ddc", size = 446886, upload-time = "2025-12-15T19:21:01.287Z" },
+    { url = "https://files.pythonhosted.org/packages/50/49/8dc3fd90902f70084bd2cd059d576ddb4f8bb44c2c7c0e33a11422acb17e/tornado-6.5.4-cp39-abi3-win_arm64.whl", hash = "sha256:053e6e16701eb6cbe641f308f4c1a9541f91b6261991160391bfc342e8a551a1", size = 445910, upload-time = "2025-12-15T19:21:02.571Z" },
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
+]
+
+[[package]]
+name = "ty"
+version = "0.0.17"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/c3/41ae6346443eedb65b96761abfab890a48ce2aa5a8a27af69c5c5d99064d/ty-0.0.17.tar.gz", hash = "sha256:847ed6c120913e280bf9b54d8eaa7a1049708acb8824ad234e71498e8ad09f97", size = 5167209, upload-time = "2026-02-13T13:26:36.835Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c0/01/0ef15c22a1c54b0f728ceff3f62d478dbf8b0dcf8ff7b80b954f79584f3e/ty-0.0.17-py3-none-linux_armv6l.whl", hash = "sha256:64a9a16555cc8867d35c2647c2f1afbd3cae55f68fd95283a574d1bb04fe93e0", size = 10192793, upload-time = "2026-02-13T13:27:13.943Z" },
+    { url = "https://files.pythonhosted.org/packages/0f/2c/f4c322d9cded56edc016b1092c14b95cf58c8a33b4787316ea752bb9418e/ty-0.0.17-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:eb2dbd8acd5c5a55f4af0d479523e7c7265a88542efe73ed3d696eb1ba7b6454", size = 10051977, upload-time = "2026-02-13T13:26:57.741Z" },
+    { url = "https://files.pythonhosted.org/packages/4c/a5/43746c1ff81e784f5fc303afc61fe5bcd85d0fcf3ef65cb2cef78c7486c7/ty-0.0.17-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f18f5fd927bc628deb9ea2df40f06b5f79c5ccf355db732025a3e8e7152801f6", size = 9564639, upload-time = "2026-02-13T13:26:42.781Z" },
+    { url = "https://files.pythonhosted.org/packages/d6/b8/280b04e14a9c0474af574f929fba2398b5e1c123c1e7735893b4cd73d13c/ty-0.0.17-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5383814d1d7a5cc53b3b07661856bab04bb2aac7a677c8d33c55169acdaa83df", size = 10061204, upload-time = "2026-02-13T13:27:00.152Z" },
+    { url = "https://files.pythonhosted.org/packages/2a/d7/493e1607d8dfe48288d8a768a2adc38ee27ef50e57f0af41ff273987cda0/ty-0.0.17-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c20423b8744b484f93e7bf2ef8a9724bca2657873593f9f41d08bd9f83444c9", size = 10013116, upload-time = "2026-02-13T13:26:34.543Z" },
+    { url = "https://files.pythonhosted.org/packages/80/ef/22f3ed401520afac90dbdf1f9b8b7755d85b0d5c35c1cb35cf5bd11b59c2/ty-0.0.17-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6f5b1aba97db9af86517b911674b02f5bc310750485dc47603a105bd0e83ddd", size = 10533623, upload-time = "2026-02-13T13:26:31.449Z" },
+    { url = "https://files.pythonhosted.org/packages/75/ce/744b15279a11ac7138832e3a55595706b4a8a209c9f878e3ab8e571d9032/ty-0.0.17-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:488bce1a9bea80b851a97cd34c4d2ffcd69593d6c3f54a72ae02e5c6e47f3d0c", size = 11069750, upload-time = "2026-02-13T13:26:48.638Z" },
+    { url = "https://files.pythonhosted.org/packages/f2/be/1133c91f15a0e00d466c24f80df486d630d95d1b2af63296941f7473812f/ty-0.0.17-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8df66b91ec84239420985ec215e7f7549bfda2ac036a3b3c065f119d1c06825a", size = 10870862, upload-time = "2026-02-13T13:26:54.715Z" },
+    { url = "https://files.pythonhosted.org/packages/3e/4a/a2ed209ef215b62b2d3246e07e833081e07d913adf7e0448fc204be443d6/ty-0.0.17-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:002139e807c53002790dfefe6e2f45ab0e04012e76db3d7c8286f96ec121af8f", size = 10628118, upload-time = "2026-02-13T13:26:45.439Z" },
+    { url = "https://files.pythonhosted.org/packages/b3/0c/87476004cb5228e9719b98afffad82c3ef1f84334bde8527bcacba7b18cb/ty-0.0.17-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6c4e01f05ce82e5d489ab3900ca0899a56c4ccb52659453780c83e5b19e2b64c", size = 10038185, upload-time = "2026-02-13T13:27:02.693Z" },
+    { url = "https://files.pythonhosted.org/packages/46/4b/98f0b3ba9aef53c1f0305519536967a4aa793a69ed72677b0a625c5313ac/ty-0.0.17-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2b226dd1e99c0d2152d218c7e440150d1a47ce3c431871f0efa073bbf899e881", size = 10047644, upload-time = "2026-02-13T13:27:05.474Z" },
+    { url = "https://files.pythonhosted.org/packages/93/e0/06737bb80aa1a9103b8651d2eb691a7e53f1ed54111152be25f4a02745db/ty-0.0.17-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8b11f1da7859e0ad69e84b3c5ef9a7b055ceed376a432fad44231bdfc48061c2", size = 10231140, upload-time = "2026-02-13T13:27:10.844Z" },
+    { url = "https://files.pythonhosted.org/packages/7c/79/e2a606bd8852383ba9abfdd578f4a227bd18504145381a10a5f886b4e751/ty-0.0.17-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c04e196809ff570559054d3e011425fd7c04161529eb551b3625654e5f2434cb", size = 10718344, upload-time = "2026-02-13T13:26:51.66Z" },
+    { url = "https://files.pythonhosted.org/packages/c5/2d/2663984ac11de6d78f74432b8b14ba64d170b45194312852b7543cf7fd56/ty-0.0.17-py3-none-win32.whl", hash = "sha256:305b6ed150b2740d00a817b193373d21f0767e10f94ac47abfc3b2e5a5aec809", size = 9672932, upload-time = "2026-02-13T13:27:08.522Z" },
+    { url = "https://files.pythonhosted.org/packages/de/b5/39be78f30b31ee9f5a585969930c7248354db90494ff5e3d0756560fb731/ty-0.0.17-py3-none-win_amd64.whl", hash = "sha256:531828267527aee7a63e972f54e5eee21d9281b72baf18e5c2850c6b862add83", size = 10542138, upload-time = "2026-02-13T13:27:17.084Z" },
+    { url = "https://files.pythonhosted.org/packages/40/b7/f875c729c5d0079640c75bad2c7e5d43edc90f16ba242f28a11966df8f65/ty-0.0.17-py3-none-win_arm64.whl", hash = "sha256:de9810234c0c8d75073457e10a84825b9cd72e6629826b7f01c7a0b266ae25b1", size = 10023068, upload-time = "2026-02-13T13:26:39.637Z" },
+]
+
+[[package]]
+name = "types-pytz"
+version = "2025.2.0.20251108"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/40/ff/c047ddc68c803b46470a357454ef76f4acd8c1088f5cc4891cdd909bfcf6/types_pytz-2025.2.0.20251108.tar.gz", hash = "sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb", size = 10961, upload-time = "2025-11-08T02:55:57.001Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e7/c1/56ef16bf5dcd255155cc736d276efa6ae0a5c26fd685e28f0412a4013c01/types_pytz-2025.2.0.20251108-py3-none-any.whl", hash = "sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c", size = 10116, upload-time = "2025-11-08T02:55:56.194Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "tzdata"
+version = "2025.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" },
+]
+
+[[package]]
+name = "umep"
+version = "0.0.1b32"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "geopandas" },
+    { name = "matplotlib" },
+    { name = "numba" },
+    { name = "numpy" },
+    { name = "pandas" },
+    { name = "pvlib" },
+    { name = "pyproj" },
+    { name = "rasterio" },
+    { name = "rioxarray" },
+    { name = "scipy" },
+    { name = "shapely" },
+    { name = "tqdm" },
+    { name = "xarray" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/45/29/12052bcb63077ce1aa913f0cce03fbad40961d1c7e49d3053314931e5b04/umep-0.0.1b32.tar.gz", hash = "sha256:a2500399007de0c94329626b9807877e31c17f04ccd63173d6b8188bb313c5db", size = 2247546, upload-time = "2025-08-21T14:27:20.243Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/96/db/dd74789eadf4cf82ddfd80123001c5811cb9552dc97c32202b890a54a0b7/umep-0.0.1b32-py3-none-any.whl", hash = "sha256:264b917bc23feb3c03f043a98237eae784431444e87da00f2a77d2bd38d58d6d", size = 2248648, upload-time = "2025-08-21T14:27:18.752Z" },
+]
+
+[[package]]
+name = "uri-template"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678, upload-time = "2023-06-21T01:49:05.374Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.6.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
+]
+
+[[package]]
+name = "virtualenv"
+version = "20.36.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "distlib" },
+    { name = "filelock" },
+    { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" },
+]
+
+[[package]]
+name = "watchdog"
+version = "6.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" },
+    { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" },
+    { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" },
+    { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" },
+    { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" },
+    { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" },
+    { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" },
+    { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" },
+    { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" },
+    { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" },
+    { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" },
+    { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" },
+    { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" },
+    { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" },
+    { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" },
+    { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
+    { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" },
+    { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" },
+    { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" },
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/38/75/2144b65e4fba12a2d9868e9a3f99db7fa0760670d064603634bef9ff1709/wcwidth-0.3.0.tar.gz", hash = "sha256:af1a2fb0b83ef4a7fc0682a4c95ca2576e14d0280bca2a9e67b7dc9f2733e123", size = 172238, upload-time = "2026-01-21T17:44:09.508Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/18/0e/a5f0257ab47492b7afb5fb60347d14ba19445e2773fc8352d4be6bd2f6f8/wcwidth-0.3.0-py3-none-any.whl", hash = "sha256:073a1acb250e4add96cfd5ef84e0036605cd6e0d0782c8c15c80e42202348458", size = 85520, upload-time = "2026-01-21T17:44:08.002Z" },
+]
+
+[[package]]
+name = "webcolors"
+version = "25.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/7a/eb316761ec35664ea5174709a68bbd3389de60d4a1ebab8808bfc264ed67/webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf", size = 53491, upload-time = "2025-10-31T07:51:03.977Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/e2/cc/e097523dd85c9cf5d354f78310927f1656c422bd7b2613b2db3e3f9a0f2c/webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d", size = 14905, upload-time = "2025-10-31T07:51:01.778Z" },
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" },
+]
+
+[[package]]
+name = "widgetsnbextension"
+version = "4.0.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bd/f4/c67440c7fb409a71b7404b7aefcd7569a9c0d6bd071299bf4198ae7a5d95/widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9", size = 1097402, upload-time = "2025-11-01T21:15:55.178Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/3f/0e/fa3b193432cfc60c93b42f3be03365f5f909d2b3ea410295cf36df739e31/widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366", size = 2196503, upload-time = "2025-11-01T21:15:53.565Z" },
+]
+
+[[package]]
+name = "xarray"
+version = "2025.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+    { name = "numpy" },
+    { name = "packaging" },
+    { name = "pandas" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d3/af/7b945f331ba8911fdfff2fdfa092763156119f124be1ba4144615c540222/xarray-2025.12.0.tar.gz", hash = "sha256:73f6a6fadccc69c4d45bdd70821a47c72de078a8a0313ff8b1e97cd54ac59fed", size = 3082244, upload-time = "2025-12-05T21:51:22.432Z" }
+wheels = [
+    { url = "https://files.pythonhosted.org/packages/d5/e4/62a677feefde05b12a70a4fc9bdc8558010182a801fbcab68cb56c2b0986/xarray-2025.12.0-py3-none-any.whl", hash = "sha256:9e77e820474dbbe4c6c2954d0da6342aa484e33adaa96ab916b15a786181e970", size = 1381742, upload-time = "2025-12-05T21:51:20.841Z" },
+]