From 923b238b10eddea96f06bbe0ed77e757c4e3111d Mon Sep 17 00:00:00 2001 From: Runyang Xu Date: Thu, 26 Mar 2026 16:51:30 -0400 Subject: [PATCH 1/2] Align wildfire models and docs to 31-model roster --- docs/source/api/pyhazards.models.rst | 2 +- docs/source/appendix_a_coverage.rst | 8 +- docs/source/benchmarks/wildfire_benchmark.rst | 9 +- .../benchmarks/wildfirespreadts_ecosystem.rst | 6 +- docs/source/datasets/fpa_fod_weekly.rst | 2 +- ...ecasting.rst => models_attention_unet.rst} | 34 +- .../source/modules/models_convgru_trajgru.rst | 107 +++ docs/source/modules/models_convlstm.rst | 107 +++ docs/source/modules/models_deep_ensemble.rst | 107 +++ docs/source/modules/models_deeplabv3p.rst | 107 +++ docs/source/modules/models_earthfarseer.rst | 107 +++ docs/source/modules/models_earthformer.rst | 107 +++ docs/source/modules/models_firepred.rst | 107 +++ ...wildfire_mamba.rst => models_lightgbm.rst} | 38 +- .../modules/models_logistic_regression.rst | 105 +++ docs/source/modules/models_mau.rst | 107 +++ docs/source/modules/models_predrnn_v2.rst | 107 +++ docs/source/modules/models_rainformer.rst | 107 +++ docs/source/modules/models_random_forest.rst | 105 +++ docs/source/modules/models_resnet18_unet.rst | 107 +++ docs/source/modules/models_segformer.rst | 107 +++ docs/source/modules/models_swin_unet.rst | 107 +++ docs/source/modules/models_swinlstm.rst | 107 +++ docs/source/modules/models_tcn.rst | 107 +++ docs/source/modules/models_ts_satfire.rst | 107 +++ docs/source/modules/models_unet.rst | 107 +++ docs/source/modules/models_utae.rst | 107 +++ docs/source/modules/models_vit_segmenter.rst | 107 +++ docs/source/modules/models_xgboost.rst | 105 +++ docs/source/pyhazards_benchmarks.rst | 10 +- docs/source/pyhazards_models.rst | 666 +++++++++++++++++- pyhazards/appendix_a_catalog.py | 1 - .../benchmark_cards/wildfire_benchmark.yaml | 36 +- .../wildfirespreadts_ecosystem.yaml | 22 +- pyhazards/benchmark_catalog.py | 29 +- pyhazards/dataset_cards/fpa_fod_weekly.yaml | 1 - pyhazards/dataset_catalog.py | 57 +- pyhazards/model_cards/attention_unet.yaml | 39 + pyhazards/model_cards/convgru_trajgru.yaml | 40 ++ pyhazards/model_cards/convlstm.yaml | 40 ++ pyhazards/model_cards/deep_ensemble.yaml | 40 ++ pyhazards/model_cards/deeplabv3p.yaml | 39 + pyhazards/model_cards/earthfarseer.yaml | 40 ++ pyhazards/model_cards/earthformer.yaml | 40 ++ pyhazards/model_cards/firepred.yaml | 40 ++ pyhazards/model_cards/lightgbm.yaml | 40 ++ .../model_cards/logistic_regression.yaml | 38 + pyhazards/model_cards/mau.yaml | 40 ++ pyhazards/model_cards/predrnn_v2.yaml | 40 ++ pyhazards/model_cards/rainformer.yaml | 40 ++ pyhazards/model_cards/random_forest.yaml | 39 + pyhazards/model_cards/resnet18_unet.yaml | 39 + pyhazards/model_cards/segformer.yaml | 40 ++ pyhazards/model_cards/swin_unet.yaml | 41 ++ pyhazards/model_cards/swinlstm.yaml | 41 ++ pyhazards/model_cards/tcn.yaml | 40 ++ pyhazards/model_cards/ts_satfire.yaml | 40 ++ pyhazards/model_cards/unet.yaml | 39 + pyhazards/model_cards/utae.yaml | 40 ++ pyhazards/model_cards/vit_segmenter.yaml | 40 ++ .../model_cards/wildfire_forecasting.yaml | 53 -- pyhazards/model_cards/wildfire_mamba.yaml | 58 -- pyhazards/model_cards/xgboost.yaml | 40 ++ pyhazards/model_catalog.py | 38 +- pyhazards/models/__init__.py | 229 ++++++ pyhazards/models/_wildfire_estimator.py | 44 ++ pyhazards/models/_wildfire_layers.py | 117 +++ pyhazards/models/attention_unet.py | 52 ++ pyhazards/models/convgru_trajgru.py | 54 ++ pyhazards/models/convlstm.py | 59 ++ pyhazards/models/deep_ensemble.py | 36 + pyhazards/models/deeplabv3p.py | 37 + pyhazards/models/earthfarseer.py | 43 ++ pyhazards/models/earthformer.py | 37 + pyhazards/models/firepred.py | 98 +++ pyhazards/models/lightgbm.py | 48 ++ pyhazards/models/logistic_regression.py | 39 + pyhazards/models/mau.py | 39 + pyhazards/models/predrnn_v2.py | 55 ++ pyhazards/models/rainformer.py | 35 + pyhazards/models/random_forest.py | 45 ++ pyhazards/models/resnet18_unet.py | 44 ++ pyhazards/models/segformer.py | 36 + pyhazards/models/swin_unet.py | 49 ++ pyhazards/models/swinlstm.py | 49 ++ pyhazards/models/tcn.py | 41 ++ pyhazards/models/ts_satfire.py | 84 +++ pyhazards/models/unet.py | 41 ++ pyhazards/models/utae.py | 40 ++ pyhazards/models/vit_segmenter.py | 36 + pyhazards/models/xgboost.py | 48 ++ 91 files changed, 5689 insertions(+), 219 deletions(-) rename docs/source/modules/{models_wildfire_forecasting.rst => models_attention_unet.rst} (61%) create mode 100644 docs/source/modules/models_convgru_trajgru.rst create mode 100644 docs/source/modules/models_convlstm.rst create mode 100644 docs/source/modules/models_deep_ensemble.rst create mode 100644 docs/source/modules/models_deeplabv3p.rst create mode 100644 docs/source/modules/models_earthfarseer.rst create mode 100644 docs/source/modules/models_earthformer.rst create mode 100644 docs/source/modules/models_firepred.rst rename docs/source/modules/{models_wildfire_mamba.rst => models_lightgbm.rst} (57%) create mode 100644 docs/source/modules/models_logistic_regression.rst create mode 100644 docs/source/modules/models_mau.rst create mode 100644 docs/source/modules/models_predrnn_v2.rst create mode 100644 docs/source/modules/models_rainformer.rst create mode 100644 docs/source/modules/models_random_forest.rst create mode 100644 docs/source/modules/models_resnet18_unet.rst create mode 100644 docs/source/modules/models_segformer.rst create mode 100644 docs/source/modules/models_swin_unet.rst create mode 100644 docs/source/modules/models_swinlstm.rst create mode 100644 docs/source/modules/models_tcn.rst create mode 100644 docs/source/modules/models_ts_satfire.rst create mode 100644 docs/source/modules/models_unet.rst create mode 100644 docs/source/modules/models_utae.rst create mode 100644 docs/source/modules/models_vit_segmenter.rst create mode 100644 docs/source/modules/models_xgboost.rst create mode 100644 pyhazards/model_cards/attention_unet.yaml create mode 100644 pyhazards/model_cards/convgru_trajgru.yaml create mode 100644 pyhazards/model_cards/convlstm.yaml create mode 100644 pyhazards/model_cards/deep_ensemble.yaml create mode 100644 pyhazards/model_cards/deeplabv3p.yaml create mode 100644 pyhazards/model_cards/earthfarseer.yaml create mode 100644 pyhazards/model_cards/earthformer.yaml create mode 100644 pyhazards/model_cards/firepred.yaml create mode 100644 pyhazards/model_cards/lightgbm.yaml create mode 100644 pyhazards/model_cards/logistic_regression.yaml create mode 100644 pyhazards/model_cards/mau.yaml create mode 100644 pyhazards/model_cards/predrnn_v2.yaml create mode 100644 pyhazards/model_cards/rainformer.yaml create mode 100644 pyhazards/model_cards/random_forest.yaml create mode 100644 pyhazards/model_cards/resnet18_unet.yaml create mode 100644 pyhazards/model_cards/segformer.yaml create mode 100644 pyhazards/model_cards/swin_unet.yaml create mode 100644 pyhazards/model_cards/swinlstm.yaml create mode 100644 pyhazards/model_cards/tcn.yaml create mode 100644 pyhazards/model_cards/ts_satfire.yaml create mode 100644 pyhazards/model_cards/unet.yaml create mode 100644 pyhazards/model_cards/utae.yaml create mode 100644 pyhazards/model_cards/vit_segmenter.yaml delete mode 100644 pyhazards/model_cards/wildfire_forecasting.yaml delete mode 100644 pyhazards/model_cards/wildfire_mamba.yaml create mode 100644 pyhazards/model_cards/xgboost.yaml create mode 100644 pyhazards/models/_wildfire_estimator.py create mode 100644 pyhazards/models/_wildfire_layers.py create mode 100644 pyhazards/models/attention_unet.py create mode 100644 pyhazards/models/convgru_trajgru.py create mode 100644 pyhazards/models/convlstm.py create mode 100644 pyhazards/models/deep_ensemble.py create mode 100644 pyhazards/models/deeplabv3p.py create mode 100644 pyhazards/models/earthfarseer.py create mode 100644 pyhazards/models/earthformer.py create mode 100644 pyhazards/models/firepred.py create mode 100644 pyhazards/models/lightgbm.py create mode 100644 pyhazards/models/logistic_regression.py create mode 100644 pyhazards/models/mau.py create mode 100644 pyhazards/models/predrnn_v2.py create mode 100644 pyhazards/models/rainformer.py create mode 100644 pyhazards/models/random_forest.py create mode 100644 pyhazards/models/resnet18_unet.py create mode 100644 pyhazards/models/segformer.py create mode 100644 pyhazards/models/swin_unet.py create mode 100644 pyhazards/models/swinlstm.py create mode 100644 pyhazards/models/tcn.py create mode 100644 pyhazards/models/ts_satfire.py create mode 100644 pyhazards/models/unet.py create mode 100644 pyhazards/models/utae.py create mode 100644 pyhazards/models/vit_segmenter.py create mode 100644 pyhazards/models/xgboost.py diff --git a/docs/source/api/pyhazards.models.rst b/docs/source/api/pyhazards.models.rst index f2ee6f21..b21ef46d 100644 --- a/docs/source/api/pyhazards.models.rst +++ b/docs/source/api/pyhazards.models.rst @@ -17,7 +17,7 @@ Wildfire Implemented Models ++++++++++++++++++ -:doc:`ASUFM `, :doc:`DNN-LSTM-AutoEncoder `, :doc:`FireCastNet `, :doc:`ForeFire Adapter `, :doc:`Wildfire Forecasting `, :doc:`WildfireSpreadTS `, :doc:`WRF-SFIRE Adapter `, :doc:`CNN-ASPP `. +:doc:`ASUFM `, :doc:`Attention U-Net `, :doc:`ConvGRU-TrajGRU `, :doc:`ConvLSTM `, :doc:`Deep Ensemble `, :doc:`DeepLabV3+ `, :doc:`DNN-LSTM-AutoEncoder `, :doc:`EarthFarseer `, :doc:`EarthFormer `, :doc:`FireCastNet `, :doc:`FirePred `, :doc:`ForeFire Adapter `, :doc:`LightGBM `, :doc:`Logistic Regression `, :doc:`MAU `, :doc:`PredRNN-v2 `, :doc:`Rainformer `, :doc:`Random Forest `, :doc:`ResNet18 U-Net `, :doc:`SegFormer `, :doc:`Swin-Unet `, :doc:`SwinLSTM `, :doc:`TCN `, :doc:`TS-SatFire `, :doc:`U-Net `, :doc:`U-TAE `, :doc:`ViT Segmenter `, :doc:`WildfireSpreadTS `, :doc:`WRF-SFIRE Adapter `, :doc:`XGBoost `, :doc:`CNN-ASPP `. Earthquake ~~~~~~~~~~ diff --git a/docs/source/appendix_a_coverage.rst b/docs/source/appendix_a_coverage.rst index 10610e49..4bfa34f0 100644 --- a/docs/source/appendix_a_coverage.rst +++ b/docs/source/appendix_a_coverage.rst @@ -34,7 +34,7 @@ Hazard Summary - 0 - 0 * - Wildfire - - 6 + - 5 - 0 - 0 * - Flood @@ -108,12 +108,6 @@ Method and Resource Matrix - ``Implemented`` - None - A synthetic-backed AEFA-style forecasting dataset adapter is registered. - * - Wildfire - - `wildfire_forecasting `_ - - Baseline - - ``Implemented`` - - :doc:`Wildfire Forecasting ` - - * - Wildfire - `WildfireSpreadTS `_ - Baseline / Benchmark diff --git a/docs/source/benchmarks/wildfire_benchmark.rst b/docs/source/benchmarks/wildfire_benchmark.rst index 24016f64..bae8084c 100644 --- a/docs/source/benchmarks/wildfire_benchmark.rst +++ b/docs/source/benchmarks/wildfire_benchmark.rst @@ -6,7 +6,7 @@ Wildfire Benchmark Overview -------- -The wildfire benchmark family is the single scoring layer for tabular danger tasks, weekly forecasting tasks, and raster spread tasks. +The wildfire benchmark family is the single scoring layer for tabular occurrence tasks, risk forecasting baselines, and raster spread tasks. Current coverage is synthetic-backed, but it already exposes a single hazard-level evaluator contract across wildfire danger and wildfire spread smoke configs. @@ -55,11 +55,11 @@ At a Glance .. container:: catalog-stat-value - 8 + 31 .. container:: catalog-stat-note - 8 models + 31 models Benchmark Mapping @@ -97,7 +97,6 @@ Mapped benchmark ecosystems :class-container: catalog-dropdown - ``wildfire_danger_smoke.yaml`` - - ``wildfire_forecasting_smoke.yaml`` - ``asufm_smoke.yaml`` - ``wildfire_spread_smoke.yaml`` - ``wildfirespreadts_smoke.yaml`` @@ -108,7 +107,7 @@ Mapped benchmark ecosystems .. dropdown:: Linked Models :class-container: catalog-dropdown - :doc:`DNN-LSTM-AutoEncoder `, :doc:`Wildfire Forecasting `, :doc:`ASUFM `, :doc:`CNN-ASPP `, :doc:`WildfireSpreadTS `, :doc:`ForeFire Adapter `, :doc:`WRF-SFIRE Adapter `, :doc:`FireCastNet `. + :doc:`Logistic Regression `, :doc:`Random Forest `, :doc:`XGBoost `, :doc:`LightGBM `, :doc:`U-Net `, :doc:`ResNet18 U-Net `, :doc:`Attention U-Net `, :doc:`DeepLabV3+ `, :doc:`ConvLSTM `, :doc:`MAU `, :doc:`PredRNN-v2 `, :doc:`Rainformer `, :doc:`EarthFormer `, :doc:`SwinLSTM `, :doc:`EarthFarseer `, :doc:`ConvGRU-TrajGRU `, :doc:`TCN `, :doc:`U-TAE `, :doc:`SegFormer `, :doc:`Swin-Unet `, :doc:`ViT Segmenter `, :doc:`Deep Ensemble `, :doc:`ASUFM `, :doc:`CNN-ASPP `, :doc:`FireCastNet `, :doc:`FirePred `, :doc:`WildfireSpreadTS `, :doc:`TS-SatFire `, :doc:`DNN-LSTM-AutoEncoder `, :doc:`ForeFire Adapter `, :doc:`WRF-SFIRE Adapter `. .. dropdown:: Notes :class-container: catalog-dropdown diff --git a/docs/source/benchmarks/wildfirespreadts_ecosystem.rst b/docs/source/benchmarks/wildfirespreadts_ecosystem.rst index 73f88b7c..a14c797f 100644 --- a/docs/source/benchmarks/wildfirespreadts_ecosystem.rst +++ b/docs/source/benchmarks/wildfirespreadts_ecosystem.rst @@ -55,11 +55,11 @@ At a Glance .. container:: catalog-stat-value - 5 + 25 .. container:: catalog-stat-note - 5 models + 25 models Benchmark Mapping @@ -103,4 +103,4 @@ Primary Source .. dropdown:: Linked Models :class-container: catalog-dropdown - :doc:`CNN-ASPP `, :doc:`WildfireSpreadTS `, :doc:`ForeFire Adapter `, :doc:`WRF-SFIRE Adapter `, :doc:`FireCastNet `. + :doc:`CNN-ASPP `, :doc:`U-Net `, :doc:`ResNet18 U-Net `, :doc:`Attention U-Net `, :doc:`DeepLabV3+ `, :doc:`ConvLSTM `, :doc:`MAU `, :doc:`PredRNN-v2 `, :doc:`Rainformer `, :doc:`EarthFormer `, :doc:`SwinLSTM `, :doc:`EarthFarseer `, :doc:`ConvGRU-TrajGRU `, :doc:`TCN `, :doc:`U-TAE `, :doc:`SegFormer `, :doc:`Swin-Unet `, :doc:`ViT Segmenter `, :doc:`Deep Ensemble `, :doc:`FireCastNet `, :doc:`FirePred `, :doc:`WildfireSpreadTS `, :doc:`TS-SatFire `, :doc:`ForeFire Adapter `, :doc:`WRF-SFIRE Adapter `. diff --git a/docs/source/datasets/fpa_fod_weekly.rst b/docs/source/datasets/fpa_fod_weekly.rst index 6dfa3f7f..017de714 100644 --- a/docs/source/datasets/fpa_fod_weekly.rst +++ b/docs/source/datasets/fpa_fod_weekly.rst @@ -98,7 +98,7 @@ Related Coverage **Benchmarks:** :doc:`Wildfire Benchmark ` -**Representative Models:** :doc:`DNN-LSTM-AutoEncoder `, :doc:`Wildfire Forecasting `, :doc:`ASUFM ` +**Representative Models:** :doc:`DNN-LSTM-AutoEncoder `, :doc:`ASUFM ` Inspection Workflow ------------------- diff --git a/docs/source/modules/models_wildfire_forecasting.rst b/docs/source/modules/models_attention_unet.rst similarity index 61% rename from docs/source/modules/models_wildfire_forecasting.rst rename to docs/source/modules/models_attention_unet.rst index 82a6b73a..d07c736c 100644 --- a/docs/source/modules/models_wildfire_forecasting.rst +++ b/docs/source/modules/models_attention_unet.rst @@ -1,12 +1,12 @@ .. This file is generated by scripts/render_model_docs.py. Do not edit by hand. -Wildfire Forecasting -==================== +Attention U-Net +=============== Overview -------- -``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire activity windows. +``attention_unet`` extends the encoder-decoder wildfire baseline with lightweight gating on skip connections. At a Glance ----------- @@ -46,7 +46,7 @@ At a Glance .. container:: catalog-stat-note - Forecasting + Spread .. grid-item-card:: Benchmark Family :class-card: catalog-stat-card @@ -63,29 +63,31 @@ At a Glance Description ----------- -``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire activity windows. +``attention_unet`` extends the encoder-decoder wildfire baseline with lightweight gating on skip connections. -The PyHazards implementation targets smoke-testable next-window size-group prediction through the shared wildfire benchmark flow. +PyHazards keeps it public because attention-augmented U-Net variants appear frequently in wildfire progression comparisons. Benchmark Compatibility ----------------------- **Primary benchmark family:** :doc:`Wildfire Benchmark ` +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + External References ------------------- -**Paper:** `Wildfire Danger Prediction and Understanding with Deep Learning `_ | **Repo:** `Repository `__ +**Paper:** `TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences `_ Registry Name ------------- -Primary entrypoint: ``wildfire_forecasting`` +Primary entrypoint: ``attention_unet`` Supported Tasks --------------- -- Forecasting +- Spread Programmatic Use ---------------- @@ -95,17 +97,11 @@ Programmatic Use import torch from pyhazards.models import build_model - model = build_model( - name="wildfire_forecasting", - task="forecasting", - input_dim=7, - output_dim=5, - lookback=12, - ) - preds = model(torch.randn(2, 12, 7)) - print(preds.shape) + model = build_model(name="attention_unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) Notes ----- -- This public adapter is exercised on the weekly wildfire smoke benchmark. +- The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map. diff --git a/docs/source/modules/models_convgru_trajgru.rst b/docs/source/modules/models_convgru_trajgru.rst new file mode 100644 index 00000000..346bd058 --- /dev/null +++ b/docs/source/modules/models_convgru_trajgru.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +ConvGRU-TrajGRU +=============== + +Overview +-------- + +``convgru_trajgru`` provides a gated recurrent raster-history baseline for wildfire sequence prediction. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``convgru_trajgru`` provides a gated recurrent raster-history baseline for wildfire sequence prediction. + +PyHazards keeps it available as a lightweight proxy for ConvGRU and TrajGRU style wildfire forecasters. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `FireCastNet: Earth-as-a-Graph for Seasonal Fire Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``convgru_trajgru`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="convgru_trajgru", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_convlstm.rst b/docs/source/modules/models_convlstm.rst new file mode 100644 index 00000000..88477d2f --- /dev/null +++ b/docs/source/modules/models_convlstm.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +ConvLSTM +======== + +Overview +-------- + +``convlstm`` is the standard recurrent raster-history baseline for wildfire prediction in PyHazards. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``convlstm`` is the standard recurrent raster-history baseline for wildfire prediction in PyHazards. + +It maps short spatio-temporal covariate windows to the next wildfire risk or spread-style output map. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``convlstm`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="convlstm", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_deep_ensemble.rst b/docs/source/modules/models_deep_ensemble.rst new file mode 100644 index 00000000..d1faf2eb --- /dev/null +++ b/docs/source/modules/models_deep_ensemble.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +Deep Ensemble +============= + +Overview +-------- + +``deep_ensemble`` averages several compact wildfire raster predictors to provide a simple uncertainty-aware baseline. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``deep_ensemble`` averages several compact wildfire raster predictors to provide a simple uncertainty-aware baseline. + +It is kept public so the wildfire catalog includes at least one ensemble-style dense predictor beside single-model baselines. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles `_ + +Registry Name +------------- + +Primary entrypoint: ``deep_ensemble`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="deep_ensemble", task="segmentation", in_channels=12, ensemble_size=3) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path averages several compact wildfire raster predictors into one output map. diff --git a/docs/source/modules/models_deeplabv3p.rst b/docs/source/modules/models_deeplabv3p.rst new file mode 100644 index 00000000..89f830a4 --- /dev/null +++ b/docs/source/modules/models_deeplabv3p.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +DeepLabV3+ +========== + +Overview +-------- + +``deeplabv3p`` provides an ASPP-style dense-prediction baseline for wildfire rasters within the shared PyHazards registry. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``deeplabv3p`` provides an ASPP-style dense-prediction baseline for wildfire rasters within the shared PyHazards registry. + +It complements U-Net family baselines with a lighter context-aggregation decoder. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Wildfire spreading prediction using multimodal data and deep neural network approach `_ + +Registry Name +------------- + +Primary entrypoint: ``deeplabv3p`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="deeplabv3p", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map. diff --git a/docs/source/modules/models_earthfarseer.rst b/docs/source/modules/models_earthfarseer.rst new file mode 100644 index 00000000..ac832643 --- /dev/null +++ b/docs/source/modules/models_earthfarseer.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +EarthFarseer +============ + +Overview +-------- + +``earthfarseer`` is a lightweight space-time wildfire forecaster exposed through the shared raster-history contract. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``earthfarseer`` is a lightweight space-time wildfire forecaster exposed through the shared raster-history contract. + +It remains public because the Sim2Real-Fire benchmark family compares EarthFarseer-style sequence models against other wildfire forecasters. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``earthfarseer`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="earthfarseer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_earthformer.rst b/docs/source/modules/models_earthformer.rst new file mode 100644 index 00000000..e5f35ee0 --- /dev/null +++ b/docs/source/modules/models_earthformer.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +EarthFormer +=========== + +Overview +-------- + +``earthformer`` gives the wildfire catalog a lightweight space-time transformer-style sequence model. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``earthformer`` gives the wildfire catalog a lightweight space-time transformer-style sequence model. + +PyHazards keeps it aligned with the shared wildfire raster-history contract used for spread-style benchmarking. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``earthformer`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="earthformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_firepred.rst b/docs/source/modules/models_firepred.rst new file mode 100644 index 00000000..dc4f74fc --- /dev/null +++ b/docs/source/modules/models_firepred.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +FirePred +======== + +Overview +-------- + +``firepred`` mixes recent temporal context, averaged daily context, and a latest-step snapshot branch for wildfire prediction. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``firepred`` mixes recent temporal context, averaged daily context, and a latest-step snapshot branch for wildfire prediction. + +PyHazards exposes it as a lightweight approximation of the wildfire-native FirePred modeling idea. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `FirePred: A hybrid multi-temporal convolutional network for wildfire spread prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``firepred`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="firepred", task="segmentation", history=5, in_channels=8) + logits = model(torch.randn(2, 5, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a five-step wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_wildfire_mamba.rst b/docs/source/modules/models_lightgbm.rst similarity index 57% rename from docs/source/modules/models_wildfire_mamba.rst rename to docs/source/modules/models_lightgbm.rst index 0912fc54..a510bbcd 100644 --- a/docs/source/modules/models_wildfire_mamba.rst +++ b/docs/source/modules/models_lightgbm.rst @@ -1,14 +1,12 @@ .. This file is generated by scripts/render_model_docs.py. Do not edit by hand. -:orphan: - -Wildfire Mamba -============== +LightGBM +======== Overview -------- -``wildfire_mamba`` models county-day ERA5 sequences by combining selective state-space temporal blocks with a simple spatial graph layer. +``lightgbm`` is a boosted-tree baseline for wildfire occurrence and danger classification on flattened covariates. At a Glance ----------- @@ -33,7 +31,7 @@ At a Glance .. container:: catalog-stat-value - Hidden + Implemented .. container:: catalog-stat-note @@ -55,7 +53,7 @@ At a Glance .. container:: catalog-stat-value - Unmapped + :doc:`Wildfire Benchmark ` .. container:: catalog-stat-note @@ -65,24 +63,24 @@ At a Glance Description ----------- -``wildfire_mamba`` models county-day ERA5 sequences by combining selective state-space temporal blocks with a simple spatial graph layer. +``lightgbm`` is a boosted-tree baseline for wildfire occurrence and danger classification on flattened covariates. -The PyHazards implementation targets binary next-day per-county wildfire classification and supports an optional count head for multi-task extensions. +PyHazards keeps it alongside ``xgboost`` so the wildfire catalog covers both major tree-boosting baselines. Benchmark Compatibility ----------------------- -**Primary benchmark family:** Not yet mapped. +**Primary benchmark family:** :doc:`Wildfire Benchmark ` External References ------------------- -**Paper:** `Mamba: Linear-Time Sequence Modeling with Selective State Spaces `_ +**Paper:** `LightGBM: A Highly Efficient Gradient Boosting Decision Tree `_ Registry Name ------------- -Primary entrypoint: ``wildfire_mamba`` +Primary entrypoint: ``lightgbm`` Supported Tasks --------------- @@ -97,19 +95,11 @@ Programmatic Use import torch from pyhazards.models import build_model - model = build_model( - name="wildfire_mamba", - task="classification", - in_dim=3, - num_counties=4, - past_days=5, - ) - - x = torch.randn(2, 5, 4, 3) - logits = model(x) - print(logits.shape) + model = build_model(name="lightgbm", task="classification", num_boost_round=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) Notes ----- -- The CI smoke test validates the default binary-classification path on synthetic data. +- The smoke path uses flattened wildfire covariates and returns two-class probabilities. diff --git a/docs/source/modules/models_logistic_regression.rst b/docs/source/modules/models_logistic_regression.rst new file mode 100644 index 00000000..958ca4f2 --- /dev/null +++ b/docs/source/modules/models_logistic_regression.rst @@ -0,0 +1,105 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +Logistic Regression +=================== + +Overview +-------- + +``logistic_regression`` is a lightweight classical baseline for wildfire occurrence and danger classification. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Classification + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``logistic_regression`` is a lightweight classical baseline for wildfire occurrence and danger classification. + +PyHazards exposes it through the shared wildfire benchmark family for direct comparison against tree and raster deep-learning models. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +External References +------------------- + +**Paper:** `Next Day Wildfire Spread: A Machine Learning Dataset to Predict Wildfire Spreading From Remote-Sensing Data `_ + +Registry Name +------------- + +Primary entrypoint: ``logistic_regression`` + +Supported Tasks +--------------- + +- Classification + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="logistic_regression", task="classification", max_iter=200) + probs = model(torch.randn(4, 8)) + print(probs.shape) + +Notes +----- + +- The smoke path uses flattened wildfire covariates and returns two-class probabilities. diff --git a/docs/source/modules/models_mau.rst b/docs/source/modules/models_mau.rst new file mode 100644 index 00000000..e7b8fa02 --- /dev/null +++ b/docs/source/modules/models_mau.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +MAU +=== + +Overview +-------- + +``mau`` is a recurrent spatio-temporal wildfire baseline inspired by memory-in-memory sequence modeling. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``mau`` is a recurrent spatio-temporal wildfire baseline inspired by memory-in-memory sequence modeling. + +PyHazards keeps it public because the Sim2Real-Fire benchmark family compares MAU-style models against other wildfire sequence forecasters. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``mau`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="mau", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_predrnn_v2.rst b/docs/source/modules/models_predrnn_v2.rst new file mode 100644 index 00000000..fc8b1d0b --- /dev/null +++ b/docs/source/modules/models_predrnn_v2.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +PredRNN-v2 +========== + +Overview +-------- + +``predrnn_v2`` is a recurrent raster-history baseline for wildfire sequence forecasting. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``predrnn_v2`` is a recurrent raster-history baseline for wildfire sequence forecasting. + +It gives the PyHazards wildfire suite a lightweight proxy for memory-augmented next-frame wildfire prediction. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``predrnn_v2`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="predrnn_v2", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_rainformer.rst b/docs/source/modules/models_rainformer.rst new file mode 100644 index 00000000..5aa7b7f1 --- /dev/null +++ b/docs/source/modules/models_rainformer.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +Rainformer +========== + +Overview +-------- + +``rainformer`` adapts sequence-to-map transformer-style modeling to short wildfire raster histories. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``rainformer`` adapts sequence-to-map transformer-style modeling to short wildfire raster histories. + +It stays lightweight so the PyHazards wildfire catalog can expose a modern temporal attention baseline without external dependencies. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``rainformer`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="rainformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_random_forest.rst b/docs/source/modules/models_random_forest.rst new file mode 100644 index 00000000..b38ee0ae --- /dev/null +++ b/docs/source/modules/models_random_forest.rst @@ -0,0 +1,105 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +Random Forest +============= + +Overview +-------- + +``random_forest`` is a classical ensemble baseline for wildfire occurrence and danger classification. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Classification + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``random_forest`` is a classical ensemble baseline for wildfire occurrence and danger classification. + +It gives the wildfire catalog a non-neural tree baseline that remains easy to compare against the deep raster families. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +External References +------------------- + +**Paper:** `WildfireDB: An Open-Source Dataset Connecting Wildfire Occurrence with Relevant Determinants `_ + +Registry Name +------------- + +Primary entrypoint: ``random_forest`` + +Supported Tasks +--------------- + +- Classification + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="random_forest", task="classification", n_estimators=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) + +Notes +----- + +- The smoke path uses flattened wildfire covariates and returns two-class probabilities. diff --git a/docs/source/modules/models_resnet18_unet.rst b/docs/source/modules/models_resnet18_unet.rst new file mode 100644 index 00000000..8bd25aef --- /dev/null +++ b/docs/source/modules/models_resnet18_unet.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +ResNet18 U-Net +============== + +Overview +-------- + +``resnet18_unet`` keeps the U-Net decoding path while using residual feature extraction blocks for wildfire rasters. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``resnet18_unet`` keeps the U-Net decoding path while using residual feature extraction blocks for wildfire rasters. + +It matches the wildfire benchmark family used in WildfireSpreadTS-style sequence-to-map comparisons. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``resnet18_unet`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="resnet18_unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map. diff --git a/docs/source/modules/models_segformer.rst b/docs/source/modules/models_segformer.rst new file mode 100644 index 00000000..613dc5e3 --- /dev/null +++ b/docs/source/modules/models_segformer.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +SegFormer +========= + +Overview +-------- + +``segformer`` is a transformer-style sequence-to-map wildfire baseline over short raster histories. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``segformer`` is a transformer-style sequence-to-map wildfire baseline over short raster histories. + +PyHazards keeps it public as a modern dense-prediction alternative to recurrent wildfire models. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context `_ + +Registry Name +------------- + +Primary entrypoint: ``segformer`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="segformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_swin_unet.rst b/docs/source/modules/models_swin_unet.rst new file mode 100644 index 00000000..72a66508 --- /dev/null +++ b/docs/source/modules/models_swin_unet.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +Swin-Unet +========= + +Overview +-------- + +``swin_unet`` combines a short wildfire raster history with a windowed encoder-decoder segmentation backbone. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``swin_unet`` combines a short wildfire raster history with a windowed encoder-decoder segmentation backbone. + +It serves as the PyHazards entry for Swin-based transformer segmentation within the wildfire benchmark family. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context `_ + +Registry Name +------------- + +Primary entrypoint: ``swin_unet`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="swin_unet", task="segmentation", history=4, in_channels=8, window_size=4) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_swinlstm.rst b/docs/source/modules/models_swinlstm.rst new file mode 100644 index 00000000..6ca96114 --- /dev/null +++ b/docs/source/modules/models_swinlstm.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +SwinLSTM +======== + +Overview +-------- + +``swinlstm`` combines a short wildfire raster history with a windowed recurrent mixing block. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``swinlstm`` combines a short wildfire raster history with a windowed recurrent mixing block. + +It serves as the PyHazards proxy for SwinLSTM-style spatio-temporal wildfire forecasting baselines. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + +Registry Name +------------- + +Primary entrypoint: ``swinlstm`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="swinlstm", task="segmentation", history=4, in_channels=8, window_size=4) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_tcn.rst b/docs/source/modules/models_tcn.rst new file mode 100644 index 00000000..81bf0c02 --- /dev/null +++ b/docs/source/modules/models_tcn.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +TCN +=== + +Overview +-------- + +``tcn`` applies temporal convolution over short wildfire raster histories before decoding a dense output map. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``tcn`` applies temporal convolution over short wildfire raster histories before decoding a dense output map. + +It gives the wildfire catalog a lightweight non-recurrent temporal baseline beside ConvLSTM-style models. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `An Empirical Evaluation of Generic Convolutional and Recurrent Networks for Sequence Modeling `_ + +Registry Name +------------- + +Primary entrypoint: ``tcn`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="tcn", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_ts_satfire.rst b/docs/source/modules/models_ts_satfire.rst new file mode 100644 index 00000000..9f4719b8 --- /dev/null +++ b/docs/source/modules/models_ts_satfire.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +TS-SatFire +========== + +Overview +-------- + +``ts_satfire`` fuses a short multi-step wildfire raster sequence with temporal attention before dense decoding. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``ts_satfire`` fuses a short multi-step wildfire raster sequence with temporal attention before dense decoding. + +It gives the PyHazards wildfire catalog a lightweight proxy for the TS-SatFire progression-prediction family. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `TS-SatFire: A Benchmark Dataset and Deep Learning Models for Forest Fire Progression Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``ts_satfire`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="ts_satfire", task="segmentation", history=5, in_channels=8) + logits = model(torch.randn(2, 5, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a five-step wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_unet.rst b/docs/source/modules/models_unet.rst new file mode 100644 index 00000000..d0c0a0ad --- /dev/null +++ b/docs/source/modules/models_unet.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +U-Net +===== + +Overview +-------- + +``unet`` is the canonical encoder-decoder raster baseline for wildfire spread-style prediction in PyHazards. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``unet`` is the canonical encoder-decoder raster baseline for wildfire spread-style prediction in PyHazards. + +It provides the simplest dense prediction reference point before moving to recurrent or transformer-style wildfire models. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``unet`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map. diff --git a/docs/source/modules/models_utae.rst b/docs/source/modules/models_utae.rst new file mode 100644 index 00000000..58039afd --- /dev/null +++ b/docs/source/modules/models_utae.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +U-TAE +===== + +Overview +-------- + +``utae`` is a sequence-to-map wildfire baseline that fuses short temporal windows before dense decoding. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``utae`` is a sequence-to-map wildfire baseline that fuses short temporal windows before dense decoding. + +It stays aligned with the WildfireSpreadTS-style benchmark family used for temporal wildfire spread prediction. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + +Registry Name +------------- + +Primary entrypoint: ``utae`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="utae", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_vit_segmenter.rst b/docs/source/modules/models_vit_segmenter.rst new file mode 100644 index 00000000..6083fa80 --- /dev/null +++ b/docs/source/modules/models_vit_segmenter.rst @@ -0,0 +1,107 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +ViT Segmenter +============= + +Overview +-------- + +``vit_segmenter`` gives the wildfire catalog a lightweight vision-transformer segmentation baseline over short temporal windows. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Spread + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``vit_segmenter`` gives the wildfire catalog a lightweight vision-transformer segmentation baseline over short temporal windows. + +It complements SegFormer and Swin-Unet with a simpler transformer-style dense-prediction proxy. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +**Mapped benchmark ecosystems:** :doc:`WildfireSpreadTS ` + +External References +------------------- + +**Paper:** `TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences `_ + +Registry Name +------------- + +Primary entrypoint: ``vit_segmenter`` + +Supported Tasks +--------------- + +- Spread + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="vit_segmenter", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) + +Notes +----- + +- The smoke path uses a short wildfire raster history and returns a one-channel prediction map. diff --git a/docs/source/modules/models_xgboost.rst b/docs/source/modules/models_xgboost.rst new file mode 100644 index 00000000..f761aedc --- /dev/null +++ b/docs/source/modules/models_xgboost.rst @@ -0,0 +1,105 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +XGBoost +======= + +Overview +-------- + +``xgboost`` is a strong tabular baseline for wildfire occurrence and danger prediction. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Implemented + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Classification + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + :doc:`Wildfire Benchmark ` + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``xgboost`` is a strong tabular baseline for wildfire occurrence and danger prediction. + +PyHazards keeps it public because gradient-boosted trees remain a standard reference point for wildfire risk forecasting. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** :doc:`Wildfire Benchmark ` + +External References +------------------- + +**Paper:** `Wildfire Danger Prediction and Understanding With Deep Learning `_ + +Registry Name +------------- + +Primary entrypoint: ``xgboost`` + +Supported Tasks +--------------- + +- Classification + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model(name="xgboost", task="classification", num_boost_round=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) + +Notes +----- + +- The smoke path uses flattened wildfire covariates and returns two-class probabilities. diff --git a/docs/source/pyhazards_benchmarks.rst b/docs/source/pyhazards_benchmarks.rst index 82ec8635..74793bac 100644 --- a/docs/source/pyhazards_benchmarks.rst +++ b/docs/source/pyhazards_benchmarks.rst @@ -51,7 +51,7 @@ At a Glance .. container:: catalog-stat-value - 27 + 26 .. container:: catalog-stat-note @@ -74,7 +74,7 @@ coverage counts into a scan-friendly catalog. .. container:: catalog-entry-summary - Shared PyHazards evaluator family for wildfire danger and wildfire spread experiments. + Shared PyHazards evaluator family for wildfire occurrence, risk, and spread experiments. .. container:: catalog-chip-row @@ -90,7 +90,7 @@ coverage counts into a scan-friendly catalog. .. container:: catalog-meta-row - **Coverage:** 8 smoke configs | 8 models | 1 ecosystem + **Coverage:** 7 smoke configs | 31 models | 1 ecosystem .. container:: catalog-link-row @@ -200,7 +200,7 @@ status without opening the detail pages first. - :doc:`Wildfire Benchmark ` - Danger, Spread - Accuracy, Macro F1, AUC, PR-AUC, +5 more - - 8 models + - 31 models - Synthetic-backed * - Earthquake - :doc:`Earthquake Benchmark ` @@ -264,7 +264,7 @@ and the models currently mapped to that ecosystem. .. container:: catalog-meta-row - **Coverage:** 5 smoke configs | 5 models + **Coverage:** 5 smoke configs | 25 models .. container:: catalog-link-row diff --git a/docs/source/pyhazards_models.rst b/docs/source/pyhazards_models.rst index fd46d953..3c2a3cc7 100644 --- a/docs/source/pyhazards_models.rst +++ b/docs/source/pyhazards_models.rst @@ -29,7 +29,7 @@ At a Glance .. container:: catalog-stat-value - 24 + 47 .. container:: catalog-stat-note @@ -51,7 +51,7 @@ At a Glance .. container:: catalog-stat-value - 27 + 50 .. container:: catalog-stat-note @@ -72,7 +72,7 @@ pages and compatible benchmark coverage. .. container:: catalog-section-note - Wildfire models cover danger forecasting, weekly activity forecasting, and spread prediction under the shared wildfire benchmark family. + Wildfire models cover tabular occurrence baselines, raster spread predictors, and spatio-temporal forecasting variants under the shared wildfire benchmark family. .. rubric:: Implemented Models @@ -107,6 +107,141 @@ pages and compatible benchmark coverage. **Paper:** `Wildfire Spread Prediction in North America Using Satellite Imagery and Vision Transformer `_ | **Repo:** `Repository `__ + .. grid-item-card:: Attention U-Net + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + An attention-gated U-Net baseline for wildfire raster prediction. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Attention U-Net ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences `_ + + .. grid-item-card:: ConvGRU-TrajGRU + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact ConvGRU-style wildfire sequence baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`ConvGRU-TrajGRU ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `FireCastNet: Earth-as-a-Graph for Seasonal Fire Prediction `_ + + .. grid-item-card:: ConvLSTM + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact ConvLSTM baseline for wildfire spread prediction from raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`ConvLSTM ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + + .. grid-item-card:: Deep Ensemble + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + An uncertainty-aware ensemble of compact wildfire segmentation members. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Deep Ensemble ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles `_ + + .. grid-item-card:: DeepLabV3+ + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact DeepLabV3+-style baseline for wildfire raster prediction. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`DeepLabV3+ ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Wildfire spreading prediction using multimodal data and deep neural network approach `_ + .. grid-item-card:: DNN-LSTM-AutoEncoder :class-card: catalog-entry-card @@ -130,6 +265,60 @@ pages and compatible benchmark coverage. **Paper:** `Developing risk assessment framework for wildfire in the United States `_ + .. grid-item-card:: EarthFarseer + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact EarthFarseer-style wildfire sequence baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`EarthFarseer ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + + .. grid-item-card:: EarthFormer + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact EarthFormer-style wildfire sequence baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`EarthFormer ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + .. grid-item-card:: FireCastNet :class-card: catalog-entry-card @@ -157,6 +346,33 @@ pages and compatible benchmark coverage. **Paper:** `FireCastNet: Earth-as-a-Graph for Seasonal Fire Prediction `_ | **Repo:** `Repository `__ + .. grid-item-card:: FirePred + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A hybrid multi-temporal CNN wildfire predictor inspired by FirePred. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`FirePred ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `FirePred: A hybrid multi-temporal convolutional network for wildfire spread prediction `_ + .. grid-item-card:: ForeFire Adapter :class-card: catalog-entry-card @@ -184,20 +400,20 @@ pages and compatible benchmark coverage. **Paper:** `ForeFire: A Modular, Scriptable C++ Simulation Engine and Library for Wildland-Fire Spread `_ | **Repo:** `Repository `__ - .. grid-item-card:: Wildfire Forecasting + .. grid-item-card:: LightGBM :class-card: catalog-entry-card .. container:: catalog-entry-summary - A sequence forecasting baseline for next-window wildfire activity across weekly count features. + A LightGBM wildfire occurrence baseline for fast tabular classification experiments. .. container:: catalog-chip-row - :bdg-primary:`Wildfire` :bdg-secondary:`Forecasting` :bdg-success:`Implemented` + :bdg-primary:`Wildfire` :bdg-secondary:`Classification` :bdg-success:`Implemented` .. container:: catalog-meta-row - **Details:** :doc:`Wildfire Forecasting ` + **Details:** :doc:`LightGBM ` .. container:: catalog-meta-row @@ -205,14 +421,37 @@ pages and compatible benchmark coverage. .. container:: catalog-link-row - **Paper:** `Wildfire Danger Prediction and Understanding with Deep Learning `_ | **Repo:** `Repository `__ + **Paper:** `LightGBM: A Highly Efficient Gradient Boosting Decision Tree `_ - .. grid-item-card:: WildfireSpreadTS + .. grid-item-card:: Logistic Regression :class-card: catalog-entry-card .. container:: catalog-entry-summary - A temporal convolution wildfire spread baseline over short raster history windows. + A classical logistic-regression baseline for wildfire occurrence probability from tabular covariates. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Classification` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Logistic Regression ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-link-row + + **Paper:** `Next Day Wildfire Spread: A Machine Learning Dataset to Predict Wildfire Spreading From Remote-Sensing Data `_ + + .. grid-item-card:: MAU + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact MAU-style spatio-temporal wildfire baseline over raster histories. .. container:: catalog-chip-row @@ -220,7 +459,7 @@ pages and compatible benchmark coverage. .. container:: catalog-meta-row - **Details:** :doc:`WildfireSpreadTS ` + **Details:** :doc:`MAU ` .. container:: catalog-meta-row @@ -232,14 +471,14 @@ pages and compatible benchmark coverage. .. container:: catalog-link-row - **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ | **Repo:** `Repository `__ + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ - .. grid-item-card:: WRF-SFIRE Adapter + .. grid-item-card:: PredRNN-v2 :class-card: catalog-entry-card .. container:: catalog-entry-summary - A lightweight raster wildfire spread adapter inspired by WRF-SFIRE style transport. + A compact PredRNN-v2-style wildfire baseline over raster history windows. .. container:: catalog-chip-row @@ -247,7 +486,7 @@ pages and compatible benchmark coverage. .. container:: catalog-meta-row - **Details:** :doc:`WRF-SFIRE Adapter ` + **Details:** :doc:`PredRNN-v2 ` .. container:: catalog-meta-row @@ -259,7 +498,377 @@ pages and compatible benchmark coverage. .. container:: catalog-link-row - **Paper:** `Coupled atmosphere-wildland fire modeling with WRF 3.3 and SFIRE 2011 `_ | **Repo:** `Repository `__ + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + + .. grid-item-card:: Rainformer + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact Rainformer-style wildfire sequence baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Rainformer ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + + .. grid-item-card:: Random Forest + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A random-forest wildfire occurrence baseline over tabular predictors. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Classification` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Random Forest ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-link-row + + **Paper:** `WildfireDB: An Open-Source Dataset Connecting Wildfire Occurrence with Relevant Determinants `_ + + .. grid-item-card:: ResNet18 U-Net + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A residual U-Net wildfire baseline for raster spread prediction. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`ResNet18 U-Net ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + + .. grid-item-card:: SegFormer + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact SegFormer-style wildfire sequence segmenter. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`SegFormer ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context `_ + + .. grid-item-card:: Swin-Unet + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact Swin-Unet wildfire sequence segmenter. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`Swin-Unet ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context `_ + + .. grid-item-card:: SwinLSTM + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact SwinLSTM-style wildfire sequence baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`SwinLSTM ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire `_ + + .. grid-item-card:: TCN + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact temporal-convolution wildfire baseline over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`TCN ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `An Empirical Evaluation of Generic Convolutional and Recurrent Networks for Sequence Modeling `_ + + .. grid-item-card:: TS-SatFire + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A spatio-temporal wildfire predictor inspired by TS-SatFire. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`TS-SatFire ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `TS-SatFire: A Benchmark Dataset and Deep Learning Models for Forest Fire Progression Prediction `_ + + .. grid-item-card:: U-Net + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact U-Net baseline for wildfire raster prediction. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`U-Net ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + + .. grid-item-card:: U-TAE + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact U-TAE wildfire baseline over raster history windows. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`U-TAE ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ + + .. grid-item-card:: ViT Segmenter + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A compact ViT-style wildfire sequence segmenter over raster histories. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`ViT Segmenter ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences `_ + + .. grid-item-card:: WildfireSpreadTS + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A temporal convolution wildfire spread baseline over short raster history windows. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction `_ | **Repo:** `Repository `__ + + .. grid-item-card:: WRF-SFIRE Adapter + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A lightweight raster wildfire spread adapter inspired by WRF-SFIRE style transport. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Spread` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`WRF-SFIRE Adapter ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-meta-row + + **Benchmark Ecosystems:** :doc:`WildfireSpreadTS ` + + .. container:: catalog-link-row + + **Paper:** `Coupled atmosphere-wildland fire modeling with WRF 3.3 and SFIRE 2011 `_ | **Repo:** `Repository `__ + + .. grid-item-card:: XGBoost + :class-card: catalog-entry-card + + .. container:: catalog-entry-summary + + A boosted-tree wildfire occurrence baseline with a binary logistic objective. + + .. container:: catalog-chip-row + + :bdg-primary:`Wildfire` :bdg-secondary:`Classification` :bdg-success:`Implemented` + + .. container:: catalog-meta-row + + **Details:** :doc:`XGBoost ` + + .. container:: catalog-meta-row + + **Benchmark Family:** :doc:`Wildfire Benchmark ` + + .. container:: catalog-link-row + + **Paper:** `Wildfire Danger Prediction and Understanding With Deep Learning `_ .. grid-item-card:: CNN-ASPP :class-card: catalog-entry-card @@ -924,9 +1533,17 @@ before selecting a model for evaluation. :hidden: modules/models_asufm + modules/models_attention_unet + modules/models_convgru_trajgru + modules/models_convlstm + modules/models_deep_ensemble + modules/models_deeplabv3p + modules/models_earthfarseer + modules/models_earthformer modules/models_eqnet modules/models_eqtransformer modules/models_firecastnet + modules/models_firepred modules/models_floodcast modules/models_forefire modules/models_fourcastnet_tc @@ -935,18 +1552,33 @@ before selecting a model for evaluation. modules/models_graphcast_tc modules/models_hurricast modules/models_hydrographnet + modules/models_lightgbm + modules/models_logistic_regression + modules/models_mau modules/models_neuralhydrology_ealstm modules/models_neuralhydrology_lstm modules/models_pangu_tc modules/models_phasenet + modules/models_predrnn_v2 + modules/models_rainformer + modules/models_random_forest + modules/models_resnet18_unet modules/models_saf_net + modules/models_segformer + modules/models_swin_unet + modules/models_swinlstm modules/models_tcif_fusion + modules/models_tcn modules/models_tropicalcyclone_mlp modules/models_tropicyclonenet + modules/models_ts_satfire + modules/models_unet modules/models_urbanfloodcast + modules/models_utae + modules/models_vit_segmenter modules/models_wavecastnet modules/models_wildfire_aspp - modules/models_wildfire_forecasting modules/models_wildfire_fpa modules/models_wildfirespreadts modules/models_wrf_sfire + modules/models_xgboost diff --git a/pyhazards/appendix_a_catalog.py b/pyhazards/appendix_a_catalog.py index fa92c3ae..aa3b6016 100644 --- a/pyhazards/appendix_a_catalog.py +++ b/pyhazards/appendix_a_catalog.py @@ -43,7 +43,6 @@ class AppendixAEntry: AppendixAEntry("Earthquake", "pick-benchmark", "Benchmark", "https://github.com/seisbench/pick-benchmark", "core", notes="A synthetic-backed pick-benchmark-compatible waveform adapter is registered for smoke benchmarking."), AppendixAEntry("Earthquake", "pyCSEP", "Benchmark / Reports", "https://github.com/SCECCode/pycsep", "core", notes="The forecasting smoke benchmark exports a pyCSEP-style JSON artifact."), AppendixAEntry("Earthquake", "AEFA", "Dataset / Forecast Benchmark", "https://github.com/chenyk1990/aefa", "core", notes="A synthetic-backed AEFA-style forecasting dataset adapter is registered."), - AppendixAEntry("Wildfire", "wildfire_forecasting", "Baseline", "https://github.com/Orion-AI-Lab/wildfire_forecasting", "core", ("wildfire_forecasting",)), AppendixAEntry("Wildfire", "WildfireSpreadTS", "Baseline / Benchmark", "https://github.com/SebastianGer/WildfireSpreadTS", "core", ("wildfirespreadts",)), AppendixAEntry("Wildfire", "ASUFM", "Baseline", "https://github.com/bronteee/fire-asufm", "core", ("asufm",)), AppendixAEntry("Wildfire", "WRF-SFIRE", "Simulator Adapter", "https://github.com/openwfm/WRF-SFIRE", "core", ("wrf_sfire",), "The current adapter is lightweight and synthetic-backed rather than a full external simulator binding."), diff --git a/pyhazards/benchmark_cards/wildfire_benchmark.yaml b/pyhazards/benchmark_cards/wildfire_benchmark.yaml index e905aaf2..bba8a373 100644 --- a/pyhazards/benchmark_cards/wildfire_benchmark.yaml +++ b/pyhazards/benchmark_cards/wildfire_benchmark.yaml @@ -5,11 +5,11 @@ hazard_family: Wildfire benchmark_key: wildfire support_status: synthetic-backed summary: > - Shared PyHazards evaluator family for wildfire danger and wildfire spread experiments. + Shared PyHazards evaluator family for wildfire occurrence, risk, and spread experiments. description: - > - The wildfire benchmark family is the single scoring layer for tabular danger tasks, - weekly forecasting tasks, and raster spread tasks. + The wildfire benchmark family is the single scoring layer for tabular occurrence tasks, + risk forecasting baselines, and raster spread tasks. - > Current coverage is synthetic-backed, but it already exposes a single hazard-level evaluator contract across wildfire danger and wildfire spread smoke configs. @@ -28,7 +28,6 @@ metrics: - burned_area_mae smoke_configs: - pyhazards/configs/wildfire/wildfire_danger_smoke.yaml - - pyhazards/configs/wildfire/wildfire_forecasting_smoke.yaml - pyhazards/configs/wildfire/asufm_smoke.yaml - pyhazards/configs/wildfire/wildfire_spread_smoke.yaml - pyhazards/configs/wildfire/wildfirespreadts_smoke.yaml @@ -36,13 +35,36 @@ smoke_configs: - pyhazards/configs/wildfire/wrf_sfire_smoke.yaml - pyhazards/configs/wildfire/firecastnet_smoke.yaml linked_models: - - wildfire_fpa - - wildfire_forecasting + - logistic_regression + - random_forest + - xgboost + - lightgbm + - unet + - resnet18_unet + - attention_unet + - deeplabv3p + - convlstm + - mau + - predrnn_v2 + - rainformer + - earthformer + - swinlstm + - earthfarseer + - convgru_trajgru + - tcn + - utae + - segformer + - swin_unet + - vit_segmenter + - deep_ensemble - asufm - wildfire_aspp + - firecastnet + - firepred - wildfirespreadts + - ts_satfire + - wildfire_fpa - forefire - wrf_sfire - - firecastnet notes: - "WildfireSpreadTS is the public Appendix-A benchmark ecosystem surfaced on this page." diff --git a/pyhazards/benchmark_cards/wildfirespreadts_ecosystem.yaml b/pyhazards/benchmark_cards/wildfirespreadts_ecosystem.yaml index 85771ead..c9eaf187 100644 --- a/pyhazards/benchmark_cards/wildfirespreadts_ecosystem.yaml +++ b/pyhazards/benchmark_cards/wildfirespreadts_ecosystem.yaml @@ -30,7 +30,27 @@ smoke_configs: - pyhazards/configs/wildfire/firecastnet_smoke.yaml linked_models: - wildfire_aspp + - unet + - resnet18_unet + - attention_unet + - deeplabv3p + - convlstm + - mau + - predrnn_v2 + - rainformer + - earthformer + - swinlstm + - earthfarseer + - convgru_trajgru + - tcn + - utae + - segformer + - swin_unet + - vit_segmenter + - deep_ensemble + - firecastnet + - firepred - wildfirespreadts + - ts_satfire - forefire - wrf_sfire - - firecastnet diff --git a/pyhazards/benchmark_catalog.py b/pyhazards/benchmark_catalog.py index 1e3e4c28..ea534644 100644 --- a/pyhazards/benchmark_catalog.py +++ b/pyhazards/benchmark_catalog.py @@ -5,7 +5,30 @@ from typing import Dict, Iterable, List, Literal, Optional, Sequence, Set import yaml -from pydantic import BaseModel, Field, model_validator + +try: + from pydantic import BaseModel, Field, model_validator + + def _after_model_validator(func): + return model_validator(mode="after")(func) + + def _model_validate(model_cls, raw): + return model_cls.model_validate(raw) + +except ImportError: + from pydantic import BaseModel, Field, root_validator + + def _after_model_validator(func): + @root_validator(skip_on_failure=True, allow_reuse=True) + def _wrapped(cls, values): + instance = cls.construct(**values) + func(instance) + return values + + return _wrapped + + def _model_validate(model_cls, raw): + return model_cls.parse_obj(raw) from .benchmarks import available_benchmarks, build_benchmark from .configs import load_experiment_config @@ -97,7 +120,7 @@ class BenchmarkCard(BaseModel): notes: List[str] = Field(default_factory=list) source: Optional[BenchmarkSource] = None - @model_validator(mode="after") + @_after_model_validator def validate_card(self) -> "BenchmarkCard": if self.kind == "ecosystem" and self.source is None: raise ValueError("ecosystem benchmark cards require a source block") @@ -117,7 +140,7 @@ def load_benchmark_cards(cards_dir: Path = BENCHMARK_CARDS_DIR) -> List[Benchmar seen_slugs: Set[str] = set() for path in sorted(cards_dir.glob("*.y*ml")): raw = yaml.safe_load(path.read_text(encoding="utf-8")) or {} - card = BenchmarkCard.model_validate(raw) + card = _model_validate(BenchmarkCard, raw) if path.stem != card.slug: raise ValueError( "Benchmark card filename must match slug: " diff --git a/pyhazards/dataset_cards/fpa_fod_weekly.yaml b/pyhazards/dataset_cards/fpa_fod_weekly.yaml index a7599be7..edbbba24 100644 --- a/pyhazards/dataset_cards/fpa_fod_weekly.yaml +++ b/pyhazards/dataset_cards/fpa_fod_weekly.yaml @@ -50,7 +50,6 @@ primary_references: url: https://github.com/LabRAI/PyHazards related_models: - wildfire_fpa - - wildfire_forecasting - asufm related_benchmarks: - wildfire_benchmark diff --git a/pyhazards/dataset_catalog.py b/pyhazards/dataset_catalog.py index 73501e4c..c058c070 100644 --- a/pyhazards/dataset_catalog.py +++ b/pyhazards/dataset_catalog.py @@ -6,7 +6,47 @@ from typing import Dict, Iterable, List, Optional, Sequence, Set import yaml -from pydantic import AliasChoices, BaseModel, Field, model_validator + +try: + from pydantic import AliasChoices, BaseModel, Field, model_validator + + def _field(*args, validation_alias=None, **kwargs): + if validation_alias is not None: + kwargs["validation_alias"] = validation_alias + return Field(*args, **kwargs) + + def _after_model_validator(func): + return model_validator(mode="after")(func) + + def _model_validate(model_cls, raw): + return model_cls.model_validate(raw) + +except ImportError: + from pydantic import BaseModel, Field, root_validator + + class AliasChoices: + def __init__(self, *choices): + self.choices = choices + + def _field(*args, validation_alias=None, **kwargs): + if validation_alias is not None and "alias" not in kwargs: + if isinstance(validation_alias, AliasChoices): + kwargs["alias"] = validation_alias.choices[0] + else: + kwargs["alias"] = validation_alias + return Field(*args, **kwargs) + + def _after_model_validator(func): + @root_validator(skip_on_failure=True, allow_reuse=True) + def _wrapped(cls, values): + instance = cls.construct(**values) + func(instance) + return values + + return _wrapped + + def _model_validate(model_cls, raw): + return model_cls.parse_obj(raw) REPO_ROOT = Path(__file__).resolve().parent.parent @@ -111,7 +151,7 @@ class InspectionSpec(BaseModel): module: Optional[str] = None notes: List[str] = Field(default_factory=list) - @model_validator(mode="after") + @_after_model_validator def derive_module_from_command(self) -> "InspectionSpec": if self.module: return self @@ -122,7 +162,7 @@ def derive_module_from_command(self) -> "InspectionSpec": class RegistrySpec(BaseModel): - name: str = Field(validation_alias=AliasChoices("name", "dataset_name")) + name: str = _field(validation_alias=AliasChoices("name", "dataset_name")) example: str notes: List[str] = Field(default_factory=list) @@ -146,7 +186,7 @@ class DatasetCard(BaseModel): typical_use_cases: List[str] = Field(default_factory=list) access_links: List[DatasetLink] = Field(default_factory=list) inspection: Optional[InspectionSpec] = None - references: List[DatasetReference] = Field( + references: List[DatasetReference] = _field( default_factory=list, validation_alias=AliasChoices("references", "primary_references"), ) @@ -156,7 +196,7 @@ class DatasetCard(BaseModel): related_benchmarks: List[str] = Field(default_factory=list) notes: List[str] = Field(default_factory=list) - @model_validator(mode="after") + @_after_model_validator def validate_card(self) -> "DatasetCard": if not self.references: raise ValueError("dataset cards require at least one reference") @@ -188,7 +228,12 @@ def load_dataset_cards(cards_dir: Path = DATASET_CARDS_DIR) -> List[DatasetCard] seen_slugs: Set[str] = set() for path in sorted(cards_dir.glob("*.y*ml")): raw = yaml.safe_load(path.read_text(encoding="utf-8")) or {} - card = DatasetCard.model_validate(raw) + if "primary_references" in raw and "references" not in raw: + raw["references"] = raw.pop("primary_references") + registry = raw.get("registry") + if isinstance(registry, dict) and "dataset_name" in registry and "name" not in registry: + registry["name"] = registry.pop("dataset_name") + card = _model_validate(DatasetCard, raw) if path.stem != card.slug: raise ValueError( f"Dataset card filename must match slug: {path.name} vs {card.slug}" diff --git a/pyhazards/model_cards/attention_unet.yaml b/pyhazards/model_cards/attention_unet.yaml new file mode 100644 index 00000000..0dd81352 --- /dev/null +++ b/pyhazards/model_cards/attention_unet.yaml @@ -0,0 +1,39 @@ +model_name: attention_unet +display_name: Attention U-Net +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/attention_unet.py +builder_name: attention_unet_builder +summary: > + An attention-gated U-Net baseline for wildfire raster prediction. +description: + - > + ``attention_unet`` extends the encoder-decoder wildfire baseline with lightweight gating on skip connections. + - > + PyHazards keeps it public because attention-augmented U-Net variants appear frequently in wildfire progression comparisons. +paper: + title: "TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences" + url: https://doi.org/10.1038/s41597-025-06271-3 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="attention_unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + in_channels: 12 + input: + kind: tensor + tensor: + shape: [2, 12, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/convgru_trajgru.yaml b/pyhazards/model_cards/convgru_trajgru.yaml new file mode 100644 index 00000000..c997b99e --- /dev/null +++ b/pyhazards/model_cards/convgru_trajgru.yaml @@ -0,0 +1,40 @@ +model_name: convgru_trajgru +display_name: ConvGRU-TrajGRU +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/convgru_trajgru.py +builder_name: convgru_trajgru_builder +summary: > + A compact ConvGRU-style wildfire sequence baseline over raster histories. +description: + - > + ``convgru_trajgru`` provides a gated recurrent raster-history baseline for wildfire sequence prediction. + - > + PyHazards keeps it available as a lightweight proxy for ConvGRU and TrajGRU style wildfire forecasters. +paper: + title: "FireCastNet: Earth-as-a-Graph for Seasonal Fire Prediction" + url: https://doi.org/10.1038/s41598-025-30645-7 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="convgru_trajgru", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/convlstm.yaml b/pyhazards/model_cards/convlstm.yaml new file mode 100644 index 00000000..97e4ccb8 --- /dev/null +++ b/pyhazards/model_cards/convlstm.yaml @@ -0,0 +1,40 @@ +model_name: convlstm +display_name: ConvLSTM +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/convlstm.py +builder_name: convlstm_builder +summary: > + A compact ConvLSTM baseline for wildfire spread prediction from raster histories. +description: + - > + ``convlstm`` is the standard recurrent raster-history baseline for wildfire prediction in PyHazards. + - > + It maps short spatio-temporal covariate windows to the next wildfire risk or spread-style output map. +paper: + title: "WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction" + url: https://openreview.net/forum?id=RgdGkPRQ03 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="convlstm", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/deep_ensemble.yaml b/pyhazards/model_cards/deep_ensemble.yaml new file mode 100644 index 00000000..d886587a --- /dev/null +++ b/pyhazards/model_cards/deep_ensemble.yaml @@ -0,0 +1,40 @@ +model_name: deep_ensemble +display_name: Deep Ensemble +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/deep_ensemble.py +builder_name: deep_ensemble_builder +summary: > + An uncertainty-aware ensemble of compact wildfire segmentation members. +description: + - > + ``deep_ensemble`` averages several compact wildfire raster predictors to provide a simple uncertainty-aware baseline. + - > + It is kept public so the wildfire catalog includes at least one ensemble-style dense predictor beside single-model baselines. +paper: + title: "Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles" + url: https://proceedings.neurips.cc/paper/2017/hash/9ef2ed4b7fd2c810847ffa85bce38c92-Abstract.html +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="deep_ensemble", task="segmentation", in_channels=12, ensemble_size=3) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) +notes: + - "The smoke path averages several compact wildfire raster predictors into one output map." +smoke_test: + task: segmentation + build_kwargs: + in_channels: 12 + ensemble_size: 3 + input: + kind: tensor + tensor: + shape: [2, 12, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/deeplabv3p.yaml b/pyhazards/model_cards/deeplabv3p.yaml new file mode 100644 index 00000000..ec6e4a85 --- /dev/null +++ b/pyhazards/model_cards/deeplabv3p.yaml @@ -0,0 +1,39 @@ +model_name: deeplabv3p +display_name: DeepLabV3+ +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/deeplabv3p.py +builder_name: deeplabv3p_builder +summary: > + A compact DeepLabV3+-style baseline for wildfire raster prediction. +description: + - > + ``deeplabv3p`` provides an ASPP-style dense-prediction baseline for wildfire rasters within the shared PyHazards registry. + - > + It complements U-Net family baselines with a lighter context-aggregation decoder. +paper: + title: "Wildfire spreading prediction using multimodal data and deep neural network approach" + url: https://doi.org/10.1038/s41598-024-52821-x +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="deeplabv3p", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + in_channels: 12 + input: + kind: tensor + tensor: + shape: [2, 12, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/earthfarseer.yaml b/pyhazards/model_cards/earthfarseer.yaml new file mode 100644 index 00000000..9fe50453 --- /dev/null +++ b/pyhazards/model_cards/earthfarseer.yaml @@ -0,0 +1,40 @@ +model_name: earthfarseer +display_name: EarthFarseer +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/earthfarseer.py +builder_name: earthfarseer_builder +summary: > + A compact EarthFarseer-style wildfire sequence baseline over raster histories. +description: + - > + ``earthfarseer`` is a lightweight space-time wildfire forecaster exposed through the shared raster-history contract. + - > + It remains public because the Sim2Real-Fire benchmark family compares EarthFarseer-style sequence models against other wildfire forecasters. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="earthfarseer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/earthformer.yaml b/pyhazards/model_cards/earthformer.yaml new file mode 100644 index 00000000..62ab409e --- /dev/null +++ b/pyhazards/model_cards/earthformer.yaml @@ -0,0 +1,40 @@ +model_name: earthformer +display_name: EarthFormer +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/earthformer.py +builder_name: earthformer_builder +summary: > + A compact EarthFormer-style wildfire sequence baseline over raster histories. +description: + - > + ``earthformer`` gives the wildfire catalog a lightweight space-time transformer-style sequence model. + - > + PyHazards keeps it aligned with the shared wildfire raster-history contract used for spread-style benchmarking. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="earthformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/firepred.yaml b/pyhazards/model_cards/firepred.yaml new file mode 100644 index 00000000..739598b1 --- /dev/null +++ b/pyhazards/model_cards/firepred.yaml @@ -0,0 +1,40 @@ +model_name: firepred +display_name: FirePred +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/firepred.py +builder_name: firepred_builder +summary: > + A hybrid multi-temporal CNN wildfire predictor inspired by FirePred. +description: + - > + ``firepred`` mixes recent temporal context, averaged daily context, and a latest-step snapshot branch for wildfire prediction. + - > + PyHazards exposes it as a lightweight approximation of the wildfire-native FirePred modeling idea. +paper: + title: "FirePred: A hybrid multi-temporal convolutional network for wildfire spread prediction" + url: https://doi.org/10.1016/j.ecoinf.2023.102282 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="firepred", task="segmentation", history=5, in_channels=8) + logits = model(torch.randn(2, 5, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a five-step wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 5 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 5, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/lightgbm.yaml b/pyhazards/model_cards/lightgbm.yaml new file mode 100644 index 00000000..09fde19e --- /dev/null +++ b/pyhazards/model_cards/lightgbm.yaml @@ -0,0 +1,40 @@ +model_name: lightgbm +display_name: LightGBM +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/lightgbm.py +builder_name: lightgbm_builder +summary: > + A LightGBM wildfire occurrence baseline for fast tabular classification experiments. +description: + - > + ``lightgbm`` is a boosted-tree baseline for wildfire occurrence and danger classification on flattened covariates. + - > + PyHazards keeps it alongside ``xgboost`` so the wildfire catalog covers both major tree-boosting baselines. +paper: + title: "LightGBM: A Highly Efficient Gradient Boosting Decision Tree" + url: https://proceedings.neurips.cc/paper_files/paper/2017/hash/6449f44a102fde848669bdd9eb6b76fa-Abstract.html +tasks: + - classification +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="lightgbm", task="classification", num_boost_round=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) +notes: + - "The smoke path uses flattened wildfire covariates and returns two-class probabilities." +smoke_test: + task: classification + build_kwargs: + num_leaves: 31 + learning_rate: 0.1 + num_boost_round: 64 + input: + kind: tensor + tensor: + shape: [4, 8] + expected_output: + kind: tensor + shape: [4, 2] diff --git a/pyhazards/model_cards/logistic_regression.yaml b/pyhazards/model_cards/logistic_regression.yaml new file mode 100644 index 00000000..42576d92 --- /dev/null +++ b/pyhazards/model_cards/logistic_regression.yaml @@ -0,0 +1,38 @@ +model_name: logistic_regression +display_name: Logistic Regression +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/logistic_regression.py +builder_name: logistic_regression_builder +summary: > + A classical logistic-regression baseline for wildfire occurrence probability from tabular covariates. +description: + - > + ``logistic_regression`` is a lightweight classical baseline for wildfire occurrence and danger classification. + - > + PyHazards exposes it through the shared wildfire benchmark family for direct comparison against tree and raster deep-learning models. +paper: + title: "Next Day Wildfire Spread: A Machine Learning Dataset to Predict Wildfire Spreading From Remote-Sensing Data" + url: https://doi.org/10.1109/TGRS.2022.3192974 +tasks: + - classification +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="logistic_regression", task="classification", max_iter=200) + probs = model(torch.randn(4, 8)) + print(probs.shape) +notes: + - "The smoke path uses flattened wildfire covariates and returns two-class probabilities." +smoke_test: + task: classification + build_kwargs: + max_iter: 200 + input: + kind: tensor + tensor: + shape: [4, 8] + expected_output: + kind: tensor + shape: [4, 2] diff --git a/pyhazards/model_cards/mau.yaml b/pyhazards/model_cards/mau.yaml new file mode 100644 index 00000000..02ca6382 --- /dev/null +++ b/pyhazards/model_cards/mau.yaml @@ -0,0 +1,40 @@ +model_name: mau +display_name: MAU +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/mau.py +builder_name: mau_builder +summary: > + A compact MAU-style spatio-temporal wildfire baseline over raster histories. +description: + - > + ``mau`` is a recurrent spatio-temporal wildfire baseline inspired by memory-in-memory sequence modeling. + - > + PyHazards keeps it public because the Sim2Real-Fire benchmark family compares MAU-style models against other wildfire sequence forecasters. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="mau", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/predrnn_v2.yaml b/pyhazards/model_cards/predrnn_v2.yaml new file mode 100644 index 00000000..87739763 --- /dev/null +++ b/pyhazards/model_cards/predrnn_v2.yaml @@ -0,0 +1,40 @@ +model_name: predrnn_v2 +display_name: PredRNN-v2 +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/predrnn_v2.py +builder_name: predrnn_v2_builder +summary: > + A compact PredRNN-v2-style wildfire baseline over raster history windows. +description: + - > + ``predrnn_v2`` is a recurrent raster-history baseline for wildfire sequence forecasting. + - > + It gives the PyHazards wildfire suite a lightweight proxy for memory-augmented next-frame wildfire prediction. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="predrnn_v2", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/rainformer.yaml b/pyhazards/model_cards/rainformer.yaml new file mode 100644 index 00000000..0282eb53 --- /dev/null +++ b/pyhazards/model_cards/rainformer.yaml @@ -0,0 +1,40 @@ +model_name: rainformer +display_name: Rainformer +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/rainformer.py +builder_name: rainformer_builder +summary: > + A compact Rainformer-style wildfire sequence baseline over raster histories. +description: + - > + ``rainformer`` adapts sequence-to-map transformer-style modeling to short wildfire raster histories. + - > + It stays lightweight so the PyHazards wildfire catalog can expose a modern temporal attention baseline without external dependencies. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="rainformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/random_forest.yaml b/pyhazards/model_cards/random_forest.yaml new file mode 100644 index 00000000..64cfe157 --- /dev/null +++ b/pyhazards/model_cards/random_forest.yaml @@ -0,0 +1,39 @@ +model_name: random_forest +display_name: Random Forest +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/random_forest.py +builder_name: random_forest_builder +summary: > + A random-forest wildfire occurrence baseline over tabular predictors. +description: + - > + ``random_forest`` is a classical ensemble baseline for wildfire occurrence and danger classification. + - > + It gives the wildfire catalog a non-neural tree baseline that remains easy to compare against the deep raster families. +paper: + title: "WildfireDB: An Open-Source Dataset Connecting Wildfire Occurrence with Relevant Determinants" + url: https://datasets-benchmarks-proceedings.neurips.cc/paper_files/paper/2021/hash/3fe94a002317b5f9259f82690aeea4cd-Abstract-round2.html +tasks: + - classification +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="random_forest", task="classification", n_estimators=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) +notes: + - "The smoke path uses flattened wildfire covariates and returns two-class probabilities." +smoke_test: + task: classification + build_kwargs: + n_estimators: 64 + max_depth: 6 + input: + kind: tensor + tensor: + shape: [4, 8] + expected_output: + kind: tensor + shape: [4, 2] diff --git a/pyhazards/model_cards/resnet18_unet.yaml b/pyhazards/model_cards/resnet18_unet.yaml new file mode 100644 index 00000000..19a17986 --- /dev/null +++ b/pyhazards/model_cards/resnet18_unet.yaml @@ -0,0 +1,39 @@ +model_name: resnet18_unet +display_name: ResNet18 U-Net +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/resnet18_unet.py +builder_name: resnet18_unet_builder +summary: > + A residual U-Net wildfire baseline for raster spread prediction. +description: + - > + ``resnet18_unet`` keeps the U-Net decoding path while using residual feature extraction blocks for wildfire rasters. + - > + It matches the wildfire benchmark family used in WildfireSpreadTS-style sequence-to-map comparisons. +paper: + title: "WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction" + url: https://openreview.net/forum?id=RgdGkPRQ03 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="resnet18_unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + in_channels: 12 + input: + kind: tensor + tensor: + shape: [2, 12, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/segformer.yaml b/pyhazards/model_cards/segformer.yaml new file mode 100644 index 00000000..a8d94fc1 --- /dev/null +++ b/pyhazards/model_cards/segformer.yaml @@ -0,0 +1,40 @@ +model_name: segformer +display_name: SegFormer +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/segformer.py +builder_name: segformer_builder +summary: > + A compact SegFormer-style wildfire sequence segmenter. +description: + - > + ``segformer`` is a transformer-style sequence-to-map wildfire baseline over short raster histories. + - > + PyHazards keeps it public as a modern dense-prediction alternative to recurrent wildfire models. +paper: + title: "WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context" + url: https://arxiv.org/abs/2502.12003 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="segformer", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/swin_unet.yaml b/pyhazards/model_cards/swin_unet.yaml new file mode 100644 index 00000000..1246cf5a --- /dev/null +++ b/pyhazards/model_cards/swin_unet.yaml @@ -0,0 +1,41 @@ +model_name: swin_unet +display_name: Swin-Unet +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/swin_unet.py +builder_name: swin_unet_builder +summary: > + A compact Swin-Unet wildfire sequence segmenter. +description: + - > + ``swin_unet`` combines a short wildfire raster history with a windowed encoder-decoder segmentation backbone. + - > + It serves as the PyHazards entry for Swin-based transformer segmentation within the wildfire benchmark family. +paper: + title: "WildfireSpreadTS+: A Multi-Modal Dataset for Wildfire Spread Prediction with History Context" + url: https://arxiv.org/abs/2502.12003 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="swin_unet", task="segmentation", history=4, in_channels=8, window_size=4) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + window_size: 4 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/swinlstm.yaml b/pyhazards/model_cards/swinlstm.yaml new file mode 100644 index 00000000..5b3680fd --- /dev/null +++ b/pyhazards/model_cards/swinlstm.yaml @@ -0,0 +1,41 @@ +model_name: swinlstm +display_name: SwinLSTM +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/swinlstm.py +builder_name: swinlstm_builder +summary: > + A compact SwinLSTM-style wildfire sequence baseline over raster histories. +description: + - > + ``swinlstm`` combines a short wildfire raster history with a windowed recurrent mixing block. + - > + It serves as the PyHazards proxy for SwinLSTM-style spatio-temporal wildfire forecasting baselines. +paper: + title: "Sim2Real-Fire: A Multi-Modal Simulation Dataset for Forecast and Backtracking of Real-World Forest Fire" + url: https://openreview.net/forum?id=H1x4DwwRzC +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="swinlstm", task="segmentation", history=4, in_channels=8, window_size=4) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + window_size: 4 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/tcn.yaml b/pyhazards/model_cards/tcn.yaml new file mode 100644 index 00000000..209f8ce5 --- /dev/null +++ b/pyhazards/model_cards/tcn.yaml @@ -0,0 +1,40 @@ +model_name: tcn +display_name: TCN +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/tcn.py +builder_name: tcn_builder +summary: > + A compact temporal-convolution wildfire baseline over raster histories. +description: + - > + ``tcn`` applies temporal convolution over short wildfire raster histories before decoding a dense output map. + - > + It gives the wildfire catalog a lightweight non-recurrent temporal baseline beside ConvLSTM-style models. +paper: + title: "An Empirical Evaluation of Generic Convolutional and Recurrent Networks for Sequence Modeling" + url: https://arxiv.org/abs/1803.01271 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="tcn", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/ts_satfire.yaml b/pyhazards/model_cards/ts_satfire.yaml new file mode 100644 index 00000000..7e9485fe --- /dev/null +++ b/pyhazards/model_cards/ts_satfire.yaml @@ -0,0 +1,40 @@ +model_name: ts_satfire +display_name: TS-SatFire +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/ts_satfire.py +builder_name: ts_satfire_builder +summary: > + A spatio-temporal wildfire predictor inspired by TS-SatFire. +description: + - > + ``ts_satfire`` fuses a short multi-step wildfire raster sequence with temporal attention before dense decoding. + - > + It gives the PyHazards wildfire catalog a lightweight proxy for the TS-SatFire progression-prediction family. +paper: + title: "TS-SatFire: A Benchmark Dataset and Deep Learning Models for Forest Fire Progression Prediction" + url: https://doi.org/10.1038/s41597-025-06271-3 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="ts_satfire", task="segmentation", history=5, in_channels=8) + logits = model(torch.randn(2, 5, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a five-step wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 5 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 5, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/unet.yaml b/pyhazards/model_cards/unet.yaml new file mode 100644 index 00000000..365a7444 --- /dev/null +++ b/pyhazards/model_cards/unet.yaml @@ -0,0 +1,39 @@ +model_name: unet +display_name: U-Net +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/unet.py +builder_name: unet_builder +summary: > + A compact U-Net baseline for wildfire raster prediction. +description: + - > + ``unet`` is the canonical encoder-decoder raster baseline for wildfire spread-style prediction in PyHazards. + - > + It provides the simplest dense prediction reference point before moving to recurrent or transformer-style wildfire models. +paper: + title: "WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction" + url: https://openreview.net/forum?id=RgdGkPRQ03 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="unet", task="segmentation", in_channels=12) + logits = model(torch.randn(2, 12, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a single wildfire raster snapshot and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + in_channels: 12 + input: + kind: tensor + tensor: + shape: [2, 12, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/utae.yaml b/pyhazards/model_cards/utae.yaml new file mode 100644 index 00000000..6c8fc241 --- /dev/null +++ b/pyhazards/model_cards/utae.yaml @@ -0,0 +1,40 @@ +model_name: utae +display_name: U-TAE +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/utae.py +builder_name: utae_builder +summary: > + A compact U-TAE wildfire baseline over raster history windows. +description: + - > + ``utae`` is a sequence-to-map wildfire baseline that fuses short temporal windows before dense decoding. + - > + It stays aligned with the WildfireSpreadTS-style benchmark family used for temporal wildfire spread prediction. +paper: + title: "WildfireSpreadTS: A Dataset of Multi-Modal Time Series for Wildfire Spread Prediction" + url: https://openreview.net/forum?id=RgdGkPRQ03 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="utae", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/vit_segmenter.yaml b/pyhazards/model_cards/vit_segmenter.yaml new file mode 100644 index 00000000..76dbecff --- /dev/null +++ b/pyhazards/model_cards/vit_segmenter.yaml @@ -0,0 +1,40 @@ +model_name: vit_segmenter +display_name: ViT Segmenter +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/vit_segmenter.py +builder_name: vit_segmenter_builder +summary: > + A compact ViT-style wildfire sequence segmenter over raster histories. +description: + - > + ``vit_segmenter`` gives the wildfire catalog a lightweight vision-transformer segmentation baseline over short temporal windows. + - > + It complements SegFormer and Swin-Unet with a simpler transformer-style dense-prediction proxy. +paper: + title: "TS-SatFire: A wildfire progression benchmark from temporally dense satellite sequences" + url: https://doi.org/10.1038/s41597-025-06271-3 +tasks: + - segmentation + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="vit_segmenter", task="segmentation", history=4, in_channels=8) + logits = model(torch.randn(2, 4, 8, 16, 16)) + print(logits.shape) +notes: + - "The smoke path uses a short wildfire raster history and returns a one-channel prediction map." +smoke_test: + task: segmentation + build_kwargs: + history: 4 + in_channels: 8 + input: + kind: tensor + tensor: + shape: [2, 4, 8, 16, 16] + expected_output: + kind: tensor + shape: [2, 1, 16, 16] diff --git a/pyhazards/model_cards/wildfire_forecasting.yaml b/pyhazards/model_cards/wildfire_forecasting.yaml deleted file mode 100644 index 258dcd81..00000000 --- a/pyhazards/model_cards/wildfire_forecasting.yaml +++ /dev/null @@ -1,53 +0,0 @@ -model_name: wildfire_forecasting -display_name: Wildfire Forecasting -hazard: Wildfire -catalog_status: core -source_file: pyhazards/models/wildfire_forecasting.py -builder_name: wildfire_forecasting_builder -summary: > - A sequence forecasting baseline for next-window wildfire activity across weekly count features. -description: - - > - ``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire - activity windows. - - > - The PyHazards implementation targets smoke-testable next-window size-group prediction - through the shared wildfire benchmark flow. -paper: - title: Wildfire Danger Prediction and Understanding with Deep Learning - url: https://doi.org/10.1029/2022GL099368 - repo_url: https://github.com/Orion-AI-Lab/wildfire_forecasting -tasks: - - forecasting - - regression -example: | - import torch - from pyhazards.models import build_model - - model = build_model( - name="wildfire_forecasting", - task="forecasting", - input_dim=7, - output_dim=5, - lookback=12, - ) - preds = model(torch.randn(2, 12, 7)) - print(preds.shape) -notes: - - "This public adapter is exercised on the weekly wildfire smoke benchmark." -smoke_test: - task: forecasting - build_kwargs: - input_dim: 7 - hidden_dim: 32 - output_dim: 5 - lookback: 12 - num_layers: 2 - dropout: 0.0 - input: - kind: tensor - tensor: - shape: [2, 12, 7] - expected_output: - kind: tensor - shape: [2, 5] diff --git a/pyhazards/model_cards/wildfire_mamba.yaml b/pyhazards/model_cards/wildfire_mamba.yaml deleted file mode 100644 index 6762874f..00000000 --- a/pyhazards/model_cards/wildfire_mamba.yaml +++ /dev/null @@ -1,58 +0,0 @@ -model_name: wildfire_mamba -display_name: Wildfire Mamba -hazard: Wildfire -include_in_public_catalog: false -catalog_status: hidden -source_file: pyhazards/models/wildfire_mamba.py -builder_name: wildfire_mamba_builder -summary: > - A Mamba-inspired spatio-temporal wildfire model that mixes county-level temporal - encoders with a lightweight graph convolution over spatial adjacency. -description: - - > - ``wildfire_mamba`` models county-day ERA5 sequences by combining selective - state-space temporal blocks with a simple spatial graph layer. - - > - The PyHazards implementation targets binary next-day per-county wildfire - classification and supports an optional count head for multi-task extensions. -paper: - title: "Mamba: Linear-Time Sequence Modeling with Selective State Spaces" - url: https://arxiv.org/abs/2312.00752 -tasks: - - classification -example: | - import torch - from pyhazards.models import build_model - - model = build_model( - name="wildfire_mamba", - task="classification", - in_dim=3, - num_counties=4, - past_days=5, - ) - - x = torch.randn(2, 5, 4, 3) - logits = model(x) - print(logits.shape) -notes: - - "The CI smoke test validates the default binary-classification path on synthetic data." -smoke_test: - task: classification - build_kwargs: - in_dim: 3 - num_counties: 4 - past_days: 5 - hidden_dim: 32 - gcn_hidden: 16 - mamba_layers: 2 - state_dim: 16 - conv_kernel: 3 - dropout: 0.0 - input: - kind: tensor - tensor: - shape: [2, 5, 4, 3] - expected_output: - kind: tensor - shape: [2, 4] diff --git a/pyhazards/model_cards/xgboost.yaml b/pyhazards/model_cards/xgboost.yaml new file mode 100644 index 00000000..f6d61190 --- /dev/null +++ b/pyhazards/model_cards/xgboost.yaml @@ -0,0 +1,40 @@ +model_name: xgboost +display_name: XGBoost +hazard: Wildfire +catalog_status: core +source_file: pyhazards/models/xgboost.py +builder_name: xgboost_builder +summary: > + A boosted-tree wildfire occurrence baseline with a binary logistic objective. +description: + - > + ``xgboost`` is a strong tabular baseline for wildfire occurrence and danger prediction. + - > + PyHazards keeps it public because gradient-boosted trees remain a standard reference point for wildfire risk forecasting. +paper: + title: "Wildfire Danger Prediction and Understanding With Deep Learning" + url: https://doi.org/10.1029/2022GL099368 +tasks: + - classification +example: | + import torch + from pyhazards.models import build_model + + model = build_model(name="xgboost", task="classification", num_boost_round=64) + probs = model(torch.randn(4, 8)) + print(probs.shape) +notes: + - "The smoke path uses flattened wildfire covariates and returns two-class probabilities." +smoke_test: + task: classification + build_kwargs: + max_depth: 4 + eta: 0.1 + num_boost_round: 64 + input: + kind: tensor + tensor: + shape: [4, 8] + expected_output: + kind: tensor + shape: [4, 2] diff --git a/pyhazards/model_catalog.py b/pyhazards/model_catalog.py index 06c32178..3dcf1ad9 100644 --- a/pyhazards/model_catalog.py +++ b/pyhazards/model_catalog.py @@ -7,7 +7,30 @@ import torch import yaml -from pydantic import BaseModel, Field, model_validator + +try: + from pydantic import BaseModel, Field, model_validator + + def _after_model_validator(func): + return model_validator(mode="after")(func) + + def _model_validate(model_cls, raw): + return model_cls.model_validate(raw) + +except ImportError: + from pydantic import BaseModel, Field, root_validator + + def _after_model_validator(func): + @root_validator(skip_on_failure=True, allow_reuse=True) + def _wrapped(cls, values): + instance = cls.construct(**values) + func(instance) + return values + + return _wrapped + + def _model_validate(model_cls, raw): + return model_cls.parse_obj(raw) REPO_ROOT = Path(__file__).resolve().parent.parent @@ -59,8 +82,9 @@ HAZARD_SECTION_SUMMARIES = { "Wildfire": ( - "Wildfire models cover danger forecasting, weekly activity forecasting, " - "and spread prediction under the shared wildfire benchmark family." + "Wildfire models cover tabular occurrence baselines, raster spread " + "predictors, and spatio-temporal forecasting variants under the shared " + "wildfire benchmark family." ), "Earthquake": ( "Earthquake models span phase picking and dense-grid forecasting, with " @@ -128,7 +152,7 @@ class SmokeInputSpec(BaseModel): mapping: Dict[str, SmokeTensorSpec] = Field(default_factory=dict) kwargs: Dict[str, SmokeTensorSpec] = Field(default_factory=dict) - @model_validator(mode="after") + @_after_model_validator def validate_payload(self) -> "SmokeInputSpec": kind = self.kind.lower() if kind == "tensor" and self.tensor is None: @@ -147,7 +171,7 @@ class SmokeOutputSpec(BaseModel): shape: Optional[List[int]] = None shapes: List[List[int]] = Field(default_factory=list) - @model_validator(mode="after") + @_after_model_validator def validate_payload(self) -> "SmokeOutputSpec": kind = self.kind.lower() if kind == "tensor" and self.shape is None: @@ -186,7 +210,7 @@ class ModelCard(BaseModel): doc_slug: Optional[str] = None smoke_test: SmokeTestSpec - @model_validator(mode="after") + @_after_model_validator def validate_catalog_metadata(self) -> "ModelCard": if self.catalog_status == "hidden" and self.include_in_public_catalog: raise ValueError("hidden catalog_status requires include_in_public_catalog: false") @@ -218,7 +242,7 @@ def load_model_cards(cards_dir: Path = MODEL_CARDS_DIR) -> List[ModelCard]: seen_registry_names: Set[str] = set() for path in sorted(cards_dir.glob("*.y*ml")): raw = yaml.safe_load(path.read_text(encoding="utf-8")) or {} - card = ModelCard.model_validate(raw) + card = _model_validate(ModelCard, raw) if path.stem != card.model_name: raise ValueError( "Model card filename must match model_name: " diff --git a/pyhazards/models/__init__.py b/pyhazards/models/__init__.py index ea923edc..ba5fac62 100644 --- a/pyhazards/models/__init__.py +++ b/pyhazards/models/__init__.py @@ -5,6 +5,7 @@ from .eqnet import EQNet, eqnet_builder from .eqtransformer import EQTransformer, eqtransformer_builder from .firecastnet import FireCastNet, firecastnet_builder +from .firepred import FirePred, firepred_builder from .floodcast import FloodCast, floodcast_builder from .forefire import ForeFireAdapter, forefire_builder from .fourcastnet_tc import FourCastNetTC, fourcastnet_tc_builder @@ -35,6 +36,29 @@ from .wildfire_aspp import TverskyLoss, WildfireASPP, wildfire_aspp_builder from .wildfire_fpa import WildfireFPA, wildfire_fpa_builder from .wildfire_mamba import WildfireMamba, wildfire_mamba_builder +from .lightgbm import LightGBMModel, lightgbm_builder +from .logistic_regression import LogisticRegressionModel, logistic_regression_builder +from .random_forest import RandomForestModel, random_forest_builder +from .xgboost import XGBoostModel, xgboost_builder +from .unet import TinyUNet, unet_builder +from .resnet18_unet import TinyResNet18UNet, resnet18_unet_builder +from .attention_unet import TinyAttentionUNet, attention_unet_builder +from .deeplabv3p import TinyDeepLabV3P, deeplabv3p_builder +from .convlstm import TinyConvLSTM, convlstm_builder +from .mau import TinyMAU, mau_builder +from .predrnn_v2 import TinyPredRNNv2, predrnn_v2_builder +from .rainformer import TinyRainformer, rainformer_builder +from .earthformer import TinyEarthFormer, earthformer_builder +from .swinlstm import TinySwinLSTM, swinlstm_builder +from .earthfarseer import TinyEarthFarseer, earthfarseer_builder +from .convgru_trajgru import TinyConvGRTrajGRU, convgru_trajgru_builder +from .tcn import TinyTCN, tcn_builder +from .utae import TinyUTAE, utae_builder +from .segformer import TinySegFormer, segformer_builder +from .swin_unet import TinySwinUNet, swin_unet_builder +from .vit_segmenter import TinyViTSegmenter, vit_segmenter_builder +from .deep_ensemble import DeepEnsemble, deep_ensemble_builder +from .ts_satfire import TSSatFire, ts_satfire_builder from .wildfirespreadts import WildfireSpreadTS, wildfirespreadts_builder from .wrf_sfire import WRFSFireAdapter, wrf_sfire_builder @@ -57,6 +81,54 @@ "eqtransformer_builder", "FireCastNet", "firecastnet_builder", + "FirePred", + "firepred_builder", + "LightGBMModel", + "lightgbm_builder", + "LogisticRegressionModel", + "logistic_regression_builder", + "RandomForestModel", + "random_forest_builder", + "XGBoostModel", + "xgboost_builder", + "TinyUNet", + "unet_builder", + "TinyResNet18UNet", + "resnet18_unet_builder", + "TinyAttentionUNet", + "attention_unet_builder", + "TinyDeepLabV3P", + "deeplabv3p_builder", + "TinyConvLSTM", + "convlstm_builder", + "TinyMAU", + "mau_builder", + "TinyPredRNNv2", + "predrnn_v2_builder", + "TinyRainformer", + "rainformer_builder", + "TinyEarthFormer", + "earthformer_builder", + "TinySwinLSTM", + "swinlstm_builder", + "TinyEarthFarseer", + "earthfarseer_builder", + "TinyConvGRTrajGRU", + "convgru_trajgru_builder", + "TinyTCN", + "tcn_builder", + "TinyUTAE", + "utae_builder", + "TinySegFormer", + "segformer_builder", + "TinySwinUNet", + "swin_unet_builder", + "TinyViTSegmenter", + "vit_segmenter_builder", + "DeepEnsemble", + "deep_ensemble_builder", + "TSSatFire", + "ts_satfire_builder", "FloodCast", "floodcast_builder", "ForeFireAdapter", @@ -237,6 +309,163 @@ }, ) +register_model( + "logistic_regression", + logistic_regression_builder, + defaults={ + "max_iter": 500, + }, +) + +register_model( + "random_forest", + random_forest_builder, + defaults={ + "n_estimators": 500, + "max_depth": None, + }, +) + +register_model( + "xgboost", + xgboost_builder, + defaults={ + "max_depth": 8, + "eta": 0.05, + "num_boost_round": 800, + }, +) + +register_model( + "lightgbm", + lightgbm_builder, + defaults={ + "num_leaves": 63, + "learning_rate": 0.05, + "num_boost_round": 800, + }, +) + +register_model( + "unet", + unet_builder, + defaults={"in_channels": 1, "base_channels": 16, "out_dim": 1}, +) + +register_model( + "resnet18_unet", + resnet18_unet_builder, + defaults={"in_channels": 1, "base_channels": 16, "out_dim": 1}, +) + +register_model( + "attention_unet", + attention_unet_builder, + defaults={"in_channels": 1, "base_channels": 16, "out_dim": 1}, +) + +register_model( + "deeplabv3p", + deeplabv3p_builder, + defaults={"in_channels": 1, "hidden_dim": 32, "out_dim": 1}, +) + +register_model( + "convlstm", + convlstm_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "mau", + mau_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "predrnn_v2", + predrnn_v2_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "rainformer", + rainformer_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "earthformer", + earthformer_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 32, "out_dim": 1}, +) + +register_model( + "swinlstm", + swinlstm_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "earthfarseer", + earthfarseer_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "convgru_trajgru", + convgru_trajgru_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "tcn", + tcn_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "utae", + utae_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 24, "out_dim": 1}, +) + +register_model( + "segformer", + segformer_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 32, "out_dim": 1}, +) + +register_model( + "swin_unet", + swin_unet_builder, + defaults={"history": 4, "in_channels": 1, "base_channels": 16, "out_dim": 1}, +) + +register_model( + "vit_segmenter", + vit_segmenter_builder, + defaults={"history": 4, "in_channels": 1, "hidden_dim": 32, "out_dim": 1}, +) + +register_model( + "deep_ensemble", + deep_ensemble_builder, + defaults={"in_channels": 1, "base_channels": 16, "out_dim": 1, "ensemble_size": 3}, +) + +register_model( + "firepred", + firepred_builder, + defaults={"history": 5, "in_channels": 8, "hidden_dim": 32, "out_channels": 1, "dropout": 0.1}, +) + +register_model( + "ts_satfire", + ts_satfire_builder, + defaults={"history": 5, "in_channels": 8, "hidden_dim": 32, "out_channels": 1, "dropout": 0.1}, +) + register_model( "wildfire_cnn_aspp", cnn_aspp_builder, diff --git a/pyhazards/models/_wildfire_estimator.py b/pyhazards/models/_wildfire_estimator.py new file mode 100644 index 00000000..4afa8c5d --- /dev/null +++ b/pyhazards/models/_wildfire_estimator.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from typing import Any, Optional + +import numpy as np +import torch +import torch.nn as nn + + +def require_task(task: str, allowed: set[str], model_name: str) -> None: + normalized = task.lower() + if normalized not in allowed: + allowed_text = ', '.join(sorted(allowed)) + raise ValueError(f"Model '{model_name}' does not support task={task!r}. Allowed tasks: {allowed_text}") + + +def flatten_tensor(x: torch.Tensor) -> np.ndarray: + if not isinstance(x, torch.Tensor): + raise TypeError('Expected torch.Tensor input for estimator-style wildfire models.') + x_np = x.detach().cpu().float().numpy() + if x_np.ndim == 1: + x_np = x_np[:, None] + if x_np.ndim > 2: + x_np = x_np.reshape(x_np.shape[0], -1) + return x_np + + +class BinaryEstimatorProxy(nn.Module): + def __init__(self): + super().__init__() + self._is_fitted = False + + def _fallback_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + score = np.clip(x_np.mean(axis=1), -8.0, 8.0) + return 1.0 / (1.0 + np.exp(-score)) + + def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + return self._fallback_positive_proba(x_np) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x_np = flatten_tensor(x) + probs_pos = np.clip(self._predict_positive_proba(x_np), 1e-6, 1.0 - 1e-6) + probs = np.stack([1.0 - probs_pos, probs_pos], axis=-1).astype(np.float32) + return torch.from_numpy(probs).to(x.device) diff --git a/pyhazards/models/_wildfire_layers.py b/pyhazards/models/_wildfire_layers.py new file mode 100644 index 00000000..99b0375a --- /dev/null +++ b/pyhazards/models/_wildfire_layers.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def check_image_input(x: torch.Tensor, in_channels: int, name: str) -> None: + if x.ndim != 4: + raise ValueError(f"{name} expects input shape (batch, channels, height, width), got {tuple(x.shape)}.") + if x.size(1) != in_channels: + raise ValueError(f"{name} expected in_channels={in_channels}, got {x.size(1)}.") + + +def check_sequence_input(x: torch.Tensor, in_channels: int, name: str) -> None: + if x.ndim != 5: + raise ValueError( + f"{name} expects input shape (batch, history, channels, height, width), got {tuple(x.shape)}." + ) + if x.size(2) != in_channels: + raise ValueError(f"{name} expected in_channels={in_channels}, got {x.size(2)}.") + + +class ConvBlock(nn.Module): + def __init__(self, in_channels: int, out_channels: int): + super().__init__() + self.block = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.GELU(), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.block(x) + + +class ResidualBlock(nn.Module): + def __init__(self, channels: int): + super().__init__() + self.block = nn.Sequential( + nn.Conv2d(channels, channels, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(channels, channels, kernel_size=3, padding=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return F.gelu(x + self.block(x)) + + +class Downsample(nn.Module): + def __init__(self, in_channels: int, out_channels: int): + super().__init__() + self.block = nn.Sequential(nn.MaxPool2d(2), ConvBlock(in_channels, out_channels)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.block(x) + + +class Upsample(nn.Module): + def __init__(self, in_channels: int, skip_channels: int, out_channels: int): + super().__init__() + self.up = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=2, stride=2) + self.block = ConvBlock(out_channels + skip_channels, out_channels) + + def forward(self, x: torch.Tensor, skip: torch.Tensor) -> torch.Tensor: + x = self.up(x) + if x.shape[-2:] != skip.shape[-2:]: + x = F.interpolate(x, size=skip.shape[-2:], mode='bilinear', align_corners=False) + return self.block(torch.cat([x, skip], dim=1)) + + +class ASPPBlock(nn.Module): + def __init__(self, in_channels: int, out_channels: int, dilations: tuple[int, ...] = (1, 3, 6, 12)): + super().__init__() + self.branches = nn.ModuleList([ + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=d, dilation=d) + for d in dilations + ]) + self.project = nn.Conv2d(len(dilations) * out_channels, out_channels, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + branches = [F.gelu(branch(x)) for branch in self.branches] + return F.gelu(self.project(torch.cat(branches, dim=1))) + + +class TemporalAttentionFusion(nn.Module): + def __init__(self, in_channels: int, hidden_dim: int): + super().__init__() + self.encoder = nn.Sequential( + nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + nn.Conv3d(hidden_dim, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + ) + self.score = nn.Conv3d(hidden_dim, 1, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + encoded = self.encoder(x.permute(0, 2, 1, 3, 4)) + weights = torch.softmax(self.score(encoded), dim=2) + return torch.sum(encoded * weights, dim=2) + + +class PatchMixer(nn.Module): + def __init__(self, in_channels: int, hidden_dim: int): + super().__init__() + self.proj = nn.Conv2d(in_channels, hidden_dim, kernel_size=3, padding=1) + self.mix = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=1), + nn.GELU(), + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1, groups=max(1, hidden_dim // 8)), + nn.GELU(), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = F.gelu(self.proj(x)) + return self.mix(x) diff --git a/pyhazards/models/attention_unet.py b/pyhazards/models/attention_unet.py new file mode 100644 index 00000000..189676a0 --- /dev/null +++ b/pyhazards/models/attention_unet.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import ConvBlock, Downsample, Upsample, check_image_input + + +class AttentionGate(nn.Module): + def __init__(self, channels: int): + super().__init__() + self.gate = nn.Sequential(nn.Conv2d(channels, channels, kernel_size=1), nn.Sigmoid()) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return x * self.gate(x) + + +class TinyAttentionUNet(nn.Module): + """Compact Attention U-Net baseline for wildfire spread masks.""" + + def __init__(self, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1): + super().__init__() + self.in_channels = int(in_channels) + self.stem = ConvBlock(in_channels, base_channels) + self.down1 = Downsample(base_channels, base_channels * 2) + self.down2 = Downsample(base_channels * 2, base_channels * 4) + self.attn2 = AttentionGate(base_channels * 2) + self.attn1 = AttentionGate(base_channels) + self.up1 = Upsample(base_channels * 4, base_channels * 2, base_channels * 2) + self.up2 = Upsample(base_channels * 2, base_channels, base_channels) + self.head = nn.Conv2d(base_channels, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_image_input(x, self.in_channels, 'TinyAttentionUNet') + s1 = self.stem(x) + s2 = self.down1(s1) + bottleneck = self.down2(s2) + x = self.up1(bottleneck, self.attn2(s2)) + x = self.up2(x, self.attn1(s1)) + return self.head(x) + + +def attention_unet_builder(task: str, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"attention_unet supports task='segmentation' or 'regression', got {task!r}.") + return TinyAttentionUNet(in_channels=in_channels, base_channels=base_channels, out_dim=out_dim) + + +__all__ = ['TinyAttentionUNet', 'attention_unet_builder'] diff --git a/pyhazards/models/convgru_trajgru.py b/pyhazards/models/convgru_trajgru.py new file mode 100644 index 00000000..7b9baa14 --- /dev/null +++ b/pyhazards/models/convgru_trajgru.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import check_sequence_input + + +class ConvGRUCell(nn.Module): + def __init__(self, in_channels: int, hidden_dim: int): + super().__init__() + self.hidden_dim = hidden_dim + self.update = nn.Conv2d(in_channels + hidden_dim, hidden_dim, kernel_size=3, padding=1) + self.reset = nn.Conv2d(in_channels + hidden_dim, hidden_dim, kernel_size=3, padding=1) + self.out = nn.Conv2d(in_channels + hidden_dim, hidden_dim, kernel_size=3, padding=1) + + def forward(self, x: torch.Tensor, h: torch.Tensor) -> torch.Tensor: + z = torch.sigmoid(self.update(torch.cat([x, h], dim=1))) + r = torch.sigmoid(self.reset(torch.cat([x, h], dim=1))) + candidate = torch.tanh(self.out(torch.cat([x, r * h], dim=1))) + return (1.0 - z) * h + z * candidate + + +class TinyConvGRTrajGRU(nn.Module): + """Compact ConvGRU-style baseline used for trajectory-aware wildfire sequence prediction.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.cell = ConvGRUCell(in_channels, hidden_dim) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyConvGRTrajGRU') + if x.size(1) != self.history: + raise ValueError(f"TinyConvGRTrajGRU expected history={self.history}, got {x.size(1)}.") + b, _, _, h, w = x.shape + h_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + for t in range(self.history): + h_t = self.cell(x[:, t], h_t) + return self.head(h_t) + + +def convgru_trajgru_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"convgru_trajgru supports task='segmentation' or 'regression', got {task!r}.") + return TinyConvGRTrajGRU(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyConvGRTrajGRU', 'convgru_trajgru_builder'] diff --git a/pyhazards/models/convlstm.py b/pyhazards/models/convlstm.py new file mode 100644 index 00000000..d464661d --- /dev/null +++ b/pyhazards/models/convlstm.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import check_sequence_input + + +class ConvLSTMCell(nn.Module): + def __init__(self, in_channels: int, hidden_dim: int): + super().__init__() + self.hidden_dim = hidden_dim + self.gates = nn.Conv2d(in_channels + hidden_dim, hidden_dim * 4, kernel_size=3, padding=1) + + def forward(self, x: torch.Tensor, state: tuple[torch.Tensor, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor]: + h, c = state + gates = self.gates(torch.cat([x, h], dim=1)) + i, f, o, g = torch.chunk(gates, 4, dim=1) + i = torch.sigmoid(i) + f = torch.sigmoid(f) + o = torch.sigmoid(o) + g = torch.tanh(g) + c = f * c + i * g + h = o * torch.tanh(c) + return h, c + + +class TinyConvLSTM(nn.Module): + """Compact ConvLSTM baseline for wildfire spread prediction from raster histories.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.cell = ConvLSTMCell(in_channels, hidden_dim) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyConvLSTM') + if x.size(1) != self.history: + raise ValueError(f"TinyConvLSTM expected history={self.history}, got {x.size(1)}.") + b, _, _, h, w = x.shape + h_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + c_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + for t in range(self.history): + h_t, c_t = self.cell(x[:, t], (h_t, c_t)) + return self.head(h_t) + + +def convlstm_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"convlstm supports task='segmentation' or 'regression', got {task!r}.") + return TinyConvLSTM(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyConvLSTM', 'convlstm_builder'] diff --git a/pyhazards/models/deep_ensemble.py b/pyhazards/models/deep_ensemble.py new file mode 100644 index 00000000..7f1e6bb4 --- /dev/null +++ b/pyhazards/models/deep_ensemble.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from .unet import TinyUNet +from ._wildfire_layers import check_image_input + + +class DeepEnsemble(nn.Module): + """Mean-ensemble wrapper over multiple compact wildfire segmentation members.""" + + def __init__(self, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, ensemble_size: int = 3): + super().__init__() + self.in_channels = int(in_channels) + self.members = nn.ModuleList([ + TinyUNet(in_channels=in_channels, base_channels=base_channels, out_dim=out_dim) + for _ in range(int(ensemble_size)) + ]) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_image_input(x, self.in_channels, 'DeepEnsemble') + outputs = [member(x) for member in self.members] + return torch.stack(outputs, dim=0).mean(dim=0) + + +def deep_ensemble_builder(task: str, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, ensemble_size: int = 3, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"deep_ensemble supports task='segmentation' or 'regression', got {task!r}.") + return DeepEnsemble(in_channels=in_channels, base_channels=base_channels, out_dim=out_dim, ensemble_size=ensemble_size) + + +__all__ = ['DeepEnsemble', 'deep_ensemble_builder'] diff --git a/pyhazards/models/deeplabv3p.py b/pyhazards/models/deeplabv3p.py new file mode 100644 index 00000000..ac20e8cb --- /dev/null +++ b/pyhazards/models/deeplabv3p.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import ASPPBlock, ConvBlock, check_image_input + + +class TinyDeepLabV3P(nn.Module): + """Compact DeepLabV3+ style wildfire segmentation baseline.""" + + def __init__(self, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1): + super().__init__() + self.in_channels = int(in_channels) + self.encoder = ConvBlock(in_channels, hidden_dim) + self.aspp = ASPPBlock(hidden_dim, hidden_dim) + self.decoder = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(hidden_dim, out_dim, kernel_size=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_image_input(x, self.in_channels, 'TinyDeepLabV3P') + return self.decoder(self.aspp(self.encoder(x))) + + +def deeplabv3p_builder(task: str, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"deeplabv3p supports task='segmentation' or 'regression', got {task!r}.") + return TinyDeepLabV3P(in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyDeepLabV3P', 'deeplabv3p_builder'] diff --git a/pyhazards/models/earthfarseer.py b/pyhazards/models/earthfarseer.py new file mode 100644 index 00000000..9d4bce78 --- /dev/null +++ b/pyhazards/models/earthfarseer.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import check_sequence_input + + +class TinyEarthFarseer(nn.Module): + """Compact multi-scale temporal baseline inspired by EarthFarseer.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.branch1 = nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=1) + self.branch2 = nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=(2, 1, 1), dilation=(2, 1, 1)) + self.project = nn.Sequential( + nn.Conv2d(hidden_dim * 2, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(hidden_dim, out_dim, kernel_size=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyEarthFarseer') + if x.size(1) != self.history: + raise ValueError(f"TinyEarthFarseer expected history={self.history}, got {x.size(1)}.") + x3d = x.permute(0, 2, 1, 3, 4) + f1 = torch.mean(torch.gelu(self.branch1(x3d)), dim=2) + f2 = torch.mean(torch.gelu(self.branch2(x3d)), dim=2) + return self.project(torch.cat([f1, f2], dim=1)) + + +def earthfarseer_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"earthfarseer supports task='segmentation' or 'regression', got {task!r}.") + return TinyEarthFarseer(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyEarthFarseer', 'earthfarseer_builder'] diff --git a/pyhazards/models/earthformer.py b/pyhazards/models/earthformer.py new file mode 100644 index 00000000..5c76dfed --- /dev/null +++ b/pyhazards/models/earthformer.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import PatchMixer, TemporalAttentionFusion, check_sequence_input + + +class TinyEarthFormer(nn.Module): + """Compact EarthFormer-style baseline for wildfire spread sequence prediction.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.fusion = TemporalAttentionFusion(in_channels, hidden_dim) + self.encoder = PatchMixer(hidden_dim, hidden_dim) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyEarthFormer') + if x.size(1) != self.history: + raise ValueError(f"TinyEarthFormer expected history={self.history}, got {x.size(1)}.") + fused = self.fusion(x) + return self.head(self.encoder(fused)) + + +def earthformer_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"earthformer supports task='segmentation' or 'regression', got {task!r}.") + return TinyEarthFormer(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyEarthFormer', 'earthformer_builder'] diff --git a/pyhazards/models/firepred.py b/pyhazards/models/firepred.py new file mode 100644 index 00000000..0cf67033 --- /dev/null +++ b/pyhazards/models/firepred.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +import torch +import torch.nn as nn + + +class FirePred(nn.Module): + """Hybrid multi-temporal CNN wildfire spread predictor inspired by FirePred.""" + + def __init__( + self, + history: int = 5, + in_channels: int = 8, + hidden_dim: int = 32, + out_channels: int = 1, + dropout: float = 0.1, + ): + super().__init__() + if history <= 0: + raise ValueError(f"history must be positive, got {history}") + if in_channels <= 0: + raise ValueError(f"in_channels must be positive, got {in_channels}") + if hidden_dim <= 0: + raise ValueError(f"hidden_dim must be positive, got {hidden_dim}") + if out_channels <= 0: + raise ValueError(f"out_channels must be positive, got {out_channels}") + if not 0.0 <= dropout < 1.0: + raise ValueError(f"dropout must be in [0, 1), got {dropout}") + + self.history = int(history) + self.in_channels = int(in_channels) + + self.recent_branch = nn.Sequential( + nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + nn.Conv3d(hidden_dim, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + ) + self.daily_branch = nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + ) + self.snapshot_branch = nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=2, dilation=2), + nn.GELU(), + ) + self.fusion = nn.Sequential( + nn.Conv2d(hidden_dim * 3, hidden_dim * 2, kernel_size=3, padding=1), + nn.GELU(), + nn.Dropout2d(dropout) if dropout > 0 else nn.Identity(), + nn.Conv2d(hidden_dim * 2, out_channels, kernel_size=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + if x.ndim != 5: + raise ValueError( + "FirePred expects input shape (batch, history, channels, height, width), " + f"got {tuple(x.shape)}." + ) + if x.size(1) != self.history: + raise ValueError(f"FirePred expected history={self.history}, got {x.size(1)}.") + if x.size(2) != self.in_channels: + raise ValueError(f"FirePred expected in_channels={self.in_channels}, got {x.size(2)}.") + + x_3d = x.permute(0, 2, 1, 3, 4) + recent = torch.mean(self.recent_branch(x_3d), dim=2) + daily = self.daily_branch(torch.mean(x, dim=1)) + snapshot = self.snapshot_branch(x[:, -1]) + fused = torch.cat([recent, daily, snapshot], dim=1) + return self.fusion(fused) + + +def firepred_builder( + task: str, + history: int = 5, + in_channels: int = 8, + hidden_dim: int = 32, + out_channels: int = 1, + dropout: float = 0.1, + **kwargs, +) -> nn.Module: + _ = kwargs + if task.lower() not in {"segmentation", "regression"}: + raise ValueError(f"firepred supports task='segmentation' or 'regression', got {task!r}.") + return FirePred( + history=history, + in_channels=in_channels, + hidden_dim=hidden_dim, + out_channels=out_channels, + dropout=dropout, + ) + + +__all__ = ["FirePred", "firepred_builder"] diff --git a/pyhazards/models/lightgbm.py b/pyhazards/models/lightgbm.py new file mode 100644 index 00000000..59d76213 --- /dev/null +++ b/pyhazards/models/lightgbm.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import Any + +import numpy as np +import torch.nn as nn + +from ._wildfire_estimator import BinaryEstimatorProxy, require_task + + +class LightGBMModel(BinaryEstimatorProxy): + """A LightGBM wildfire occurrence baseline using binary classification.""" + + def __init__(self, num_leaves: int = 63, learning_rate: float = 0.05, feature_fraction: float = 0.8, bagging_fraction: float = 0.8, num_boost_round: int = 800): + super().__init__() + self.params = { + 'objective': 'binary', + 'metric': 'binary_logloss', + 'num_leaves': int(num_leaves), + 'learning_rate': float(learning_rate), + 'feature_fraction': float(feature_fraction), + 'bagging_fraction': float(bagging_fraction), + 'verbose': -1, + } + self.num_boost_round = int(num_boost_round) + self.booster = None + + def fit(self, x_train: np.ndarray, y_train: np.ndarray) -> None: + try: + import lightgbm as lgb + except Exception: + return + dtrain = lgb.Dataset(x_train, label=y_train) + self.booster = lgb.train(self.params, dtrain, num_boost_round=self.num_boost_round) + self._is_fitted = True + + def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + if self._is_fitted and self.booster is not None: + return self.booster.predict(x_np) + return super()._predict_positive_proba(x_np) + + +def lightgbm_builder(task: str, **kwargs: Any) -> nn.Module: + require_task(task, {'classification'}, 'lightgbm') + return LightGBMModel(**kwargs) + + +__all__ = ['LightGBMModel', 'lightgbm_builder'] diff --git a/pyhazards/models/logistic_regression.py b/pyhazards/models/logistic_regression.py new file mode 100644 index 00000000..8dce060f --- /dev/null +++ b/pyhazards/models/logistic_regression.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from typing import Any + +import numpy as np +import torch.nn as nn + +from ._wildfire_estimator import BinaryEstimatorProxy, require_task + + +class LogisticRegressionModel(BinaryEstimatorProxy): + """A classical logistic baseline for wildfire occurrence probability.""" + + def __init__(self, solver: str = 'lbfgs', max_iter: int = 500, class_weight: Any = 'balanced'): + super().__init__() + try: + from sklearn.linear_model import LogisticRegression + self.estimator = LogisticRegression(solver=solver, max_iter=int(max_iter), class_weight=class_weight) + except Exception: + self.estimator = None + + def fit(self, x_train: np.ndarray, y_train: np.ndarray) -> None: + if self.estimator is None: + return + self.estimator.fit(x_train, y_train) + self._is_fitted = True + + def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + if self._is_fitted and self.estimator is not None: + return self.estimator.predict_proba(x_np)[:, 1] + return super()._predict_positive_proba(x_np) + + +def logistic_regression_builder(task: str, **kwargs: Any) -> nn.Module: + require_task(task, {'classification'}, 'logistic_regression') + return LogisticRegressionModel(**kwargs) + + +__all__ = ['LogisticRegressionModel', 'logistic_regression_builder'] diff --git a/pyhazards/models/mau.py b/pyhazards/models/mau.py new file mode 100644 index 00000000..4040885b --- /dev/null +++ b/pyhazards/models/mau.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import TemporalAttentionFusion, check_sequence_input + + +class TinyMAU(nn.Module): + """Compact multi-axis temporal fusion baseline for wildfire spread masks.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.fusion = TemporalAttentionFusion(in_channels, hidden_dim) + self.head = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Conv2d(hidden_dim, out_dim, kernel_size=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyMAU') + if x.size(1) != self.history: + raise ValueError(f"TinyMAU expected history={self.history}, got {x.size(1)}.") + return self.head(self.fusion(x)) + + +def mau_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"mau supports task='segmentation' or 'regression', got {task!r}.") + return TinyMAU(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyMAU', 'mau_builder'] diff --git a/pyhazards/models/predrnn_v2.py b/pyhazards/models/predrnn_v2.py new file mode 100644 index 00000000..d41f0adb --- /dev/null +++ b/pyhazards/models/predrnn_v2.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import check_sequence_input + + +class PredCell(nn.Module): + def __init__(self, in_channels: int, hidden_dim: int): + super().__init__() + self.hidden_dim = hidden_dim + self.input_proj = nn.Conv2d(in_channels, hidden_dim, kernel_size=3, padding=1) + self.hidden_proj = nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1) + self.memory_proj = nn.Conv2d(hidden_dim, hidden_dim, kernel_size=1) + + def forward(self, x: torch.Tensor, h: torch.Tensor, m: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor]: + x_proj = torch.tanh(self.input_proj(x)) + h = torch.tanh(self.hidden_proj(h) + x_proj + self.memory_proj(m)) + m = 0.7 * m + 0.3 * h + return h, m + + +class TinyPredRNNv2(nn.Module): + """Compact predictive recurrent baseline inspired by PredRNN-v2.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.cell = PredCell(in_channels, hidden_dim) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyPredRNNv2') + if x.size(1) != self.history: + raise ValueError(f"TinyPredRNNv2 expected history={self.history}, got {x.size(1)}.") + b, _, _, h, w = x.shape + h_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + m_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + for t in range(self.history): + h_t, m_t = self.cell(x[:, t], h_t, m_t) + return self.head(h_t) + + +def predrnn_v2_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"predrnn_v2 supports task='segmentation' or 'regression', got {task!r}.") + return TinyPredRNNv2(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyPredRNNv2', 'predrnn_v2_builder'] diff --git a/pyhazards/models/rainformer.py b/pyhazards/models/rainformer.py new file mode 100644 index 00000000..18a4f9b8 --- /dev/null +++ b/pyhazards/models/rainformer.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import PatchMixer, TemporalAttentionFusion, check_sequence_input + + +class TinyRainformer(nn.Module): + """Compact temporal-attention baseline inspired by Rainformer-style sequence fusion.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.fusion = TemporalAttentionFusion(in_channels, hidden_dim) + self.decoder = nn.Sequential(PatchMixer(hidden_dim, hidden_dim), nn.Conv2d(hidden_dim, out_dim, kernel_size=1)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyRainformer') + if x.size(1) != self.history: + raise ValueError(f"TinyRainformer expected history={self.history}, got {x.size(1)}.") + return self.decoder(self.fusion(x)) + + +def rainformer_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"rainformer supports task='segmentation' or 'regression', got {task!r}.") + return TinyRainformer(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyRainformer', 'rainformer_builder'] diff --git a/pyhazards/models/random_forest.py b/pyhazards/models/random_forest.py new file mode 100644 index 00000000..6157ee55 --- /dev/null +++ b/pyhazards/models/random_forest.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from typing import Any, Optional + +import numpy as np +import torch.nn as nn + +from ._wildfire_estimator import BinaryEstimatorProxy, require_task + + +class RandomForestModel(BinaryEstimatorProxy): + """A random-forest wildfire occurrence baseline over tabular features.""" + + def __init__(self, n_estimators: int = 500, max_depth: Optional[int] = None, class_weight: Any = 'balanced_subsample'): + super().__init__() + try: + from sklearn.ensemble import RandomForestClassifier + self.estimator = RandomForestClassifier( + n_estimators=int(n_estimators), + max_depth=max_depth, + class_weight=class_weight, + random_state=42, + n_jobs=1, + ) + except Exception: + self.estimator = None + + def fit(self, x_train: np.ndarray, y_train: np.ndarray) -> None: + if self.estimator is None: + return + self.estimator.fit(x_train, y_train) + self._is_fitted = True + + def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + if self._is_fitted and self.estimator is not None: + return self.estimator.predict_proba(x_np)[:, 1] + return super()._predict_positive_proba(x_np) + + +def random_forest_builder(task: str, **kwargs: Any) -> nn.Module: + require_task(task, {'classification'}, 'random_forest') + return RandomForestModel(**kwargs) + + +__all__ = ['RandomForestModel', 'random_forest_builder'] diff --git a/pyhazards/models/resnet18_unet.py b/pyhazards/models/resnet18_unet.py new file mode 100644 index 00000000..8dc4b704 --- /dev/null +++ b/pyhazards/models/resnet18_unet.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import ConvBlock, Downsample, ResidualBlock, Upsample, check_image_input + + +class TinyResNet18UNet(nn.Module): + """Residual encoder-decoder baseline inspired by ResNet18 U-Net.""" + + def __init__(self, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1): + super().__init__() + self.in_channels = int(in_channels) + self.stem = ConvBlock(in_channels, base_channels) + self.res1 = ResidualBlock(base_channels) + self.down1 = Downsample(base_channels, base_channels * 2) + self.res2 = ResidualBlock(base_channels * 2) + self.down2 = Downsample(base_channels * 2, base_channels * 4) + self.res3 = ResidualBlock(base_channels * 4) + self.up1 = Upsample(base_channels * 4, base_channels * 2, base_channels * 2) + self.up2 = Upsample(base_channels * 2, base_channels, base_channels) + self.head = nn.Conv2d(base_channels, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_image_input(x, self.in_channels, 'TinyResNet18UNet') + s1 = self.res1(self.stem(x)) + s2 = self.res2(self.down1(s1)) + bottleneck = self.res3(self.down2(s2)) + x = self.up1(bottleneck, s2) + x = self.up2(x, s1) + return self.head(x) + + +def resnet18_unet_builder(task: str, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"resnet18_unet supports task='segmentation' or 'regression', got {task!r}.") + return TinyResNet18UNet(in_channels=in_channels, base_channels=base_channels, out_dim=out_dim) + + +__all__ = ['TinyResNet18UNet', 'resnet18_unet_builder'] diff --git a/pyhazards/models/segformer.py b/pyhazards/models/segformer.py new file mode 100644 index 00000000..eed0c715 --- /dev/null +++ b/pyhazards/models/segformer.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import PatchMixer, TemporalAttentionFusion, check_sequence_input + + +class TinySegFormer(nn.Module): + """Compact SegFormer-style wildfire sequence segmenter.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.fusion = TemporalAttentionFusion(in_channels, hidden_dim) + self.encoder = nn.Sequential(PatchMixer(hidden_dim, hidden_dim), PatchMixer(hidden_dim, hidden_dim)) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinySegFormer') + if x.size(1) != self.history: + raise ValueError(f"TinySegFormer expected history={self.history}, got {x.size(1)}.") + return self.head(self.encoder(self.fusion(x))) + + +def segformer_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"segformer supports task='segmentation' or 'regression', got {task!r}.") + return TinySegFormer(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinySegFormer', 'segformer_builder'] diff --git a/pyhazards/models/swin_unet.py b/pyhazards/models/swin_unet.py new file mode 100644 index 00000000..9b3228b3 --- /dev/null +++ b/pyhazards/models/swin_unet.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ._wildfire_layers import ConvBlock, Downsample, TemporalAttentionFusion, Upsample, check_sequence_input + + +class TinySwinUNet(nn.Module): + """Compact Swin-UNet style wildfire sequence segmenter.""" + + def __init__(self, history: int = 4, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, window_size: int = 4): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.window_size = int(window_size) + self.fusion = TemporalAttentionFusion(in_channels, base_channels) + self.stem = ConvBlock(base_channels, base_channels) + self.down = Downsample(base_channels, base_channels * 2) + self.up = Upsample(base_channels * 2, base_channels, base_channels) + self.head = nn.Conv2d(base_channels, out_dim, kernel_size=1) + + def _window_mix(self, x: torch.Tensor) -> torch.Tensor: + pooled = F.avg_pool2d(x, kernel_size=self.window_size, stride=1, padding=self.window_size // 2) + pooled = pooled[..., : x.size(-2), : x.size(-1)] + return 0.5 * x + 0.5 * pooled + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinySwinUNet') + if x.size(1) != self.history: + raise ValueError(f"TinySwinUNet expected history={self.history}, got {x.size(1)}.") + fused = self._window_mix(self.fusion(x)) + s1 = self.stem(fused) + s2 = self.down(s1) + x = self.up(s2, s1) + return self.head(x) + + +def swin_unet_builder(task: str, history: int = 4, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, window_size: int = 4, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"swin_unet supports task='segmentation' or 'regression', got {task!r}.") + return TinySwinUNet(history=history, in_channels=in_channels, base_channels=base_channels, out_dim=out_dim, window_size=window_size) + + +__all__ = ['TinySwinUNet', 'swin_unet_builder'] diff --git a/pyhazards/models/swinlstm.py b/pyhazards/models/swinlstm.py new file mode 100644 index 00000000..4c8851e4 --- /dev/null +++ b/pyhazards/models/swinlstm.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .convlstm import ConvLSTMCell +from ._wildfire_layers import check_sequence_input + + +class TinySwinLSTM(nn.Module): + """Compact windowed recurrent baseline inspired by SwinLSTM.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, window_size: int = 4): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.window_size = int(window_size) + self.cell = ConvLSTMCell(in_channels, hidden_dim) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def _window_smooth(self, x: torch.Tensor) -> torch.Tensor: + if self.window_size <= 1: + return x + pooled = F.avg_pool2d(x, kernel_size=self.window_size, stride=1, padding=self.window_size // 2) + return 0.5 * x + 0.5 * pooled[..., : x.size(-2), : x.size(-1)] + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinySwinLSTM') + if x.size(1) != self.history: + raise ValueError(f"TinySwinLSTM expected history={self.history}, got {x.size(1)}.") + b, _, _, h, w = x.shape + h_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + c_t = x.new_zeros((b, self.cell.hidden_dim, h, w)) + for t in range(self.history): + h_t, c_t = self.cell(self._window_smooth(x[:, t]), (h_t, c_t)) + return self.head(h_t) + + +def swinlstm_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, window_size: int = 4, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"swinlstm supports task='segmentation' or 'regression', got {task!r}.") + return TinySwinLSTM(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim, window_size=window_size) + + +__all__ = ['TinySwinLSTM', 'swinlstm_builder'] diff --git a/pyhazards/models/tcn.py b/pyhazards/models/tcn.py new file mode 100644 index 00000000..d353be94 --- /dev/null +++ b/pyhazards/models/tcn.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import check_sequence_input + + +class TinyTCN(nn.Module): + """Compact temporal-convolution wildfire spread baseline.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.temporal = nn.Sequential( + nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + nn.Conv3d(hidden_dim, hidden_dim, kernel_size=(3, 3, 3), padding=(2, 1, 1), dilation=(2, 1, 1)), + nn.GELU(), + ) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyTCN') + if x.size(1) != self.history: + raise ValueError(f"TinyTCN expected history={self.history}, got {x.size(1)}.") + encoded = self.temporal(x.permute(0, 2, 1, 3, 4)) + return self.head(torch.mean(encoded, dim=2)) + + +def tcn_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"tcn supports task='segmentation' or 'regression', got {task!r}.") + return TinyTCN(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyTCN', 'tcn_builder'] diff --git a/pyhazards/models/ts_satfire.py b/pyhazards/models/ts_satfire.py new file mode 100644 index 00000000..9e9c28f6 --- /dev/null +++ b/pyhazards/models/ts_satfire.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import torch +import torch.nn as nn + + +class TSSatFire(nn.Module): + """Spatio-temporal wildfire prediction model inspired by TS-SatFire.""" + + def __init__( + self, + history: int = 5, + in_channels: int = 8, + hidden_dim: int = 32, + out_channels: int = 1, + dropout: float = 0.1, + ): + super().__init__() + if history <= 0: + raise ValueError(f"history must be positive, got {history}") + if in_channels <= 0: + raise ValueError(f"in_channels must be positive, got {in_channels}") + if hidden_dim <= 0: + raise ValueError(f"hidden_dim must be positive, got {hidden_dim}") + if out_channels <= 0: + raise ValueError(f"out_channels must be positive, got {out_channels}") + if not 0.0 <= dropout < 1.0: + raise ValueError(f"dropout must be in [0, 1), got {dropout}") + + self.history = int(history) + self.in_channels = int(in_channels) + self.temporal_encoder = nn.Sequential( + nn.Conv3d(in_channels, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + nn.Conv3d(hidden_dim, hidden_dim, kernel_size=(3, 3, 3), padding=1), + nn.GELU(), + ) + self.time_attention = nn.Conv3d(hidden_dim, 1, kernel_size=1) + self.decoder = nn.Sequential( + nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1), + nn.GELU(), + nn.Dropout2d(dropout) if dropout > 0 else nn.Identity(), + nn.Conv2d(hidden_dim, out_channels, kernel_size=1), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + if x.ndim != 5: + raise ValueError( + "TSSatFire expects input shape (batch, history, channels, height, width), " + f"got {tuple(x.shape)}." + ) + if x.size(1) != self.history: + raise ValueError(f"TSSatFire expected history={self.history}, got {x.size(1)}.") + if x.size(2) != self.in_channels: + raise ValueError(f"TSSatFire expected in_channels={self.in_channels}, got {x.size(2)}.") + + feat = self.temporal_encoder(x.permute(0, 2, 1, 3, 4)) + attn = torch.softmax(self.time_attention(feat), dim=2) + pooled = torch.sum(attn * feat, dim=2) + return self.decoder(pooled) + + +def ts_satfire_builder( + task: str, + history: int = 5, + in_channels: int = 8, + hidden_dim: int = 32, + out_channels: int = 1, + dropout: float = 0.1, + **kwargs, +) -> nn.Module: + _ = kwargs + if task.lower() not in {"segmentation", "regression"}: + raise ValueError(f"ts_satfire supports task='segmentation' or 'regression', got {task!r}.") + return TSSatFire( + history=history, + in_channels=in_channels, + hidden_dim=hidden_dim, + out_channels=out_channels, + dropout=dropout, + ) + + +__all__ = ["TSSatFire", "ts_satfire_builder"] diff --git a/pyhazards/models/unet.py b/pyhazards/models/unet.py new file mode 100644 index 00000000..06e77eee --- /dev/null +++ b/pyhazards/models/unet.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import ConvBlock, Downsample, Upsample, check_image_input + + +class TinyUNet(nn.Module): + """Compact U-Net baseline for wildfire raster prediction.""" + + def __init__(self, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1): + super().__init__() + self.in_channels = int(in_channels) + self.stem = ConvBlock(in_channels, base_channels) + self.down1 = Downsample(base_channels, base_channels * 2) + self.down2 = Downsample(base_channels * 2, base_channels * 4) + self.up1 = Upsample(base_channels * 4, base_channels * 2, base_channels * 2) + self.up2 = Upsample(base_channels * 2, base_channels, base_channels) + self.head = nn.Conv2d(base_channels, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_image_input(x, self.in_channels, 'TinyUNet') + s1 = self.stem(x) + s2 = self.down1(s1) + bottleneck = self.down2(s2) + x = self.up1(bottleneck, s2) + x = self.up2(x, s1) + return self.head(x) + + +def unet_builder(task: str, in_channels: int = 1, base_channels: int = 16, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"unet supports task='segmentation' or 'regression', got {task!r}.") + return TinyUNet(in_channels=in_channels, base_channels=base_channels, out_dim=out_dim) + + +__all__ = ['TinyUNet', 'unet_builder'] diff --git a/pyhazards/models/utae.py b/pyhazards/models/utae.py new file mode 100644 index 00000000..064ce77d --- /dev/null +++ b/pyhazards/models/utae.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import PatchMixer, check_sequence_input + + +class TinyUTAE(nn.Module): + """Compact temporal-attention encoder for wildfire spread prediction.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.frame_encoder = PatchMixer(in_channels, hidden_dim) + self.score = nn.Sequential(nn.AdaptiveAvgPool2d(1), nn.Conv2d(hidden_dim, 1, kernel_size=1)) + self.head = nn.Sequential(PatchMixer(hidden_dim, hidden_dim), nn.Conv2d(hidden_dim, out_dim, kernel_size=1)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyUTAE') + if x.size(1) != self.history: + raise ValueError(f"TinyUTAE expected history={self.history}, got {x.size(1)}.") + frames = [self.frame_encoder(x[:, t]) for t in range(self.history)] + scores = torch.stack([self.score(frame).flatten(1) for frame in frames], dim=1) + weights = torch.softmax(scores, dim=1).unsqueeze(-1).unsqueeze(-1) + fused = torch.sum(torch.stack(frames, dim=1) * weights, dim=1) + return self.head(fused) + + +def utae_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 24, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"utae supports task='segmentation' or 'regression', got {task!r}.") + return TinyUTAE(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyUTAE', 'utae_builder'] diff --git a/pyhazards/models/vit_segmenter.py b/pyhazards/models/vit_segmenter.py new file mode 100644 index 00000000..4137fb7f --- /dev/null +++ b/pyhazards/models/vit_segmenter.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import Any + +import torch +import torch.nn as nn + +from ._wildfire_layers import PatchMixer, TemporalAttentionFusion, check_sequence_input + + +class TinyViTSegmenter(nn.Module): + """Compact ViT-style wildfire segmenter over short raster histories.""" + + def __init__(self, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1): + super().__init__() + self.history = int(history) + self.in_channels = int(in_channels) + self.fusion = TemporalAttentionFusion(in_channels, hidden_dim) + self.mixer = nn.Sequential(PatchMixer(hidden_dim, hidden_dim), PatchMixer(hidden_dim, hidden_dim)) + self.head = nn.Conv2d(hidden_dim, out_dim, kernel_size=1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + check_sequence_input(x, self.in_channels, 'TinyViTSegmenter') + if x.size(1) != self.history: + raise ValueError(f"TinyViTSegmenter expected history={self.history}, got {x.size(1)}.") + return self.head(self.mixer(self.fusion(x))) + + +def vit_segmenter_builder(task: str, history: int = 4, in_channels: int = 1, hidden_dim: int = 32, out_dim: int = 1, **kwargs: Any) -> nn.Module: + _ = kwargs + if task.lower() not in {'segmentation', 'regression'}: + raise ValueError(f"vit_segmenter supports task='segmentation' or 'regression', got {task!r}.") + return TinyViTSegmenter(history=history, in_channels=in_channels, hidden_dim=hidden_dim, out_dim=out_dim) + + +__all__ = ['TinyViTSegmenter', 'vit_segmenter_builder'] diff --git a/pyhazards/models/xgboost.py b/pyhazards/models/xgboost.py new file mode 100644 index 00000000..44f26794 --- /dev/null +++ b/pyhazards/models/xgboost.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import Any + +import numpy as np +import torch.nn as nn + +from ._wildfire_estimator import BinaryEstimatorProxy, require_task + + +class XGBoostModel(BinaryEstimatorProxy): + """A boosted-tree wildfire occurrence baseline using a binary logistic objective.""" + + def __init__(self, max_depth: int = 8, eta: float = 0.05, subsample: float = 0.8, colsample_bytree: float = 0.8, num_boost_round: int = 800): + super().__init__() + self.params = { + 'objective': 'binary:logistic', + 'eval_metric': 'logloss', + 'max_depth': int(max_depth), + 'eta': float(eta), + 'subsample': float(subsample), + 'colsample_bytree': float(colsample_bytree), + } + self.num_boost_round = int(num_boost_round) + self.booster = None + + def fit(self, x_train: np.ndarray, y_train: np.ndarray) -> None: + try: + import xgboost as xgb + except Exception: + return + dtrain = xgb.DMatrix(x_train, label=y_train) + self.booster = xgb.train(self.params, dtrain, num_boost_round=self.num_boost_round) + self._is_fitted = True + + def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: + if self._is_fitted and self.booster is not None: + import xgboost as xgb + return self.booster.predict(xgb.DMatrix(x_np)) + return super()._predict_positive_proba(x_np) + + +def xgboost_builder(task: str, **kwargs: Any) -> nn.Module: + require_task(task, {'classification'}, 'xgboost') + return XGBoostModel(**kwargs) + + +__all__ = ['XGBoostModel', 'xgboost_builder'] From fa4ff5cac2ce8e1aae6e16ac8d860458d9f9cb19 Mon Sep 17 00:00:00 2001 From: Runyang Xu Date: Thu, 26 Mar 2026 17:23:47 -0400 Subject: [PATCH 2/2] Fix wildfire model validation and hidden catalog entries --- .../modules/models_wildfire_forecasting.rst | 113 +++++++++++++++++ docs/source/modules/models_wildfire_mamba.rst | 115 ++++++++++++++++++ .../model_cards/wildfire_forecasting.yaml | 54 ++++++++ pyhazards/model_cards/wildfire_mamba.yaml | 58 +++++++++ pyhazards/models/earthfarseer.py | 5 +- pyhazards/models/lightgbm.py | 1 + pyhazards/models/logistic_regression.py | 1 + pyhazards/models/random_forest.py | 1 + pyhazards/models/xgboost.py | 1 + 9 files changed, 347 insertions(+), 2 deletions(-) create mode 100644 docs/source/modules/models_wildfire_forecasting.rst create mode 100644 docs/source/modules/models_wildfire_mamba.rst create mode 100644 pyhazards/model_cards/wildfire_forecasting.yaml create mode 100644 pyhazards/model_cards/wildfire_mamba.yaml diff --git a/docs/source/modules/models_wildfire_forecasting.rst b/docs/source/modules/models_wildfire_forecasting.rst new file mode 100644 index 00000000..f0816871 --- /dev/null +++ b/docs/source/modules/models_wildfire_forecasting.rst @@ -0,0 +1,113 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +:orphan: + +Wildfire Forecasting +==================== + +Overview +-------- + +``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire activity windows. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Hidden + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Forecasting + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Unmapped + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire activity windows. + +The PyHazards implementation targets smoke-testable next-window size-group prediction through the shared wildfire benchmark flow. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** Not yet mapped. + +External References +------------------- + +**Paper:** `Wildfire Danger Prediction and Understanding with Deep Learning `_ | **Repo:** `Repository `__ + +Registry Name +------------- + +Primary entrypoint: ``wildfire_forecasting`` + +Supported Tasks +--------------- + +- Forecasting + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model( + name="wildfire_forecasting", + task="forecasting", + input_dim=7, + output_dim=5, + lookback=12, + ) + preds = model(torch.randn(2, 12, 7)) + print(preds.shape) + +Notes +----- + +- Retained as a hidden compatibility entry; it is no longer part of the public 31-model wildfire roster. diff --git a/docs/source/modules/models_wildfire_mamba.rst b/docs/source/modules/models_wildfire_mamba.rst new file mode 100644 index 00000000..bbb7a630 --- /dev/null +++ b/docs/source/modules/models_wildfire_mamba.rst @@ -0,0 +1,115 @@ +.. This file is generated by scripts/render_model_docs.py. Do not edit by hand. + +:orphan: + +Wildfire Mamba +============== + +Overview +-------- + +``wildfire_mamba`` models county-day ERA5 sequences by combining selective state-space temporal blocks with a simple spatial graph layer. + +At a Glance +----------- + +.. grid:: 1 2 4 4 + :gutter: 2 + :class-container: catalog-grid + + .. grid-item-card:: Hazard Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Wildfire + + .. container:: catalog-stat-note + + Public catalog grouping used for this model. + + .. grid-item-card:: Maturity + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Hidden + + .. container:: catalog-stat-note + + Catalog maturity label used on the index page. + + .. grid-item-card:: Tasks + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + 1 + + .. container:: catalog-stat-note + + Classification + + .. grid-item-card:: Benchmark Family + :class-card: catalog-stat-card + + .. container:: catalog-stat-value + + Unmapped + + .. container:: catalog-stat-note + + Primary benchmark-family link used for compatible evaluation coverage. + + +Description +----------- + +``wildfire_mamba`` models county-day ERA5 sequences by combining selective state-space temporal blocks with a simple spatial graph layer. + +The PyHazards implementation targets binary next-day per-county wildfire classification and supports an optional count head for multi-task extensions. + +Benchmark Compatibility +----------------------- + +**Primary benchmark family:** Not yet mapped. + +External References +------------------- + +**Paper:** `Mamba: Linear-Time Sequence Modeling with Selective State Spaces `_ + +Registry Name +------------- + +Primary entrypoint: ``wildfire_mamba`` + +Supported Tasks +--------------- + +- Classification + +Programmatic Use +---------------- + +.. code-block:: python + + import torch + from pyhazards.models import build_model + + model = build_model( + name="wildfire_mamba", + task="classification", + in_dim=3, + num_counties=4, + past_days=5, + ) + + x = torch.randn(2, 5, 4, 3) + logits = model(x) + print(logits.shape) + +Notes +----- + +- Retained as a hidden compatibility entry; it is no longer part of the public 31-model wildfire roster. diff --git a/pyhazards/model_cards/wildfire_forecasting.yaml b/pyhazards/model_cards/wildfire_forecasting.yaml new file mode 100644 index 00000000..b5cafeb4 --- /dev/null +++ b/pyhazards/model_cards/wildfire_forecasting.yaml @@ -0,0 +1,54 @@ +model_name: wildfire_forecasting +display_name: Wildfire Forecasting +hazard: Wildfire +include_in_public_catalog: false +catalog_status: hidden +source_file: pyhazards/models/wildfire_forecasting.py +builder_name: wildfire_forecasting_builder +summary: > + A sequence forecasting baseline for next-window wildfire activity across weekly count features. +description: + - > + ``wildfire_forecasting`` is a compact GRU-attention forecaster for weekly wildfire + activity windows. + - > + The PyHazards implementation targets smoke-testable next-window size-group prediction + through the shared wildfire benchmark flow. +paper: + title: "Wildfire Danger Prediction and Understanding with Deep Learning" + url: https://doi.org/10.1029/2022GL099368 + repo_url: https://github.com/Orion-AI-Lab/wildfire_forecasting +tasks: + - forecasting + - regression +example: | + import torch + from pyhazards.models import build_model + + model = build_model( + name="wildfire_forecasting", + task="forecasting", + input_dim=7, + output_dim=5, + lookback=12, + ) + preds = model(torch.randn(2, 12, 7)) + print(preds.shape) +notes: + - "Retained as a hidden compatibility entry; it is no longer part of the public 31-model wildfire roster." +smoke_test: + task: forecasting + build_kwargs: + input_dim: 7 + hidden_dim: 32 + output_dim: 5 + lookback: 12 + num_layers: 2 + dropout: 0.0 + input: + kind: tensor + tensor: + shape: [2, 12, 7] + expected_output: + kind: tensor + shape: [2, 5] diff --git a/pyhazards/model_cards/wildfire_mamba.yaml b/pyhazards/model_cards/wildfire_mamba.yaml new file mode 100644 index 00000000..5fc2c8c9 --- /dev/null +++ b/pyhazards/model_cards/wildfire_mamba.yaml @@ -0,0 +1,58 @@ +model_name: wildfire_mamba +display_name: Wildfire Mamba +hazard: Wildfire +include_in_public_catalog: false +catalog_status: hidden +source_file: pyhazards/models/wildfire_mamba.py +builder_name: wildfire_mamba_builder +summary: > + A Mamba-inspired spatio-temporal wildfire model that mixes county-level temporal + encoders with a lightweight graph convolution over spatial adjacency. +description: + - > + ``wildfire_mamba`` models county-day ERA5 sequences by combining selective + state-space temporal blocks with a simple spatial graph layer. + - > + The PyHazards implementation targets binary next-day per-county wildfire + classification and supports an optional count head for multi-task extensions. +paper: + title: "Mamba: Linear-Time Sequence Modeling with Selective State Spaces" + url: https://arxiv.org/abs/2312.00752 +tasks: + - classification +example: | + import torch + from pyhazards.models import build_model + + model = build_model( + name="wildfire_mamba", + task="classification", + in_dim=3, + num_counties=4, + past_days=5, + ) + + x = torch.randn(2, 5, 4, 3) + logits = model(x) + print(logits.shape) +notes: + - "Retained as a hidden compatibility entry; it is no longer part of the public 31-model wildfire roster." +smoke_test: + task: classification + build_kwargs: + in_dim: 3 + num_counties: 4 + past_days: 5 + hidden_dim: 32 + gcn_hidden: 16 + mamba_layers: 2 + state_dim: 16 + conv_kernel: 3 + dropout: 0.0 + input: + kind: tensor + tensor: + shape: [2, 5, 4, 3] + expected_output: + kind: tensor + shape: [2, 4] diff --git a/pyhazards/models/earthfarseer.py b/pyhazards/models/earthfarseer.py index 9d4bce78..f9bfdd82 100644 --- a/pyhazards/models/earthfarseer.py +++ b/pyhazards/models/earthfarseer.py @@ -4,6 +4,7 @@ import torch import torch.nn as nn +import torch.nn.functional as F from ._wildfire_layers import check_sequence_input @@ -28,8 +29,8 @@ def forward(self, x: torch.Tensor) -> torch.Tensor: if x.size(1) != self.history: raise ValueError(f"TinyEarthFarseer expected history={self.history}, got {x.size(1)}.") x3d = x.permute(0, 2, 1, 3, 4) - f1 = torch.mean(torch.gelu(self.branch1(x3d)), dim=2) - f2 = torch.mean(torch.gelu(self.branch2(x3d)), dim=2) + f1 = torch.mean(F.gelu(self.branch1(x3d)), dim=2) + f2 = torch.mean(F.gelu(self.branch2(x3d)), dim=2) return self.project(torch.cat([f1, f2], dim=1)) diff --git a/pyhazards/models/lightgbm.py b/pyhazards/models/lightgbm.py index 59d76213..0e7445a1 100644 --- a/pyhazards/models/lightgbm.py +++ b/pyhazards/models/lightgbm.py @@ -42,6 +42,7 @@ def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: def lightgbm_builder(task: str, **kwargs: Any) -> nn.Module: require_task(task, {'classification'}, 'lightgbm') + kwargs.pop('name', None) return LightGBMModel(**kwargs) diff --git a/pyhazards/models/logistic_regression.py b/pyhazards/models/logistic_regression.py index 8dce060f..eab9e54b 100644 --- a/pyhazards/models/logistic_regression.py +++ b/pyhazards/models/logistic_regression.py @@ -33,6 +33,7 @@ def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: def logistic_regression_builder(task: str, **kwargs: Any) -> nn.Module: require_task(task, {'classification'}, 'logistic_regression') + kwargs.pop('name', None) return LogisticRegressionModel(**kwargs) diff --git a/pyhazards/models/random_forest.py b/pyhazards/models/random_forest.py index 6157ee55..de24df0b 100644 --- a/pyhazards/models/random_forest.py +++ b/pyhazards/models/random_forest.py @@ -39,6 +39,7 @@ def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: def random_forest_builder(task: str, **kwargs: Any) -> nn.Module: require_task(task, {'classification'}, 'random_forest') + kwargs.pop('name', None) return RandomForestModel(**kwargs) diff --git a/pyhazards/models/xgboost.py b/pyhazards/models/xgboost.py index 44f26794..053f154d 100644 --- a/pyhazards/models/xgboost.py +++ b/pyhazards/models/xgboost.py @@ -42,6 +42,7 @@ def _predict_positive_proba(self, x_np: np.ndarray) -> np.ndarray: def xgboost_builder(task: str, **kwargs: Any) -> nn.Module: require_task(task, {'classification'}, 'xgboost') + kwargs.pop('name', None) return XGBoostModel(**kwargs)