diff --git a/docs/architecture.md b/docs/architecture.md index 73045d56..2354fbbc 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -57,4 +57,7 @@ For convenience, we provide a loss handler that can be used to compute the full Refer to the section on [data](data.md) ## Exporting models -A future feature would be the possibility to save models in ONNX format, and export them that way. This would bring all the benefits of onnx. \ No newline at end of file +Models are saved using the PyTorch format, which basically serializes the model weights using pickle +and store them into a binary file. + + diff --git a/docs/faq.md b/docs/faq.md deleted file mode 100644 index 32cce907..00000000 --- a/docs/faq.md +++ /dev/null @@ -1 +0,0 @@ -# FAQ \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 56581e3f..5461998f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,19 +1,19 @@ -# Welcome to Terratorch +# Welcome to TerraTorch ## Overview The purpose of this library is twofold: -1. To integrate prithvi backbones into the TorchGeo framework -2. To provide generic LightningDataModules that can be built at runtime +1. To integrate prithvi backbones into the TorchGeo framework. +2. To provide generic LightningDataModules that can be built at runtime. 3. To build a flexible fine-tuning framework based on TorchGeo which can be interacted with at different abstraction levels. This library provides: -- All the functionality in TorchGeo -- Easy access to prithvi, timm and smp backbones -- Flexible trainers for Image Segmentation and Pixel Wise Regression (more in progress) -- Launching of fine-tuning tasks through powerful configuration files +- All the functionality in TorchGeo. +- Easy access to prithvi, timm and smp backbones. +- Flexible trainers for Image Segmentation, Pixel Wise Regression and Classification (more in progress). +- Launching of fine-tuning tasks through powerful configuration files. A good starting place is familiarization with [PyTorch Lightning](https://lightning.ai/docs/pytorch/stable/), which this project is built on, and to a certain extent [TorchGeo](https://torchgeo.readthedocs.io/en/stable/) diff --git a/docs/license.md b/docs/license.md new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/docs/license.md @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/docs/quick_start.md b/docs/quick_start.md index 446ea1b4..6c4e8ee2 100644 --- a/docs/quick_start.md +++ b/docs/quick_start.md @@ -1,6 +1,6 @@ # Quick start -We suggest using Python>=3.10. -To get started, make sure to have [PyTorch](https://pytorch.org/get-started/locally/) >= 2.0.0 and [GDAL](https://gdal.org/index.html) installed. +We suggest using `3.10 <= Python <= 3.12`. +To get started, make sure to have `[PyTorch](https://pytorch.org/get-started/locally/) >= 2.0.0` and [GDAL](https://gdal.org/index.html) installed. Installing GDAL can be quite a complex process. If you don't have GDAL set up on your system, we reccomend using a conda environment and installing it with `conda install -c conda-forge gdal`. @@ -20,7 +20,8 @@ from terratorch import BACKBONE_REGISTRY # find available prithvi models print([model_name for model_name in BACKBONE_REGISTRY if "prithvi" in model_name]) ->>> ['timm_prithvi_swin_B', 'timm_prithvi_swin_L', 'timm_prithvi_vit_100', 'timm_prithvi_vit_300', 'timm_prithvi_vit_tiny'] +>>> ['timm_prithvi_eo_tiny', 'timm_prithvi_eo_v1_100', 'timm_prithvi_eo_v2_300', 'timm_prithvi_eo_v2_300_tl', 'timm_prithvi_eo_v2_600', + 'timm_prithvi_eo_v2_600_tl', 'timm_prithvi_swin_B', 'timm_prithvi_swin_L', 'timm_prithvi_vit_100', 'timm_prithvi_vit_tiny'] # show all models with list(BACKBONE_REGISTRY) @@ -35,7 +36,7 @@ print([model_name for model_name in BACKBONE_REGISTRY if "prithvi" in model_name # instantiate your desired model # the backbone registry prefix (in this case 'timm') is optional # in this case, the underlying registry is timm, so we can pass timm arguments to it -model = BACKBONE_REGISTRY.build("prithvi_vit_100", num_frames=1, pretrained=True) +model = BACKBONE_REGISTRY.build("prithvi_eo_v1_100", num_frames=1, pretrained=True) # instantiate your model with more options, for instance, passing weights of your own through timm model = BACKBONE_REGISTRY.build( @@ -78,7 +79,7 @@ model = model_factory.build_model(task="segmentation", HLSBands.SWIR_1, HLSBands.SWIR_2, ], - necks=[{"name": "SelectIndices", "indices": -1}, + necks=[{"name": "SelectIndices", "indices": [-1]}, {"name": "ReshapeTokensToImage"}], num_classes=4, backbone_pretrained=True, @@ -88,6 +89,10 @@ model = model_factory.build_model(task="segmentation", ) # Rest of your PyTorch / PyTorchLightning code +. +. +. + ``` ## Training with Lightning Tasks @@ -127,7 +132,7 @@ task = PixelwiseRegressionTask( # Pass this LightningModule to a Lightning Trainer, together with some LightningDataModule ``` - +Alternatively, all the process can be summarized in configuration files written in YAML format, as seen below. ```yaml title="Configuration file for a Semantic Segmentation Task" # lightning.pytorch==2.1.1 seed_everything: 0 @@ -220,8 +225,17 @@ lr_scheduler: ``` -To run this training task, simply execute `terratorch fit --config ` +To run this training task using the YAML, simply execute: +```sh +terratorch fit --config +``` -To test your model on the test set, execute `terratorch test --config --ckpt_path ` +To test your model on the test set, execute: +```sh +terratorch test --config --ckpt_path +``` -For inference, execute `terratorch predict -c --ckpt_path --predict_output_dir --data.init_args.predict_data_root --data.init_args.predict_dataset_bands ` +For inference, execute: +```sh +terratorch predict -c --ckpt_path --predict_output_dir --data.init_args.predict_data_root --data.init_args.predict_dataset_bands +``` diff --git a/docs/registry.md b/docs/registry.md index 8575c1b1..06ceb2bc 100644 --- a/docs/registry.md +++ b/docs/registry.md @@ -1,6 +1,6 @@ # Registries -Terratorch keeps a set of registries which map strings to instances of those strings. They can be imported from `terratorch.registry`. +TerraTorch keeps a set of registries which map strings to instances of those strings. They can be imported from `terratorch.registry`. !!! info If you are using tasks with existing models, you may never have to interact with registries directly. The [model factory](models.md#model-factories) will handle interactions with registries. @@ -72,4 +72,4 @@ To add a new registry to these top level registries, you should use the `.regist ## Other Registries -Additionally, terratorch has the `NECK_REGISTRY`, where all necks must be registered, and the `MODEL_FACTORY_REGISTRY`, where all model factories must be registered. \ No newline at end of file +Additionally, terratorch has the `NECK_REGISTRY`, where all necks must be registered, and the `MODEL_FACTORY_REGISTRY`, where all model factories must be registered. diff --git a/mkdocs.yml b/mkdocs.yml index 1f85f6bb..3105bbf0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,6 +1,6 @@ site_name: TerraTorch theme: - name: material + name: readthedocs #material palette: scheme: slate features: @@ -31,8 +31,9 @@ nav: - Registries: registry.md - EncoderDecoderFactory: encoder_decoder_factory.md - Examples: examples.md - - FAQ: faq.md + #- FAQ: faq.md - For Developers: architecture.md + - License: license.md markdown_extensions: - pymdownx.highlight: diff --git a/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_300.yaml b/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_300.yaml new file mode 100644 index 00000000..3e44a1c5 --- /dev/null +++ b/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_300.yaml @@ -0,0 +1,150 @@ +# lightning.pytorch==2.1.1 +seed_everything: 42 +trainer: + accelerator: cpu + strategy: auto + devices: auto + num_nodes: 1 + # precision: 16-mixed + logger: + class_path: TensorBoardLogger + init_args: + save_dir: tests/ + name: all_ecos_random + callbacks: + - class_path: RichProgressBar + - class_path: LearningRateMonitor + init_args: + logging_interval: epoch + - class_path: EarlyStopping + init_args: + monitor: val/loss + patience: 100 + max_epochs: 2 + check_val_every_n_epoch: 1 + log_every_n_steps: 20 + enable_checkpointing: true + default_root_dir: tests/ +data: + class_path: GenericNonGeoPixelwiseRegressionDataModule + init_args: + batch_size: 2 + num_workers: 4 + train_transform: + #- class_path: albumentations.HorizontalFlip + # init_args: + # p: 0.5 + #- class_path: albumentations.Rotate + # init_args: + # limit: 30 + # border_mode: 0 # cv2.BORDER_CONSTANT + # value: 0 + # # mask_value: 1 + # p: 0.5 + - class_path: ToTensorV2 + dataset_bands: + - 0 + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + - 1 + - 2 + - 3 + - 4 + output_bands: + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + rgb_indices: + - 2 + - 1 + - 0 + train_data_root: tests/resources/inputs + train_label_data_root: tests/resources/inputs + val_data_root: tests/resources/inputs + val_label_data_root: tests/resources/inputs + test_data_root: tests/resources/inputs + test_label_data_root: tests/resources/inputs + img_grep: "regression*input*.tif" + label_grep: "regression*label*.tif" + means: + - 547.36707 + - 898.5121 + - 1020.9082 + - 2665.5352 + - 2340.584 + - 1610.1407 + stds: + - 411.4701 + - 558.54065 + - 815.94025 + - 812.4403 + - 1113.7145 + - 1067.641 + no_label_replace: -1 + no_data_replace: 0 + +model: + class_path: terratorch.tasks.PixelwiseRegressionTask + init_args: + model_args: + decoder: UperNetDecoder + pretrained: false + backbone: prithvi_eo_v2_300 + # backbone_pretrained_cfg_overlay: + # file: tests/prithvi_vit_300.pt + backbone_drop_path_rate: 0.3 + # backbone_window_size: 8 + decoder_channels: 64 + num_frames: 1 + in_channels: 6 + bands: + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + head_dropout: 0.5708022831486758 + head_final_act: torch.nn.ReLU + head_learned_upscale_layers: 2 + loss: rmse + #aux_heads: + # - name: aux_head + # decoder: IdentityDecoder + # decoder_args: + # decoder_out_index: 2 + # head_dropout: 0,5 + # head_channel_list: + # - 64 + # head_final_act: torch.nn.ReLU + #aux_loss: + # aux_head: 0.4 + ignore_index: -1 + freeze_backbone: true + freeze_decoder: false + model_factory: PrithviModelFactory + + # uncomment this block for tiled inference + # tiled_inference_parameters: + # h_crop: 224 + # h_stride: 192 + # w_crop: 224 + # w_stride: 192 + # average_patches: true +optimizer: + class_path: torch.optim.AdamW + init_args: + lr: 0.00013524680528283027 + weight_decay: 0.047782217873995426 +lr_scheduler: + class_path: ReduceLROnPlateau + init_args: + monitor: val/loss + diff --git a/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_600.yaml b/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_600.yaml new file mode 100644 index 00000000..292d229c --- /dev/null +++ b/tests/resources/configs/manufactured-finetune_prithvi_eo_v2_600.yaml @@ -0,0 +1,150 @@ +# lightning.pytorch==2.1.1 +seed_everything: 42 +trainer: + accelerator: cpu + strategy: auto + devices: auto + num_nodes: 1 + # precision: 16-mixed + logger: + class_path: TensorBoardLogger + init_args: + save_dir: tests/ + name: all_ecos_random + callbacks: + - class_path: RichProgressBar + - class_path: LearningRateMonitor + init_args: + logging_interval: epoch + - class_path: EarlyStopping + init_args: + monitor: val/loss + patience: 100 + max_epochs: 2 + check_val_every_n_epoch: 1 + log_every_n_steps: 20 + enable_checkpointing: true + default_root_dir: tests/ +data: + class_path: GenericNonGeoPixelwiseRegressionDataModule + init_args: + batch_size: 2 + num_workers: 4 + train_transform: + #- class_path: albumentations.HorizontalFlip + # init_args: + # p: 0.5 + #- class_path: albumentations.Rotate + # init_args: + # limit: 30 + # border_mode: 0 # cv2.BORDER_CONSTANT + # value: 0 + # # mask_value: 1 + # p: 0.5 + - class_path: ToTensorV2 + dataset_bands: + - 0 + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + - 1 + - 2 + - 3 + - 4 + output_bands: + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + rgb_indices: + - 2 + - 1 + - 0 + train_data_root: tests/resources/inputs + train_label_data_root: tests/resources/inputs + val_data_root: tests/resources/inputs + val_label_data_root: tests/resources/inputs + test_data_root: tests/resources/inputs + test_label_data_root: tests/resources/inputs + img_grep: "regression*input*.tif" + label_grep: "regression*label*.tif" + means: + - 547.36707 + - 898.5121 + - 1020.9082 + - 2665.5352 + - 2340.584 + - 1610.1407 + stds: + - 411.4701 + - 558.54065 + - 815.94025 + - 812.4403 + - 1113.7145 + - 1067.641 + no_label_replace: -1 + no_data_replace: 0 + +model: + class_path: terratorch.tasks.PixelwiseRegressionTask + init_args: + model_args: + decoder: UperNetDecoder + pretrained: false + backbone: prithvi_eo_v2_600 + # backbone_pretrained_cfg_overlay: + # file: tests/prithvi_vit_300.pt + backbone_drop_path_rate: 0.3 + # backbone_window_size: 8 + decoder_channels: 64 + num_frames: 1 + in_channels: 6 + bands: + - BLUE + - GREEN + - RED + - NIR_NARROW + - SWIR_1 + - SWIR_2 + head_dropout: 0.5708022831486758 + head_final_act: torch.nn.ReLU + head_learned_upscale_layers: 2 + loss: rmse + #aux_heads: + # - name: aux_head + # decoder: IdentityDecoder + # decoder_args: + # decoder_out_index: 2 + # head_dropout: 0,5 + # head_channel_list: + # - 64 + # head_final_act: torch.nn.ReLU + #aux_loss: + # aux_head: 0.4 + ignore_index: -1 + freeze_backbone: true + freeze_decoder: false + model_factory: PrithviModelFactory + + # uncomment this block for tiled inference + # tiled_inference_parameters: + # h_crop: 224 + # h_stride: 192 + # w_crop: 224 + # w_stride: 192 + # average_patches: true +optimizer: + class_path: torch.optim.AdamW + init_args: + lr: 0.00013524680528283027 + weight_decay: 0.047782217873995426 +lr_scheduler: + class_path: ReduceLROnPlateau + init_args: + monitor: val/loss + diff --git a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_band_interval.yaml b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_band_interval.yaml index a9d4145e..44bb31a4 100644 --- a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_band_interval.yaml +++ b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_band_interval.yaml @@ -31,16 +31,16 @@ data: batch_size: 2 num_workers: 4 train_transform: - - class_path: albumentations.HorizontalFlip - init_args: - p: 0.5 - - class_path: albumentations.Rotate - init_args: - limit: 30 - border_mode: 0 # cv2.BORDER_CONSTANT - value: 0 - # mask_value: 1 - p: 0.5 + #- class_path: albumentations.HorizontalFlip + # init_args: + # p: 0.5 + #- class_path: albumentations.Rotate + # init_args: + # limit: 30 + # border_mode: 0 # cv2.BORDER_CONSTANT + # value: 0 + # # mask_value: 1 + # p: 0.5 - class_path: ToTensorV2 dataset_bands: - [0, 11] diff --git a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_metrics_from_file.yaml b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_metrics_from_file.yaml index 9005547b..79a1263b 100644 --- a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_metrics_from_file.yaml +++ b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_metrics_from_file.yaml @@ -31,16 +31,16 @@ data: batch_size: 2 num_workers: 4 train_transform: - - class_path: albumentations.HorizontalFlip - init_args: - p: 0.5 - - class_path: albumentations.Rotate - init_args: - limit: 30 - border_mode: 0 # cv2.BORDER_CONSTANT - value: 0 - # mask_value: 1 - p: 0.5 + #- class_path: albumentations.HorizontalFlip + # init_args: + # p: 0.5 + #- class_path: albumentations.Rotate + # init_args: + # limit: 30 + # border_mode: 0 # cv2.BORDER_CONSTANT + # value: 0 + # # mask_value: 1 + # p: 0.5 - class_path: ToTensorV2 dataset_bands: - [0, 11] diff --git a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_string.yaml b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_string.yaml index 73813b6d..d2451a5a 100644 --- a/tests/resources/configs/manufactured-finetune_prithvi_swin_B_string.yaml +++ b/tests/resources/configs/manufactured-finetune_prithvi_swin_B_string.yaml @@ -31,16 +31,16 @@ data: batch_size: 2 num_workers: 4 train_transform: - - class_path: albumentations.HorizontalFlip - init_args: - p: 0.5 - - class_path: albumentations.Rotate - init_args: - limit: 30 - border_mode: 0 # cv2.BORDER_CONSTANT - value: 0 - # mask_value: 1 - p: 0.5 + #- class_path: albumentations.HorizontalFlip + # init_args: + # p: 0.5 + #- class_path: albumentations.Rotate + # init_args: + # limit: 30 + # border_mode: 0 # cv2.BORDER_CONSTANT + # value: 0 + # # mask_value: 1 + # p: 0.5 - class_path: ToTensorV2 dataset_bands: - "band_1" diff --git a/tests/resources/configs/manufactured-finetune_prithvi_vit_300.yaml b/tests/resources/configs/manufactured-finetune_prithvi_vit_300.yaml index 2f762232..3e44a1c5 100644 --- a/tests/resources/configs/manufactured-finetune_prithvi_vit_300.yaml +++ b/tests/resources/configs/manufactured-finetune_prithvi_vit_300.yaml @@ -96,7 +96,7 @@ model: model_args: decoder: UperNetDecoder pretrained: false - backbone: prithvi_vit_300 + backbone: prithvi_eo_v2_300 # backbone_pretrained_cfg_overlay: # file: tests/prithvi_vit_300.pt backbone_drop_path_rate: 0.3 diff --git a/tests/test_backbones.py b/tests/test_backbones.py index f76cb6ee..df60fad4 100644 --- a/tests/test_backbones.py +++ b/tests/test_backbones.py @@ -35,7 +35,7 @@ def input_386(): return torch.ones((1, NUM_CHANNELS, 386, 386)) -@pytest.mark.parametrize("model_name", ["prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) +@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_swin_L", "prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) @pytest.mark.parametrize("test_input", ["input_224", "input_512"]) def test_can_create_backbones_from_timm(model_name, test_input, request): backbone = timm.create_model(model_name, pretrained=False) @@ -43,14 +43,15 @@ def test_can_create_backbones_from_timm(model_name, test_input, request): backbone(input_tensor) gc.collect() -@pytest.mark.parametrize("model_name", ["prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) +@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_swin_L", "prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) @pytest.mark.parametrize("test_input", ["input_224", "input_512"]) def test_can_create_backbones_from_timm_features_only(model_name, test_input, request): backbone = timm.create_model(model_name, pretrained=False, features_only=True) input_tensor = request.getfixturevalue(test_input) backbone(input_tensor) gc.collect() -@pytest.mark.parametrize("model_name", ["prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) + +@pytest.mark.parametrize("model_name", ["prithvi_swin_L", "prithvi_swin_L", "prithvi_eo_v1_100", "prithvi_eo_v2_300", "prithvi_swin_B"]) @pytest.mark.parametrize("prefix", ["", "timm_"]) def test_can_create_timm_backbones_from_registry(model_name, input_224, prefix): backbone = BACKBONE_REGISTRY.build(prefix+model_name, pretrained=False) @@ -62,12 +63,14 @@ def test_vit_models_accept_multitemporal(model_name, input_224_multitemporal): backbone = timm.create_model(model_name, pretrained=False, num_frames=NUM_FRAMES) backbone(input_224_multitemporal) gc.collect() + @pytest.mark.parametrize("model_name", ["prithvi_eo_v1_100", "prithvi_eo_v2_300"]) def test_vit_models_non_divisible_input(model_name, input_non_divisible): #padding 'none','constant', 'reflect', 'replicate' or 'circular' default is 'none' backbone = timm.create_model(model_name, pretrained=False, features_only=True, num_frames=NUM_FRAMES, padding='constant') backbone(input_non_divisible) gc.collect() + @pytest.mark.parametrize("model_name", ["prithvi_eo_v1_100", "prithvi_eo_v2_300"]) @pytest.mark.parametrize("patch_size", [8, 16]) @pytest.mark.parametrize("patch_size_time", [1, 2, 4]) diff --git a/tests/test_finetune.py b/tests/test_finetune.py index e003b7f1..9c06e8da 100644 --- a/tests/test_finetune.py +++ b/tests/test_finetune.py @@ -22,7 +22,7 @@ def setup_and_cleanup(model_name): if os.path.isdir(os.path.join("tests", "all_ecos_random")): shutil.rmtree(os.path.join("tests", "all_ecos_random")) -@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_vit_100"]) +@pytest.mark.parametrize("model_name", ["prithvi_swin_B", "prithvi_swin_L", "prithvi_vit_100", "prithvi_eo_v2_300", "prithvi_eo_v2_600"]) @pytest.mark.parametrize("case", ["fit", "test", "validate"]) def test_finetune_multiple_backbones(model_name, case): command_list = [case, "-c", f"tests/resources/configs/manufactured-finetune_{model_name}.yaml"]