diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..3365cbe1
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,2 @@
+# These owners will be requested for review when someone opens a pull request.
+* @ASFHyP3/platform
diff --git a/.github/actions/deploy/action.yml b/.github/actions/deploy/action.yml
new file mode 100644
index 00000000..e9e31822
--- /dev/null
+++ b/.github/actions/deploy/action.yml
@@ -0,0 +1,58 @@
+name: Deploy ITS_LIVE Monitoring
+description: Deploy the ITS_LIVE Monitoring system
+
+inputs:
+ STACK_NAME:
+ required: true
+ AWS_ACCESS_KEY_ID:
+ required: true
+ AWS_SECRET_ACCESS_KEY:
+ required: true
+ CF_TEMPLATE_BUCKET:
+ required: true
+ LANDSAT_TOPIC_ARN:
+ required: true
+ HYP3_API:
+ required: true
+ LAMBDA_LOGGING_LEVEL:
+ required: true
+ EARTHDATA_USERNAME:
+ required: true
+ EARTHDATA_PASSWORD:
+ required: true
+
+runs:
+ using: composite
+ steps:
+ - uses: aws-actions/configure-aws-credentials@v1
+ with:
+ aws-access-key-id: ${{ inputs.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ inputs.AWS_SECRET_ACCESS_KEY }}
+ aws-region: us-west-2
+
+ - uses: actions/setup-python@v3
+ with:
+ python-version: 3.12
+
+ - name: Install
+ shell: bash
+ run: make install-lambda-deps
+
+ - name: Package and deploy
+ shell: bash
+ run: |
+ aws cloudformation package \
+ --template-file landsat/cloudformation.yml \
+ --s3-bucket=${{ inputs.CF_TEMPLATE_BUCKET }} \
+ --output-template-file packaged.yml
+
+ aws cloudformation deploy \
+ --template-file packaged.yml \
+ --stack-name=${{ inputs.STACK_NAME }} \
+ --capabilities CAPABILITY_IAM \
+ --parameter-overrides \
+ LandsatTopicArn=${{ inputs.LANDSAT_TOPIC_ARN }} \
+ Hyp3Api=${{ inputs.HYP3_API }} \
+ LambdaLoggingLevel=${{ inputs.LAMBDA_LOGGING_LEVEL }} \
+ EarthdataUsername=${{ inputs.EARTHDATA_USERNAME }} \
+ EarthdataPassword=${{ inputs.EARTHDATA_PASSWORD }}
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..383b1536
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,19 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
+
+version: 2
+updates:
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "daily"
+ labels:
+ - "bumpless"
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "daily"
+ labels:
+ - "bumpless"
diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml
new file mode 100644
index 00000000..3ce6f429
--- /dev/null
+++ b/.github/workflows/changelog.yml
@@ -0,0 +1,18 @@
+name: Changelog updated?
+
+on:
+ pull_request:
+ types:
+ - opened
+ - labeled
+ - unlabeled
+ - synchronize
+ branches:
+ - main
+ - develop
+
+jobs:
+ call-changelog-check-workflow:
+ uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.11.0
+ secrets:
+ USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml
new file mode 100644
index 00000000..6a33d529
--- /dev/null
+++ b/.github/workflows/deploy-prod.yml
@@ -0,0 +1,34 @@
+name: Deploy ITS_LIVE Monitoring Production
+
+on:
+ push:
+ branches:
+ - main
+
+concurrency: deploy-prod
+
+jobs:
+ deploy:
+ environment: prod
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - uses: ./.github/actions/deploy
+ with:
+ STACK_NAME: its-live-monitoring-prod
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }}
+ EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD }}
+ CF_TEMPLATE_BUCKET: cf-templates-3o5lnspmwmzg-us-west-2
+ LANDSAT_TOPIC_ARN: arn:aws:sns:us-west-2:673253540267:public-c2-notify-v2
+ HYP3_API: https://hyp3-its-live.asf.alaska.edu
+ LAMBDA_LOGGING_LEVEL: INFO
+
+
+ call-bump-version-workflow:
+ needs: deploy
+ uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.11.0
+ secrets:
+ USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }}
diff --git a/.github/workflows/deploy-test.yml b/.github/workflows/deploy-test.yml
new file mode 100644
index 00000000..8b955353
--- /dev/null
+++ b/.github/workflows/deploy-test.yml
@@ -0,0 +1,27 @@
+name: Deploy ITS_LIVE Monitoring Test
+
+on:
+ push:
+ branches:
+ - develop
+
+concurrency: deploy-test
+
+jobs:
+ deploy:
+ environment: test
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - uses: ./.github/actions/deploy
+ with:
+ STACK_NAME: its-live-monitoring-test
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }}
+ EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD }}
+ CF_TEMPLATE_BUCKET: cf-templates-3o5lnspmwmzg-us-west-2
+ LANDSAT_TOPIC_ARN: arn:aws:sns:us-west-2:986442313181:its-live-notify-test
+ HYP3_API: https://hyp3-its-live.asf.alaska.edu
+ LAMBDA_LOGGING_LEVEL: DEBUG
diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml
new file mode 100644
index 00000000..66ba502e
--- /dev/null
+++ b/.github/workflows/labeled-pr.yml
@@ -0,0 +1,15 @@
+name: Is PR labeled?
+
+on:
+ pull_request:
+ types:
+ - opened
+ - labeled
+ - unlabeled
+ - synchronize
+ branches:
+ - main
+
+jobs:
+ call-labeled-pr-check-workflow:
+ uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.11.0
diff --git a/.github/workflows/release-checklist-comment.yml b/.github/workflows/release-checklist-comment.yml
new file mode 100644
index 00000000..77ac46bd
--- /dev/null
+++ b/.github/workflows/release-checklist-comment.yml
@@ -0,0 +1,16 @@
+name: Create Release Comment
+
+on:
+ pull_request:
+ types:
+ - opened
+ branches:
+ - main
+
+jobs:
+ call-release-workflow:
+ uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.11.0
+ permissions:
+ pull-requests: write
+ secrets:
+ USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..1add01c2
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,14 @@
+name: Create Release
+
+on:
+ push:
+ tags:
+ - 'v*'
+
+jobs:
+ call-release-workflow:
+ uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.11.0
+ with:
+ release_prefix: its-live-monitoring
+ secrets:
+ USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }}
diff --git a/.github/workflows/static-anaysis.yml b/.github/workflows/static-anaysis.yml
new file mode 100644
index 00000000..dc4961dc
--- /dev/null
+++ b/.github/workflows/static-anaysis.yml
@@ -0,0 +1,17 @@
+name: Static analysis
+
+on: push
+
+jobs:
+ call-ruff-workflow:
+ uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.11.0
+
+ cfn-lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: 3.12
+ - run: make install
+ - run: make cfn-lint
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..1bc83c4e
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,310 @@
+# Created by https://www.toptal.com/developers/gitignore/api/python,vim,jetbrains
+# Edit at https://www.toptal.com/developers/gitignore?templates=python,vim,jetbrains
+
+### JetBrains ###
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# AWS User-specific
+.idea/**/aws.xml
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# SonarLint plugin
+.idea/sonarlint/
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+### JetBrains Patch ###
+# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
+
+# *.iml
+# modules.xml
+# .idea/misc.xml
+# *.ipr
+
+# Sonarlint plugin
+# https://plugins.jetbrains.com/plugin/7973-sonarlint
+.idea/**/sonarlint/
+
+# SonarQube Plugin
+# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
+.idea/**/sonarIssues.xml
+
+# Markdown Navigator plugin
+# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
+.idea/**/markdown-navigator.xml
+.idea/**/markdown-navigator-enh.xml
+.idea/**/markdown-navigator/
+
+# Cache file creation bug
+# See https://youtrack.jetbrains.com/issue/JBR-2257
+.idea/$CACHE_FILE$
+
+# CodeStream plugin
+# https://plugins.jetbrains.com/plugin/12206-codestream
+.idea/codestream.xml
+
+# Azure Toolkit for IntelliJ plugin
+# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
+.idea/**/azureSettings.xml
+
+### Python ###
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+### Python Patch ###
+# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
+poetry.toml
+
+# ruff
+.ruff_cache/
+
+# LSP config files
+pyrightconfig.json
+
+### Vim ###
+# Swap
+[._]*.s[a-v][a-z]
+!*.svg # comment out if you don't need vector files
+[._]*.sw[a-p]
+[._]s[a-rt-v][a-z]
+[._]ss[a-gi-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+Sessionx.vim
+
+# Temporary
+.netrwhist
+*~
+# Auto-generated tag files
+tags
+# Persistent undo
+[._]*.un~
+
+# End of https://www.toptal.com/developers/gitignore/api/python,vim,jetbrains
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..41ca5063
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,11 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [0.0.1]
+
+### Added
+- Initial release of its-live-monitoring. The application will monitor for newly-published Landsat 8/9 scenes over 50
+ Landsat tiles and submit a stack of AUTORIFT jobs for each to hyp3-its-live.asf.alaska.edu for processing.
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..4547fa8c
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,129 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity
+and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people
+* Being respectful of differing opinions, viewpoints, and experiences
+* Giving and gracefully accepting constructive feedback
+* Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+* Focusing on what is best not just for us as individuals, but for the
+ overall community
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or
+ advances of any kind
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or email
+ address, without their explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement by emailing the
+ASF APD/Tools team at [UAF-asf-apd@alaska.edu](mailto:UAF-asf-apd@alaska.edu).
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series
+of actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or
+permanent ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within
+the community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.0, available at
+https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
+
+Community Impact Guidelines were inspired by [Mozilla's code of conduct
+enforcement ladder](https://github.com/mozilla/diversity).
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see the FAQ at
+https://www.contributor-covenant.org/faq. Translations are available at
+https://www.contributor-covenant.org/translations.
+
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..e2ca8116
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,26 @@
+export PYTHONPATH = ${PWD}/landsat/src
+
+install:
+ python -m pip install --upgrade pip && \
+ python -m pip install -r requirements-all.txt
+
+install-lambda-deps:
+ python -m pip install --upgrade pip && \
+ python -m pip install -r requirements-landsat.txt -t landsat/src/
+
+test_file ?= 'tests/'
+test:
+ pytest $(test_file)
+
+static: ruff-check cfn-lint
+
+ruff-check:
+ ruff check
+
+ruff-format:
+ ruff format
+
+ruff: ruff-check ruff-format
+
+cfn-lint:
+ cfn-lint --template `find . -name cloudformation.yml` --info --ignore-checks W3002
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..286502e9
--- /dev/null
+++ b/README.md
@@ -0,0 +1,71 @@
+# ITS_LIVE Monitoring
+
+The ITS_LIVE monitoring stack provides the AWS architecture to support low-latency production of netCDF glacier velocity products produced from Optical (Landsat 8/9, Sentinel-2) and SAR (Sentinel-1) image pairs.
+
+## Architecture overview
+
+ITS_LIVE Monitoring uses a pub-sub model for the optical missions. These Open Data on AWS datasets include SNS Topics to which messages are published for each new scene added to the dataset:
+* Landsat:
+* Sentinel-2:
+
+ITS_LIVE Monitoring subscribes to these messages and collects them in an SQS Queue. An AWS Lambda function consumes messages from the SQS Queue and:
+* determines if the scene in the message should be processed
+* searches the dataset's catalog for secondary scenes to form processing pairs
+* ensures these pairs haven't already been processed
+* submits the scene pairs to HyP3 for processing
+
+## Development
+
+### Development environment setup
+
+To create a development environment, run:
+```shell
+conda env update -f environment.yml
+conda activate its-live-monitoring
+```
+
+A `Makefile` has been provided to run some common development steps:
+* `make static` runs the static analysis suite, including `ruff` for linting and formatting of Python code, and `cfn-lin` for linting CloudFormation.
+* `make test` runs the PyTest test suite.
+
+Review the `Makefile` for a complete list of commands.
+
+### Environment variables
+
+Many parts of this stack are controlled by environment variables. Refer to the `deploy-*.yml` GitHub Actions [workflows](.github/workflows) to see which are set upon deployment. Below is a non-exhaustive list of some environment variables that you may want to set.
+* `HYP3_API`: The HyP3 deployment to which jobs will be submitted, e.g. https://hyp3-its-live.asf.alaska.edu.
+* `EARTHDATA_USERNAME`: Earthdata Login username for the account which will submit jobs to HyP3. In the production stack, this should the ITS_LIVE operational user; in the test stack, this should be the team testing user.
+* `EARTHDATA_PASSWORD`: Earthdata Login password for the account which will submit jobs to HyP3.
+
+### Running the Lambda functions locally
+
+The Lambda functions can be run locally from the command line, or by calling the appropriate function in the Python console.
+
+> [!NOTE]
+> To call the functions in the python console, you'll need to add all the `src` directories to your `PYTHONPATH`. With PyCharm, you can accomplish this by marking all such directories as "Sources Root" and enabling the "Add source roots to PYTHONPATH" Python Console setting.
+
+#### Landsat
+
+To show the help text for the [`landsat`](landsat/src/main.py) Lambda function, which is used to submit new Landsat 8/9 scenes for processing:
+```shell
+python landsat/src/main.py -h
+```
+
+For example, processing a valid scene:
+```shell
+python landsat/src/main.py LC08_L1TP_138041_20240128_20240207_02_T1
+```
+
+### Integration tests
+
+The Landsat monitoring Lambda can be tested by manually publishing a message to the test SNS topic which was manually deployed with [`test-sns-cf.yml`](scripts/test-sns-cf.yml).
+
+```shell
+aws sns publish \
+ --topic-arn ${TOPIC_ARN} \
+ --message file://${MESSAGE_FILE}
+```
+
+where `TOPIC_ARN` is the ARN of the test topic and `MESSAGE_FILE` is the path to a file containing the contents of the message you want published. Example message contents are provided in these files in the [`tests/integration`](tests/integration) directory:
+* [`sns-message-landsat-l9-valid.txt`](tests/integration/sns-message-landsat-l9-valid.txt) - A message containing Landsat 9 scene over ice that *should* be processed.
+* [`sns-message-landsat-l9-invalid.txt`](tests/integration/sns-message-landsat-l9-invalid.txt) - A message containingLandsat 9 scene *not* over ice that *should not* be processed.
diff --git a/cloudformation.yml b/cloudformation.yml
deleted file mode 100644
index 9e914804..00000000
--- a/cloudformation.yml
+++ /dev/null
@@ -1,95 +0,0 @@
-Parameters:
-
- TopicArn:
- Type: String
- Default: arn:aws:sns:us-west-2:673253540267:public-c2-notify-v2
-
-Resources:
-
- Queue:
- Type: AWS::SQS::Queue
- Properties:
- MessageRetentionPeriod: 1209600
- VisibilityTimeout: 300
-
- QueuePolicy:
- Type: AWS::SQS::QueuePolicy
- Properties:
- PolicyDocument:
- Version: 2012-10-17
- Statement:
- - Effect: Allow
- Principal:
- Service: sns.amazonaws.com
- Action: sqs:SendMessage
- Resource: !GetAtt Queue.Arn
- Condition:
- ArnEquals:
- aws:SourceArn: !Ref TopicArn
- Queues:
- - !Ref Queue
-
- Subscription:
- Type: AWS::SNS::Subscription
- Properties:
- TopicArn: !Ref TopicArn
- Protocol: sqs
- Endpoint: !GetAtt Queue.Arn
- FilterPolicyScope: MessageBody
- FilterPolicy: |
- {
- "landsat_product_id": [
- {"prefix": "LC08_L1"},
- {"prefix": "LC09_L1"},
- {"prefix": "LO08_L1"},
- {"prefix": "LO09_L1"}
- ]
- }
-
- Lambda:
- Type: AWS::Lambda::Function
- Properties:
- Code: src/
- Handler: main.lambda_handler
- MemorySize: 128
- Role: !GetAtt LambdaRole.Arn
- Runtime: python3.12
- Timeout: 300
-
- LambdaEventSourceMapping:
- Type: AWS::Lambda::EventSourceMapping
- Properties:
- FunctionName: !Ref Lambda
- BatchSize: 10
- EventSourceArn: !GetAtt Queue.Arn
- MaximumBatchingWindowInSeconds: 5
-
- LambdaLogGroup:
- Type: AWS::Logs::LogGroup
- Properties:
- LogGroupName: !Sub "/aws/lambda/${Lambda}"
- RetentionInDays: 90
-
- LambdaRole:
- Type: AWS::IAM::Role
- Properties:
- AssumeRolePolicyDocument:
- Version: 2012-10-17
- Statement:
- Action: sts:AssumeRole
- Principal:
- Service: lambda.amazonaws.com
- Effect: Allow
- Policies:
- - PolicyName: policy
- PolicyDocument:
- Version: 2012-10-17
- Statement:
- - Effect: Allow
- Action:
- - logs:CreateLogStream
- - logs:PutLogEvents
- Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/*"
- - Effect: Allow
- Action: sqs:*
- Resource: !GetAtt Queue.Arn
diff --git a/environment.yml b/environment.yml
new file mode 100644
index 00000000..e65fdd9e
--- /dev/null
+++ b/environment.yml
@@ -0,0 +1,6 @@
+name: its-live-monitoring
+dependencies:
+ - python=3.12
+ - pip
+ - pip:
+ - -r requirements-all.txt
diff --git a/landsat/cloudformation.yml b/landsat/cloudformation.yml
new file mode 100644
index 00000000..4d2c4632
--- /dev/null
+++ b/landsat/cloudformation.yml
@@ -0,0 +1,128 @@
+Parameters:
+ LandsatTopicArn:
+ Type: String
+
+ Hyp3Api:
+ Type: String
+
+ LambdaLoggingLevel:
+ Type: String
+ Default: INFO
+ AllowedValues:
+ - INFO
+ - DEBUG
+
+ EarthdataUsername:
+ Type: String
+
+ EarthdataPassword:
+ Type: String
+ NoEcho: true
+
+Resources:
+
+ DeadLetterQueue:
+ Type: AWS::SQS::Queue
+ Properties:
+ MessageRetentionPeriod: 1209600
+
+ Queue:
+ Type: AWS::SQS::Queue
+ Properties:
+ MessageRetentionPeriod: 1209600
+ VisibilityTimeout: 300
+ RedrivePolicy:
+ deadLetterTargetArn: !GetAtt DeadLetterQueue.Arn
+ maxReceiveCount: 2
+
+ QueuePolicy:
+ Type: AWS::SQS::QueuePolicy
+ Properties:
+ PolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Allow
+ Principal:
+ Service: sns.amazonaws.com
+ Action: sqs:SendMessage
+ Resource: !GetAtt Queue.Arn
+ Condition:
+ ArnEquals:
+ aws:SourceArn: !Ref LandsatTopicArn
+ Queues:
+ - !Ref Queue
+
+ Subscription:
+ Type: AWS::SNS::Subscription
+ Properties:
+ TopicArn: !Ref LandsatTopicArn
+ Protocol: sqs
+ Endpoint: !GetAtt Queue.Arn
+ FilterPolicyScope: MessageBody
+ FilterPolicy: |
+ {
+ "s3_location": [{"prefix": "s3://usgs-landsat/collection02/level-1/standard/oli-tirs/"}]
+ }
+
+ Lambda:
+ Type: AWS::Lambda::Function
+ Properties:
+ Code: src/
+ Handler: main.lambda_handler
+ MemorySize: 128
+ Role: !GetAtt LambdaRole.Arn
+ Runtime: python3.12
+ Timeout: 300
+ Environment:
+ Variables:
+ HYP3_API: !Ref Hyp3Api
+ LOGGING_LEVEL: !Ref LambdaLoggingLevel
+ EARTHDATA_USERNAME: !Ref EarthdataUsername
+ EARTHDATA_PASSWORD: !Ref EarthdataPassword
+
+ LambdaEventSourceMapping:
+ Type: AWS::Lambda::EventSourceMapping
+ Properties:
+ FunctionName: !Ref Lambda
+ BatchSize: 10
+ EventSourceArn: !GetAtt Queue.Arn
+ FunctionResponseTypes:
+ - ReportBatchItemFailures
+
+ LambdaLogGroup:
+ Type: AWS::Logs::LogGroup
+ Properties:
+ LogGroupName: !Sub "/aws/lambda/${Lambda}"
+ RetentionInDays: 90
+
+ LambdaRole:
+ Type: Custom::JplRole
+ Properties:
+ ServiceToken: !ImportValue Custom::JplRole::ServiceToken
+ Path: /account-managed/hyp3/
+ AssumeRolePolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ Action: sts:AssumeRole
+ Principal:
+ Service: lambda.amazonaws.com
+ Effect: Allow
+ ManagedPolicyArns:
+ - !Ref LambdaPolicy
+
+ LambdaPolicy:
+ Type: Custom::JplPolicy
+ Properties:
+ ServiceToken: !ImportValue Custom::JplPolicy::ServiceToken
+ Path: /account-managed/hyp3/
+ PolicyDocument:
+ Version: 2012-10-17
+ Statement:
+ - Effect: Allow
+ Action:
+ - logs:CreateLogStream
+ - logs:PutLogEvents
+ Resource: !Sub "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/*"
+ - Effect: Allow
+ Action: sqs:*
+ Resource: !GetAtt Queue.Arn
diff --git a/landsat/src/landsat_tiles_to_process.json b/landsat/src/landsat_tiles_to_process.json
new file mode 100644
index 00000000..3bc93b67
--- /dev/null
+++ b/landsat/src/landsat_tiles_to_process.json
@@ -0,0 +1,52 @@
+[
+ "001115",
+ "002017",
+ "005243",
+ "013125",
+ "013248",
+ "021130",
+ "027239",
+ "027247",
+ "030118",
+ "034003",
+ "039133",
+ "043006",
+ "048243",
+ "049024",
+ "049246",
+ "050243",
+ "056243",
+ "058244",
+ "060246",
+ "061123",
+ "062017",
+ "063134",
+ "066018",
+ "068116",
+ "070011",
+ "070239",
+ "071133",
+ "078128",
+ "101128",
+ "103120",
+ "115116",
+ "121107",
+ "126122",
+ "133135",
+ "135037",
+ "136035",
+ "142122",
+ "143107",
+ "143225",
+ "147112",
+ "148123",
+ "152125",
+ "163122",
+ "165002",
+ "178130",
+ "193027",
+ "193117",
+ "217118",
+ "220107",
+ "221129"
+]
diff --git a/landsat/src/main.py b/landsat/src/main.py
new file mode 100644
index 00000000..b4268f2d
--- /dev/null
+++ b/landsat/src/main.py
@@ -0,0 +1,236 @@
+"""Lambda function to trigger low-latency Landsat processing from newly acquired scenes."""
+
+import argparse
+import json
+import logging
+import os
+import sys
+from datetime import timedelta
+from pathlib import Path
+
+import geopandas as gpd
+import hyp3_sdk as sdk
+import pandas as pd
+import pystac
+import pystac_client
+
+
+LANDSAT_STAC_API = 'https://landsatlook.usgs.gov/stac-server'
+LANDSAT_CATALOG = pystac_client.Client.open(LANDSAT_STAC_API)
+LANDSAT_COLLECTION = 'landsat-c2l1'
+LANDSAT_TILES_TO_PROCESS = json.loads((Path(__file__).parent / 'landsat_tiles_to_process.json').read_text())
+
+MAX_PAIR_SEPARATION_IN_DAYS = 544
+MAX_CLOUD_COVER_PERCENT = 60
+
+EARTHDATA_USERNAME = os.environ.get('EARTHDATA_USERNAME')
+EARTHDATA_PASSWORD = os.environ.get('EARTHDATA_PASSWORD')
+HYP3 = sdk.HyP3(
+ os.environ.get('HYP3_API', 'https://hyp3-its-live.asf.alaska.edu'),
+ username=EARTHDATA_USERNAME,
+ password=EARTHDATA_PASSWORD,
+)
+
+log = logging.getLogger()
+log.setLevel(os.environ.get('LOGGING_LEVEL', 'INFO'))
+
+
+def _qualifies_for_processing(item: pystac.item.Item, max_cloud_cover: int = MAX_CLOUD_COVER_PERCENT) -> bool:
+ return (
+ item.collection_id == 'landsat-c2l1'
+ and 'OLI' in item.properties['instruments']
+ and item.properties['landsat:collection_category'] in ['T1', 'T2']
+ and item.properties['landsat:wrs_path'] + item.properties['landsat:wrs_row'] in LANDSAT_TILES_TO_PROCESS
+ and item.properties['eo:cloud_cover'] < max_cloud_cover
+ and item.properties['view:off_nadir'] == 0
+ )
+
+
+def _get_stac_item(scene: str) -> pystac.item.Item:
+ collection = LANDSAT_CATALOG.get_collection(LANDSAT_COLLECTION)
+ item = collection.get_item(scene)
+ if item is None:
+ raise ValueError(f'Scene {scene} not found in STAC catalog')
+ return item
+
+
+def get_landsat_pairs_for_reference_scene(
+ reference: pystac.item.Item,
+ max_pair_separation: timedelta = timedelta(days=MAX_PAIR_SEPARATION_IN_DAYS),
+ max_cloud_cover: int = MAX_CLOUD_COVER_PERCENT,
+) -> gpd.GeoDataFrame:
+ """Generate potential ITS_LIVE velocity pairs for a given Landsat scene.
+
+ Args:
+ reference: STAC item of the Landsat reference scene to find pairs for
+ max_pair_separation: How many days back from a reference scene's acquisition date to search for secondary scenes
+ max_cloud_cover: The maximum percent of the secondary scene that can be covered by clouds
+
+ Returns:
+ A DataFrame with all potential pairs for a landsat reference scene. Metadata in the columns will be for the
+ *secondary* scene unless specified otherwise.
+ """
+ results = LANDSAT_CATALOG.search(
+ collections=[reference.collection_id],
+ query=[
+ f'landsat:wrs_path={reference.properties["landsat:wrs_path"]}',
+ f'landsat:wrs_row={reference.properties["landsat:wrs_row"]}',
+ ],
+ datetime=[reference.datetime - max_pair_separation, reference.datetime - timedelta(seconds=1)],
+ )
+ items = [item for page in results.pages() for item in page if _qualifies_for_processing(item, max_cloud_cover)]
+
+ features = []
+ for item in items:
+ feature = item.to_dict()
+ feature['properties']['reference'] = reference.id
+ feature['properties']['reference_acquisition'] = reference.datetime
+ feature['properties']['secondary'] = item.id
+ features.append(feature)
+
+ df = gpd.GeoDataFrame.from_features(features)
+ df['datetime'] = pd.to_datetime(df.datetime)
+
+ return df
+
+
+def deduplicate_hyp3_pairs(pairs: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
+ """Ensure we don't submit duplicate jobs to HyP3.
+
+ Search HyP3 jobs since the reference scene's acquisition date and remove already processed pairs
+
+ Args:
+ pairs: A GeoDataFrame containing *at least* these columns: `reference`, `reference_acquisition`, and
+ `secondary`.
+
+ Returns:
+ The pairs GeoDataFrame with any already submitted pairs removed.
+ """
+ jobs = HYP3.find_jobs(
+ job_type='AUTORIFT',
+ start=pairs.iloc[0].reference_acquisition,
+ name=pairs.iloc[0].reference,
+ user_id=EARTHDATA_USERNAME,
+ )
+
+ df = pd.DataFrame([job.job_parameters['granules'] for job in jobs], columns=['reference', 'secondary'])
+
+ df = df.set_index(['reference', 'secondary'])
+ pairs = pairs.set_index(['reference', 'secondary'])
+
+ duplicates = df.loc[df.index.isin(pairs.index)]
+ if len(duplicates) > 0:
+ pairs = pairs.drop(duplicates.index)
+
+ return pairs.reset_index()
+
+
+def submit_pairs_for_processing(pairs: gpd.GeoDataFrame) -> sdk.Batch: # noqa: D103
+ prepared_jobs = []
+ for reference, secondary in pairs[['reference', 'secondary']].itertuples(index=False):
+ prepared_jobs.append(HYP3.prepare_autorift_job(reference, secondary, name=reference))
+
+ log.debug(prepared_jobs)
+
+ jobs = sdk.Batch()
+ for batch in sdk.util.chunk(prepared_jobs):
+ jobs += HYP3.submit_prepared_jobs(batch)
+
+ return jobs
+
+
+def process_scene(
+ scene: str,
+ max_pair_separation: timedelta = timedelta(days=MAX_PAIR_SEPARATION_IN_DAYS),
+ max_cloud_cover: int = MAX_CLOUD_COVER_PERCENT,
+ submit: bool = True,
+) -> sdk.Batch:
+ """Trigger Landsat processing for a scene.
+
+ Args:
+ scene: Reference Landsat scene name to build pairs for.
+ max_pair_separation: How many days back from a reference scene's acquisition date to search for secondary
+ scenes.
+ max_cloud_cover: The maximum percent a Landsat scene can be covered by clouds.
+ submit: Submit pairs to HyP3 for processing.
+
+ Returns:
+ Jobs submitted to HyP3 for processing.
+ """
+ reference = _get_stac_item(scene)
+
+ if not _qualifies_for_processing(reference, max_cloud_cover):
+ log.info(f'Reference scene {scene} does not qualify for processing')
+ return sdk.Batch()
+
+ pairs = get_landsat_pairs_for_reference_scene(reference, max_pair_separation, max_cloud_cover)
+ log.info(f'Found {len(pairs)} pairs for {scene}')
+ with pd.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', None):
+ log.debug(pairs.loc[:, ['reference', 'secondary']])
+
+ pairs = deduplicate_hyp3_pairs(pairs)
+ log.info(f'Deduplicated pairs; {len(pairs)} remaining')
+ with pd.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', None):
+ log.debug(pairs.loc[:, ['reference', 'secondary']])
+
+ jobs = sdk.Batch()
+ if submit:
+ jobs += submit_pairs_for_processing(pairs)
+
+ log.info(jobs)
+ return jobs
+
+
+def lambda_handler(event: dict, context: object) -> dict:
+ """Landsat processing lambda function.
+
+ Accepts an event with SQS records for newly ingested Landsat scenes and processes each scene.
+
+ Args:
+ event: The event dictionary that contains the parameters sent when this function is invoked.
+ context: The context in which is function is called.
+
+ Returns:
+ AWS SQS batchItemFailures JSON response including messages that failed to be processed
+ """
+ batch_item_failures = []
+ for record in event['Records']:
+ try:
+ body = json.loads(record['body'])
+ message = json.loads(body['Message'])
+ _ = process_scene(message['landsat_product_id'])
+ except Exception:
+ log.exception(f'Could not process message {record["messageId"]}')
+ batch_item_failures.append({'itemIdentifier': record['messageId']})
+ return {'batchItemFailures': batch_item_failures}
+
+
+def main() -> None:
+ """Command Line wrapper around `process_scene`."""
+ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('reference', help='Reference Landsat scene name to build pairs for')
+ parser.add_argument(
+ '--max-pair-separation',
+ type=int,
+ default=MAX_PAIR_SEPARATION_IN_DAYS,
+ help="How many days back from a reference scene's acquisition date to search for secondary scenes",
+ )
+ parser.add_argument(
+ '--max-cloud-cover',
+ type=int,
+ default=MAX_CLOUD_COVER_PERCENT,
+ help='The maximum percent a Landsat scene can be covered by clouds',
+ )
+ parser.add_argument('--submit', action='store_true', help='Submit pairs to HyP3 for processing')
+ parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
+ args = parser.parse_args()
+
+ level = logging.DEBUG if args.verbose else logging.INFO
+ logging.basicConfig(stream=sys.stdout, format='%(asctime)s - %(levelname)s - %(message)s', level=level)
+ log.debug(' '.join(sys.argv))
+
+ _ = process_scene(args.reference, timedelta(days=args.max_pair_separation), args.max_cloud_cover, args.submit)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/requirements-all.txt b/requirements-all.txt
new file mode 100644
index 00000000..e0648624
--- /dev/null
+++ b/requirements-all.txt
@@ -0,0 +1,4 @@
+-r requirements-landsat.txt
+cfn-lint
+ruff
+pytest
diff --git a/requirements-landsat.txt b/requirements-landsat.txt
new file mode 100644
index 00000000..7630319a
--- /dev/null
+++ b/requirements-landsat.txt
@@ -0,0 +1,4 @@
+geopandas==0.14.3
+hyp3-sdk==6.1.0
+pandas==2.2.1
+pystac-client==0.7.6
diff --git a/ruff.toml b/ruff.toml
new file mode 100644
index 00000000..257c6dd5
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,22 @@
+line-length = 120
+src = ["landsat/src", "tests"]
+
+[format]
+indent-style = "space"
+quote-style = "single"
+
+[lint]
+extend-select = [
+ "I", # isort: https://docs.astral.sh/ruff/rules/#isort-i
+ "UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up
+ "D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d
+ "ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann
+ "PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
+]
+
+[lint.pydocstyle]
+convention = "google"
+
+[lint.isort]
+case-sensitive = true
+lines-after-imports = 2
diff --git a/src/main.py b/src/main.py
deleted file mode 100644
index 737021fb..00000000
--- a/src/main.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import json
-
-
-def process_scene(scene):
- print(scene)
-
-
-def lambda_handler(event, context):
- for record in event['Records']:
- body = json.loads(record['body'])
- message = json.loads(body['Message'])
- process_scene(message['landsat_product_id'])
diff --git a/tests/integration/landsat-l8-not-over-ice.json b/tests/integration/landsat-l8-not-over-ice.json
new file mode 100644
index 00000000..2090a8db
--- /dev/null
+++ b/tests/integration/landsat-l8-not-over-ice.json
@@ -0,0 +1,4 @@
+{
+ "landsat_product_id": "LC08_L1TP_084230_20230624_20230630_02_T2",
+ "s3_location": "s3://usgs-landsat/collection02/level-1/standard/oli-tirs/2023/084/230/LC08_L1TP_084230_20230624_20230630_02_T2/"
+}
diff --git a/tests/integration/landsat-l8-too-many-clouds.json b/tests/integration/landsat-l8-too-many-clouds.json
new file mode 100644
index 00000000..c99124b9
--- /dev/null
+++ b/tests/integration/landsat-l8-too-many-clouds.json
@@ -0,0 +1,4 @@
+{
+ "landsat_product_id": "LC08_L1TP_001005_20231008_20231017_02_T1",
+ "s3_location": "s3://usgs-landsat/collection02/level-1/standard/oli-tirs/2023/001/005/LC08_L1TP_001005_20231008_20231017_02_T1/"
+}
diff --git a/tests/integration/landsat-l8-valid.json b/tests/integration/landsat-l8-valid.json
new file mode 100644
index 00000000..67070730
--- /dev/null
+++ b/tests/integration/landsat-l8-valid.json
@@ -0,0 +1,4 @@
+{
+ "landsat_product_id": "LC08_L1TP_001005_20230704_20230717_02_T1",
+ "s3_location": "s3://usgs-landsat/collection02/level-1/standard/oli-tirs/2023/001/005/LC08_L1TP_001005_20230704_20230717_02_T1/"
+}
diff --git a/tests/integration/landsat-l9-wrong-product-level.json b/tests/integration/landsat-l9-wrong-product-level.json
new file mode 100644
index 00000000..5243d2e6
--- /dev/null
+++ b/tests/integration/landsat-l9-wrong-product-level.json
@@ -0,0 +1,4 @@
+{
+ "landsat_product_id": "LC09_L2SP_077023_20240124_20240125_02_T1",
+ "s3_location": "s3://usgs-landsat/collection02/level-2/standard/oli-tirs/2024/77/23/LC09_L2SP_077023_20240124_20240125_02_T1/"
+}