Skip to content

Commit

Permalink
Merge pull request #37 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.1.0
  • Loading branch information
jhkennedy authored Mar 26, 2024
2 parents e019568 + 8e1dda4 commit 6dee6c3
Show file tree
Hide file tree
Showing 8 changed files with 145 additions and 13 deletions.
5 changes: 4 additions & 1 deletion .github/actions/deploy/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ inputs:
required: true
EARTHDATA_PASSWORD:
required: true
PUBLISH_BUCKET:
required: true

runs:
using: composite
Expand Down Expand Up @@ -55,4 +57,5 @@ runs:
Hyp3Api=${{ inputs.HYP3_API }} \
LambdaLoggingLevel=${{ inputs.LAMBDA_LOGGING_LEVEL }} \
EarthdataUsername=${{ inputs.EARTHDATA_USERNAME }} \
EarthdataPassword=${{ inputs.EARTHDATA_PASSWORD }}
EarthdataPassword=${{ inputs.EARTHDATA_PASSWORD }} \
PublishBucket=${{ inputs.PUBLISH_BUCKET }}
1 change: 1 addition & 0 deletions .github/workflows/deploy-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ jobs:
LANDSAT_TOPIC_ARN: arn:aws:sns:us-west-2:673253540267:public-c2-notify-v2
HYP3_API: https://hyp3-its-live.asf.alaska.edu
LAMBDA_LOGGING_LEVEL: INFO
PUBLISH_BUCKET: its-live-data


call-bump-version-workflow:
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/deploy-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@ jobs:
LANDSAT_TOPIC_ARN: arn:aws:sns:us-west-2:986442313181:its-live-notify-test
HYP3_API: https://hyp3-its-live.asf.alaska.edu
LAMBDA_LOGGING_LEVEL: DEBUG
PUBLISH_BUCKET: its-live-data-test
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


## [0.1.0]

### Changed
- HyP3 jobs will now be submitted with the `publish_bucket` job parameter set
- The reason a scene disqualifies for processing will now be logged

### Fixed
- The `landsat:cloud_cover_land` property instead of `eo:cloud_cover` will be used to determine if a scene qualifies for processing
- Scenes with unknown cloud cover (unreported or a value < 0) will be disqualified for processing
- The max cloud cover percentage is now an inclusive bound, so only scenes with *more* (`>`) cloud cover will be disqualified

## [0.0.3]

### Changed
Expand Down
7 changes: 7 additions & 0 deletions landsat/cloudformation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ Parameters:
Type: String
NoEcho: true

PublishBucket:
Type: String
AllowedValues:
- its-live-data
- its-live-data-test

Resources:

DeadLetterQueue:
Expand Down Expand Up @@ -80,6 +86,7 @@ Resources:
LOGGING_LEVEL: !Ref LambdaLoggingLevel
EARTHDATA_USERNAME: !Ref EarthdataUsername
EARTHDATA_PASSWORD: !Ref EarthdataPassword
PUBLISH_BUCKET: !Ref PublishBucket

LambdaEventSourceMapping:
Type: AWS::Lambda::EventSourceMapping
Expand Down
52 changes: 40 additions & 12 deletions landsat/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,39 @@
log.setLevel(os.environ.get('LOGGING_LEVEL', 'INFO'))


def _qualifies_for_processing(item: pystac.item.Item, max_cloud_cover: int = MAX_CLOUD_COVER_PERCENT) -> bool:
return (
item.collection_id == 'landsat-c2l1'
and 'OLI' in item.properties['instruments']
and item.properties['landsat:collection_category'] in ['T1', 'T2']
and item.properties['landsat:wrs_path'] + item.properties['landsat:wrs_row'] in LANDSAT_TILES_TO_PROCESS
and item.properties['eo:cloud_cover'] < max_cloud_cover
and item.properties['view:off_nadir'] == 0
)
def _qualifies_for_processing(
item: pystac.item.Item, max_cloud_cover: int = MAX_CLOUD_COVER_PERCENT, log_level: int = logging.DEBUG
) -> bool:
if item.collection_id != 'landsat-c2l1':
log.log(log_level, f'{item.id} disqualifies for processing because it is from the wrong collection')
return False

if 'OLI' not in item.properties['instruments']:
log.log(log_level, f'{item.id} disqualifies for processing because it was not imaged with the right instrument')
return False

if item.properties['landsat:collection_category'] not in ['T1', 'T2']:
log.log(log_level, f'{item.id} disqualifies for processing because it is from the wrong tier')
return False

if item.properties['landsat:wrs_path'] + item.properties['landsat:wrs_row'] not in LANDSAT_TILES_TO_PROCESS:
log.log(log_level, f'{item.id} disqualifies for processing because it is not from a tile containing land-ice')
return False

if item.properties.get('landsat:cloud_cover_land', -1) < 0:
log.log(log_level, f'{item.id} disqualifies for processing because cloud coverage is unknown')
return False

if item.properties['landsat:cloud_cover_land'] > max_cloud_cover:
log.log(log_level, f'{item.id} disqualifies for processing because it has too much cloud cover')
return False

if item.properties['view:off_nadir'] != 0:
log.log(log_level, f'{item.id} disqualifies for processing because it is off-nadir')
return False

log.log(log_level, f'{item.id} qualifies for processing')
return True


def _get_stac_item(scene: str) -> pystac.item.Item:
Expand Down Expand Up @@ -128,7 +152,12 @@ def deduplicate_hyp3_pairs(pairs: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
def submit_pairs_for_processing(pairs: gpd.GeoDataFrame) -> sdk.Batch: # noqa: D103
prepared_jobs = []
for reference, secondary in pairs[['reference', 'secondary']].itertuples(index=False):
prepared_jobs.append(HYP3.prepare_autorift_job(reference, secondary, name=reference))
prepared_job = HYP3.prepare_autorift_job(reference, secondary, name=reference)

if publish_bucket := os.environ.get('PUBLISH_BUCKET', ''):
prepared_job['job_parameters']['publish_bucket'] = publish_bucket

prepared_jobs.append(prepared_job)

log.debug(prepared_jobs)

Expand Down Expand Up @@ -159,8 +188,7 @@ def process_scene(
"""
reference = _get_stac_item(scene)

if not _qualifies_for_processing(reference, max_cloud_cover):
log.info(f'Reference scene {scene} does not qualify for processing')
if not _qualifies_for_processing(reference, max_cloud_cover, logging.INFO):
return sdk.Batch()

pairs = get_landsat_pairs_for_reference_scene(reference, max_pair_separation, max_cloud_cover)
Expand Down
3 changes: 3 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,6 @@ convention = "google"
[lint.isort]
case-sensitive = true
lines-after-imports = 2

[lint.extend-per-file-ignores]
"tests/landsat/test_main.py" = ["D100", "D103", "ANN201"]
78 changes: 78 additions & 0 deletions tests/landsat/test_main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import unittest.mock

import main


def get_mock_pystac_item() -> unittest.mock.NonCallableMagicMock:
item = unittest.mock.NonCallableMagicMock()
item.collection_id = 'landsat-c2l1'
item.properties = {
'instruments': ['OLI'],
'landsat:collection_category': 'T1',
'landsat:wrs_path': '001',
'landsat:wrs_row': '005',
'landsat:cloud_cover_land': 50,
'view:off_nadir': 0,
}
return item


def test_qualifies_for_processing():
item = get_mock_pystac_item()
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.collection_id = 'foo'
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['instruments'] = ['TIRS']
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:collection_category'] = 'T2'
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:collection_category'] = 'RT'
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:wrs_path'] = 'foo'
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:wrs_row'] = 'foo'
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
del item.properties['landsat:cloud_cover_land']
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = -1
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = 0
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = 1
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = main.MAX_CLOUD_COVER_PERCENT - 1
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = main.MAX_CLOUD_COVER_PERCENT
assert main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['landsat:cloud_cover_land'] = main.MAX_CLOUD_COVER_PERCENT + 1
assert not main._qualifies_for_processing(item)

item = get_mock_pystac_item()
item.properties['view:off_nadir'] = 1
assert not main._qualifies_for_processing(item)

0 comments on commit 6dee6c3

Please sign in to comment.