Skip to content

Commit

Permalink
Merge pull request #268 from ASFHyP3/more-refactor
Browse files Browse the repository at this point in the history
fully isolate S1
  • Loading branch information
mfangaritav authored Jun 4, 2024
2 parents 2ade4d8 + 187068c commit 006fd91
Show file tree
Hide file tree
Showing 8 changed files with 109 additions and 120 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


## [0.16.1]
## Changed
* In preparation for a major update, the Sentinel-1 processing workflow has been isolated to a new `hyp3_autorift.s1_isce2` module.

## [0.16.0]
### Fixed
* `hyp3_autorift` will no longer attempt to crop files with no valid data
Expand Down
117 changes: 32 additions & 85 deletions src/hyp3_autorift/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,7 @@
import numpy as np
import requests
from hyp3lib.aws import upload_file_to_s3
from hyp3lib.fetch import download_file
from hyp3lib.get_orb import downloadSentinelOrbitFile
from hyp3lib.image import create_thumbnail
from hyp3lib.scene import get_download_url
from netCDF4 import Dataset
from osgeo import gdal

Expand Down Expand Up @@ -205,15 +202,6 @@ def get_platform(scene: str) -> str:
raise NotImplementedError(f'autoRIFT processing not available for this platform. {scene}')


def get_s1_primary_polarization(granule_name):
polarization = granule_name[14:16]
if polarization in ['SV', 'DV']:
return 'vv'
if polarization in ['SH', 'DH']:
return 'hh'
raise ValueError(f'Cannot determine co-polarization of granule {granule_name}')


def create_filtered_filepath(path: str) -> str:
parent = (Path.cwd() / 'filtered').resolve()
parent.mkdir(exist_ok=True)
Expand Down Expand Up @@ -349,8 +337,6 @@ def process(
secondary: str,
parameter_file: str = DEFAULT_PARAMETER_FILE,
naming_scheme: Literal['ITS_LIVE_OD', 'ITS_LIVE_PROD'] = 'ITS_LIVE_OD',
esa_username: Optional[str] = None,
esa_password: Optional[str] = None,
) -> Tuple[Path, Path, Path]:
"""Process a Sentinel-1, Sentinel-2, or Landsat-8 image pair
Expand All @@ -363,106 +349,69 @@ def process(
Returns:
the autoRIFT product file, browse image, and thumbnail image
"""
orbits = None
polarization = None
reference_path = None
secondary_path = None
reference_metadata = None
secondary_metadata = None
reference_zero_path = None
secondary_zero_path = None
reference_state_vec = None
secondary_state_vec = None
lat_limits, lon_limits = None, None

platform = get_platform(reference)
if platform == 'S1':
from hyp3_autorift.s1_isce2 import bounding_box, process_sentinel1_with_isce2

for scene in [reference, secondary]:
scene_url = get_download_url(scene)
download_file(scene_url, chunk_size=5242880)
orbits = Path('Orbits').resolve()
orbits.mkdir(parents=True, exist_ok=True)

if (esa_username is None) or (esa_password is None):
esa_username, esa_password = utils.get_esa_credentials()

reference_state_vec, reference_provider = downloadSentinelOrbitFile(
reference, directory=str(orbits), esa_credentials=(esa_username, esa_password)
)
log.info(f'Downloaded orbit file {reference_state_vec} from {reference_provider}')
secondary_state_vec, secondary_provider = downloadSentinelOrbitFile(
secondary, directory=str(orbits), esa_credentials=(esa_username, esa_password)
)
log.info(f'Downloaded orbit file {secondary_state_vec} from {secondary_provider}')

polarization = get_s1_primary_polarization(reference)
lat_limits, lon_limits = bounding_box(f'{reference}.zip', polarization=polarization, orbits=orbits)

scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits)
parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file)

netcdf_file = process_sentinel1_with_isce2(parameter_info, reference, secondary, polarization, orbits)
if platform == 'S1':
from hyp3_autorift.s1_isce2 import process_sentinel1_with_isce2
netcdf_file = process_sentinel1_with_isce2(reference, secondary, parameter_file)

elif platform == 'S2':
else:
# Set config and env for new CXX threads in Geogrid/autoRIFT
gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR')
os.environ['GDAL_DISABLE_READDIR_ON_OPEN'] = 'EMPTY_DIR'

gdal.SetConfigOption('AWS_REGION', 'us-west-2')
os.environ['AWS_REGION'] = 'us-west-2'

reference_metadata = get_s2_metadata(reference)
secondary_metadata = get_s2_metadata(secondary)
reference_path = reference_metadata['path']
secondary_path = secondary_metadata['path']
bbox = reference_metadata['bbox']
lat_limits = (bbox[1], bbox[3])
lon_limits = (bbox[0], bbox[2])
if platform == 'S2':
reference_metadata = get_s2_metadata(reference)
reference_path = reference_metadata['path']

elif 'L' in platform:
# Set config and env for new CXX threads in Geogrid/autoRIFT
gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR')
os.environ['GDAL_DISABLE_READDIR_ON_OPEN'] = 'EMPTY_DIR'

gdal.SetConfigOption('AWS_REGION', 'us-west-2')
os.environ['AWS_REGION'] = 'us-west-2'
secondary_metadata = get_s2_metadata(secondary)
secondary_path = secondary_metadata['path']

gdal.SetConfigOption('AWS_REQUEST_PAYER', 'requester')
os.environ['AWS_REQUEST_PAYER'] = 'requester'
elif 'L' in platform:
# Set config and env for new CXX threads in Geogrid/autoRIFT
gdal.SetConfigOption('AWS_REQUEST_PAYER', 'requester')
os.environ['AWS_REQUEST_PAYER'] = 'requester'

reference_metadata = get_lc2_metadata(reference)
reference_path = get_lc2_path(reference_metadata)
reference_metadata = get_lc2_metadata(reference)
reference_path = get_lc2_path(reference_metadata)

secondary_metadata = get_lc2_metadata(secondary)
secondary_path = get_lc2_path(secondary_metadata)
secondary_metadata = get_lc2_metadata(secondary)
secondary_path = get_lc2_path(secondary_metadata)

filter_platform = min([platform, get_platform(secondary)])
if filter_platform in ('L4', 'L5', 'L7'):
# Log path here before we transform it
log.info(f'Reference scene path: {reference_path}')
log.info(f'Secondary scene path: {secondary_path}')
reference_path, reference_zero_path, secondary_path, secondary_zero_path = \
apply_landsat_filtering(reference_path, secondary_path)
filter_platform = min([platform, get_platform(secondary)])
if filter_platform in ('L4', 'L5', 'L7'):
# Log path here before we transform it
log.info(f'Reference scene path: {reference_path}')
log.info(f'Secondary scene path: {secondary_path}')
reference_path, reference_zero_path, secondary_path, secondary_zero_path = \
apply_landsat_filtering(reference_path, secondary_path)

if reference_metadata['properties']['proj:epsg'] != secondary_metadata['properties']['proj:epsg']:
log.info('Reference and secondary projections are different! Reprojecting.')
if reference_metadata['properties']['proj:epsg'] != secondary_metadata['properties']['proj:epsg']:
log.info('Reference and secondary projections are different! Reprojecting.')

# Reproject zero masks if necessary
if reference_zero_path and secondary_zero_path:
_, _ = utils.ensure_same_projection(reference_zero_path, secondary_zero_path)
# Reproject zero masks if necessary
if reference_zero_path and secondary_zero_path:
_, _ = utils.ensure_same_projection(reference_zero_path, secondary_zero_path)

reference_path, secondary_path = utils.ensure_same_projection(reference_path, secondary_path)
reference_path, secondary_path = utils.ensure_same_projection(reference_path, secondary_path)

bbox = reference_metadata['bbox']
lat_limits = (bbox[1], bbox[3])
lon_limits = (bbox[0], bbox[2])

log.info(f'Reference scene path: {reference_path}')
log.info(f'Secondary scene path: {secondary_path}')
log.info(f'Reference scene path: {reference_path}')
log.info(f'Secondary scene path: {secondary_path}')

if 'L' in platform or platform == 'S2':
scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits)
parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file)

Expand Down Expand Up @@ -523,8 +472,6 @@ def main():
parser.add_argument('--publish-bucket', default='',
help='Additionally, publish products to this bucket. Necessary credentials must be provided '
'via the `PUBLISH_ACCESS_KEY_ID` and `PUBLISH_SECRET_ACCESS_KEY` environment variables.')
parser.add_argument('--esa-username', default=None, help="Username for ESA's Copernicus Data Space Ecosystem")
parser.add_argument('--esa-password', default=None, help="Password for ESA's Copernicus Data Space Ecosystem")
parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE,
help='Shapefile for determining the correct search parameters by geographic location. '
'Path to shapefile must be understood by GDAL')
Expand Down
1 change: 1 addition & 0 deletions src/hyp3_autorift/s1_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from hyp3_autorift.process import DEFAULT_PARAMETER_FILE
from hyp3_autorift.s1_isce2 import generate_correction_data

log = logging.getLogger(__name__)


Expand Down
60 changes: 47 additions & 13 deletions src/hyp3_autorift/s1_isce2.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,35 +7,60 @@
from pathlib import Path
from typing import Optional

import isce # noqa: F401
import isceobj
import numpy as np
from contrib.demUtils import createDemStitcher
from contrib.geo_autoRIFT.geogrid import Geogrid
from hyp3lib.fetch import download_file
from hyp3lib.get_orb import downloadSentinelOrbitFile
from hyp3lib.scene import get_download_url
from isceobj.Orbit.Orbit import Orbit
from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
from osgeo import gdal

from hyp3_autorift import geometry, utils
from hyp3_autorift.process import DEFAULT_PARAMETER_FILE, get_s1_primary_polarization
from hyp3_autorift.process import DEFAULT_PARAMETER_FILE
from hyp3_autorift.utils import get_esa_credentials
from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid

log = logging.getLogger(__name__)


def process_sentinel1_with_isce2(parameter_info, reference, secondary, polarization, orbits):
def get_s1_primary_polarization(granule_name):
polarization = granule_name[14:16]
if polarization in ['SV', 'DV']:
return 'vv'
if polarization in ['SH', 'DH']:
return 'hh'
raise ValueError(f'Cannot determine co-polarization of granule {granule_name}')


def process_sentinel1_with_isce2(reference, secondary, parameter_file):
import isce # noqa
from topsApp import TopsInSAR
from hyp3_autorift.vend.testGeogrid_ISCE import loadMetadata, runGeogrid
from hyp3_autorift.vend.testautoRIFT_ISCE import generateAutoriftProduct

lat_limits, lon_limits = bounding_box(f'{reference}.zip', polarization=polarization, orbits=orbits)
isce_dem = prep_isce_dem(parameter_info['geogrid']['dem'], lat_limits, lon_limits)
for scene in [reference, secondary]:
scene_url = get_download_url(scene)
download_file(scene_url, chunk_size=5242880)

orbits = Path('Orbits').resolve()
orbits.mkdir(parents=True, exist_ok=True)

esa_username, esa_password = utils.get_esa_credentials()

reference_state_vec, reference_provider = downloadSentinelOrbitFile(
reference, directory=str(orbits), esa_credentials=(esa_username, esa_password)
)
log.info(f'Downloaded orbit file {reference_state_vec} from {reference_provider}')

secondary_state_vec, secondary_provider = downloadSentinelOrbitFile(
secondary, directory=str(orbits), esa_credentials=(esa_username, esa_password)
)
log.info(f'Downloaded orbit file {secondary_state_vec} from {secondary_provider}')

polarization = get_s1_primary_polarization(reference)
lat_limits, lon_limits = bounding_box(f'{reference}.zip', polarization=polarization, orbits=str(orbits))

scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits)
parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file)

isce_dem = prep_isce_dem(parameter_info['geogrid']['dem'], lat_limits, lon_limits)
format_tops_xml(reference, secondary, polarization, isce_dem, orbits)

insar = TopsInSAR(name='topsApp', cmdline=['topsApp.xml', '--end=mergebursts'])
Expand All @@ -59,7 +84,7 @@ def process_sentinel1_with_isce2(parameter_info, reference, secondary, polarizat
netcdf_file = generateAutoriftProduct(
reference_path, secondary_path, nc_sensor='S1', optical_flag=False, ncname=None,
geogrid_run_info=geogrid_info, **parameter_info['autorift'],
parameter_file=DEFAULT_PARAMETER_FILE.replace('/vsicurl/', ''),
parameter_file=parameter_file.replace('/vsicurl/', ''),
)
return netcdf_file

Expand All @@ -71,6 +96,7 @@ def generate_correction_data(
esa_username: Optional[str] = None,
esa_password: Optional[str] = None,
):
from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid
scene_path = Path(f'{scene}.zip')
if not scene_path.exists():
scene_url = get_download_url(scene)
Expand All @@ -88,7 +114,7 @@ def generate_correction_data(
log.info(f'Downloaded orbit file {state_vec} from {oribit_provider}')

polarization = get_s1_primary_polarization(scene)
lat_limits, lon_limits = bounding_box(f'{scene}.zip', polarization=polarization, orbits=orbits)
lat_limits, lon_limits = bounding_box(f'{scene}.zip', polarization=polarization, orbits=str(orbits))

scene_poly = geometry.polygon_from_bbox(x_limits=lat_limits, y_limits=lon_limits)
parameter_info = utils.find_jpl_parameter_info(scene_poly, parameter_file)
Expand Down Expand Up @@ -205,6 +231,10 @@ def bounding_box(safe, priority='reference', polarization='hh', orbits='Orbits',
lat_limits: list containing the [minimum, maximum] latitudes
lat_limits: list containing the [minimum, maximum] longitudes
"""
import isce # noqa: F401
from contrib.geo_autoRIFT.geogrid import Geogrid
from isceobj.Orbit.Orbit import Orbit
from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
frames = []
for swath in range(1, 4):
rdr = Sentinel1()
Expand Down Expand Up @@ -263,6 +293,10 @@ def bounding_box(safe, priority='reference', polarization='hh', orbits='Orbits',


def prep_isce_dem(input_dem, lat_limits, lon_limits, isce_dem=None):
import isce # noqa: F401
import isceobj
from contrib.demUtils import createDemStitcher

if isce_dem is None:
seamstress = createDemStitcher()
isce_dem = seamstress.defaultName([*lat_limits, *lon_limits])
Expand Down
2 changes: 1 addition & 1 deletion src/hyp3_autorift/vend/CHANGES.diff
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@
- slave_filename = conts['slave_filename'][0]
- master_dt = conts['master_dt'][0]
- slave_dt = conts['slave_dt'][0]
+ from hyp3_autorift.utils import get_topsinsar_config
+ from hyp3_autorift.s1_isce2 import get_topsinsar_config
+ conts = get_topsinsar_config()
+ master_filename = conts['reference_filename']
+ slave_filename = conts['secondary_filename']
Expand Down
2 changes: 1 addition & 1 deletion src/hyp3_autorift/vend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ Changes, as listed in `CHANGES.diff`, were done to:
* use the full Sentinel-2 COG id in the output netCDF product filename to ensure unique names

**Note:** The `topsinsar_filename.py` included here is not used, but retained for reference.
We've replaced it with `hyp3_autorift.utils.get_topsinsar_config`.
We've replaced it with `hyp3_autorift.s1_isce2.get_topsinsar_config`.

## Additional Patches

Expand Down
20 changes: 0 additions & 20 deletions tests/test_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,26 +286,6 @@ def test_get_datetime():
process.get_datetime('S3_adsflafjladsf')


def test_get_s1_primary_polarization():
assert process.get_s1_primary_polarization(
'S1B_WV_SLC__1SSV_20200923T184541_20200923T185150_023506_02CA71_AABB') == 'vv'
assert process.get_s1_primary_polarization(
'S1B_IW_GRDH_1SDV_20200924T092954_20200924T093026_023515_02CABC_6C62') == 'vv'
assert process.get_s1_primary_polarization(
'S1B_IW_GRDH_1SSH_20200924T112903_20200924T112932_023516_02CAC7_D003') == 'hh'
assert process.get_s1_primary_polarization(
'S1B_IW_OCN__2SDH_20200924T090450_20200924T090519_023515_02CAB8_917B') == 'hh'

with pytest.raises(ValueError):
process.get_s1_primary_polarization('S1A_EW_GRDM_1SHH_20150513T080355_20150513T080455_005900_007994_35D2')
with pytest.raises(ValueError):
process.get_s1_primary_polarization('S1A_EW_GRDM_1SHV_20150509T230833_20150509T230912_005851_00787D_3BE5')
with pytest.raises(ValueError):
process.get_s1_primary_polarization('S1A_IW_SLC__1SVH_20150706T015744_20150706T015814_006684_008EF7_9B69')
with pytest.raises(ValueError):
process.get_s1_primary_polarization('S1A_IW_GRDH_1SVV_20150706T015720_20150706T015749_006684_008EF7_54BA')


def test_apply_landsat_filtering(monkeypatch):
def mock_apply_filter_function(scene, _):
if process.get_platform(scene) < 'L7':
Expand Down
23 changes: 23 additions & 0 deletions tests/test_s1_isce2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import pytest

from hyp3_autorift import s1_isce2


def test_get_s1_primary_polarization():
assert s1_isce2.get_s1_primary_polarization(
'S1B_WV_SLC__1SSV_20200923T184541_20200923T185150_023506_02CA71_AABB') == 'vv'
assert s1_isce2.get_s1_primary_polarization(
'S1B_IW_GRDH_1SDV_20200924T092954_20200924T093026_023515_02CABC_6C62') == 'vv'
assert s1_isce2.get_s1_primary_polarization(
'S1B_IW_GRDH_1SSH_20200924T112903_20200924T112932_023516_02CAC7_D003') == 'hh'
assert s1_isce2.get_s1_primary_polarization(
'S1B_IW_OCN__2SDH_20200924T090450_20200924T090519_023515_02CAB8_917B') == 'hh'

with pytest.raises(ValueError):
s1_isce2.get_s1_primary_polarization('S1A_EW_GRDM_1SHH_20150513T080355_20150513T080455_005900_007994_35D2')
with pytest.raises(ValueError):
s1_isce2.get_s1_primary_polarization('S1A_EW_GRDM_1SHV_20150509T230833_20150509T230912_005851_00787D_3BE5')
with pytest.raises(ValueError):
s1_isce2.get_s1_primary_polarization('S1A_IW_SLC__1SVH_20150706T015744_20150706T015814_006684_008EF7_9B69')
with pytest.raises(ValueError):
s1_isce2.get_s1_primary_polarization('S1A_IW_GRDH_1SVV_20150706T015720_20150706T015749_006684_008EF7_54BA')

0 comments on commit 006fd91

Please sign in to comment.