Skip to content

Commit

Permalink
Merge pull request #24 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release autoRIFT v1.0.7 upgrade
  • Loading branch information
jhkennedy authored Oct 13, 2020
2 parents cfbefca + 521ed43 commit 4ebca78
Show file tree
Hide file tree
Showing 19 changed files with 1,469 additions and 1,059 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/changelog.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Changelog updated?
on:
pull_request:
types:
- opened
- labeled
- unlabeled
- synchronize
branches:
- master
- develop
jobs:
changelog-udated:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1

- name: Changelog check
uses: Zomzog/[email protected]
with:
fileName: CHANGELOG.md
noChangelogLabel: bumpless
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
15 changes: 14 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.2.0](https://github.com/ASFHyP3/hyp3-autorift/compare/v0.1.0...v0.2.0)

### Changed
* `hyp3_autorift` now requires python >=3.8, and depends on ISCE >=2.4.1 which
includes [autoRIFT 1.0.7](https://github.com/leiyangleon/autoRIFT/releases/tag/v1.0.7)
* Upgraded to hyp3lib [v1.6.1](https://github.com/ASFHyP3/hyp3-lib/blob/develop/CHANGELOG.md#161) from v1.5.0
* Output product names have change to follow HyP3's standard pair-processing naming scheme
* Browse images are now uploaded for hyp3v1 and will appear in email notifications
* NetCDF product files include a `source` and `reference` global attribute in line with
[CF-Conventions](https://cfconventions.org/Data/cf-conventions/cf-conventions-1.8/cf-conventions.html#description-of-file-contents)
([see PR #20](https://github.com/ASFHyP3/hyp3-autorift/pull/20))

## [0.1.1](https://github.com/ASFHyP3/hyp3-autorift/compare/v0.1.0...v0.1.1)

### Added
* Browse and thumbnail images are now created and uploaded for hyp3v2
* A browse image of the ice velocity is produced for HyP3v1 and v2, and a thumbnail
of the browse image will be produced for HyP3v2

### Fixes
* Restrict ISCE version to 2.4.0 which includes autoRIFT 1.0.6
Expand Down
12 changes: 7 additions & 5 deletions conda-env.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ channels:
- nodefaults
dependencies:
- boto3
- python=3.7
- botocore
- python=3.8
- pip
# For packaging, and testing
- pytest
Expand All @@ -14,13 +15,14 @@ dependencies:
- setuptools_scm
- wheel
# For running
- hyp3lib=1.5
- isce2=2.4.0
- gdal>=3
- hyp3lib=1.6.1
- isce2>=2.4.1
- boto3
- importlib_metadata
- netCDF4
- numpy
- pillow
- psycopg2 # missing hyp3proclib dep
- requests
- scikit-image # missing autoRIFT dep
- scipy
- pip:
Expand Down
3 changes: 1 addition & 2 deletions hyp3_autorift/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""A HyP3 plugin for feature tracking processing with AutoRIFT-ISCE"""

# FIXME: Python 3.8+ this should be `from importlib.metadata...`
from importlib_metadata import PackageNotFoundError, version
from importlib.metadata import PackageNotFoundError, version

from hyp3_autorift.process import process

Expand Down
122 changes: 23 additions & 99 deletions hyp3_autorift/__main__.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
"""
AutoRIFT processing for HyP3
"""
import glob
import logging
import os
import shutil
import sys
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from datetime import datetime
from mimetypes import guess_type

import boto3
from PIL import Image
from hyp3lib.aws import upload_file_to_s3
from hyp3lib.fetch import write_credentials_to_netrc_file
from hyp3lib.image import create_thumbnail
from hyp3proclib import (
build_output_name_pair,
earlier_granule_first,
extra_arg_is,
failure,
process,
record_metrics,
success,
upload_product,
zip_dir,
Expand All @@ -31,9 +26,6 @@

import hyp3_autorift

EARTHDATA_LOGIN_DOMAIN = 'urs.earthdata.nasa.gov'
S3_CLIENT = boto3.client('s3')


def entry():
parser = ArgumentParser(prefix_chars='+', formatter_class=ArgumentDefaultsHelpFormatter)
Expand All @@ -49,54 +41,6 @@ def entry():
)


# v2 functions
def create_thumbnail(input_image, size=(100, 100)):
filename, ext = os.path.splitext(input_image)
thumbnail_name = f'{filename}_thumb{ext}'

output_image = Image.open(input_image)
output_image.thumbnail(size)
output_image.save(thumbnail_name)
return thumbnail_name


def write_netrc_file(username, password):
netrc_file = os.path.join(os.environ['HOME'], '.netrc')
if os.path.isfile(netrc_file):
logging.warning(f'Using existing .netrc file: {netrc_file}')
else:
with open(netrc_file, 'w') as f:
f.write(f'machine {EARTHDATA_LOGIN_DOMAIN} login {username} password {password}')


def string_is_true(s: str) -> bool:
return s.lower() == 'true'


def get_content_type(filename):
content_type = guess_type(filename)[0]
if not content_type:
content_type = 'application/octet-stream'
return content_type


def upload_file_to_s3(path_to_file, file_type, bucket, prefix=''):
key = os.path.join(prefix, os.path.basename(path_to_file))
extra_args = {'ContentType': get_content_type(key)}

logging.info(f'Uploading s3://{bucket}/{key}')
S3_CLIENT.upload_file(path_to_file, bucket, key, extra_args)
tag_set = {
'TagSet': [
{
'Key': 'file_type',
'Value': file_type
}
]
}
S3_CLIENT.put_object_tagging(Bucket=bucket, Key=key, Tagging=tag_set)


def main_v2():
parser = ArgumentParser()
parser.add_argument('--username', required=True)
Expand All @@ -110,27 +54,19 @@ def main_v2():
if len(args.granules) != 2:
parser.error('Must provide exactly two granules')

with open('get_asf.cfg', 'w') as f:
f.write(f'[general]\nusername={args.username}\npassword={args.password}')
write_credentials_to_netrc_file(args.username, args.password)

g1, g2 = earlier_granule_first(args.granules[0], args.granules[1])

hyp3_autorift.process(f'{g1}.zip', f'{g2}.zip', download=True)
product_file = hyp3_autorift.process(f'{g1}.zip', f'{g2}.zip', download=True)

outname = build_output_name_pair(g1, g2, os.getcwd(), '-autorift')
product_name = f'{outname}.nc'
netcdf_file = glob.glob('*nc')[0]
os.rename(netcdf_file, product_name)
browse_name = f'{outname}.png'
browse_file = glob.glob('*.png')[0]
os.rename(browse_file, browse_name)
browse_file = product_file.with_suffix('.png')

if args.bucket:
upload_file_to_s3(product_name, 'product', args.bucket, args.bucket_prefix)
upload_file_to_s3(browse_name, 'browse', args.bucket, args.bucket_prefix)
thumbnail_name = create_thumbnail(browse_name)
upload_file_to_s3(thumbnail_name, 'thumbnail', args.bucket, args.bucket_prefix)
# End v2 functions
upload_file_to_s3(product_file, args.bucket, args.bucket_prefix)
upload_file_to_s3(browse_file, args.bucket, args.bucket_prefix)
thumbnail_file = create_thumbnail(browse_file)
upload_file_to_s3(thumbnail_file, args.bucket, args.bucket_prefix)


def hyp3_process(cfg, n):
Expand All @@ -152,34 +88,24 @@ def hyp3_process(cfg, n):
if not extra_arg_is(cfg, 'intermediate_files', 'no'): # handle processes b4 option added
autorift_args.append('--product')

process(cfg, 'autorift_proc_pair', autorift_args)

out_name = build_output_name_pair(g1, g2, cfg['workdir'], cfg['suffix'])
log.info(f'Output name: {out_name}')

if extra_arg_is(cfg, 'intermediate_files', 'no'):
product_glob = os.path.join(cfg['workdir'], cfg['ftd'], '*.nc')
netcdf_files = glob.glob(product_glob)

if not netcdf_files:
log.info(f'No product netCDF files found with: {product_glob}')
raise Exception('Processing failed! Output netCDF file not found')
if len(netcdf_files) > 1:
log.info(f'Too many netCDF files found with: {product_glob}\n'
f' {netcdf_files}')
raise Exception('Processing failed! Too many netCDF files found')

product_file = f'{out_name}.nc'
os.rename(netcdf_files[0], product_file)

else:
product_file = hyp3_autorift.process(
reference=f'{g1}.zip',
secondary=f'{g2}.zip',
download=True,
process_dir=cfg["ftd"],
product=extra_arg_is(cfg, 'intermediate_files', 'yes')
)
cfg['attachment'] = str(product_file.with_suffix('.png'))
cfg['email_text'] = ' ' # fix line break in email

if extra_arg_is(cfg, 'intermediate_files', 'yes'):
tmp_product_dir = os.path.join(cfg['workdir'], 'PRODUCT')
if not os.path.isdir(tmp_product_dir):
log.info(f'PRODUCT directory not found: {tmp_product_dir}')
log.error('Processing failed')
raise Exception('Processing failed: PRODUCT directory not found')

product_dir = os.path.join(cfg['workdir'], out_name)
product_dir = os.path.join(cfg['workdir'], product_file.stem)
product_file = f'{product_dir}.zip'
if os.path.isdir(product_dir):
shutil.rmtree(product_dir)
Expand All @@ -192,11 +118,9 @@ def hyp3_process(cfg, n):
zip_dir(product_dir, product_file)

cfg['final_product_size'] = [os.stat(product_file).st_size, ]
cfg['original_product_size'] = 0

with get_db_connection('hyp3-db') as conn:
record_metrics(cfg, conn)
upload_product(product_file, cfg, conn)
upload_product(str(product_file), cfg, conn)
success(conn, cfg)

except Exception as e:
Expand Down
65 changes: 0 additions & 65 deletions hyp3_autorift/etc/SConfigISCE

This file was deleted.

Loading

0 comments on commit 4ebca78

Please sign in to comment.