Skip to content

Commit

Permalink
Merge pull request #256 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.8.2
  • Loading branch information
jtherrmann authored Jan 13, 2025
2 parents 9f6b469 + d5a7a5f commit cddc7f2
Show file tree
Hide file tree
Showing 16 changed files with 54 additions and 34 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@ name: Static analysis
on: push

jobs:
# Docs: https://github.com/ASFHyP3/actions
call-secrets-analysis-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-mypy-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected]
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.8.2]

### Added
- Add `mypy` to [`static-analysis`](.github/workflows/static-analysis.yml)

## [0.8.1]

### Fixed
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ dependencies:
# - arcpy # windows only
- python-build
- ruff
- mypy
- setuptools>=61
- setuptools_scm>=6.2
- pytest
Expand Down
10 changes: 10 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ flood_map = "asf_tools.hydrosar.flood_map:hyp3"
develop = [
"gdal-utils",
"ruff",
"mypy",
"pytest",
"pytest-cov",
"pytest-console-scripts",
Expand Down Expand Up @@ -103,3 +104,12 @@ convention = "google"
[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2

[tool.mypy]
python_version = "3.10"
warn_redundant_casts = true
warn_unused_ignores = true
warn_unreachable = true
strict_equality = true
check_untyped_defs = true
explicit_package_bases = true
8 changes: 4 additions & 4 deletions src/asf_tools/composite.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def reproject_to_target(raster_info: dict, target_epsg_code: int, target_resolut
return target_raster_info


def make_composite(out_name: str, rasters: list[str], resolution: float = None):
def make_composite(out_name: str, rasters: list[str], resolution: float | None = None):
"""Creates a local-resolution-weighted composite from Sentinel-1 RTC products
Args:
Expand Down Expand Up @@ -201,8 +201,8 @@ def make_composite(out_name: str, rasters: list[str], resolution: float = None):
for raster, info in raster_info.items():
log.info(f'Processing raster {raster}')
log.debug(
f"Raster upper left: {info['cornerCoordinates']['upperLeft']}; "
f"lower right: {info['cornerCoordinates']['lowerRight']}"
f'Raster upper left: {info["cornerCoordinates"]["upperLeft"]}; '
f'lower right: {info["cornerCoordinates"]["lowerRight"]}'
)

values = read_as_array(raster)
Expand Down Expand Up @@ -261,7 +261,7 @@ def main():
'-r',
'--resolution',
type=float,
help='Desired output resolution in meters ' '(default is the max resolution of all the input files)',
help='Desired output resolution in meters (default is the max resolution of all the input files)',
)
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
args = parser.parse_args()
Expand Down
16 changes: 8 additions & 8 deletions src/asf_tools/hydrosar/flood_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@
log = logging.getLogger(__name__)


def get_pw_threshold(water_array: np.array) -> float:
def get_pw_threshold(water_array: np.ndarray) -> float:
hist, bin_edges = np.histogram(water_array, density=True, bins=100)
reverse_cdf = np.cumsum(np.flipud(hist)) * (bin_edges[1] - bin_edges[0])
ths_orig = np.flipud(bin_edges)[np.searchsorted(np.array(reverse_cdf), 0.95)]
return round(ths_orig) + 1


def get_waterbody(input_info: dict, threshold: float | None = None) -> np.array:
def get_waterbody(input_info: dict, threshold: float | None = None) -> np.ndarray:
epsg = get_epsg_code(input_info)

west, south, east, north = get_coordinates(input_info)
Expand Down Expand Up @@ -67,9 +67,9 @@ def get_waterbody(input_info: dict, threshold: float | None = None) -> np.array:


def iterative(
hand: np.array,
extent: np.array,
water_levels: np.array = np.arange(15),
hand: np.ndarray,
extent: np.ndarray,
water_levels: np.ndarray = np.arange(15),
minimization_metric: str = 'ts',
):
def get_confusion_matrix(w):
Expand Down Expand Up @@ -344,7 +344,7 @@ def optional_float(value: str) -> float | None:
def _get_cli(interface: Literal['hyp3', 'main']) -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)

available_estimators = ['iterative', 'logstat', 'nmad', 'numpy']
available_estimators: list[str | None] = ['iterative', 'logstat', 'nmad', 'numpy']
estimator_help = 'Flood depth estimation approach.'
if interface == 'hyp3':
parser.add_argument('--bucket')
Expand Down Expand Up @@ -384,7 +384,7 @@ def _get_cli(interface: Literal['hyp3', 'main']) -> argparse.ArgumentParser:
'--known-water-threshold',
type=optional_float,
default=None,
help='Threshold for extracting known water area in percent.' ' If `None`, threshold will be calculated.',
help='Threshold for extracting known water area in percent. If `None`, threshold will be calculated.',
)
parser.add_argument(
'--minimization-metric',
Expand Down Expand Up @@ -421,7 +421,7 @@ def _get_cli(interface: Literal['hyp3', 'main']) -> argparse.ArgumentParser:
type=int,
nargs=2,
default=[0, 15],
help='Minimum and maximum bound on the flood depths calculated using the iterative ' 'estimator.',
help='Minimum and maximum bound on the flood depths calculated using the iterative estimator.',
)
else:
raise NotImplementedError(f'Unknown interface: {interface}')
Expand Down
5 changes: 2 additions & 3 deletions src/asf_tools/hydrosar/hand/calculate.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def make_copernicus_hand(


def none_or_int(value: str):
if value.lower == 'none':
if value.lower() == 'none':
return None
return int(value)

Expand All @@ -226,8 +226,7 @@ def main():
'--acc-threshold',
type=none_or_int,
default=100,
help='Accumulation threshold for determining the drainage mask. '
'If `None`, the mean accumulation value is used',
help='Accumulation threshold for determining the drainage mask. If `None`, the mean accumulation value is used',
)

parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
Expand Down
2 changes: 1 addition & 1 deletion src/asf_tools/hydrosar/threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def expectation_maximization_threshold(tile: np.ndarray, number_of_classes: int
class_means = class_means + minimum - 1
s = image_copy.shape
posterior = np.zeros((s[0], s[1], number_of_classes))
posterior_lookup = dict()
posterior_lookup: dict = dict()
for i in range(0, s[0]):
for j in range(0, s[1]):
pixel_val = image_copy2[i, j]
Expand Down
12 changes: 6 additions & 6 deletions src/asf_tools/hydrosar/water_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ def select_hand_tiles(

tile_indexes = np.arange(tiles.shape[0])

tiles = np.ma.masked_greater_equal(tiles, hand_threshold)
percent_valid_pixels = np.sum(~tiles.mask, axis=(1, 2)) / (tiles.shape[1] * tiles.shape[2])
masked_tiles = np.ma.masked_greater_equal(tiles, hand_threshold)
percent_valid_pixels = np.sum(~masked_tiles.mask, axis=(1, 2)) / (masked_tiles.shape[1] * masked_tiles.shape[2])

return tile_indexes[percent_valid_pixels > hand_fraction]

Expand Down Expand Up @@ -100,10 +100,10 @@ def calculate_slope_magnitude(array: np.ndarray, pixel_size) -> np.ndarray:
def determine_membership_limits(
array: np.ndarray, mask_percentile: float = 90.0, std_range: float = 3.0
) -> tuple[float, float]:
array = np.ma.masked_values(array, 0.0)
array = np.ma.masked_greater(array, np.nanpercentile(array.filled(np.nan), mask_percentile))
lower_limit = np.ma.median(array)
upper_limit = lower_limit + std_range * array.std() + 5.0
masked_array = np.ma.masked_values(array, 0.0)
masked_array = np.ma.masked_greater(masked_array, np.nanpercentile(masked_array.filled(np.nan), mask_percentile))
lower_limit = np.ma.median(masked_array)
upper_limit = lower_limit + std_range * masked_array.std() + 5.0
return lower_limit, upper_limit


Expand Down
8 changes: 4 additions & 4 deletions src/asf_tools/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,16 +59,16 @@ def read_as_masked_array(raster: str | Path, band: int = 1) -> np.ma.MaskedArray
"""
log.debug(f'Reading raster values from {raster}')
ds = gdal.Open(str(raster))
band = ds.GetRasterBand(band)
data = np.ma.masked_invalid(band.ReadAsArray())
nodata = band.GetNoDataValue()
raster_band = ds.GetRasterBand(band)
data = np.ma.masked_invalid(raster_band.ReadAsArray())
nodata = raster_band.GetNoDataValue()
if nodata is not None:
return np.ma.masked_values(data, nodata)
del ds # How to close w/ gdal
return data


def read_as_array(raster: str, band: int = 1) -> np.array:
def read_as_array(raster: str, band: int = 1) -> np.ndarray:
"""Reads data from a raster image into memory
Args:
Expand Down
4 changes: 3 additions & 1 deletion src/asf_tools/tile.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
def tile_array(
array: np.ndarray | np.ma.MaskedArray,
tile_shape: tuple[int, int] = (200, 200),
pad_value: float = None,
pad_value: float | None = None,
) -> np.ndarray | np.ma.MaskedArray:
"""Tile a 2D numpy array
Expand Down Expand Up @@ -49,6 +49,7 @@ def tile_array(
raise ValueError(f'Cannot evenly tile a {array.shape} array into ({tile_rows},{tile_columns}) tiles')

if rpad or cpad:
assert pad_value is not None
padded_array = np.pad(array, ((0, rpad), (0, cpad)), constant_values=pad_value)
if isinstance(array, np.ma.MaskedArray):
mask = np.pad(array.mask, ((0, rpad), (0, cpad)), constant_values=True)
Expand Down Expand Up @@ -127,6 +128,7 @@ def untile_array(
] = tiled_array[ii * untiled_columns + jj, :, :]

if isinstance(tiled_array, np.ma.MaskedArray):
assert len(untiled.shape) == 2
untiled_mask = untile_array(tiled_array.mask, untiled.shape)
untiled = np.ma.MaskedArray(untiled, mask=untiled_mask)

Expand Down
1 change: 1 addition & 0 deletions src/asf_tools/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def get_property_values_for_intersecting_features(geometry: ogr.Geometry, featur
for feature in features:
if feature.GetGeometryRef().Intersects(geometry):
return True
return False


def intersecting_feature_properties(geometry: ogr.Geometry, features: Iterator, feature_property: str) -> list[str]:
Expand Down
2 changes: 1 addition & 1 deletion src/asf_tools/watermasking/generate_osm_tiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def extract_water(water_file, lat, lon, tile_width_deg, tile_height_deg, interio
tile_geojson = tile + '.geojson'

# Extract tile from the main pbf, then convert it to a tif.
bbox = f'--bbox {lon},{lat},{lon+tile_width_deg},{lat+tile_height_deg}'
bbox = f'--bbox {lon},{lat},{lon + tile_width_deg},{lat + tile_height_deg}'
extract_command = f'osmium extract -s smart -S tags=natural=water {bbox} {water_file} -o {tile_pbf}'.split(' ')
export_command = f'osmium export --geometry-types=polygon {tile_pbf} -o {tile_geojson}'.split(' ')
subprocess.run(extract_command)
Expand Down
2 changes: 1 addition & 1 deletion tests/hydrosar/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
def raster_tiles():
tiles_file = Path(__file__).parent / 'data' / 'em_tiles.npz'
tile_data = np.load(tiles_file)
tiles = np.ma.MaskedArray(tile_data['tiles'], mask=tile_data['mask'])
tiles: np.ma.MaskedArray = np.ma.MaskedArray(tile_data['tiles'], mask=tile_data['mask'])
return np.log10(tiles) + 30


Expand Down
2 changes: 1 addition & 1 deletion tests/test_raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def test_convert_scale():


def test_convert_scale_masked_arrays():
masked_array = np.ma.MaskedArray([-1, 0, 1, 4, 9], mask=[False, False, False, False, False])
masked_array: np.ma.MaskedArray = np.ma.MaskedArray([-1, 0, 1, 4, 9], mask=[False, False, False, False, False])
c = raster.convert_scale(masked_array, 'power', 'db')
assert np.allclose(c.mask, [True, True, False, False, False])
assert np.allclose(
Expand Down
4 changes: 2 additions & 2 deletions tests/test_tile.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def test_tile_masked_array():
]
)

ma = np.ma.MaskedArray(a, mask=m)
ma: np.ma.MaskedArray = np.ma.MaskedArray(a, mask=m)
tiled = tile.tile_array(ma, tile_shape=(2, 2))

assert tiled.shape == (4, 2, 2)
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_untile_masked_array():
]
)

ma = np.ma.MaskedArray(a, mask=m)
ma: np.ma.MaskedArray = np.ma.MaskedArray(a, mask=m)
untiled = tile.untile_array(tile.tile_array(ma.copy(), tile_shape=(2, 2)), array_shape=a.shape)

assert np.all(ma == untiled)
Expand Down

0 comments on commit cddc7f2

Please sign in to comment.