Skip to content

Commit

Permalink
Merge pull request #280 from asfadmin/master
Browse files Browse the repository at this point in the history
V7.0.6 Release
  • Loading branch information
SpicyGarlicAlbacoreRoll authored Mar 6, 2024
2 parents 5c2f995 + 00f9246 commit 0de9728
Show file tree
Hide file tree
Showing 12 changed files with 422 additions and 124 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
-->
------
## [v7.0.6](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.5...v7.0.6)
### Changed
- timestamps while building queries and reading results from CMR now use UTC if no timezone is provided
- Changed what collections the `NISAR` dataset and platform collections lists are pointed at.

------
## [v7.0.5](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.4...v7.0.5)
### Added
Expand Down
7 changes: 4 additions & 3 deletions asf_search/ASFProduct.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from asf_search.ASFSearchOptions import ASFSearchOptions
from asf_search.download import download_url
from asf_search.download.file_download_type import FileDownloadType
from asf_search.CMR.translate import try_parse_date
from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float


Expand Down Expand Up @@ -44,15 +45,15 @@ def get_classname(cls):
# min viable product
'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float},
'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float},
'stopTime': {'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime']}, # primary search results sort key
'stopTime': {'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime'], 'cast': try_parse_date}, # primary search results sort key
'fileID': {'path': ['GranuleUR']}, # secondary search results sort key
'flightDirection': {'path': [ 'AdditionalAttributes', ('Name', 'ASCENDING_DESCENDING'), 'Values', 0]},
'pathNumber': {'path': ['AdditionalAttributes', ('Name', 'PATH_NUMBER'), 'Values', 0], 'cast': try_parse_int},
'processingLevel': {'path': [ 'AdditionalAttributes', ('Name', 'PROCESSING_TYPE'), 'Values', 0]},

# commonly used
'url': {'path': [ 'RelatedUrls', ('Type', 'GET DATA'), 'URL']},
'startTime': {'path': [ 'TemporalExtent', 'RangeDateTime', 'BeginningDateTime']},
'startTime': {'path': [ 'TemporalExtent', 'RangeDateTime', 'BeginningDateTime'], 'cast': try_parse_date},
'sceneName': {'path': [ 'DataGranule', 'Identifiers', ('IdentifierType', 'ProducerGranuleId'), 'Identifier']},
'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]},
'platform': {'path': [ 'AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0]},
Expand All @@ -62,7 +63,7 @@ def get_classname(cls):
'granuleType': {'path': [ 'AdditionalAttributes', ('Name', 'GRANULE_TYPE'), 'Values', 0]},
'orbit': {'path': [ 'OrbitCalculatedSpatialDomains', 0, 'OrbitNumber'], 'cast': try_parse_int},
'polarization': {'path': [ 'AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0]},
'processingDate': {'path': [ 'DataGranule', 'ProductionDateTime'], },
'processingDate': {'path': [ 'DataGranule', 'ProductionDateTime'], 'cast': try_parse_date},
'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], },
}
"""
Expand Down
21 changes: 14 additions & 7 deletions asf_search/ASFSearchOptions/validators.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import dateparser
import datetime
from datetime import datetime, timezone

import requests
from typing import Union, Tuple, TypeVar, Callable, List, Type, Sequence

import math
from shapely import wkt, errors


number = TypeVar('number', int, float)

def parse_string(value: str) -> str:
Expand Down Expand Up @@ -40,21 +41,27 @@ def parse_float(value: float) -> float:
return value


def parse_date(value: Union[str, datetime.datetime]) -> str:
def parse_date(value: Union[str, datetime]) -> Union[datetime, str]:
"""
Base date validator
:param value: String or datetime object to be validated
:return: String passed in, if it can successfully convert to Datetime.
(Need to keep strings like "today" w/out converting them, but throw on "asdf")
"""
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime):
return _to_utc(value)

date = dateparser.parse(str(value))
if date is None:
raise ValueError(f"Invalid date: '{value}'.")
return str(value)


return _to_utc(date).strftime('%Y-%m-%dT%H:%M:%SZ')

def _to_utc(date: datetime):
if date.tzinfo is None:
date = date.replace(tzinfo=timezone.utc)
return date

def parse_range(value: Tuple[number, number], h: Callable[[number], number]) -> Tuple[number, number]:
"""
Base range validator. For our purposes, a range is a tuple with exactly two numeric elements (a, b), requiring a <= b.
Expand All @@ -79,7 +86,7 @@ def parse_range(value: Tuple[number, number], h: Callable[[number], number]) ->


# Parse and validate a date range: "1991-10-01T00:00:00Z,1991-10-02T00:00:00Z"
def parse_date_range(value: Tuple[Union[str, datetime.datetime], Union[str, datetime.datetime]]) -> Tuple[datetime.datetime, datetime.datetime]:
def parse_date_range(value: Tuple[Union[str, datetime], Union[str, datetime]]) -> Tuple[datetime, datetime]:
return parse_range(value, parse_date)


Expand Down
Loading

0 comments on commit 0de9728

Please sign in to comment.