diff --git a/environment.yml b/environment.yml index 678b8ac..8756dd9 100644 --- a/environment.yml +++ b/environment.yml @@ -14,3 +14,4 @@ dependencies: - pytest-cov - cdsapi - cartopy + - setuptools diff --git a/metocean_api/ts/internal/ec/ec_products.py b/metocean_api/ts/internal/ec/ec_products.py index 38eac54..4968862 100644 --- a/metocean_api/ts/internal/ec/ec_products.py +++ b/metocean_api/ts/internal/ec/ec_products.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, override, Tuple, List +from typing import TYPE_CHECKING, Tuple, List import os import subprocess import pandas as pd @@ -24,7 +24,6 @@ def find_product(name: str) -> Product: class ERA5(Product): - @override def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): """ Extract times series of the nearest gird point (lon,lat) from @@ -152,7 +151,6 @@ def download_temporary_files(self, ts: TimeSeries, use_cache: bool = False) -> T class GTSM(Product): - @override def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): """ Extract times series of the nearest grid point (lon, lat) from diff --git a/metocean_api/ts/internal/metno/met_product.py b/metocean_api/ts/internal/metno/met_product.py index c735d03..9792410 100644 --- a/metocean_api/ts/internal/metno/met_product.py +++ b/metocean_api/ts/internal/metno/met_product.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, override, Tuple, List +from typing import TYPE_CHECKING, Tuple, List import os from abc import abstractmethod from tqdm import tqdm @@ -35,12 +35,10 @@ def _get_near_coord(self, url: str, lon: float, lat: float): def get_dates(self, start_date, end_date): raise NotImplementedError(f"Not implemented for {self.name}") - @override def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): tempfiles, lon_near, lat_near = self.download_temporary_files(ts, use_cache) return self._combine_temporary_files(ts, save_csv, save_nc, use_cache, tempfiles, lon_near, lat_near, height=ts.height, depth=ts.depth) - @override def download_temporary_files(self, ts: TimeSeries, use_cache: bool = False) -> Tuple[List[str], float, float]: if ts.variable == [] or ts.variable is None: ts.variable = self.get_default_variables() diff --git a/metocean_api/ts/internal/metno/met_products.py b/metocean_api/ts/internal/metno/met_products.py index 762d87e..d0d4623 100644 --- a/metocean_api/ts/internal/metno/met_products.py +++ b/metocean_api/ts/internal/metno/met_products.py @@ -1,5 +1,5 @@ from __future__ import annotations -from typing import TYPE_CHECKING, override, Tuple,List +from typing import TYPE_CHECKING, Tuple,List import os from datetime import datetime import xarray as xr @@ -52,7 +52,6 @@ def find_product(name: str) -> Product: class Nora3Wave(MetProduct): - @override def get_default_variables(self): return [ "hs", @@ -71,7 +70,6 @@ def get_default_variables(self): "thq_swell", ] - @override def get_dates(self, start_date, end_date): return pd.date_range(start=start_date, end=end_date, freq="D") @@ -99,7 +97,6 @@ def _drop_variables(self): class Nora3WaveSub(Nora3Wave): - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -113,11 +110,9 @@ def _get_url_info(self, date: str): class NORA3WindSub(MetProduct): - @override def get_default_variables(self): return ["wind_speed", "wind_direction"] - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -125,11 +120,9 @@ def get_dates(self, start_date, end_date): freq="MS", ) - @override def _get_url_info(self, date: str): return "https://thredds.met.no/thredds/dodsC/nora3_subset_atmos/wind_hourly/arome3kmwind_1hr_" + date.strftime("%Y%m") + ".nc" - @override def _get_near_coord(self, url: str, lon: float, lat: float): with xr.open_dataset(url) as ds: x, y = aux_funcs.find_nearest_cart_coord(ds.longitude, ds.latitude, lon, lat) @@ -137,7 +130,6 @@ def _get_near_coord(self, url: str, lon: float, lat: float): lat_near = ds.latitude.sel(y=y, x=x).values[0][0] return {"x": x.values[0], "y": y.values[0]}, lon_near, lat_near - @override def _alter_temporary_dataset_if_needed(self, dataset: xr.Dataset): for var_name in dataset.variables: # The encoding of the fill value is not always correct in the netcdf files on the server @@ -146,7 +138,6 @@ def _alter_temporary_dataset_if_needed(self, dataset: xr.Dataset): var.encoding["_FillValue"] = var.attrs["fill_value"] return dataset - @override def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims): variables_to_flatten = ["wind_speed", "wind_direction"] height = self._get_values_for_dimension(ds, flatten_dims, "height") @@ -161,23 +152,18 @@ def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims): class NORA3WindWaveCombined(MetProduct): - @override def get_default_variables(self): raise NotImplementedError("This method should not be called") - @override def _get_url_info(self, date: str): raise NotImplementedError("This method should not be called") - @override def _get_near_coord(self, url: str, lon: float, lat: float): raise NotImplementedError("This method should not be called") - @override def get_dates(self, start_date, end_date): raise NotImplementedError("This method should not be called") - - @override + def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): product = ts.product @@ -240,8 +226,7 @@ def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=Fa self._clean_cache(tempfiles) return df - - @override + def download_temporary_files(self, ts: TimeSeries, use_cache: bool = False) -> Tuple[List[str], float, float]: raise NotImplementedError(f"Not implemented for product {self.name}") @@ -266,7 +251,6 @@ def get_default_variables(self): "pdir1", ] - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -274,11 +258,9 @@ def get_dates(self, start_date, end_date): freq="MS", ) - @override def _get_url_info(self, date: str): return "https://thredds.met.no/thredds/dodsC/norac_wave/field/ww3." + date.strftime("%Y%m") + ".nc" - @override def _get_near_coord(self, url: str, lon: float, lat: float): with xr.open_dataset(url) as ds: node_id = aux_funcs.distance_2points(ds.latitude.values, ds.longitude.values, lat, lon).argmin() @@ -289,7 +271,6 @@ def _get_near_coord(self, url: str, lon: float, lat: float): class NORA3AtmSub(MetProduct): - @override def get_default_variables(self): return [ "air_pressure_at_sea_level", @@ -301,7 +282,6 @@ def get_default_variables(self): "fog", ] - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -325,7 +305,6 @@ def _drop_variables(self): class NORA3Atm3hrSub(MetProduct): - @override def get_default_variables(self): return [ "wind_speed", @@ -336,7 +315,6 @@ def get_default_variables(self): "tke", ] - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -368,11 +346,9 @@ def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims): class NORA3StormSurge(MetProduct): - @override def get_default_variables(self): return ["zeta"] - @override def get_dates(self, start_date, end_date): return pd.date_range(start=start_date, end=end_date, freq="YS") @@ -390,7 +366,6 @@ def _alter_temporary_dataset_if_needed(self, dataset: xr.Dataset): dataset = dataset.rename_dims({"ocean_time": "time"}) return dataset.rename_vars({"ocean_time": "time"}) - @override def import_data(self, ts: TimeSeries, save_csv=False, save_nc=False, use_cache=False): """ Extract times series of the nearest gird point (lon,lat) from @@ -433,11 +408,9 @@ def _drop_variables(self): class Norkyst800(MetProduct): - @override def get_default_variables(self): return ["salinity", "temperature", "u", "v", "zeta"] - @override def get_dates(self, start_date, end_date): return pd.date_range(start=start_date, end=end_date, freq="D") @@ -489,6 +462,9 @@ def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=Fa dataset = dataset[ts.variable] dataset = dataset.sel(selection).squeeze(drop=True) dataset.to_netcdf(tempfiles[i]) + + ts.lat_data = lat_near + ts.lon_data = lon_near return self._combine_temporary_files(ts, save_csv, save_nc, use_cache, tempfiles, lon_near, lat_near, height=ts.height) @@ -570,7 +546,6 @@ def _get_near_coord(self, url: str, lon: float, lat: float): lat_near = ds.lat.sel(y=y, x=x).values[0][0] return {"x": x.values[0], "y": y.values[0]}, lon_near, lat_near - @override def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims): depth = self._get_values_for_dimension(ds, flatten_dims, "depth") variables_to_flatten = ["u", "v", "temp", "salt", "AKs"] @@ -598,11 +573,9 @@ def _flatten_data_structure(self, ds: xr.Dataset, **flatten_dims): class NORA3WaveSpectrum(MetProduct): - @override def get_default_variables(self): return ["SPEC"] - @override def get_dates(self, start_date, end_date): return pd.date_range(start=start_date, end=end_date, freq="D") @@ -626,7 +599,6 @@ def _get_near_coord(self, url: str, lon: float, lat: float): station += 1 # station labels are 1-indexed return {"x": station}, lon_near, lat_near - @override def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): """ Extract NORA3 wave spectra timeseries. @@ -669,11 +641,9 @@ def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=Fa class NORACWaveSpectrum(MetProduct): - @override def get_default_variables(self): return ["efth"] - @override def get_dates(self, start_date, end_date): return pd.date_range( start=datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m"), @@ -733,11 +703,9 @@ def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=Fa class E39Observations(MetProduct): - @override def get_default_variables(self): return ["Hm0"] - @override def get_dates(self, start_date, end_date): return pd.date_range(start=start_date, end=end_date, freq="MS") diff --git a/metocean_api/ts/internal/tudelft/tudelft_products.py b/metocean_api/ts/internal/tudelft/tudelft_products.py index 323d3d3..814ee5c 100644 --- a/metocean_api/ts/internal/tudelft/tudelft_products.py +++ b/metocean_api/ts/internal/tudelft/tudelft_products.py @@ -1,6 +1,6 @@ from __future__ import annotations import os -from typing import TYPE_CHECKING,override +from typing import TYPE_CHECKING from datetime import datetime import pandas as pd import xarray as xr @@ -20,7 +20,6 @@ def find_product(name: str) -> Product: class EchoWave(Product): - @override def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=False): """ Extract times series of the nearest gird point (lon,lat) from @@ -52,7 +51,6 @@ def import_data(self, ts: TimeSeries, save_csv=True, save_nc=False, use_cache=Fa self.__clean_cache(tempfiles) return df - @override def download_temporary_files(self, ts: TimeSeries, use_cache: bool = False): ts.variable = [ 'ucur', 'vcur', 'uwnd', 'vwnd', 'wlv', 'ice', 'hs', 'lm', 't02', 't01', 'fp', 'dir', 'spr', 'dp', 'phs0', 'phs1', 'phs2', 'ptp0', 'ptp1', 'ptp2', 'pdir0', 'pdir1'] dates = self.__get_dates(start_date=ts.start_time, end_date=ts.end_time) diff --git a/setup.py b/setup.py index fe883b5..ee4b989 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ 'dask', 'pip', 'netcdf4', - 'cdsapi', + 'cartopy', ], packages = setuptools.find_packages(), include_package_data = True, diff --git a/tests/test_extract_data.py b/tests/test_extract_data.py index bfbb82c..dbbf84a 100644 --- a/tests/test_extract_data.py +++ b/tests/test_extract_data.py @@ -11,13 +11,15 @@ def test_extract_nora3_wind(): df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2000-01-01', end_time='2000-01-31', product='NORA3_wind_sub') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (53.32374838481946, 1.3199893172215793) assert df_ts.data.shape == (744,14) def test_download_of_temporary_files(): # Pick a time region with a start and end time where the temporary files will cover more than the requested time df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2020-10-21', end_time='2020-11-21', product='NORA3_wave_sub') product = products.find_product(df_ts.product) - files,_,_=product.download_temporary_files(df_ts) + files,lon_data,lat_data=product.download_temporary_files(df_ts) + assert (lat_data, lon_data) == (53.32494354248047, 1.3358169794082642) assert len(files) == 2 with xr.open_mfdataset(files) as values: # Make sure we have all the data in the temporary files @@ -31,12 +33,14 @@ def test_extract_nora3_wave(): df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2000-01-01', end_time='2000-01-31', product='NORA3_wave_sub') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (53.32494354248047, 1.3358169794082642) assert df_ts.data.shape == (744,14) def test_nora3_wind_wave_combined(): df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2020-09-14', end_time='2020-09-15', product='NORA3_wind_wave', height=[10]) # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.60475157243123, 3.752025547482376) assert df_ts.data.shape == (48, 16) #def test_extract_nora3_stormsurge(): @@ -50,12 +54,15 @@ def test_extract_nora3_atm(): df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2000-01-01', end_time='2000-01-31', product='NORA3_atm_sub') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (53.32374838481946, 1.3199893172215793) assert df_ts.data.shape == (744,7) def test_extract_nora3_atm3hr(): df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2000-01-01', end_time='2000-01-31', product='NORA3_atm3hr_sub') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + print(f"product: {df_ts.product}: {df_ts.lat_data}, {df_ts.lon_data}") + assert (df_ts.lat_data, df_ts.lon_data) == (53.32374838481946, 1.3199893172215793) assert df_ts.data.shape == (248,30) def test_extract_obs(): @@ -68,6 +75,7 @@ def test_norkyst_800(): df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2020-09-14', end_time='2020-09-15', product='NORKYST800') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.59832175874106, 3.728905373023728) assert df_ts.data.shape == (48, 65) def test_norkyst_da_zdepth(): @@ -76,12 +84,14 @@ def test_norkyst_da_zdepth(): df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2017-01-19', end_time='2017-01-20', product='NorkystDA_zdepth',depth=depth) # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.59537563943964, 3.74450378868417) assert df_ts.data.shape == (24, 16) def test_norkyst_da_surface(): df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2017-01-19', end_time='2017-01-20', product='NorkystDA_surface') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.59537563943964, 3.74450378868417) assert df_ts.data.shape == (48, 5) def test_echowave(): @@ -94,10 +104,12 @@ def test_extract_nora3_wave_spectra(): df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2017-01-29',end_time='2017-02-02',product='NORA3_wave_spec') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.60214233398438, 3.7667124271392822) assert df_ts.data.shape == (120,30,24) def test_extract_norac_wave_spectra(): df_ts = ts.TimeSeries(lon=8, lat=64,start_time='2017-01-01',end_time='2017-01-04',product='NORAC_wave_spec') # Import data from thredds.met.no df_ts.import_data(save_csv=SAVE_CSV,save_nc=SAVE_NC, use_cache=USE_CACHE) + assert (df_ts.lat_data, df_ts.lon_data) == (64.03120422363281, 7.936006546020508) assert df_ts.data.shape == (744,45,36)