Skip to content

Commit

Permalink
Merge pull request #38 from efvik/main
Browse files Browse the repository at this point in the history
Adding API for NORAC and NORA3 wave spectra timeseries
  • Loading branch information
KonstantinChri authored Oct 22, 2024
2 parents 58ae022 + 9f66a22 commit 09bfa2d
Show file tree
Hide file tree
Showing 5 changed files with 164 additions and 2 deletions.
16 changes: 16 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,14 @@ Several options for **product** are available. Please check the data catalog for
Dataset: https://thredds.met.no/thredds/catalog/stormrisk/catalog.html

* For wave spectra from NORA3 (Nordic Seas) developed by MET Norway:

.. code-block:: python
product='NORA3_wave_spec'
Dataset: https://thredds.met.no/thredds/catalog/windsurfer/mywavewam3km_spectra/catalog.html

* For coastal wave NORA3 data developed by MET Norway:

.. code-block:: python
Expand All @@ -140,6 +148,14 @@ Several options for **product** are available. Please check the data catalog for
Dataset: https://thredds.met.no/thredds/catalog/norac_wave/field/catalog.html

* For coastal wave spectra (Norwegian coast) developed by MET Norway:

.. code-block:: python
product='NORAC_wave_spec'
Dataset: https://thredds.met.no/thredds/catalog/norac_wave/spec/catalog.html

* For ocean data (sea level, temperature, currents, salinity over depth ) Norkyst800 data (from 2016-09-14 to today) developed by MET Norway:

.. code-block:: python
Expand Down
21 changes: 20 additions & 1 deletion metocean_api/ts/aux_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ def get_url_info(product, date) -> str:
return 'https://thredds.met.no/thredds/dodsC/nora3_subset_ocean/zdepth/{}/ocean_zdepth_2_4km-{}.nc'.format(date.strftime('%Y/%m'), date.strftime('%Y%m%d'))
elif product == 'ECHOWAVE':
return 'https://opendap.4tu.nl/thredds/dodsC/data2/djht/f359cd0f-d135-416c-9118-e79dccba57b9/1/{}/TU-MREL_EU_ATL-2M_{}.nc'.format(date.strftime('%Y'),date.strftime('%Y%m'))
elif product == "NORA3_wave_spec":
return 'https://thredds.met.no/thredds/dodsC/windsurfer/mywavewam3km_spectra/'+date.strftime('%Y')+'/'+date.strftime('%m')+'/'+'SPC'+date.strftime('%Y%m%d')+'00.nc'
elif product == "NORAC_wave_spec":
return 'https://thredds.met.no/thredds/dodsC/norac_wave/spec/ww3_spec.'+date.strftime('%Y%m')+'.nc'

raise ValueError(f'Product not handled {product}')

Expand All @@ -122,6 +126,11 @@ def get_dates(product, start_date, end_date):
return pd.date_range(start=start_date , end=end_date, freq='D')
elif product.startswith('E39'):
return pd.date_range(start=start_date , end=end_date, freq='MS')
elif product == 'NORA3_wave_spec':
return pd.date_range(start=start_date , end=end_date, freq='D')
elif product == "NORAC_wave_spec":
return pd.date_range(start=datetime.strptime(start_date, '%Y-%m-%d').strftime('%Y-%m'),
end=datetime.strptime(end_date, '%Y-%m-%d').strftime('%Y-%m'), freq='MS')
raise ValueError(f'Product not found {product}')

def __drop_variables(product: str):
Expand Down Expand Up @@ -189,11 +198,21 @@ def get_near_coord(infile, lon, lat, product):
lon_near = ds.lon.sel(Y=y, X=x).values[0][0]
lat_near = ds.lat.sel(Y=y, X=x).values[0][0]
return {'X': x, 'Y': y}, lon_near, lat_near
elif product=='NorkystDA_surface' or 'NorkystDA_zdepth':
elif product=='NorkystDA_surface' or product=='NorkystDA_zdepth':
x, y = __find_nearest_cart_coord(ds.lon, ds.lat, lon, lat)
lon_near = ds.lon.sel(y=y, x=x).values[0][0]
lat_near = ds.lat.sel(y=y, x=x).values[0][0]
return {'x': x.values[0], 'y': y.values[0]}, lon_near, lat_near
elif product == "NORA3_wave_spec":
station = __distance_2points(ds.latitude.values,ds.longitude.values,lat,lon).argmin()
lon_near = ds.longitude.values[0,station]
lat_near = ds.latitude.values[0,station]
return {'x':station}, lon_near, lat_near
elif product == "NORAC_wave_spec":
station = __distance_2points(ds.latitude.values,ds.longitude.values,lat,lon).argmin()
lon_near = ds.longitude.values[0,station]
lat_near = ds.latitude.values[0,station]
return {'station':station}, lon_near, lat_near
else:
raise ValueError(f'Product not found {product}')

Expand Down
108 changes: 108 additions & 0 deletions metocean_api/ts/read_metno.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import xarray as xr
import numpy as np
import pandas as pd
from tqdm import tqdm
from .aux_funcs import (
get_dates,
Expand Down Expand Up @@ -318,6 +319,113 @@ def __combine_temporary_files(ts: TimeSeries, save_csv, save_nc, use_cache,tempf

return df

def nora3_wave_spec(ts: TimeSeries, save_csv=False, save_nc=False, use_cache=False):
"""
Extract NORA3 wave spectra timeseries.
"""
ts.variable.append("longitude") # keep info of regular lon
ts.variable.append("latitude") # keep info of regular lat
dates = get_dates(ts.product, ts.start_time, ts.end_time)
tempfiles = get_tempfiles(ts.product, ts.lon, ts.lat, dates)

selection = None
lon_near = None
lat_near = None

# extract point and create temp files
for i in range(len(dates)):
url = get_url_info(ts.product, dates[i])

if i == 0:
station, lon_near, lat_near = get_near_coord(
url, ts.lon, ts.lat, ts.product
)

if use_cache and os.path.exists(tempfiles[i]):
print(f"Found cached file {tempfiles[i]}. Using this instead")
else:
with xr.open_dataset(url) as dataset:
# Reduce to the wanted variables and coordinates
dataset = dataset[ts.variable]
dataset = dataset.sel(station).squeeze(drop=True)
dataset.to_netcdf(tempfiles[i])

__remove_if_datafile_exists(ts.datafile)
# merge temp files and create combined result
with xr.open_mfdataset(tempfiles) as ds:
da = ds["SPEC"]
da.attrs["longitude"] = float(ds["longitude"][0].values)
da.attrs["latitude"] = float(ds["latitude"][0].values)

if save_csv:
s = da.shape
csv_data = {"time": da["time"].values.repeat(s[1]*s[2]),
"frequency": np.tile(da["freq"].values.repeat(s[2]),s[0]),
"direction": np.tile(da["direction"].values,s[0]*s[1]),
"value": da.values.flatten()}
csv_data = pd.DataFrame(csv_data,columns=["time","frequency","direction","value"])
csv_data.to_csv(ts.datafile,index=False)

if save_nc:
# Save the unaltered structure
da.to_netcdf(ts.datafile.replace(".csv", ".nc"))

return da

def norac_wave_spec(ts: TimeSeries, save_csv=False, save_nc=False, use_cache=False) -> xr.DataArray:
"""
Extract NORAC wave spectra timeseries.
"""
ts.variable.append("longitude") # keep info of regular lon
ts.variable.append("latitude") # keep info of regular lat
dates = get_dates(ts.product, ts.start_time, ts.end_time)
tempfiles = get_tempfiles(ts.product, ts.lon, ts.lat, dates)

selection = None
lon_near = None
lat_near = None

# extract point and create temp files
for i in range(len(dates)):
url = get_url_info(ts.product, dates[i])

if i == 0:
station, lon_near, lat_near = get_near_coord(
url, ts.lon, ts.lat, ts.product
)

if use_cache and os.path.exists(tempfiles[i]):
print(f"Found cached file {tempfiles[i]}. Using this instead")
else:
with xr.open_dataset(url) as dataset:
# Reduce to the wanted variables and coordinates
dataset = dataset[ts.variable]
dataset = dataset.sel(station).squeeze(drop=True)
dataset.to_netcdf(tempfiles[i])

__remove_if_datafile_exists(ts.datafile)
# merge temp files and create combined result
with xr.open_mfdataset(tempfiles) as ds:
da = ds["efth"]
da.attrs["longitude"] = float(ds["longitude"][0].values)
da.attrs["latitude"] = float(ds["latitude"][0].values)

if save_csv:
s = da.shape
csv_data = {"time": da["time"].values.repeat(s[1]*s[2]),
"frequency": np.tile(da["frequency"].values.repeat(s[2]),s[0]),
"direction": np.tile(da["direction"].values,s[0]*s[1]),
"value": da.values.flatten()}
csv_data = pd.DataFrame(csv_data,columns=["time","frequency","direction","value"])
csv_data.to_csv(ts.datafile,index=False)

if save_nc:
# Save the unaltered structure
da.to_netcdf(ts.datafile.replace(".csv", ".nc"))

return da


def __clean_cache(tempfiles):
for tmpfile in tempfiles:
try:
Expand Down
9 changes: 8 additions & 1 deletion metocean_api/ts/ts_mod.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,14 @@ def import_data(self, save_csv=True, save_nc=False, use_cache=False):
elif self.product == 'ECHOWAVE':
self.variable = [ 'ucur', 'vcur', 'uwnd', 'vwnd', 'wlv', 'ice', 'hs', 'lm', 't02', 't01', 'fp', 'dir', 'spr', 'dp', 'phs0', 'phs1', 'phs2', 'ptp0', 'ptp1', 'ptp2', 'pdir0', 'pdir1']
self.data = tudelft.echowave_ts(self, save_csv, save_nc, use_cache)

elif self.product == 'NORA3_wave_spec':
self.variable = ['SPEC']
self.data = metno.nora3_wave_spec(self,save_csv,save_nc,use_cache)
elif self.product == 'NORAC_wave_spec':
self.variable = ['efth']
self.data = metno.norac_wave_spec(self,save_csv,save_nc,use_cache)
else:
raise ValueError("Product not found.")
def load_data(self, local_file):
import pandas as pd

Expand Down
12 changes: 12 additions & 0 deletions tests/test_extract_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,15 @@ def test_echowave():
# Import data from https://data.4tu.nl/datasets/
df_ts.import_data(save_csv=False,save_nc=False)
assert df_ts.data.shape == (48, 22)

def test_extract_nora3_wave_spectra():
df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2017-01-29',end_time='2017-02-02',product='NORA3_wave_spec')
# Import data from thredds.met.no
df_ts.import_data(save_csv=False,save_nc=False)
assert df_ts.data.shape == (120,30,24)

def test_extract_norac_wave_spectra():
df_ts = ts.TimeSeries(lon=8, lat=64,start_time='2017-01-01',end_time='2017-01-04',product='NORAC_wave_spec')
# Import data from thredds.met.no
df_ts.import_data(save_csv=False,save_nc=False)
assert df_ts.data.shape == (744,45,36)

0 comments on commit 09bfa2d

Please sign in to comment.