diff --git a/.gitignore b/.gitignore index daa30ec6e..d4748d3a9 100644 --- a/.gitignore +++ b/.gitignore @@ -26,4 +26,6 @@ htmlcov docs/_build \tmp -.venv/ \ No newline at end of file +.venv/ + +.testmondata \ No newline at end of file diff --git a/Makefile b/Makefile index f17987469..26ad350e1 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,4 @@ +LIB = mikeio LIB = mikeio @@ -19,6 +20,7 @@ coverage: pytest --cov-report html --cov=$(LIB) tests/ doctest: + # only test a specific set of files for now pytest mikeio/dfs/*.py mikeio/dfsu/*.py mikeio/eum/*.py mikeio/pfs/*.py mikeio/spatial/_grid_geometry.py --doctest-modules rm -f *.dfs* # remove temporary files, created from doctests diff --git a/mikeio/dataset/_dataarray.py b/mikeio/dataset/_dataarray.py index 35347f6b2..9848b8f3c 100644 --- a/mikeio/dataset/_dataarray.py +++ b/mikeio/dataset/_dataarray.py @@ -291,7 +291,6 @@ def _is_compatible(self, other, raise_error=False): problems.append("Number of timesteps must be the same") if self.start_time != other.start_time: problems.append("start_time must be the same") - #if type(self.geometry) != type(other.geometry): if not isinstance(self.geometry, other.geometry.__class__): problems.append("The type of geometry must be the same") if hasattr(self.geometry, "__eq__"): @@ -344,12 +343,12 @@ def _set_spectral_attributes(self, geometry): # ============= Basic properties/methods =========== @property - def name(self) -> Optional[str]: + def name(self) -> str: """Name of this DataArray (=da.item.name)""" return self.item.name @name.setter - def name(self, value): + def name(self, value: str) -> None: self.item.name = value @property diff --git a/mikeio/dataset/_dataset.py b/mikeio/dataset/_dataset.py index 780290aa1..9bad3d441 100644 --- a/mikeio/dataset/_dataset.py +++ b/mikeio/dataset/_dataset.py @@ -13,7 +13,9 @@ MutableMapping, Any, overload, - Hashable + Hashable, + Set + ) @@ -94,7 +96,8 @@ def __init__( data = self._create_dataarrays( data=data, time=time, items=items, geometry=geometry, zn=zn, dims=dims ) # type: ignore - self._init_from_DataArrays(data, validate=validate) + self._data_vars = self._init_from_DataArrays(data, validate=validate) + self.plot = _DatasetPlotter(self) @staticmethod def _is_DataArrays(data): @@ -135,31 +138,23 @@ def _create_dataarrays( ) return data_vars - def _init_from_DataArrays(self, data, validate=True): + def _init_from_DataArrays(self, data, validate=True) -> MutableMapping[str, DataArray]: """Initialize Dataset object with Iterable of DataArrays""" - self._data_vars = self._DataArrays_as_mapping(data) + data_vars = self._DataArrays_as_mapping(data) - if (len(self) > 1) and validate: - first = self[0] - for i in range(1, len(self)): - da = self[i] + if (len(data_vars) > 1) and validate: + first = list(data_vars.values())[0] + for da in data_vars.values(): first._is_compatible(da, raise_error=True) - self._check_all_different_ids(self._data_vars.values()) + self._check_all_different_ids(data_vars.values()) - self.__itemattr = [] - for key, value in self._data_vars.items(): + # TODO is it necessary to keep track of item names? + self.__itemattr: Set[str] = set() + for key, value in data_vars.items(): self._set_name_attr(key, value) - self.plot = _DatasetPlotter(self) - - if len(self) > 0: - self._set_spectral_attributes(self.geometry) - - # since Dataset is MutableMapping it has values and keys by default - # but we delete those to avoid confusion - # self.values = None - self.keys = None + return data_vars @property def values(self): @@ -232,11 +227,7 @@ def _DataArrays_as_mapping(data): data = [data] item_names = Dataset._unique_item_names(data) - - data_map = {} - for n, da in zip(item_names, data): - data_map[n] = da - return data_map + return {key: da for key, da in zip(item_names, data)} @staticmethod def _validate_item_names_and_keys(data_map: Mapping[str, DataArray]): @@ -251,13 +242,12 @@ def _validate_item_names_and_keys(data_map: Mapping[str, DataArray]): return data_map @staticmethod - def _unique_item_names(das: Sequence[DataArray]): + def _unique_item_names(das: Sequence[DataArray]) -> List[str]: item_names = [da.name for da in das] if len(set(item_names)) != len(item_names): raise ValueError( f"Item names must be unique! ({item_names}). Please rename before constructing Dataset." ) - # TODO: make a list of unique items names return item_names @staticmethod @@ -303,13 +293,6 @@ def _check_already_present(self, new_da): for da in self: self._id_of_DataArrays_equal(da, new_da) - def _set_spectral_attributes(self, geometry): - if hasattr(geometry, "frequencies") and hasattr(geometry, "directions"): - self.frequencies = geometry.frequencies - self.n_frequencies = geometry.n_frequencies - self.directions = geometry.directions - self.n_directions = geometry.n_directions - # ============ end of init ============= # ============= Basic properties/methods =========== @@ -643,7 +626,7 @@ def rename(self, mapper: Mapping[str, str], inplace=False): def _set_name_attr(self, name: str, value: DataArray): name = _to_safe_name(name) if name not in self.__itemattr: - self.__itemattr.append(name) # keep track of what we insert + self.__itemattr.add(name) # keep track of what we insert setattr(self, name, value) def _del_name_attr(self, name: str): diff --git a/mikeio/dfs/_dfs.py b/mikeio/dfs/_dfs.py index 380c14547..0de9ec4e1 100644 --- a/mikeio/dfs/_dfs.py +++ b/mikeio/dfs/_dfs.py @@ -1,6 +1,7 @@ from __future__ import annotations import warnings from abc import abstractmethod +from dataclasses import dataclass from datetime import datetime from typing import List, Optional, Tuple, Sequence import numpy as np @@ -25,6 +26,16 @@ from ..spatial import GeometryUndefined from .._time import DateTimeSelector +@dataclass +class DfsHeader: + + n_items: int + n_timesteps: int + start_time: datetime + dt: float + coordinates: Tuple[str, float, float, float] + items: List[ItemInfo] + def _read_item_time_step( *, @@ -382,23 +393,28 @@ def _write( self, *, filename, - data, + ds, dt, coordinate=None, title, keep_open=False, ): + + assert isinstance(ds, Dataset) neq_datetimes = None - if isinstance(data, Dataset) and not data.is_equidistant: - neq_datetimes = data.time + if isinstance(ds, Dataset) and not ds.is_equidistant: + neq_datetimes = ds.time - self._write_handle_common_arguments( - title=title, data=data, coordinate=coordinate, dt=dt + header, data = self._write_handle_common_arguments( + title=title, data=ds, dt=dt, coordinate=coordinate ) + shape = np.shape(data[0]) t_offset = 0 if len(shape) == self._ndim else 1 + + # TODO find out a clever way to handle the grid dimensions if self._ndim == 1: self._nx = shape[t_offset + 0] elif self._ndim == 2: @@ -410,17 +426,19 @@ def _write( self._nx = shape[t_offset + 2] self._factory = DfsFactory() + + # TODO pass grid self._set_spatial_axis() if self._ndim == 1: - if not all(np.shape(d)[t_offset + 0] == self._nx for d in self._data): + if not all(np.shape(d)[t_offset + 0] == self._nx for d in data): raise DataDimensionMismatch() if self._ndim == 2: - if not all(np.shape(d)[t_offset + 0] == self._ny for d in self._data): + if not all(np.shape(d)[t_offset + 0] == self._ny for d in data): raise DataDimensionMismatch() - if not all(np.shape(d)[t_offset + 1] == self._nx for d in self._data): + if not all(np.shape(d)[t_offset + 1] == self._nx for d in data): raise DataDimensionMismatch() if neq_datetimes is not None: @@ -428,15 +446,15 @@ def _write( start_time = neq_datetimes[0] self._start_time = start_time - dfs = self._setup_header(filename) + dfs = self._setup_header(filename, header) self._dfs = dfs deletevalue = dfs.FileInfo.DeleteValueFloat # -1.0000000031710769e-30 - for i in trange(self._n_timesteps, disable=not self.show_progress): - for item in range(self._n_items): + for i in trange(header.n_timesteps, disable=not self.show_progress): + for item in range(header.n_items): - d = self._data[item][i] if t_offset == 1 else self._data[item] + d = data[item][i] if t_offset == 1 else data[item] d = d.copy() # to avoid modifying the input d[np.isnan(d)] = deletevalue @@ -453,41 +471,29 @@ def _write( return self def append(self, data: Dataset) -> None: - """Append to a dfs file opened with `write(...,keep_open=True)` + + warnings.warn(FutureWarning("append() is deprecated.")) - Parameters - ----------- - data: Dataset - """ + if not data.dims == ("time", "y", "x"): + raise NotImplementedError( + "Append is only available for 2D files with dims ('time', 'y', 'x')" + ) deletevalue = self._dfs.FileInfo.DeleteValueFloat # -1.0000000031710769e-30 - for i in trange(self._n_timesteps, disable=not self.show_progress): - for item in range(self._n_items): + for i in trange(data.n_timesteps, disable=not self.show_progress): + for da in data: - d = data[item].to_numpy()[i] + values = da.to_numpy() + d = values[i] d = d.copy() # to avoid modifying the input d[np.isnan(d)] = deletevalue - if self._ndim == 1: - darray = d - - if self._ndim == 2: - d = d.reshape(self.shape[1:]) - darray = d.reshape(d.size, 1)[:, 0] - - if self._ndim == 3: - raise NotImplementedError( - "Append is not yet available for 3D files" - ) - - if self._is_equidistant: - self._dfs.WriteItemTimeStepNext(0, darray.astype(np.float32)) - else: - raise NotImplementedError( - "Append is not yet available for non-equidistant files" - ) - + d = d.reshape(data.shape[1:]) + darray = d.reshape(d.size, 1)[:, 0] + self._dfs.WriteItemTimeStepNext(0, darray.astype(np.float32)) + + def __enter__(self): return self @@ -498,24 +504,24 @@ def close(self): "Finalize write for a dfs file opened with `write(...,keep_open=True)`" self._dfs.Close() - def _write_handle_common_arguments(self, *, title, data, coordinate, dt): + def _write_handle_common_arguments(self, *, title: Optional[str], data: Dataset, coordinate, dt: Optional[float] = None): if title is None: self._title = "" - self._n_timesteps = np.shape(data[0])[0] - self._n_items = len(data) + n_timesteps = data.n_timesteps + n_items = data.n_items if coordinate is None: if self._projstr is not None: - self._coordinate = [ + coordinate = [ self._projstr, self._longitude, self._latitude, self._orientation, ] elif isinstance(data, Dataset) and (data.geometry is not None): - self._coordinate = [ + coordinate = [ data.geometry.projection_string, data.geometry.origin[0], data.geometry.origin[1], @@ -523,48 +529,44 @@ def _write_handle_common_arguments(self, *, title, data, coordinate, dt): ] else: warnings.warn("No coordinate system provided") - self._coordinate = ["LONG/LAT", 0, 0, 0] + coordinate = ["LONG/LAT", 0, 0, 0] else: self._override_coordinates = True - self._coordinate = coordinate - - if isinstance(data, Dataset): - self._items = data.items - self._start_time = data.time[0] - self._n_timesteps = len(data.time) - if dt is None and len(data.time) > 1: - self._dt = (data.time[1] - data.time[0]).total_seconds() - self._data = data.to_numpy() - else: - raise TypeError("data must be supplied in the form of a mikeio.Dataset") - if dt: - self._dt = dt + assert isinstance(data, Dataset), "data must be supplied in the form of a mikeio.Dataset" - if self._dt is None: - self._dt = 1 - if self._n_timesteps > 1: + items = data.items + start_time = data.time[0] + n_timesteps = len(data.time) + if dt is None and len(data.time) > 1: + dt = (data.time[1] - data.time[0]).total_seconds() + data = data.to_numpy() + + if dt is None: + dt = 1 + if n_timesteps > 1: warnings.warn("No timestep supplied. Using 1s.") - if self._items is None: - self._items = [ItemInfo(f"Item {i+1}") for i in range(self._n_items)] + if items is None: + items = [ItemInfo(f"Item {i+1}") for i in range(self._n_items)] - self._timeseries_unit = TimeStepUnit.SECOND + header = DfsHeader(n_items=n_items, n_timesteps=n_timesteps, dt=dt, start_time=start_time, coordinates=coordinate, items=items) + return header, data - def _setup_header(self, filename): + def _setup_header(self, filename: str, header: DfsHeader): - system_start_time = self._start_time + system_start_time = header.start_time self._builder.SetDataType(0) - proj = self._factory.CreateProjectionGeoOrigin(*self._coordinate) + proj = self._factory.CreateProjectionGeoOrigin(*header.coordinates) self._builder.SetGeographicalProjection(proj) if self._is_equidistant: self._builder.SetTemporalAxis( self._factory.CreateTemporalEqCalendarAxis( - self._timeseries_unit, system_start_time, 0, self._dt + self._timeseries_unit, system_start_time, 0, header.dt ) ) else: @@ -574,7 +576,7 @@ def _setup_header(self, filename): ) ) - for item in self._items: + for item in header.items: self._builder.AddCreateDynamicItem( item.name, eumQuantity.Create(item.type, item.unit), diff --git a/mikeio/dfs/_dfs0.py b/mikeio/dfs/_dfs0.py index ee21baddf..9ca11385f 100644 --- a/mikeio/dfs/_dfs0.py +++ b/mikeio/dfs/_dfs0.py @@ -1,6 +1,7 @@ from pathlib import Path import warnings from datetime import datetime, timedelta +from typing import Sequence import numpy as np import pandas as pd @@ -260,30 +261,28 @@ def _to_dfs_datatype(dtype): raise TypeError("Dfs files only support float or double") - def _setup_header(self): + @staticmethod + def _setup_header(title:str, filename:str, start_time, dt:float, is_equidistant: bool, dtype, items: Sequence[ItemInfo]): factory = DfsFactory() - builder = DfsBuilder.Create(self._title, "mikeio", __dfs_version__) + builder = DfsBuilder.Create(title, "mikeio", __dfs_version__) builder.SetDataType(1) builder.SetGeographicalProjection(factory.CreateProjectionUndefined()) - system_start_time = self._start_time - - if self._is_equidistant: + if is_equidistant: temporal_axis = factory.CreateTemporalEqCalendarAxis( - TimeStepUnit.SECOND, system_start_time, 0, self._dt + TimeStepUnit.SECOND, start_time, 0, dt ) else: temporal_axis = factory.CreateTemporalNonEqCalendarAxis( - TimeStepUnit.SECOND, system_start_time + TimeStepUnit.SECOND, start_time ) builder.SetTemporalAxis(temporal_axis) builder.SetItemStatisticsType(StatType.RegularStat) - dtype_dfs = self._to_dfs_datatype(self._dtype) + dtype_dfs = Dfs0._to_dfs_datatype(dtype) - for i in range(self._n_items): - item = self._items[i] + for item in items: newitem = builder.CreateDynamicItemBuilder() quantity = eumQuantity.Create(item.type, item.unit) newitem.Set( @@ -301,9 +300,9 @@ def _setup_header(self): builder.AddDynamicItem(newitem.GetDynamicItemInfo()) try: - builder.CreateFile(self._filename) + builder.CreateFile(filename) except IOError: - raise IOError(f"Cannot create dfs0 file: {self._filename}") + raise IOError(f"Cannot create dfs0 file: {filename}") return builder.GetFile() @@ -394,7 +393,16 @@ def write( self._dt = float(self._dt) t_seconds = self._dt * np.arange(float(self._n_timesteps)) - dfs = self._setup_header() + + dfs = self._setup_header( + title=self._title, + filename=self._filename, + dt = self._dt, + start_time = self._start_time, + is_equidistant = self._is_equidistant, + dtype=self._dtype, + items=self._items + ) delete_value = dfs.FileInfo.DeleteValueFloat diff --git a/mikeio/dfs/_dfs1.py b/mikeio/dfs/_dfs1.py index 3a8ac65d8..daef5599a 100644 --- a/mikeio/dfs/_dfs1.py +++ b/mikeio/dfs/_dfs1.py @@ -107,7 +107,7 @@ def write( self._builder = DfsBuilder.Create(title, "mikeio", __dfs_version__) self._dx = dx - self._write(filename=filename, data=data, dt=dt, title=title) + self._write(filename=filename, ds=data, dt=dt, title=title) def _set_spatial_axis(self): self._builder.SetSpatialAxis( diff --git a/mikeio/dfs/_dfs2.py b/mikeio/dfs/_dfs2.py index 92f5c3a04..40730ff76 100644 --- a/mikeio/dfs/_dfs2.py +++ b/mikeio/dfs/_dfs2.py @@ -1,6 +1,7 @@ -from typing import List, Tuple - from copy import deepcopy +from typing import List, Tuple, Optional +import warnings + import numpy as np import pandas as pd from tqdm import tqdm @@ -109,10 +110,11 @@ class Dfs2(_Dfs123): def __init__(self, filename=None, type: str = "horizontal"): super().__init__(filename) - self._dx = None - self._dy = None - self._nx = None - self._ny = None + # TODO find a better way to avoid initializing these non-sensical values + self._dx = 0.0 + self._dy = 0.0 + self._nx = 0 + self._ny = 0 self._x0 = 0.0 self._y0 = 0.0 self.geometry = None @@ -280,56 +282,25 @@ def _open(self): def write( self, - filename, - data, - dt=None, - dx=None, - dy=None, - title=None, - keep_open=False, + filename: str, + data: Dataset, + dt: Optional[float] = None, + title: Optional[str]=None, + keep_open: bool =False, ): - """ - Create a dfs2 file - - Parameters - ---------- - - filename: str - Location to write the dfs2 file - data: Dataset - list of matrices, one for each item. Matrix dimension: time, y, x - dt: float, optional - The time step in seconds. - dx: float, optional - length of each grid in the x direction (projection units) - dy: float, optional - length of each grid in the y direction (projection units) - title: str, optional - title of the dfs2 file. Default is blank. - keep_open: bool, optional - Keep file open for appending - """ - if isinstance(data, list): - raise TypeError( - "supplying data as a list of numpy arrays is deprecated, please supply data in the form of a Dataset", - ) + + # this method is deprecated + warnings.warn(FutureWarning("Dfs2.write() is deprecated, use Dataset.to_dfs() instead")) filename = str(filename) self._builder = DfsBuilder.Create(title, "mikeio", __dfs_version__) - if not self._dx: - self._dx = 1 - if dx: - self._dx = dx - - if not self._dy: - self._dy = 1 - if dy: - self._dy = dy + self._dx = data.geometry.dx + self._dy = data.geometry.dy self._write( filename=filename, - data=data, + ds=data, dt=dt, title=title, keep_open=keep_open, diff --git a/mikeio/dfs/_dfs3.py b/mikeio/dfs/_dfs3.py index 01693200a..9fecd3576 100644 --- a/mikeio/dfs/_dfs3.py +++ b/mikeio/dfs/_dfs3.py @@ -362,7 +362,7 @@ def write( self._write( filename=filename, - data=data, + ds=data, dt=dt, coordinate=coordinate, title=title, diff --git a/mikeio/dfsu/_dfsu.py b/mikeio/dfsu/_dfsu.py index 1efedad0e..82c4cf4c8 100644 --- a/mikeio/dfsu/_dfsu.py +++ b/mikeio/dfsu/_dfsu.py @@ -223,13 +223,13 @@ def _read_dfsu_header(self, dfs): if self.is_spectral: dir = dfs.Directions - self._directions = None if dir is None else dir * (180 / np.pi) - self._frequencies = dfs.Frequencies + directions = None if dir is None else dir * (180 / np.pi) + frequencies = dfs.Frequencies # geometry if self._type == DfsuFileType.DfsuSpectral0D: self._geometry = GeometryFMPointSpectrum( - frequencies=self.frequencies, directions=self.directions + frequencies=frequencies, directions=directions ) else: nc, codes, node_ids = self._get_nodes_from_source(dfs) @@ -265,8 +265,8 @@ def _read_dfsu_header(self, dfs): element_ids=el_ids, node_ids=node_ids, validate=False, - frequencies=self.frequencies, - directions=self.directions, + frequencies=frequencies, + directions=directions, ) elif self._type == DfsuFileType.DfsuSpectral2D: self._geometry = GeometryFMAreaSpectrum( @@ -278,8 +278,8 @@ def _read_dfsu_header(self, dfs): element_ids=el_ids, node_ids=node_ids, validate=False, - frequencies=self.frequencies, - directions=self.directions, + frequencies=frequencies, + directions=directions, ) else: self._geometry = GeometryFM2D( @@ -1156,6 +1156,7 @@ def _write( builder.ApplicationVersion = __dfs_version__ try: + # TODO self._dfs is used by append, can we handle this better? self._dfs = builder.CreateFile(filename) except IOError: print("cannot create dfsu file: ", filename) diff --git a/mikeio/dfsu/_spectral.py b/mikeio/dfsu/_spectral.py index 3a9a99db4..c9719b222 100644 --- a/mikeio/dfsu/_spectral.py +++ b/mikeio/dfsu/_spectral.py @@ -21,7 +21,7 @@ def n_frequencies(self): @property def frequencies(self): """Frequency axis""" - return self._frequencies + return self.geometry._frequencies @property def n_directions(self): @@ -31,7 +31,7 @@ def n_directions(self): @property def directions(self): """Directional axis""" - return self._directions + return self.geometry._directions def _get_spectral_data_shape(self, n_steps: int, elements): dims = [] if n_steps == 1 else ["time"] diff --git a/tests/test_dfs2.py b/tests/test_dfs2.py index 54cba8a01..b186a10c5 100644 --- a/tests/test_dfs2.py +++ b/tests/test_dfs2.py @@ -431,7 +431,7 @@ def test_write_selected_item_to_new_file(dfs2_random_2items, tmp_path): ds = dfs.read(items=["Untitled"]) - dfs.write(fp, ds) + ds.to_dfs(fp) dfs2 = mikeio.open(fp) @@ -442,9 +442,9 @@ def test_write_selected_item_to_new_file(dfs2_random_2items, tmp_path): assert dfs.start_time == dfs2.start_time assert dfs.end_time == dfs2.end_time assert dfs.projection_string == dfs2.projection_string - assert dfs.longitude == dfs2.longitude - assert dfs.latitude == dfs2.latitude - assert dfs.orientation == dfs2.orientation + assert dfs.longitude == pytest.approx(dfs2.longitude) + assert dfs.latitude == pytest.approx(dfs2.latitude) + assert dfs.orientation == pytest.approx(dfs2.orientation) def test_repr(dfs2_gebco): @@ -486,7 +486,7 @@ def test_write_modified_data_to_new_file(dfs2_gebco, tmp_path): ds[0] = ds[0] + 10.0 - dfs.write(fp, ds) + ds.to_dfs(fp) dfsmod = mikeio.open(fp) @@ -651,12 +651,18 @@ def test_incremental_write_from_dfs2(tmp_path): ds = dfs.read(time=[0], keepdims=True) # assert ds.timestep == dfs.timestep, # ds.timestep is undefined + + # TODO find a better way to do this, without having to create a new dfs2 object dfs_to_write = Dfs2() - dfs_to_write.write(fp, ds, dt=dfs.timestep, keep_open=True) + + with pytest.warns(FutureWarning): + dfs_to_write.write(fp, ds, dt=dfs.timestep, keep_open=True) for i in range(1, nt): ds = dfs.read(time=[i], keepdims=True) - dfs_to_write.append(ds) + + with pytest.warns(FutureWarning): + dfs_to_write.append(ds) dfs_to_write.close() @@ -677,11 +683,14 @@ def test_incremental_write_from_dfs2_context_manager(tmp_path): ds = dfs.read(time=[0], keepdims=True) dfs_to_write = Dfs2() - with dfs_to_write.write(fp, ds, dt=dfs.timestep, keep_open=True) as f: - - for i in range(1, nt): - ds = dfs.read(time=[i], keepdims=True) - f.append(ds) + + with pytest.warns(FutureWarning): + with dfs_to_write.write(fp, ds, dt=dfs.timestep, keep_open=True) as f: + + for i in range(1, nt): + ds = dfs.read(time=[i], keepdims=True) + with pytest.warns(FutureWarning): + f.append(ds) # dfs_to_write.close() # called automagically by context manager diff --git a/tests/test_dfsu_spectral.py b/tests/test_dfsu_spectral.py index 3345ccc06..ac74061bc 100644 --- a/tests/test_dfsu_spectral.py +++ b/tests/test_dfsu_spectral.py @@ -178,16 +178,14 @@ def test_read_area_spectrum_xy(dfsu_area): def test_read_area_spectrum_area(dfsu_area): dfs = dfsu_area ds1 = dfs.read() - assert ds1.n_frequencies == 25 - assert ds1.n_directions == 16 + assert ds1[0].n_frequencies == 25 + assert ds1[0].n_directions == 16 bbox = (2.5, 51.8, 3.0, 52.2) ds2 = dfs.read(area=bbox) assert ds2.dims == ds1.dims assert ds2.shape == (3, 4, 16, 25) assert ds1.geometry._type == ds2.geometry._type - assert ds2.n_frequencies == 25 - assert ds2.n_directions == 16 assert ds2[0].n_frequencies == 25 assert ds2[0].n_directions == 16