Skip to content

Commit

Permalink
Merge branch 'main' into simple
Browse files Browse the repository at this point in the history
  • Loading branch information
ecomodeller authored Nov 30, 2023
2 parents bc4ac87 + cf089da commit 627d89f
Show file tree
Hide file tree
Showing 5 changed files with 54 additions and 82 deletions.
8 changes: 1 addition & 7 deletions mikeio/dfs/_dfs0.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,13 +504,7 @@ def time(self):
)

elif self._timeaxistype == TimeAxisType.CalendarNonEquidistant:
dfs = DfsFileFactory.DfsGenericOpen(self._filename)
t_seconds = np.zeros(self.n_timesteps)
for it in range(self.n_timesteps):
itemdata = dfs.ReadItemTimeStep(1, int(it))
t_seconds[it] = itemdata.Time

return pd.to_datetime(t_seconds, unit="s", origin=self.start_time)
return self.read().time

else:
return None
Expand Down
55 changes: 40 additions & 15 deletions tests/test_dfs0.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,27 @@ def test_read_all_time_steps_without_reading_items_neq():
assert isinstance(dfs.time, pd.DatetimeIndex)
assert len(dfs.time) == 744


def test_write_non_equidistant_calendar(tmp_path):
dfs0file = tmp_path / "neq.dfs0"
time = pd.DatetimeIndex(["2001-01-01", "2001-01-01 01:00", "2001-01-01 01:10"])
da1 = mikeio.DataArray(
data=np.zeros(3),
time=time,
item=ItemInfo("VarFun01", EUMType.Water_Level, unit=EUMUnit.meter),
)
da2 = mikeio.DataArray(
data=np.ones(3),
time=time,
item=ItemInfo("NotFun", EUMType.Rainfall_Depth, data_value_type="Accumulated"),
)
ds = mikeio.Dataset([da1, da2])
ds.to_dfs(dfs0file)
assert dfs0file.exists()

ds2 = mikeio.read(dfs0file)
assert not ds2.is_equidistant


def test_read_equidistant_dfs0_to_dataframe_fixed_freq():
dfs0file = "tests/testdata/random.dfs0"
Expand Down Expand Up @@ -377,8 +398,7 @@ def test_write_data_with_missing_values(tmp_path):
dfs0file = r"tests/testdata/random.dfs0"
tmpfile = tmp_path / "random.dfs0"

dfs = Dfs0(dfs0file)
ds = dfs.read()
ds = mikeio.read(dfs0file)

# Do something with the data
ds[0].values = np.zeros_like(ds[0].values)
Expand All @@ -388,22 +408,19 @@ def test_write_data_with_missing_values(tmp_path):
ds[1].values[0:10] = np.nan

# Overwrite the file
dfs.write(tmpfile, ds)
ds.to_dfs(tmpfile)

# Write operation does not modify the data
assert np.isnan(ds[1].values[1])

moddfs = Dfs0(tmpfile)
modified = moddfs.read()
modified = mikeio.read(tmpfile)
assert np.isnan(modified[1].values[5])


def test_read_relative_time_axis():
filename = r"tests/testdata/eq_relative.dfs0"

dfs0 = Dfs0(filename)
filename = "tests/testdata/eq_relative.dfs0"

ds = dfs0.read()
ds = mikeio.read(filename)
assert len(ds) == 5


Expand All @@ -422,13 +439,13 @@ def test_write_accumulated_datatype(tmp_path):
)
da.to_dfs(filename)

newdfs = Dfs0(filename)
assert newdfs.items[0].data_value_type == 3
da.to_dfs(filename)
newds = mikeio.read(filename)
assert newds[0].item.data_value_type == 3


def test_write_default_datatype(tmp_path):
filename = tmp_path / "simple.dfs0"

da = mikeio.DataArray(
data=np.random.random(100),
time=pd.date_range("2012-01-01", periods=100, freq="H"),
Expand All @@ -439,9 +456,8 @@ def test_write_default_datatype(tmp_path):
),
)
da.to_dfs(filename)

newdfs = Dfs0(filename)
assert newdfs.items[0].data_value_type == 0
newds = mikeio.read(filename)
assert newds[0].item.data_value_type == 0


def test_write_from_pandas_series_monkey_patched_data_value_not_default(tmp_path):
Expand Down Expand Up @@ -559,3 +575,12 @@ def test_read_dfs0_with_non_unique_item_names():

assert ds.Untitled_3.values[0] == pytest.approx(0.0)
assert np.isnan(ds.Untitled_3.values[1])


def test_non_equidistant_time_can_read_correctly_with_open(tmp_path):

dfs = mikeio.open("tests/testdata/neq_daily_time_unit.dfs0")
dfs.time
ds = dfs.read()

assert all(dfs.time == ds.time)
66 changes: 10 additions & 56 deletions tests/test_dfsu.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,14 +529,8 @@ def test_write(tmp_path):
geometry=msh.geometry,
)

dfs = Dfsu(meshfilename)

dfs.write(fp, ds)
dfs = Dfsu(fp)

assert dfs._source.ApplicationTitle == "mikeio"

dfs.write(fp, ds.isel(time=0)) # without time axis
ds.to_dfs(fp)
ds.isel(time=0).to_dfs(fp)


def test_write_from_dfsu(tmp_path):
Expand All @@ -545,11 +539,11 @@ def test_write_from_dfsu(tmp_path):
fp = tmp_path / "simple.dfsu"
dfs = mikeio.open(sourcefilename)

ds = dfs.read(items=[0, 1])
assert dfs.start_time.hour == 7

dfs.write(fp, ds)
ds = dfs.read(items=[0, 1])

assert dfs.start_time.hour == 7
ds.to_dfs(fp)

newdfs = mikeio.open(fp)
assert dfs.start_time == newdfs.start_time
Expand Down Expand Up @@ -682,7 +676,7 @@ def test_write_from_dfsu_2_time_steps(tmp_path):

assert ds.is_equidistant # Data with two time steps is per definition equidistant

dfs.write(fp, ds)
ds.to_dfs(fp)

newdfs = mikeio.open(fp)
assert dfs.start_time == newdfs.start_time
Expand All @@ -694,12 +688,10 @@ def test_write_non_equidistant_is_not_possible(tmp_path):

sourcefilename = "tests/testdata/HD2D.dfsu"
fp = tmp_path / "simple.dfsu"
dfs = mikeio.open(sourcefilename)

ds = dfs.read(time=[0, 1, 3])
ds = mikeio.read(sourcefilename, time=[0, 1, 3])

with pytest.raises(ValueError):
dfs.write(fp, ds)
ds.to_dfs(fp)


def test_temporal_resample_by_reading_selected_timesteps(tmp_path):
Expand All @@ -711,7 +703,7 @@ def test_temporal_resample_by_reading_selected_timesteps(tmp_path):
nt = dfs.n_timesteps

ds = dfs.read(time=list(range(0, nt, 2)))
dfs.write(fp, ds)
ds.to_dfs(fp)

newdfs = mikeio.open(fp)

Expand Down Expand Up @@ -774,7 +766,7 @@ def test_write_temporal_subset(tmp_path):

ds = dfs.read() # TODO read temporal subset with slice e.g. "1985-08-06 12:00":
selds = ds["1985-08-06 12:00":]
dfs.write(fp, selds)
selds.to_dfs(fp)

newdfs = mikeio.open(fp)

Expand All @@ -792,35 +784,6 @@ def test_geometry_2d():

assert geom.is_2d


# def test_geometry_2d_2dfile():

# dfs = mikeio.open("tests/testdata/HD2D.dfsu")

# assert dfs.is_2d
# geom = dfs.to_2d_geometry() # No op

# assert geom.is_2d


# def test_get_layers_2d_error():

# dfs = mikeio.open("tests/testdata/HD2D.dfsu")
# assert dfs.is_2d

# with pytest.raises(InvalidGeometry):
# dfs.get_layer_elements(-1)

# with pytest.raises(InvalidGeometry):
# dfs.layer_ids

# with pytest.raises(InvalidGeometry):
# dfs.elem2d_ids

# with pytest.raises(InvalidGeometry):
# dfs.find_nearest_profile_elements(x=0, y=0)


def test_to_mesh_2d(tmp_path):
filename = "tests/testdata/HD2D.dfsu"
dfs = mikeio.open(filename)
Expand Down Expand Up @@ -848,15 +811,6 @@ def test_elements_to_geometry():
other_tiny_geom = dfs.geometry.isel(set([1, 0]))
assert other_tiny_geom.n_elements == 2

# Removed, use sel on geometry instead
# prof_ids = dfs.find_nearest_profile_elements(350000, 6150000)
# geom = dfs.geometry.elements_to_geometry(prof_ids)

# text = repr(geom)

# assert geom.n_layers == 5
# assert "nodes" in text

elements = dfs.get_layer_elements(layers=-1)
geom = dfs.elements_to_geometry(elements, node_layers="top")
assert not hasattr(geom, "n_layers")
Expand Down
7 changes: 3 additions & 4 deletions tests/test_dfsu_layered.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ def test_write_from_dfsu3D(tmp_path):

ds = dfs.read(items=[0, 1])

dfs.write(fp, ds)
ds.to_dfs(fp)

assert fp.exists()

Expand All @@ -512,12 +512,11 @@ def test_extract_top_layer_to_2d(tmp_path):
filename = "tests/testdata/oresund_sigma_z.dfsu"

dfs = mikeio.open(filename)
top_ids = dfs.top_elements

ds = dfs.read(elements=top_ids)
ds = dfs.read(layers="top")

fp = tmp_path / "toplayer.dfsu"
dfs.write(fp, ds, elements=top_ids)
ds.to_dfs(fp)

newdfs = mikeio.open(fp)

Expand Down
Binary file added tests/testdata/neq_daily_time_unit.dfs0
Binary file not shown.

0 comments on commit 627d89f

Please sign in to comment.