import pandas as pd
import mikeio
@@ -810,7 +810,7 @@ Returns
Examples
-
+
= mikeio.read("../data/HD2D.dfsu", time=[0,1])[0]
da1 = mikeio.read("../data/HD2D.dfsu", time=[2,3])[0]
da2 da1.time
@@ -818,7 +818,7 @@ Examples
DatetimeIndex(['1985-08-06 07:00:00', '1985-08-06 09:30:00'], dtype='datetime64[ns]', freq=None)
-
+
= mikeio.DataArray.concat([da1,da2])
da3 da3
@@ -1116,7 +1116,7 @@ interp_na
Wrapper of xarray.DataArray.interpolate_na
Examples
-
+
import numpy as np
import pandas as pd
= pd.date_range("2000", periods=3, freq="D")
@@ -1131,7 +1131,7 @@ time Examples
values: [0, nan, 2]
-
+
da.interp_na()
<mikeio.DataArray>
@@ -1314,7 +1314,7 @@ See Also
Examples
-
+
= mikeio.read("../data/europe_wind_long_lat.dfs2")[0]
da da
@@ -1325,7 +1325,7 @@ Examples
geometry: Grid2D (ny=101, nx=221)
-
+
=-1) da.isel(time
<mikeio.DataArray>
@@ -1335,7 +1335,7 @@ Examples
geometry: Grid2D (ny=101, nx=221)
-
+
=slice(10,20), y=slice(40,60)) da.isel(x
<mikeio.DataArray>
@@ -1345,7 +1345,7 @@ Examples
geometry: Grid2D (ny=20, nx=10)
-
+
= mikeio.read("../data/oresund_sigma_z.dfsu").Temperature
da =range(200)) da.isel(element
@@ -2022,7 +2022,7 @@ See Also
Examples
-
+
= mikeio.read("../data/random.dfs1")[0]
da da
@@ -2033,7 +2033,7 @@ Examples
geometry: Grid1D (n=3, dx=100)
-
+
=slice(None, "2012-1-1 00:02")) da.sel(time
<mikeio.DataArray>
@@ -2043,7 +2043,7 @@ Examples
geometry: Grid1D (n=3, dx=100)
-
+
=100) da.sel(x
<mikeio.DataArray>
@@ -2054,7 +2054,7 @@ Examples
values: [0.3231, 0.6315, ..., 0.7506]
-
+
= mikeio.read("../data/oresund_sigma_z.dfsu").Temperature
da da
@@ -2070,7 +2070,7 @@ Examples
projection: UTM-33
-
+
="1997-09-15") da.sel(time
<mikeio.DataArray>
@@ -2086,7 +2086,7 @@ Examples
values: [16.31, 16.43, ..., 16.69]
-
+
=340000, y=6160000, z=-3) da.sel(x
<mikeio.DataArray>
@@ -2097,7 +2097,7 @@ Examples
values: [17.54, 17.31, 17.08]
-
+
="bottom") da.sel(layers
<mikeio.DataArray>
diff --git a/api/Dataset.html b/api/Dataset.html
index 68a0f6160..fc02e262e 100644
--- a/api/Dataset.html
+++ b/api/Dataset.html
@@ -428,7 +428,7 @@ Selecting Items
Examples
-
+
import mikeio
"../data/europe_wind_long_lat.dfs2") mikeio.read(
diff --git a/api/EUMType.html b/api/EUMType.html
index 15c3915ee..79f850a1a 100644
--- a/api/EUMType.html
+++ b/api/EUMType.html
@@ -370,14 +370,14 @@ EUMType
EUM type
Examples
-
+
import mikeio
mikeio.EUMType.Temperature
Temperature
-
+
mikeio.EUMType.Temperature.units
[degree Celsius, degree Fahrenheit, degree Kelvin]
diff --git a/api/EUMUnit.html b/api/EUMUnit.html
index 426bd39ab..3fbefba4f 100644
--- a/api/EUMUnit.html
+++ b/api/EUMUnit.html
@@ -370,7 +370,7 @@ EUMUnit
EUM unit
Examples
-
+
import mikeio
mikeio.EUMUnit.degree_Kelvin
diff --git a/api/Grid1D.html b/api/Grid1D.html
index 5fc5ebc13..fb03ee325 100644
--- a/api/Grid1D.html
+++ b/api/Grid1D.html
@@ -451,7 +451,7 @@ Parameters
Examples
-
+
import mikeio
=3,dx=0.1) mikeio.Grid1D(nx
@@ -459,7 +459,7 @@ Examples
x: [0, 0.1, 0.2] (nx=3, dx=0.1)
-
+
=[0.1, 0.5, 0.9]) mikeio.Grid1D(x
<mikeio.Grid1D>
@@ -639,7 +639,7 @@ Returns
Examples
-
+
import mikeio
= mikeio.Grid1D(nx=3,dx=0.1)
g g
@@ -648,14 +648,14 @@ Examples
x: [0, 0.1, 0.2] (nx=3, dx=0.1)
-
+
1,2]) g.isel([
<mikeio.Grid1D>
x: [0.1, 0.2] (nx=2, dx=0.1)
-
+
1) g.isel(
GeometryUndefined()
diff --git a/api/ItemInfo.html b/api/ItemInfo.html
index 7139169c1..b102e56fb 100644
--- a/api/ItemInfo.html
+++ b/api/ItemInfo.html
@@ -419,14 +419,14 @@ Parameters
Examples
-
+
import mikeio
"Viken", mikeio.EUMType.Water_Level) mikeio.ItemInfo(
Viken <Water Level> (meter)
-
+
mikeio.ItemInfo(mikeio.EUMType.Wind_speed)
Wind speed <Wind speed> (meter per sec)
diff --git a/api/Mesh.html b/api/Mesh.html
index 9f7df3dad..af492ec02 100644
--- a/api/Mesh.html
+++ b/api/Mesh.html
@@ -416,7 +416,7 @@ Attributes
Examples
-
+
import mikeio
"../data/odense_rough.mesh") mikeio.Mesh(
@@ -470,7 +470,7 @@ Returns
Examples
-
+
import mikeio
= mikeio.open("../data/odense_rough.mesh")
msh msh.to_shapely()
diff --git a/api/PfsDocument.html b/api/PfsDocument.html
index e315a462c..a3ee5534b 100644
--- a/api/PfsDocument.html
+++ b/api/PfsDocument.html
@@ -583,7 +583,7 @@ Parameters
Examples
-
+
import pandas as pd
import mikeio
= pd.DataFrame(dict(station=["Foo", "Bar"],include=[0,1]), index=[1,2])
@@ -619,7 +619,7 @@ df Examples
-
+
"STATION_") mikeio.PfsSection.from_dataframe(df,
[STATION_1]
@@ -787,7 +787,7 @@ Returns
Examples
-
+
= mikeio.read_pfs("../data/pfs/lake.sw")
pfs ="OUTPUT_") pfs.SW.OUTPUTS.to_dataframe(prefix
diff --git a/api/PfsSection.html b/api/PfsSection.html
index 40e4353dd..d466fc9d6 100644
--- a/api/PfsSection.html
+++ b/api/PfsSection.html
@@ -502,7 +502,7 @@ Parameters
Examples
-
+
import pandas as pd
import mikeio
= pd.DataFrame(dict(station=["Foo", "Bar"],include=[0,1]), index=[1,2])
@@ -538,7 +538,7 @@ df Examples
-
+
"STATION_") mikeio.PfsSection.from_dataframe(df,
[STATION_1]
@@ -701,7 +701,7 @@ Returns
Examples
-
+
= mikeio.read_pfs("../data/pfs/lake.sw")
pfs ="OUTPUT_") pfs.SW.OUTPUTS.to_dataframe(prefix
diff --git a/api/dataset._data_plot._DataArrayPlotterFM.html b/api/dataset._data_plot._DataArrayPlotterFM.html
index 710cdc3a5..7267ab181 100644
--- a/api/dataset._data_plot._DataArrayPlotterFM.html
+++ b/api/dataset._data_plot._DataArrayPlotterFM.html
@@ -381,7 +381,7 @@ dataset._data_plot._DataArrayPlotterFM
If DataArray is 3D the surface layer will be plotted.
Examples
-
+
import mikeio
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot()
@@ -440,7 +440,7 @@
Examples
-
+
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot.contour()
@@ -459,7 +459,7 @@
Examples
-
+
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot.contourf()
@@ -554,7 +554,7 @@
Examples
-
+
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot.mesh()
@@ -573,7 +573,7 @@
Examples
-
+
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot.outline()
@@ -592,7 +592,7 @@
Examples
-
+
= mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
da da.plot.patch()
diff --git a/api/dataset._data_plot._DataArrayPlotterGrid2D.html b/api/dataset._data_plot._DataArrayPlotterGrid2D.html
index 74f8b977c..d978a8e19 100644
--- a/api/dataset._data_plot._DataArrayPlotterGrid2D.html
+++ b/api/dataset._data_plot._DataArrayPlotterGrid2D.html
@@ -378,7 +378,7 @@ dataset._data_plot._DataArrayPlotterGrid2D
If DataArray has multiple time steps, the first step will be plotted.
Examples
-
+
import mikeio
= mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
da da.plot()
@@ -429,7 +429,7 @@
Examples
-
+
= mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
da da.plot.contour()
@@ -448,7 +448,7 @@
Examples
-
+
= mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
da da.plot.contourf()
@@ -543,7 +543,7 @@
Examples
-
+
= mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
da da.plot.pcolormesh()
diff --git a/api/dfsu.Dfsu2DV.html b/api/dfsu.Dfsu2DV.html
index 5fe670d13..ac6698a01 100644
--- a/api/dfsu.Dfsu2DV.html
+++ b/api/dfsu.Dfsu2DV.html
@@ -567,9 +567,9 @@ read
Parameters
-
-
-
+
+
+
@@ -583,49 +583,49 @@ Parameters
items
-
+str | int | typing.Sequence[str | int] | None
Read only selected items, by number (0-based), or by name
None
time
-
+int | str | slice | None
Read only selected time steps, by default None (=all)
None
keepdims
-
+bool
When reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False
False
area
-
+typing.Tuple[float, float, float, float] | None
Read only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None
None
x
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
y
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
z
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
layers
-
+int | str | typing.Sequence[int] | None
Read only data for specific layers, by default None
None
@@ -637,13 +637,13 @@ Parameters
error_bad_data
-
+bool
raise error if data is corrupt, by default True,
True
fill_bad_data_value
-
+float
fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan
np.nan
diff --git a/api/dfsu.Dfsu3D.html b/api/dfsu.Dfsu3D.html
index bf2cc3bb4..d55a54bfe 100644
--- a/api/dfsu.Dfsu3D.html
+++ b/api/dfsu.Dfsu3D.html
@@ -512,9 +512,9 @@ read
Parameters
-
-
-
+
+
+
@@ -528,49 +528,49 @@ Parameters
items
-
+str | int | typing.Sequence[str | int] | None
Read only selected items, by number (0-based), or by name
None
time
-
+int | str | slice | None
Read only selected time steps, by default None (=all)
None
keepdims
-
+bool
When reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False
False
area
-
+typing.Tuple[float, float, float, float] | None
Read only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None
None
x
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
y
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
z
-
-Read only data for elements containing the (x,y,z) points(s), by default None
+float | None
+Read only data for elements containing the (x,y,z) points(s)
None
layers
-
+int | str | typing.Sequence[int] | None
Read only data for specific layers, by default None
None
@@ -582,13 +582,13 @@ Parameters
error_bad_data
-
+bool
raise error if data is corrupt, by default True,
True
fill_bad_data_value
-
+float
fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan
np.nan
diff --git a/api/spatial.GeometryFM3D.html b/api/spatial.GeometryFM3D.html
index c52cc5f82..7112f393d 100644
--- a/api/spatial.GeometryFM3D.html
+++ b/api/spatial.GeometryFM3D.html
@@ -429,7 +429,7 @@ Methods
get_layer_elements
-spatial.GeometryFM3D.get_layer_elements(layers, layer=None)
+spatial.GeometryFM3D.get_layer_elements(layers)
3d element ids for one (or more) specific layer(s)
Parameters
diff --git a/api/spatial.GeometryFMVerticalColumn.html b/api/spatial.GeometryFMVerticalColumn.html
index de06bb48a..e3dde0a40 100644
--- a/api/spatial.GeometryFMVerticalColumn.html
+++ b/api/spatial.GeometryFMVerticalColumn.html
@@ -430,7 +430,7 @@ Methods
get_layer_elements
-spatial.GeometryFMVerticalColumn.get_layer_elements(layers, layer=None)
+spatial.GeometryFMVerticalColumn.get_layer_elements(layers)
3d element ids for one (or more) specific layer(s)
Parameters
diff --git a/api/spatial.GeometryFMVerticalProfile.html b/api/spatial.GeometryFMVerticalProfile.html
index d70605fa3..97f0a41da 100644
--- a/api/spatial.GeometryFMVerticalProfile.html
+++ b/api/spatial.GeometryFMVerticalProfile.html
@@ -326,7 +326,7 @@ On this page
spatial.GeometryFMVerticalProfile
-spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=None, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)
+spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)
Attributes
@@ -434,7 +434,7 @@ Methods
get_layer_elements
-spatial.GeometryFMVerticalProfile.get_layer_elements(layers, layer=None)
+spatial.GeometryFMVerticalProfile.get_layer_elements(layers)
3d element ids for one (or more) specific layer(s)
Parameters
diff --git a/examples/Dfsu-2D-interpolation.html b/examples/Dfsu-2D-interpolation.html
index 8a224e9fe..b60f189af 100644
--- a/examples/Dfsu-2D-interpolation.html
+++ b/examples/Dfsu-2D-interpolation.html
@@ -383,10 +383,10 @@ Dfsu - 2D interpolation
-
+
import mikeio
-
+
= mikeio.read("../data/wind_north_sea.dfsu", items="Wind speed")
ds ds
@@ -398,7 +398,7 @@ Dfsu - 2D interpolation
0: Wind speed <Wind speed> (meter per sec)
-
+
= ds.Wind_speed
da ; da.plot()
@@ -416,7 +416,7 @@ Interpolate to gridThen interpolate all data to the new grid and plot.
The interpolated data is then saved to a dfs2 file.
-
+
= da.geometry.get_overset_grid(dx=0.1)
g g
@@ -426,7 +426,7 @@ Interpolate to grid
-
+
= da.interp_like(g)
da_grid da_grid
@@ -437,7 +437,7 @@ Interpolate to grid
-
+
; da_grid.plot()
@@ -450,14 +450,14 @@ Interpolate to grid
Save to dfs2 file
-
+
"wind_north_sea_interpolated.dfs2") da_grid.to_dfs(
Save to NetCDF
-
+
= da_grid.to_xarray()
xr_da "wind_north_sea_interpolated.nc") xr_da.to_netcdf(
@@ -478,7 +478,7 @@ Save to GeoTiff
This section requires the rasterio
package.
-
+
import numpy as np
import rasterio
from rasterio.transform import from_origin
@@ -502,7 +502,7 @@ Save to GeoTiff
Interpolate to other mesh
Interpolate the data from this coarse mesh onto a finer resolution mesh
-
+
= mikeio.Mesh('../data/north_sea_2.mesh')
msh msh
@@ -512,7 +512,7 @@ Interpolate to other mesh
projection: LONG/LAT
-
+
= da.interp_like(msh)
dsi dsi
@@ -523,7 +523,7 @@ Interpolate to other mesh
geometry: Dfsu2D (2259 elements, 1296 nodes)
-
+
0].plot(figsize=(9,7), show_mesh=True); da[
@@ -533,7 +533,7 @@ Interpolate to other mesh
-
+
0].plot(figsize=(9,7), show_mesh=True); dsi[
@@ -545,14 +545,14 @@ Interpolate to other mesh
Note: 3 of the new elements are outside the original mesh and data are therefore NaN by default
-
+
= np.where(np.isnan(dsi[0].to_numpy()))[0]
nan_elements nan_elements
array([ 249, 451, 1546])
-
+
2]) da.geometry.contains(msh.element_coordinates[nan_elements,:
array([False, False, False])
@@ -561,10 +561,10 @@
We can force extrapolation to avoid the NaN values
-
+
= da.interp_like(msh, extrapolate=True) dat_interp
-
+
= np.sum(np.isnan(dat_interp.values))
n_nan_elements n_nan_elements
@@ -577,14 +577,14 @@ Interpola
We want to interpolate scatter data onto an existing mesh and create a new dfsu with the interpolated data.
This uses lower level private utility methods not part of the public API.
Interpolating from scatter data will soon be possible in a simpler way.
-
+
from mikeio.spatial._utils import dist_in_meters
from mikeio._interpolation import get_idw_interpolant
-
+
= mikeio.open("../data/wind_north_sea.dfsu") dfs
-
+
; dfs.geometry.plot.mesh()
@@ -594,7 +594,7 @@ Interpola
-
+
# scatter data: x,y,value for 4 points
= np.array([[1,50,1], [4, 52, 3], [8, 55, 2], [-1, 55, 1.5]])
scatter scatter
@@ -611,35 +611,35 @@ Interpola
calc IDW interpolatant weights
Interpolate
-
+
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[0,:2])
dist dist
array([4.00139539, 3.18881018, 6.58769411, 2.69722991])
-
+
= get_idw_interpolant(dist, p=2)
w w
array([0.19438779, 0.30607974, 0.07171749, 0.42781498])
-
+
2], w) # interpolated value in element 0 np.dot(scatter[:,
1.8977844597276883
Let’s do the same for all points in the mesh and plot in the end
-
+
= np.zeros((1,dfs.n_elements))
dati for j in range(dfs.n_elements):
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[j,:2])
dist = get_idw_interpolant(dist, p=2)
w 0,j] = np.dot(scatter[:,2], w) dati[
-
+
= mikeio.DataArray(data=dati, geometry=dfs.geometry, time=dfs.start_time)
da da
@@ -650,7 +650,7 @@ Interpola
geometry: Dfsu2D (958 elements, 570 nodes)
-
+
="Interpolated scatter data"); da.plot(title
@@ -660,13 +660,13 @@ Interpola
-
+
"interpolated_scatter.dfsu") da.to_dfs(
Clean up
-
+
import os
"wind_north_sea_interpolated.dfs2")
diff --git a/examples/Generic.html b/examples/Generic.html
index 326239fc8..4e5eaa159 100644
--- a/examples/Generic.html
+++ b/examples/Generic.html
@@ -394,7 +394,7 @@ os.remove(Generic dfs processing
quantile: Create temporal quantiles of dfs file
-
+
import matplotlib.pyplot as plt
import mikeio
import mikeio.generic
@@ -402,7 +402,7 @@ Generic dfs processing
Concatenation
Take a look at these two files with overlapping timesteps.
-
+
= mikeio.read("../data/tide1.dfs1")
t1 t1
@@ -414,7 +414,7 @@ Concatenation
0: Level <Water Level> (meter)
-
+
= mikeio.read("../data/tide2.dfs1")
t2 t2
@@ -427,7 +427,7 @@ Concatenation
Plot one of the points along the line.
-
+
0].isel(x=1).values, label="File 1")
plt.plot(t1.time,t1[0].isel(x=1).values,'k+', label="File 2")
plt.plot(t2.time,t2[ plt.legend()
@@ -439,15 +439,15 @@ Concatenation
-
+
=["../data/tide1.dfs1",
mikeio.generic.concat(infilenames"../data/tide2.dfs1"],
="concat.dfs1") outfilename
- 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 534.20it/s]
+ 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 497.54it/s]
-
+
= mikeio.read("concat.dfs1")
c 0].isel(x=1).plot()
c[ c
@@ -471,16 +471,16 @@ Concatenation
Difference between two files
Take difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs
-
+
= "../data/oresundHD_run1.dfsu"
fn1 = "../data/oresundHD_run2.dfsu"
fn2 = "oresundHD_difference.dfsu"
fn_diff mikeio.generic.diff(fn1, fn2, fn_diff)
- 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2456.83it/s]
+ 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2275.80it/s]
-
+
= plt.subplots(1,3, sharey=True, figsize=(12,5))
_, ax = mikeio.read(fn1, time=-1)[0]
da =0.06, vmax=0.27, ax=ax[0], title='run 1')
@@ -504,11 +504,11 @@ da.plot(vminExtract time s
time slice by specifying start and/or end
specific items
-
+
= "../data/tide1.dfs1"
infile "extracted.dfs1", start='2019-01-02') mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfs1")
e e
@@ -520,11 +520,11 @@ Extract time s
0: Level <Water Level> (meter)
-
+
= "../data/oresund_vertical_slice.dfsu"
infile "extracted.dfsu", items='Salinity', end=-2) mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfsu")
e e
@@ -545,7 +545,7 @@ Extract time s
Scaling
Adding a constant e.g to adjust datum
-
+
= mikeio.read("../data/gebco_sound.dfs2")
ds 0].plot(); ds.Elevation[
@@ -556,23 +556,23 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds[
-1.0
This is the processing step.
-
+
"../data/gebco_sound.dfs2",
mikeio.generic.scale("gebco_sound_local_datum.dfs2",
=-2.1
offset )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1197.35it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1245.71it/s]
-
+
= mikeio.read("gebco_sound_local_datum.dfs2")
ds2 'Elevation'][0].plot() ds2[
@@ -583,7 +583,7 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds2[
-3.1
@@ -591,7 +591,7 @@ Scaling
Spatially varying correction
-
+
import numpy as np
= np.ones_like(ds['Elevation'][0].to_numpy())
factor factor.shape
@@ -600,7 +600,7 @@ Spatially var
Add some spatially varying factors, exaggerated values for educational purpose.
-
+
0:100] = 5.3
factor[:,0:40,] = 0.1
factor[150:,150:] = 10.7
@@ -615,7 +615,7 @@ factor[Spatially var
The 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.
-
+
= np.flipud(factor)
factor_ud = factor_ud.flatten()
factor_vec "../data/gebco_sound.dfs2",
@@ -623,10 +623,10 @@ mikeio.generic.scale(Spatially var
=factor_vec
factor )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1021.75it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1867.46it/s]
-
+
= mikeio.read("gebco_sound_spatial.dfs2")
ds3 0].plot(); ds3.Elevation[
@@ -641,15 +641,15 @@ Spatially var
Time average
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Avg_NorthSea_HD_and_windspeed.dfsu"
fn_avg mikeio.generic.avg_time(fn, fn_avg)
- 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17307.99it/s]
+ 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17829.71it/s]
-
+
= mikeio.read(fn)
ds =0).describe() # alternative way of getting the time average ds.mean(axis
@@ -713,7 +713,7 @@ Time average
-
+
= mikeio.read(fn_avg)
ds_avg ds_avg.describe()
@@ -781,12 +781,12 @@ Time average
Quantile
Example that calculates the 25%, 50% and 75% percentile for all items in a dfsu file.
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Q_NorthSea_HD_and_windspeed.dfsu"
fn_q =[0.25,0.5,0.75]) mikeio.generic.quantile(fn, fn_q, q
-
+
= mikeio.read(fn_q)
ds ds
@@ -803,7 +803,7 @@ Quantile
5: Quantile 0.75, Wind speed <Wind speed> (meter per sec)
-
+
= ds["Quantile 0.75, Wind speed"]
da_q75 ="75th percentile, wind speed", label="m/s") da_q75.plot(title
@@ -817,7 +817,7 @@ Quantile
Clean up
-
+
import os
"concat.dfs1")
os.remove("oresundHD_difference.dfsu")
diff --git a/examples/Time-interpolation.html b/examples/Time-interpolation.html
index 78dc15575..07237c1f4 100644
--- a/examples/Time-interpolation.html
+++ b/examples/Time-interpolation.html
@@ -376,11 +376,11 @@ os.remove(Time interpolation
-
+
import numpy as np
import mikeio
-
+
= mikeio.read("../data/waves.dfs2")
ds ds
@@ -397,7 +397,7 @@ Time interpolation
Interpolate to specific timestep
A common use case is to interpolate to a shorter timestep, in this case 1h.
-
+
= ds.interp_time(3600)
ds_h ds_h
@@ -412,14 +412,14 @@ Interpola
And to store the interpolated data in a new file.
-
+
"waves_3h.dfs2") ds_h.to_dfs(
Interpolate to time axis of another dataset
Read some non-equidistant data typically found in observed data.
-
+
= mikeio.read("../data/waves.dfs0")
ts ts
@@ -434,10 +434,10 @@
+