From 621e5c30efd8372f16d3f5ff4d13965a397432a1 Mon Sep 17 00:00:00 2001 From: ecomodeller Date: Sun, 17 Mar 2024 08:07:23 +0000 Subject: [PATCH] deploy: 38b5acd3fb9a08008e3d41fe8d99d44c0b34d272 --- api/DataArray.html | 32 ++++----- api/Dataset.html | 2 +- api/EUMType.html | 4 +- api/EUMUnit.html | 2 +- api/Grid1D.html | 10 +-- api/ItemInfo.html | 4 +- api/Mesh.html | 4 +- api/PfsDocument.html | 6 +- api/PfsSection.html | 6 +- ...ataset._data_plot._DataArrayPlotterFM.html | 12 ++-- ...et._data_plot._DataArrayPlotterGrid2D.html | 8 +-- api/dfsu.Dfsu2DV.html | 32 ++++----- api/dfsu.Dfsu3D.html | 32 ++++----- api/spatial.GeometryFM3D.html | 2 +- api/spatial.GeometryFMVerticalColumn.html | 2 +- api/spatial.GeometryFMVerticalProfile.html | 4 +- examples/Dfsu-2D-interpolation.html | 58 ++++++++-------- examples/Generic.html | 66 +++++++++---------- examples/Time-interpolation.html | 28 ++++---- examples/dfs2/bathy.html | 44 ++++++------- examples/dfs2/gfs.html | 42 ++++++------ examples/index.html | 12 ++-- index.html | 2 +- search.json | 28 ++++---- user-guide/dataarray.html | 20 +++--- user-guide/dfs0.html | 6 +- user-guide/dfs1.html | 4 +- user-guide/dfs2.html | 10 +-- user-guide/dfsu.html | 4 +- user-guide/eum.html | 18 ++--- user-guide/getting-started.html | 10 +-- user-guide/mesh.html | 32 ++++----- user-guide/pfs.html | 20 +++--- 33 files changed, 283 insertions(+), 283 deletions(-) diff --git a/api/DataArray.html b/api/DataArray.html index d255b6453..435407a41 100644 --- a/api/DataArray.html +++ b/api/DataArray.html @@ -411,7 +411,7 @@

DataArray

Examples

-
+
import pandas as pd
 import mikeio
 
@@ -810,7 +810,7 @@ 

Returns

Examples

-
+
da1 = mikeio.read("../data/HD2D.dfsu", time=[0,1])[0]
 da2 = mikeio.read("../data/HD2D.dfsu", time=[2,3])[0]
 da1.time
@@ -818,7 +818,7 @@

Examples

DatetimeIndex(['1985-08-06 07:00:00', '1985-08-06 09:30:00'], dtype='datetime64[ns]', freq=None)
-
+
da3 = mikeio.DataArray.concat([da1,da2])
 da3
@@ -1116,7 +1116,7 @@

interp_na

Wrapper of xarray.DataArray.interpolate_na

Examples

-
+
import numpy as np
 import pandas as pd
 time = pd.date_range("2000", periods=3, freq="D")
@@ -1131,7 +1131,7 @@ 

Examples

values: [0, nan, 2]
-
+
da.interp_na()
<mikeio.DataArray>
@@ -1314,7 +1314,7 @@ 

See Also

Examples

-
+
da = mikeio.read("../data/europe_wind_long_lat.dfs2")[0]
 da
@@ -1325,7 +1325,7 @@

Examples

geometry: Grid2D (ny=101, nx=221)
-
+
da.isel(time=-1)
<mikeio.DataArray>
@@ -1335,7 +1335,7 @@ 

Examples

geometry: Grid2D (ny=101, nx=221)
-
+
da.isel(x=slice(10,20), y=slice(40,60))
<mikeio.DataArray>
@@ -1345,7 +1345,7 @@ 

Examples

geometry: Grid2D (ny=20, nx=10)
-
+
da = mikeio.read("../data/oresund_sigma_z.dfsu").Temperature
 da.isel(element=range(200))
@@ -2022,7 +2022,7 @@

See Also

Examples

-
+
da = mikeio.read("../data/random.dfs1")[0]
 da
@@ -2033,7 +2033,7 @@

Examples

geometry: Grid1D (n=3, dx=100)
-
+
da.sel(time=slice(None, "2012-1-1 00:02"))
<mikeio.DataArray>
@@ -2043,7 +2043,7 @@ 

Examples

geometry: Grid1D (n=3, dx=100)
-
+
da.sel(x=100)
<mikeio.DataArray>
@@ -2054,7 +2054,7 @@ 

Examples

values: [0.3231, 0.6315, ..., 0.7506]
-
+
da = mikeio.read("../data/oresund_sigma_z.dfsu").Temperature
 da
@@ -2070,7 +2070,7 @@

Examples

projection: UTM-33
-
+
da.sel(time="1997-09-15")
<mikeio.DataArray>
@@ -2086,7 +2086,7 @@ 

Examples

values: [16.31, 16.43, ..., 16.69]
-
+
da.sel(x=340000, y=6160000, z=-3)
<mikeio.DataArray>
@@ -2097,7 +2097,7 @@ 

Examples

values: [17.54, 17.31, 17.08]
-
+
da.sel(layers="bottom")
<mikeio.DataArray>
diff --git a/api/Dataset.html b/api/Dataset.html
index 68a0f6160..fc02e262e 100644
--- a/api/Dataset.html
+++ b/api/Dataset.html
@@ -428,7 +428,7 @@ 

Selecting Items

Examples

-
+
import mikeio
 mikeio.read("../data/europe_wind_long_lat.dfs2")
diff --git a/api/EUMType.html b/api/EUMType.html index 15c3915ee..79f850a1a 100644 --- a/api/EUMType.html +++ b/api/EUMType.html @@ -370,14 +370,14 @@

EUMType

EUM type

Examples

-
+
import mikeio
 mikeio.EUMType.Temperature
Temperature
-
+
mikeio.EUMType.Temperature.units
[degree Celsius, degree Fahrenheit, degree Kelvin]
diff --git a/api/EUMUnit.html b/api/EUMUnit.html index 426bd39ab..3fbefba4f 100644 --- a/api/EUMUnit.html +++ b/api/EUMUnit.html @@ -370,7 +370,7 @@

EUMUnit

EUM unit

Examples

-
+
import mikeio
 mikeio.EUMUnit.degree_Kelvin
diff --git a/api/Grid1D.html b/api/Grid1D.html index 5fc5ebc13..fb03ee325 100644 --- a/api/Grid1D.html +++ b/api/Grid1D.html @@ -451,7 +451,7 @@

Parameters

Examples

-
+
import mikeio
 mikeio.Grid1D(nx=3,dx=0.1)
@@ -459,7 +459,7 @@

Examples

x: [0, 0.1, 0.2] (nx=3, dx=0.1)
-
+
mikeio.Grid1D(x=[0.1, 0.5, 0.9])
<mikeio.Grid1D>
@@ -639,7 +639,7 @@ 

Returns

Examples

-
+
import mikeio
 g = mikeio.Grid1D(nx=3,dx=0.1)
 g
@@ -648,14 +648,14 @@

Examples

x: [0, 0.1, 0.2] (nx=3, dx=0.1)
-
+
g.isel([1,2])
<mikeio.Grid1D>
 x: [0.1, 0.2] (nx=2, dx=0.1)
-
+
g.isel(1)
GeometryUndefined()
diff --git a/api/ItemInfo.html b/api/ItemInfo.html index 7139169c1..b102e56fb 100644 --- a/api/ItemInfo.html +++ b/api/ItemInfo.html @@ -419,14 +419,14 @@

Parameters

Examples

-
+
import mikeio
 mikeio.ItemInfo("Viken", mikeio.EUMType.Water_Level)
Viken <Water Level> (meter)
-
+
mikeio.ItemInfo(mikeio.EUMType.Wind_speed)
Wind speed <Wind speed> (meter per sec)
diff --git a/api/Mesh.html b/api/Mesh.html index 9f7df3dad..af492ec02 100644 --- a/api/Mesh.html +++ b/api/Mesh.html @@ -416,7 +416,7 @@

Attributes

Examples

-
+
import mikeio
 mikeio.Mesh("../data/odense_rough.mesh")
@@ -470,7 +470,7 @@

Returns

Examples

-
+
import mikeio
 msh = mikeio.open("../data/odense_rough.mesh")
 msh.to_shapely()
diff --git a/api/PfsDocument.html b/api/PfsDocument.html index e315a462c..a3ee5534b 100644 --- a/api/PfsDocument.html +++ b/api/PfsDocument.html @@ -583,7 +583,7 @@

Parameters

Examples

-
+
import pandas as pd
 import mikeio
 df = pd.DataFrame(dict(station=["Foo", "Bar"],include=[0,1]), index=[1,2])
@@ -619,7 +619,7 @@ 

Examples

-
+
mikeio.PfsSection.from_dataframe(df,"STATION_")
[STATION_1]
@@ -787,7 +787,7 @@ 

Returns

Examples

-
+
pfs = mikeio.read_pfs("../data/pfs/lake.sw")
 pfs.SW.OUTPUTS.to_dataframe(prefix="OUTPUT_")
diff --git a/api/PfsSection.html b/api/PfsSection.html index 40e4353dd..d466fc9d6 100644 --- a/api/PfsSection.html +++ b/api/PfsSection.html @@ -502,7 +502,7 @@

Parameters

Examples

-
+
import pandas as pd
 import mikeio
 df = pd.DataFrame(dict(station=["Foo", "Bar"],include=[0,1]), index=[1,2])
@@ -538,7 +538,7 @@ 

Examples

-
+
mikeio.PfsSection.from_dataframe(df,"STATION_")
[STATION_1]
@@ -701,7 +701,7 @@ 

Returns

Examples

-
+
pfs = mikeio.read_pfs("../data/pfs/lake.sw")
 pfs.SW.OUTPUTS.to_dataframe(prefix="OUTPUT_")
diff --git a/api/dataset._data_plot._DataArrayPlotterFM.html b/api/dataset._data_plot._DataArrayPlotterFM.html index 710cdc3a5..7267ab181 100644 --- a/api/dataset._data_plot._DataArrayPlotterFM.html +++ b/api/dataset._data_plot._DataArrayPlotterFM.html @@ -381,7 +381,7 @@

dataset._data_plot._DataArrayPlotterFM

If DataArray is 3D the surface layer will be plotted.

Examples

-
+
import mikeio
 da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot()
@@ -440,7 +440,7 @@

Examples

-
+
da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot.contour()
@@ -459,7 +459,7 @@

Examples

-
+
da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot.contourf()
@@ -554,7 +554,7 @@

Examples

-
+
da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot.mesh()
@@ -573,7 +573,7 @@

Examples

-
+
da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot.outline()
@@ -592,7 +592,7 @@

Examples

-
+
da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"]
 da.plot.patch()
diff --git a/api/dataset._data_plot._DataArrayPlotterGrid2D.html b/api/dataset._data_plot._DataArrayPlotterGrid2D.html index 74f8b977c..d978a8e19 100644 --- a/api/dataset._data_plot._DataArrayPlotterGrid2D.html +++ b/api/dataset._data_plot._DataArrayPlotterGrid2D.html @@ -378,7 +378,7 @@

dataset._data_plot._DataArrayPlotterGrid2DIf DataArray has multiple time steps, the first step will be plotted.

Examples

-
+
import mikeio
 da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
 da.plot()
@@ -429,7 +429,7 @@

Examples

-
+
da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
 da.plot.contour()
@@ -448,7 +448,7 @@

Examples

-
+
da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
 da.plot.contourf()
@@ -543,7 +543,7 @@

Examples

-
+
da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"]
 da.plot.pcolormesh()
diff --git a/api/dfsu.Dfsu2DV.html b/api/dfsu.Dfsu2DV.html index 5fe670d13..ac6698a01 100644 --- a/api/dfsu.Dfsu2DV.html +++ b/api/dfsu.Dfsu2DV.html @@ -567,9 +567,9 @@

read

Parameters

---+++ @@ -583,49 +583,49 @@

Parameters

- + - + - + - + - - + + - - + + - - + + - + @@ -637,13 +637,13 @@

Parameters

- + - + diff --git a/api/dfsu.Dfsu3D.html b/api/dfsu.Dfsu3D.html index bf2cc3bb4..d55a54bfe 100644 --- a/api/dfsu.Dfsu3D.html +++ b/api/dfsu.Dfsu3D.html @@ -512,9 +512,9 @@

read

Parameters

itemsstr | int | typing.Sequence[str | int] | None Read only selected items, by number (0-based), or by name None
timeint | str | slice | None Read only selected time steps, by default None (=all) None
keepdimsbool When reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False False
areatyping.Tuple[float, float, float, float] | None Read only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None None
xRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
yRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
zRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
layersint | str | typing.Sequence[int] | None Read only data for specific layers, by default None None
error_bad_databool raise error if data is corrupt, by default True, True
fill_bad_data_valuefloat fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan np.nan
---+++ @@ -528,49 +528,49 @@

Parameters

- + - + - + - + - - + + - - + + - - + + - + @@ -582,13 +582,13 @@

Parameters

- + - + diff --git a/api/spatial.GeometryFM3D.html b/api/spatial.GeometryFM3D.html index c52cc5f82..7112f393d 100644 --- a/api/spatial.GeometryFM3D.html +++ b/api/spatial.GeometryFM3D.html @@ -429,7 +429,7 @@

Methods

itemsstr | int | typing.Sequence[str | int] | None Read only selected items, by number (0-based), or by name None
timeint | str | slice | None Read only selected time steps, by default None (=all) None
keepdimsbool When reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False False
areatyping.Tuple[float, float, float, float] | None Read only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None None
xRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
yRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
zRead only data for elements containing the (x,y,z) points(s), by default Nonefloat | NoneRead only data for elements containing the (x,y,z) points(s) None
layersint | str | typing.Sequence[int] | None Read only data for specific layers, by default None None
error_bad_databool raise error if data is corrupt, by default True, True
fill_bad_data_valuefloat fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan np.nan

get_layer_elements

-

spatial.GeometryFM3D.get_layer_elements(layers, layer=None)

+

spatial.GeometryFM3D.get_layer_elements(layers)

3d element ids for one (or more) specific layer(s)

Parameters

diff --git a/api/spatial.GeometryFMVerticalColumn.html b/api/spatial.GeometryFMVerticalColumn.html index de06bb48a..e3dde0a40 100644 --- a/api/spatial.GeometryFMVerticalColumn.html +++ b/api/spatial.GeometryFMVerticalColumn.html @@ -430,7 +430,7 @@

Methods

get_layer_elements

-

spatial.GeometryFMVerticalColumn.get_layer_elements(layers, layer=None)

+

spatial.GeometryFMVerticalColumn.get_layer_elements(layers)

3d element ids for one (or more) specific layer(s)

Parameters

diff --git a/api/spatial.GeometryFMVerticalProfile.html b/api/spatial.GeometryFMVerticalProfile.html index d70605fa3..97f0a41da 100644 --- a/api/spatial.GeometryFMVerticalProfile.html +++ b/api/spatial.GeometryFMVerticalProfile.html @@ -326,7 +326,7 @@

On this page

spatial.GeometryFMVerticalProfile

-

spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=None, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)

+

spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)

Attributes

@@ -434,7 +434,7 @@

Methods

get_layer_elements

-

spatial.GeometryFMVerticalProfile.get_layer_elements(layers, layer=None)

+

spatial.GeometryFMVerticalProfile.get_layer_elements(layers)

3d element ids for one (or more) specific layer(s)

Parameters

diff --git a/examples/Dfsu-2D-interpolation.html b/examples/Dfsu-2D-interpolation.html index 8a224e9fe..b60f189af 100644 --- a/examples/Dfsu-2D-interpolation.html +++ b/examples/Dfsu-2D-interpolation.html @@ -383,10 +383,10 @@

Dfsu - 2D interpolation

-
+
import mikeio
-
+
ds = mikeio.read("../data/wind_north_sea.dfsu", items="Wind speed")
 ds
@@ -398,7 +398,7 @@

Dfsu - 2D interpolation

0: Wind speed <Wind speed> (meter per sec)
-
+
da = ds.Wind_speed
 da.plot();
@@ -416,7 +416,7 @@

Interpolate to gridThen interpolate all data to the new grid and plot.
  • The interpolated data is then saved to a dfs2 file.
  • -
    +
    g = da.geometry.get_overset_grid(dx=0.1)
     g
    @@ -426,7 +426,7 @@

    Interpolate to grid

    -
    +
    da_grid = da.interp_like(g)
     da_grid
    @@ -437,7 +437,7 @@

    Interpolate to grid

    -
    +
    da_grid.plot();
    @@ -450,14 +450,14 @@

    Interpolate to grid

    Save to dfs2 file

    -
    +
    da_grid.to_dfs("wind_north_sea_interpolated.dfs2")

    Save to NetCDF

    -
    +
    xr_da = da_grid.to_xarray()
     xr_da.to_netcdf("wind_north_sea_interpolated.nc")
    @@ -478,7 +478,7 @@

    Save to GeoTiff

    This section requires the rasterio package.

    -
    +
    import numpy as np
     import rasterio
     from rasterio.transform import from_origin
    @@ -502,7 +502,7 @@ 

    Save to GeoTiff

    Interpolate to other mesh

    Interpolate the data from this coarse mesh onto a finer resolution mesh

    -
    +
    msh = mikeio.Mesh('../data/north_sea_2.mesh')
     msh
    @@ -512,7 +512,7 @@

    Interpolate to other mesh

    projection: LONG/LAT
    -
    +
    dsi = da.interp_like(msh)
     dsi
    @@ -523,7 +523,7 @@

    Interpolate to other mesh

    geometry: Dfsu2D (2259 elements, 1296 nodes)
    -
    +
    da[0].plot(figsize=(9,7), show_mesh=True);
    @@ -533,7 +533,7 @@

    Interpolate to other mesh

    -
    +
    dsi[0].plot(figsize=(9,7), show_mesh=True);
    @@ -545,14 +545,14 @@

    Interpolate to other mesh

    Note: 3 of the new elements are outside the original mesh and data are therefore NaN by default

    -
    +
    nan_elements = np.where(np.isnan(dsi[0].to_numpy()))[0]
     nan_elements
    array([ 249,  451, 1546])
    -
    +
    da.geometry.contains(msh.element_coordinates[nan_elements,:2])
    array([False, False, False])
    @@ -561,10 +561,10 @@

    We can force extrapolation to avoid the NaN values

    -
    +
    dat_interp = da.interp_like(msh, extrapolate=True)
    -
    +
    n_nan_elements = np.sum(np.isnan(dat_interp.values))
     n_nan_elements
    @@ -577,14 +577,14 @@

    Interpola

    We want to interpolate scatter data onto an existing mesh and create a new dfsu with the interpolated data.

    This uses lower level private utility methods not part of the public API.

    Interpolating from scatter data will soon be possible in a simpler way.

    -
    +
    from mikeio.spatial._utils import dist_in_meters
     from mikeio._interpolation import get_idw_interpolant
    -
    +
    dfs = mikeio.open("../data/wind_north_sea.dfsu")
    -
    +
    dfs.geometry.plot.mesh();
    @@ -594,7 +594,7 @@

    Interpola

    -
    +
    # scatter data: x,y,value for 4 points
     scatter= np.array([[1,50,1], [4, 52, 3], [8, 55, 2], [-1, 55, 1.5]])
     scatter
    @@ -611,35 +611,35 @@

    Interpola
  • calc IDW interpolatant weights
  • Interpolate
  • -
    +
    dist = dist_in_meters(scatter[:,:2], dfs.element_coordinates[0,:2])
     dist
    array([4.00139539, 3.18881018, 6.58769411, 2.69722991])
    -
    +
    w = get_idw_interpolant(dist, p=2)
     w
    array([0.19438779, 0.30607974, 0.07171749, 0.42781498])
    -
    +
    np.dot(scatter[:,2], w) # interpolated value in element 0
    1.8977844597276883

    Let’s do the same for all points in the mesh and plot in the end

    -
    +
    dati = np.zeros((1,dfs.n_elements))
     for j in range(dfs.n_elements):
         dist = dist_in_meters(scatter[:,:2], dfs.element_coordinates[j,:2])
         w = get_idw_interpolant(dist, p=2)
         dati[0,j] = np.dot(scatter[:,2], w)
    -
    +
    da = mikeio.DataArray(data=dati, geometry=dfs.geometry, time=dfs.start_time)
     da
    @@ -650,7 +650,7 @@

    Interpola geometry: Dfsu2D (958 elements, 570 nodes)

    -
    +
    da.plot(title="Interpolated scatter data");
    @@ -660,13 +660,13 @@

    Interpola

    -
    +
    da.to_dfs("interpolated_scatter.dfsu")

    Clean up

    -
    +
    import os
     
     os.remove("wind_north_sea_interpolated.dfs2")
    diff --git a/examples/Generic.html b/examples/Generic.html
    index 326239fc8..4e5eaa159 100644
    --- a/examples/Generic.html
    +++ b/examples/Generic.html
    @@ -394,7 +394,7 @@ 

    Generic dfs processing

  • quantile: Create temporal quantiles of dfs file
  • -
    +
    import matplotlib.pyplot as plt
     import mikeio
     import mikeio.generic
    @@ -402,7 +402,7 @@

    Generic dfs processing

    Concatenation

    Take a look at these two files with overlapping timesteps.

    -
    +
    t1 = mikeio.read("../data/tide1.dfs1")
     t1
    @@ -414,7 +414,7 @@

    Concatenation

    0: Level <Water Level> (meter)
    -
    +
    t2 = mikeio.read("../data/tide2.dfs1")
     t2
    @@ -427,7 +427,7 @@

    Concatenation

    Plot one of the points along the line.

    -
    +
    plt.plot(t1.time,t1[0].isel(x=1).values, label="File 1")
     plt.plot(t2.time,t2[0].isel(x=1).values,'k+', label="File 2")
     plt.legend()
    @@ -439,15 +439,15 @@

    Concatenation

    -
    +
    mikeio.generic.concat(infilenames=["../data/tide1.dfs1",
                                        "../data/tide2.dfs1"],
                          outfilename="concat.dfs1")
    -
      0%|          | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 534.20it/s]
    +
      0%|          | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 497.54it/s]
    -
    +
    c = mikeio.read("concat.dfs1")
     c[0].isel(x=1).plot()
     c
    @@ -471,16 +471,16 @@

    Concatenation

    Difference between two files

    Take difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs

    -
    +
    fn1 = "../data/oresundHD_run1.dfsu"
     fn2 = "../data/oresundHD_run2.dfsu"
     fn_diff = "oresundHD_difference.dfsu"
     mikeio.generic.diff(fn1, fn2, fn_diff)
    -
      0%|          | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2456.83it/s]
    +
      0%|          | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2275.80it/s]
    -
    +
    _, ax = plt.subplots(1,3, sharey=True, figsize=(12,5))
     da = mikeio.read(fn1, time=-1)[0]
     da.plot(vmin=0.06, vmax=0.27, ax=ax[0], title='run 1')
    @@ -504,11 +504,11 @@ 

    Extract time s
  • time slice by specifying start and/or end
  • specific items
  • -
    +
    infile = "../data/tide1.dfs1"
     mikeio.generic.extract(infile, "extracted.dfs1", start='2019-01-02')
    -
    +
    e = mikeio.read("extracted.dfs1")
     e
    @@ -520,11 +520,11 @@

    Extract time s 0: Level <Water Level> (meter)

    -
    +
    infile = "../data/oresund_vertical_slice.dfsu"
     mikeio.generic.extract(infile, "extracted.dfsu", items='Salinity', end=-2)
    -
    +
    e = mikeio.read("extracted.dfsu")
     e
    @@ -545,7 +545,7 @@

    Extract time s

    Scaling

    Adding a constant e.g to adjust datum

    -
    +
    ds = mikeio.read("../data/gebco_sound.dfs2")
     ds.Elevation[0].plot();
    @@ -556,23 +556,23 @@

    Scaling

    -
    +
    ds['Elevation'][0,104,131].to_numpy()
    -1.0

    This is the processing step.

    -
    +
    mikeio.generic.scale("../data/gebco_sound.dfs2", 
                          "gebco_sound_local_datum.dfs2",
                          offset=-2.1
                          )
    -
      0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1197.35it/s]
    +
      0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1245.71it/s]
    -
    +
    ds2 = mikeio.read("gebco_sound_local_datum.dfs2")
     ds2['Elevation'][0].plot()
    @@ -583,7 +583,7 @@

    Scaling

    -
    +
    ds2['Elevation'][0,104,131].to_numpy()
    -3.1
    @@ -591,7 +591,7 @@

    Scaling

    Spatially varying correction

    -
    +
    import numpy as np
     factor = np.ones_like(ds['Elevation'][0].to_numpy())
     factor.shape
    @@ -600,7 +600,7 @@

    Spatially var

    Add some spatially varying factors, exaggerated values for educational purpose.

    -
    +
    factor[:,0:100] = 5.3
     factor[0:40,] = 0.1
     factor[150:,150:] = 10.7
    @@ -615,7 +615,7 @@ 

    Spatially var

    The 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.

    -
    +
    factor_ud = np.flipud(factor)
     factor_vec  = factor_ud.flatten()
     mikeio.generic.scale("../data/gebco_sound.dfs2", 
    @@ -623,10 +623,10 @@ 

    Spatially var factor=factor_vec )

    -
      0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1021.75it/s]
    +
      0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1867.46it/s]
    -
    +
    ds3 = mikeio.read("gebco_sound_spatial.dfs2")
     ds3.Elevation[0].plot();
    @@ -641,15 +641,15 @@

    Spatially var

    Time average

    -
    +
    fn = "../data/NorthSea_HD_and_windspeed.dfsu"
     fn_avg = "Avg_NorthSea_HD_and_windspeed.dfsu"
     mikeio.generic.avg_time(fn, fn_avg)
    -
      0%|          | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17307.99it/s]
    +
      0%|          | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17829.71it/s]
    -
    +
    ds = mikeio.read(fn)
     ds.mean(axis=0).describe()   # alternative way of getting the time average
    @@ -713,7 +713,7 @@

    Time average

    -
    +
    ds_avg = mikeio.read(fn_avg)
     ds_avg.describe()
    @@ -781,12 +781,12 @@

    Time average

    Quantile

    Example that calculates the 25%, 50% and 75% percentile for all items in a dfsu file.

    -
    +
    fn = "../data/NorthSea_HD_and_windspeed.dfsu"
     fn_q = "Q_NorthSea_HD_and_windspeed.dfsu"
     mikeio.generic.quantile(fn, fn_q, q=[0.25,0.5,0.75])
    -
    +
    ds = mikeio.read(fn_q)
     ds
    @@ -803,7 +803,7 @@

    Quantile

    5: Quantile 0.75, Wind speed <Wind speed> (meter per sec)
    -
    +
    da_q75 = ds["Quantile 0.75, Wind speed"]
     da_q75.plot(title="75th percentile, wind speed", label="m/s")
    @@ -817,7 +817,7 @@

    Quantile

    Clean up

    -
    +
    import os
     os.remove("concat.dfs1")
     os.remove("oresundHD_difference.dfsu")
    diff --git a/examples/Time-interpolation.html b/examples/Time-interpolation.html
    index 78dc15575..07237c1f4 100644
    --- a/examples/Time-interpolation.html
    +++ b/examples/Time-interpolation.html
    @@ -376,11 +376,11 @@ 

    Time interpolation

    -
    +
    import numpy as np
     import mikeio
    -
    +
    ds = mikeio.read("../data/waves.dfs2")
     ds
    @@ -397,7 +397,7 @@

    Time interpolation

    Interpolate to specific timestep

    A common use case is to interpolate to a shorter timestep, in this case 1h.

    -
    +
    ds_h = ds.interp_time(3600)
     ds_h
    @@ -412,14 +412,14 @@

    Interpola

    And to store the interpolated data in a new file.

    -
    +
    ds_h.to_dfs("waves_3h.dfs2")

    Interpolate to time axis of another dataset

    Read some non-equidistant data typically found in observed data.

    -
    +
    ts = mikeio.read("../data/waves.dfs0")
     ts
    @@ -434,10 +434,10 @@

    +
    dsi = ds.interp_time(ts)
    -
    +
    dsi.time
    DatetimeIndex(['2004-01-01 01:00:00', '2004-01-01 02:00:00',
    @@ -455,13 +455,13 @@ 

    +
    dsi["Sign. Wave Height"].shape
    (24, 31, 31)
    -
    +
    ax = dsi["Sign. Wave Height"].sel(x=250, y=1200).plot(marker='+')
     ts["Sign. Wave Height"].plot(ax=ax,marker='+')
    @@ -479,7 +479,7 @@

    Model validation

    In the example below we calculate this metric using the model data interpolated to the observed times.

    For a more elaborate model validation library which takes care of these things for you as well as calculating a number of relevant metrics, take a look at `ModelSkill.

    Use np.nanmean to skip NaN.

    -
    +
    ts["Sign. Wave Height"]
    <mikeio.DataArray>
    @@ -490,7 +490,7 @@ 

    Model validation

    values: [0.06521, 0.06771, ..., 0.0576]
    -
    +
    dsi["Sign. Wave Height"].sel(x=250, y=1200)
    <mikeio.DataArray>
    @@ -501,7 +501,7 @@ 

    Model validation

    values: [0.0387, 0.03939, ..., nan]
    -
    +
    diff = (ts["Sign. Wave Height"]  - dsi["Sign. Wave Height"].sel(x=250, y=1200))
     diff.plot()
    @@ -512,7 +512,7 @@

    Model validation

    -
    +
    mae = np.abs(diff).nanmean().to_numpy()
     mae
    @@ -522,7 +522,7 @@

    Model validation

    Clean up

    -
    +
    import os
     os.remove("waves_3h.dfs2")
    diff --git a/examples/dfs2/bathy.html b/examples/dfs2/bathy.html index f360dc235..30cda9bee 100644 --- a/examples/dfs2/bathy.html +++ b/examples/dfs2/bathy.html @@ -377,11 +377,11 @@

    Dfs2 - Bathymetric data

    GEBCO Compilation Group (2020) GEBCO 2020 Grid (doi:10.5285/a29c5465-b138-234d-e053-6c86abc040b9)

    -
    +
    import xarray
     import mikeio
    -
    +
    ds = xarray.open_dataset("../../data/gebco_2020_n56.3_s55.2_w12.2_e13.1.nc")
     ds
    @@ -753,7 +753,7 @@

    Dfs2 - Bathymetric data

    Coordinates: (2) Data variables: elevation (lat, lon) int16 114kB ... -Attributes: (8)
  • Conventions :
    CF-1.6
    title :
    The GEBCO_2020 Grid - a continuous terrain model for oceans and land at 15 arc-second intervals
    institution :
    On behalf of the General Bathymetric Chart of the Oceans (GEBCO), the data are held at the British Oceanographic Data Centre (BODC).
    source :
    The GEBCO_2020 Grid is the latest global bathymetric product released by the General Bathymetric Chart of the Oceans (GEBCO) and has been developed through the Nippon Foundation-GEBCO Seabed 2030 Project. This is a collaborative project between the Nippon Foundation of Japan and GEBCO. The Seabed 2030 Project aims to bring together all available bathymetric data to produce the definitive map of the world ocean floor and make it available to all.
    history :
    Information on the development of the data set and the source data sets included in the grid can be found in the data set documentation available from https://www.gebco.net
    references :
    DOI: 10.5285/a29c5465-b138-234d-e053-6c86abc040b9
    comment :
    The data in the GEBCO_2020 Grid should not be used for navigation or any purpose relating to safety at sea.
    node_offset :
    1.0
  • -
    +
    ds.elevation.plot();
    @@ -784,7 +784,7 @@

    Dfs2 - Bathymetric data

    -
    +
    ds.elevation.sel(lon=12.74792, lat=55.865, method="nearest")
    @@ -1153,17 +1153,17 @@

    Dfs2 - Bathymetric data

    <xarray.DataArray 'elevation' ()> Size: 2B
     [1 values with dtype=int16]
     Coordinates: (2)
    -Attributes: (7)
    +Attributes: (7)

    Check ordering of dimensions, should be (y,x)

    -
    +
    ds.elevation.dims
    ('lat', 'lon')
    -
    +
    el = ds.elevation.values
     el.shape
    @@ -1171,37 +1171,37 @@

    Dfs2 - Bathymetric data

    Check that axes are increasing, S->N W->E

    -
    +
    ds.lat.values[0],ds.lat.values[-1] 
    (55.20208333333332, 56.29791666666665)
    -
    +
    ds.lat.values[0] < ds.lat.values[-1] 
    True
    -
    +
    ds.lon.values[0],ds.lon.values[-1] 
    (12.20208333333332, 13.097916666666663)
    -
    +
    el[0,0] # Bottom left
    -8
    -
    +
    el[-1,0] # Top Left
    -31
    -
    +
    geometry = mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
     geometry
    @@ -1211,7 +1211,7 @@

    Dfs2 - Bathymetric data

    projection: LONG/LAT
    -
    +
    da = mikeio.DataArray(data=el,
                    item=mikeio.ItemInfo("Elevation", mikeio.EUMType.Total_Water_Depth),
                    geometry=geometry,
    @@ -1226,7 +1226,7 @@ 

    Dfs2 - Bathymetric data

    geometry: Grid2D (ny=264, nx=216)
    -
    +
    da.plot();
    @@ -1236,7 +1236,7 @@

    Dfs2 - Bathymetric data

    -
    +
    da.plot(cmap='coolwarm', vmin=-100, vmax=100);
    @@ -1246,10 +1246,10 @@

    Dfs2 - Bathymetric data

    -
    +
    da.to_dfs("gebco.dfs2")
    -
    +
    ds = mikeio.read("gebco.dfs2")
     ds.Elevation.plot()
    @@ -1262,7 +1262,7 @@

    Dfs2 - Bathymetric data

    Clean up

    -
    +
    import os
     
     os.remove("gebco.dfs2")
    diff --git a/examples/dfs2/gfs.html b/examples/dfs2/gfs.html index a24c0512b..5659d1f51 100644 --- a/examples/dfs2/gfs.html +++ b/examples/dfs2/gfs.html @@ -380,13 +380,13 @@

    Dfs2 - Meteo data

    -
    +
    import xarray
     import pandas as pd
     import mikeio

    The file gfs_wind.nc contains a small sample of the GFS forecast data downloaded via their OpenDAP service

    -
    +
    ds = xarray.open_dataset('../../data/gfs_wind.nc')
     ds
    @@ -760,30 +760,30 @@

    Dfs2 - Meteo data

    msletmsl (time, lat, lon) float32 10kB ... ugrd10m (time, lat, lon) float32 10kB ... vgrd10m (time, lat, lon) float32 10kB ... -Attributes: (4)

    Running a Mike 21 HD model, needs at least three variables of meteorological forcing * Mean Sea Level Pressure * U 10m * V 10m

    Let’s take a look the U 10m

    -
    +
    ds.ugrd10m.isel(time=0).plot();
    @@ -797,7 +797,7 @@

    Dfs2 - Meteo data

    Convert to dfs2

    Time

    -
    +
    time = pd.DatetimeIndex(ds.time)
     time
    @@ -809,36 +809,36 @@

    Time

    Variable types

    -
    +
    mikeio.EUMType.Air_Pressure
    Air Pressure
    -
    +
    mikeio.EUMType.Air_Pressure.units
    [hectopascal, millibar]
    -
    +
    mikeio.EUMType.Wind_Velocity
    Wind Velocity
    -
    +
    mikeio.EUMType.Wind_Velocity.units
    [meter per sec, feet per sec, miles per hour, km per hour, knot]
    -
    +
    mslp = ds.msletmsl.values / 100 # conversion from Pa to hPa
     u = ds.ugrd10m.values
     v = ds.vgrd10m.values
    -
    +
    geometry = mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
     geometry
    @@ -848,14 +848,14 @@

    Variable types

    projection: LONG/LAT
    -
    +
    from mikeio import ItemInfo, EUMType, EUMUnit
     
     mslp_da = mikeio.DataArray(data=mslp,time=time, geometry=geometry, item=ItemInfo("Mean Sea Level Pressure", EUMType.Air_Pressure, EUMUnit.hectopascal))
     u_da = mikeio.DataArray(data=u,time=time, geometry=geometry, item=ItemInfo("Wind U", EUMType.Wind_Velocity, EUMUnit.meter_per_sec))
     v_da = mikeio.DataArray(data=v,time=time, geometry=geometry, item=ItemInfo("Wind V", EUMType.Wind_Velocity, EUMUnit.meter_per_sec))
    -
    +
    mds = mikeio.Dataset([mslp_da, u_da, v_da])
     mds
    @@ -869,11 +869,11 @@

    Variable types

    2: Wind V <Wind Velocity> (meter per sec)
    -
    +
    mds.to_dfs("gfs.dfs2")

    Clean up

    -
    +
    import os
     
     os.remove("gfs.dfs2")
    diff --git a/examples/index.html b/examples/index.html index 4897ed6b4..2b576609c 100644 --- a/examples/index.html +++ b/examples/index.html @@ -456,7 +456,7 @@

    Examples

    - + Dfs2 - Bathymetric data @@ -464,7 +464,7 @@

    Examples

    Convert GEBCO 2020 NetCDF to dfs2 - + Dfs2 - Meteo data @@ -472,7 +472,7 @@

    Examples

    Conversion of NetCDF from Global Forecasting System to Dfs2 - + Dfs2 examples @@ -483,7 +483,7 @@

    Examples

    - + Dfsu - 2D interpolation @@ -491,7 +491,7 @@

    Examples

    Interpolate dfsu data to a grid, save as dfs2 and geotiff. Interpolate dfsu data to another mesh. - + Generic dfs processing @@ -499,7 +499,7 @@

    Examples

    Tools and methods that applies to any type of dfs files. - + Time interpolation diff --git a/index.html b/index.html index fea280071..336c645a7 100644 --- a/index.html +++ b/index.html @@ -388,7 +388,7 @@

    Installation

    Getting started

    -
    +
    import mikeio
     
     ds = mikeio.read("data/FakeLake.dfsu")
    diff --git a/search.json b/search.json
    index ae7654eb0..ab1145afd 100644
    --- a/search.json
    +++ b/search.json
    @@ -102,7 +102,7 @@
         "href": "api/spatial.GeometryFM3D.html",
         "title": "spatial.GeometryFM3D",
         "section": "",
    -    "text": "spatial.GeometryFM3D(self, *, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFM3D.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFM3D.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "spatial.GeometryFM3D(self, *, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFM3D.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFM3D.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/spatial.GeometryFM3D.html#attributes",
    @@ -116,7 +116,7 @@
         "href": "api/spatial.GeometryFM3D.html#methods",
         "title": "spatial.GeometryFM3D",
         "section": "",
    -    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFM3D.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFM3D.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFM3D.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFM3D.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/spatial.GeometryFM2D.html",
    @@ -249,7 +249,7 @@
         "href": "api/dfsu.Dfsu2DV.html",
         "title": "dfsu.Dfsu2DV",
         "section": "",
    -    "text": "dfsu.Dfsu2DV(self, filename)\n\n\n\n\n\nName\nDescription\n\n\n\n\nboundary_codes\nUnique list of boundary codes\n\n\nboundary_polylines\nLists of closed polylines defining domain outline\n\n\ndeletevalue\nFile delete value\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nelement_table\nElement to node connectivity\n\n\nend_time\nFile end time\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_layered\nType is layered dfsu (3d, vertical profile or vertical column)\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nis_spectral\nType is spectral dfsu (point, line or area spectrum)\n\n\nis_tri_only\nDoes the mesh consist of triangles only?\n\n\nitems\nList of items\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of elements\n\n\nn_items\nNumber of items\n\n\nn_layers\nMaximum number of layers\n\n\nn_nodes\nNumber of nodes\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_timesteps\nNumber of time steps\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nnode_coordinates\nCoordinates (x,y,z) of all nodes\n\n\nprojection_string\nThe projection string\n\n\nstart_time\nFile start time\n\n\ntimestep\nTime step size in seconds\n\n\ntype_name\nType name, e.g. Mesh, Dfsu2D\n\n\nvalid_codes\nUnique list of node codes\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nplot_vertical_profile\nPlot unstructured vertical profile\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu2DV.plot_vertical_profile(values, time_step=None, cmin=None, cmax=None, label='', **kwargs)\nPlot unstructured vertical profile\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nvalues\n\nvalue for each element to plot\nrequired\n\n\ncmin\n\nlower bound of values to be shown on plot, default:None\nNone\n\n\ncmax\n\nupper bound of values to be shown on plot, default:None\nNone\n\n\ntitle\n\naxes title\nrequired\n\n\nlabel\n\ncolorbar label\n''\n\n\ncmap\n\ncolormap, default viridis\nrequired\n\n\nfigsize\n\nspecify size of figure\nrequired\n\n\nax\n\nAdding to existing axis, instead of creating new fig\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\n<matplotlib.axes>\n\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\n\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\n\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\n\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\n\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\ny\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nz\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nlayers\n\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\n\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\n\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
    +    "text": "dfsu.Dfsu2DV(self, filename)\n\n\n\n\n\nName\nDescription\n\n\n\n\nboundary_codes\nUnique list of boundary codes\n\n\nboundary_polylines\nLists of closed polylines defining domain outline\n\n\ndeletevalue\nFile delete value\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nelement_table\nElement to node connectivity\n\n\nend_time\nFile end time\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_layered\nType is layered dfsu (3d, vertical profile or vertical column)\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nis_spectral\nType is spectral dfsu (point, line or area spectrum)\n\n\nis_tri_only\nDoes the mesh consist of triangles only?\n\n\nitems\nList of items\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of elements\n\n\nn_items\nNumber of items\n\n\nn_layers\nMaximum number of layers\n\n\nn_nodes\nNumber of nodes\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_timesteps\nNumber of time steps\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nnode_coordinates\nCoordinates (x,y,z) of all nodes\n\n\nprojection_string\nThe projection string\n\n\nstart_time\nFile start time\n\n\ntimestep\nTime step size in seconds\n\n\ntype_name\nType name, e.g. Mesh, Dfsu2D\n\n\nvalid_codes\nUnique list of node codes\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nplot_vertical_profile\nPlot unstructured vertical profile\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu2DV.plot_vertical_profile(values, time_step=None, cmin=None, cmax=None, label='', **kwargs)\nPlot unstructured vertical profile\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nvalues\n\nvalue for each element to plot\nrequired\n\n\ncmin\n\nlower bound of values to be shown on plot, default:None\nNone\n\n\ncmax\n\nupper bound of values to be shown on plot, default:None\nNone\n\n\ntitle\n\naxes title\nrequired\n\n\nlabel\n\ncolorbar label\n''\n\n\ncmap\n\ncolormap, default viridis\nrequired\n\n\nfigsize\n\nspecify size of figure\nrequired\n\n\nax\n\nAdding to existing axis, instead of creating new fig\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\n<matplotlib.axes>\n\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\nstr | int | typing.Sequence[str | int] | None\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\nint | str | slice | None\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\nbool\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\ntyping.Tuple[float, float, float, float] | None\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\ny\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nz\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nlayers\nint | str | typing.Sequence[int] | None\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\nbool\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\nfloat\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
       },
       {
         "objectID": "api/dfsu.Dfsu2DV.html#attributes",
    @@ -263,7 +263,7 @@
         "href": "api/dfsu.Dfsu2DV.html#methods",
         "title": "dfsu.Dfsu2DV",
         "section": "",
    -    "text": "Name\nDescription\n\n\n\n\nplot_vertical_profile\nPlot unstructured vertical profile\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu2DV.plot_vertical_profile(values, time_step=None, cmin=None, cmax=None, label='', **kwargs)\nPlot unstructured vertical profile\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nvalues\n\nvalue for each element to plot\nrequired\n\n\ncmin\n\nlower bound of values to be shown on plot, default:None\nNone\n\n\ncmax\n\nupper bound of values to be shown on plot, default:None\nNone\n\n\ntitle\n\naxes title\nrequired\n\n\nlabel\n\ncolorbar label\n''\n\n\ncmap\n\ncolormap, default viridis\nrequired\n\n\nfigsize\n\nspecify size of figure\nrequired\n\n\nax\n\nAdding to existing axis, instead of creating new fig\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\n<matplotlib.axes>\n\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\n\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\n\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\n\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\n\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\ny\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nz\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nlayers\n\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\n\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\n\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
    +    "text": "Name\nDescription\n\n\n\n\nplot_vertical_profile\nPlot unstructured vertical profile\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu2DV.plot_vertical_profile(values, time_step=None, cmin=None, cmax=None, label='', **kwargs)\nPlot unstructured vertical profile\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nvalues\n\nvalue for each element to plot\nrequired\n\n\ncmin\n\nlower bound of values to be shown on plot, default:None\nNone\n\n\ncmax\n\nupper bound of values to be shown on plot, default:None\nNone\n\n\ntitle\n\naxes title\nrequired\n\n\nlabel\n\ncolorbar label\n''\n\n\ncmap\n\ncolormap, default viridis\nrequired\n\n\nfigsize\n\nspecify size of figure\nrequired\n\n\nax\n\nAdding to existing axis, instead of creating new fig\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\n<matplotlib.axes>\n\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\nstr | int | typing.Sequence[str | int] | None\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\nint | str | slice | None\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\nbool\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\ntyping.Tuple[float, float, float, float] | None\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\ny\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nz\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nlayers\nint | str | typing.Sequence[int] | None\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\nbool\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\nfloat\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu2DV.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
       },
       {
         "objectID": "api/DataArray.html",
    @@ -1448,7 +1448,7 @@
         "href": "examples/Generic.html#concatenation",
         "title": "Generic dfs processing",
         "section": "Concatenation",
    -    "text": "Concatenation\nTake a look at these two files with overlapping timesteps.\n\nt1 = mikeio.read(\"../data/tide1.dfs1\")\nt1\n\n<mikeio.Dataset>\ndims: (time:97, x:10)\ntime: 2019-01-01 00:00:00 - 2019-01-03 00:00:00 (97 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)\n\n\n\nt2 = mikeio.read(\"../data/tide2.dfs1\")\nt2\n\n<mikeio.Dataset>\ndims: (time:97, x:10)\ntime: 2019-01-02 00:00:00 - 2019-01-04 00:00:00 (97 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)\n\n\nPlot one of the points along the line.\n\nplt.plot(t1.time,t1[0].isel(x=1).values, label=\"File 1\")\nplt.plot(t2.time,t2[0].isel(x=1).values,'k+', label=\"File 2\")\nplt.legend()\n\n\n\n\n\n\n\n\n\nmikeio.generic.concat(infilenames=[\"../data/tide1.dfs1\",\n                                   \"../data/tide2.dfs1\"],\n                     outfilename=\"concat.dfs1\")\n\n  0%|          | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 534.20it/s]\n\n\n\nc = mikeio.read(\"concat.dfs1\")\nc[0].isel(x=1).plot()\nc\n\n<mikeio.Dataset>\ndims: (time:145, x:10)\ntime: 2019-01-01 00:00:00 - 2019-01-04 00:00:00 (145 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)",
    +    "text": "Concatenation\nTake a look at these two files with overlapping timesteps.\n\nt1 = mikeio.read(\"../data/tide1.dfs1\")\nt1\n\n<mikeio.Dataset>\ndims: (time:97, x:10)\ntime: 2019-01-01 00:00:00 - 2019-01-03 00:00:00 (97 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)\n\n\n\nt2 = mikeio.read(\"../data/tide2.dfs1\")\nt2\n\n<mikeio.Dataset>\ndims: (time:97, x:10)\ntime: 2019-01-02 00:00:00 - 2019-01-04 00:00:00 (97 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)\n\n\nPlot one of the points along the line.\n\nplt.plot(t1.time,t1[0].isel(x=1).values, label=\"File 1\")\nplt.plot(t2.time,t2[0].isel(x=1).values,'k+', label=\"File 2\")\nplt.legend()\n\n\n\n\n\n\n\n\n\nmikeio.generic.concat(infilenames=[\"../data/tide1.dfs1\",\n                                   \"../data/tide2.dfs1\"],\n                     outfilename=\"concat.dfs1\")\n\n  0%|          | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 497.54it/s]\n\n\n\nc = mikeio.read(\"concat.dfs1\")\nc[0].isel(x=1).plot()\nc\n\n<mikeio.Dataset>\ndims: (time:145, x:10)\ntime: 2019-01-01 00:00:00 - 2019-01-04 00:00:00 (145 records)\ngeometry: Grid1D (n=10, dx=0.06667)\nitems:\n  0:  Level <Water Level> (meter)",
         "crumbs": [
           "Home",
           "Examples",
    @@ -1460,7 +1460,7 @@
         "href": "examples/Generic.html#difference-between-two-files",
         "title": "Generic dfs processing",
         "section": "Difference between two files",
    -    "text": "Difference between two files\nTake difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs\n\nfn1 = \"../data/oresundHD_run1.dfsu\"\nfn2 = \"../data/oresundHD_run2.dfsu\"\nfn_diff = \"oresundHD_difference.dfsu\"\nmikeio.generic.diff(fn1, fn2, fn_diff)\n\n  0%|          | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2456.83it/s]\n\n\n\n_, ax = plt.subplots(1,3, sharey=True, figsize=(12,5))\nda = mikeio.read(fn1, time=-1)[0]\nda.plot(vmin=0.06, vmax=0.27, ax=ax[0], title='run 1')\nda = mikeio.read(fn2, time=-1)[0]\nda.plot(vmin=0.06, vmax=0.27, ax=ax[1], title='run 2')\nda = mikeio.read(fn_diff, time=-1)[0]\nda.plot(vmin=-0.1, vmax=0.1, cmap='coolwarm', ax=ax[2], title='difference');",
    +    "text": "Difference between two files\nTake difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs\n\nfn1 = \"../data/oresundHD_run1.dfsu\"\nfn2 = \"../data/oresundHD_run2.dfsu\"\nfn_diff = \"oresundHD_difference.dfsu\"\nmikeio.generic.diff(fn1, fn2, fn_diff)\n\n  0%|          | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2275.80it/s]\n\n\n\n_, ax = plt.subplots(1,3, sharey=True, figsize=(12,5))\nda = mikeio.read(fn1, time=-1)[0]\nda.plot(vmin=0.06, vmax=0.27, ax=ax[0], title='run 1')\nda = mikeio.read(fn2, time=-1)[0]\nda.plot(vmin=0.06, vmax=0.27, ax=ax[1], title='run 2')\nda = mikeio.read(fn_diff, time=-1)[0]\nda.plot(vmin=-0.1, vmax=0.1, cmap='coolwarm', ax=ax[2], title='difference');",
         "crumbs": [
           "Home",
           "Examples",
    @@ -1484,7 +1484,7 @@
         "href": "examples/Generic.html#scaling",
         "title": "Generic dfs processing",
         "section": "Scaling",
    -    "text": "Scaling\nAdding a constant e.g to adjust datum\n\nds = mikeio.read(\"../data/gebco_sound.dfs2\")\nds.Elevation[0].plot();\n\n\n\n\n\n\n\n\n\nds['Elevation'][0,104,131].to_numpy()\n\n-1.0\n\n\nThis is the processing step.\n\nmikeio.generic.scale(\"../data/gebco_sound.dfs2\", \n                     \"gebco_sound_local_datum.dfs2\",\n                     offset=-2.1\n                     )\n\n  0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1197.35it/s]\n\n\n\nds2 = mikeio.read(\"gebco_sound_local_datum.dfs2\")\nds2['Elevation'][0].plot()\n\n\n\n\n\n\n\n\n\nds2['Elevation'][0,104,131].to_numpy()\n\n-3.1\n\n\n\nSpatially varying correction\n\nimport numpy as np\nfactor = np.ones_like(ds['Elevation'][0].to_numpy())\nfactor.shape\n\n(264, 216)\n\n\nAdd some spatially varying factors, exaggerated values for educational purpose.\n\nfactor[:,0:100] = 5.3\nfactor[0:40,] = 0.1\nfactor[150:,150:] = 10.7\nplt.imshow(factor)\nplt.colorbar();\n\n\n\n\n\n\n\n\nThe 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.\n\nfactor_ud = np.flipud(factor)\nfactor_vec  = factor_ud.flatten()\nmikeio.generic.scale(\"../data/gebco_sound.dfs2\", \n                     \"gebco_sound_spatial.dfs2\",\n                     factor=factor_vec\n                     )\n\n  0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1021.75it/s]\n\n\n\nds3 = mikeio.read(\"gebco_sound_spatial.dfs2\")\nds3.Elevation[0].plot();",
    +    "text": "Scaling\nAdding a constant e.g to adjust datum\n\nds = mikeio.read(\"../data/gebco_sound.dfs2\")\nds.Elevation[0].plot();\n\n\n\n\n\n\n\n\n\nds['Elevation'][0,104,131].to_numpy()\n\n-1.0\n\n\nThis is the processing step.\n\nmikeio.generic.scale(\"../data/gebco_sound.dfs2\", \n                     \"gebco_sound_local_datum.dfs2\",\n                     offset=-2.1\n                     )\n\n  0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1245.71it/s]\n\n\n\nds2 = mikeio.read(\"gebco_sound_local_datum.dfs2\")\nds2['Elevation'][0].plot()\n\n\n\n\n\n\n\n\n\nds2['Elevation'][0,104,131].to_numpy()\n\n-3.1\n\n\n\nSpatially varying correction\n\nimport numpy as np\nfactor = np.ones_like(ds['Elevation'][0].to_numpy())\nfactor.shape\n\n(264, 216)\n\n\nAdd some spatially varying factors, exaggerated values for educational purpose.\n\nfactor[:,0:100] = 5.3\nfactor[0:40,] = 0.1\nfactor[150:,150:] = 10.7\nplt.imshow(factor)\nplt.colorbar();\n\n\n\n\n\n\n\n\nThe 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.\n\nfactor_ud = np.flipud(factor)\nfactor_vec  = factor_ud.flatten()\nmikeio.generic.scale(\"../data/gebco_sound.dfs2\", \n                     \"gebco_sound_spatial.dfs2\",\n                     factor=factor_vec\n                     )\n\n  0%|          | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1867.46it/s]\n\n\n\nds3 = mikeio.read(\"gebco_sound_spatial.dfs2\")\nds3.Elevation[0].plot();",
         "crumbs": [
           "Home",
           "Examples",
    @@ -1496,7 +1496,7 @@
         "href": "examples/Generic.html#time-average",
         "title": "Generic dfs processing",
         "section": "Time average",
    -    "text": "Time average\n\nfn = \"../data/NorthSea_HD_and_windspeed.dfsu\"\nfn_avg = \"Avg_NorthSea_HD_and_windspeed.dfsu\"\nmikeio.generic.avg_time(fn, fn_avg)\n\n  0%|          | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17307.99it/s]\n\n\n\nds = mikeio.read(fn)\nds.mean(axis=0).describe()   # alternative way of getting the time average\n\n\n\n\n\n\n\n\n\nSurface elevation\nWind speed\n\n\n\n\ncount\n958.000000\n958.000000\n\n\nmean\n0.449857\n12.772706\n\n\nstd\n0.178127\n2.367667\n\n\nmin\n0.114355\n6.498364\n\n\n25%\n0.373691\n11.199439\n\n\n50%\n0.431747\n12.984060\n\n\n75%\n0.479224\n14.658077\n\n\nmax\n1.202888\n16.677952\n\n\n\n\n\n\n\n\n\nds_avg = mikeio.read(fn_avg)\nds_avg.describe()\n\n\n\n\n\n\n\n\n\nSurface elevation\nWind speed\n\n\n\n\ncount\n958.000000\n958.000000\n\n\nmean\n0.449857\n12.772706\n\n\nstd\n0.178127\n2.367667\n\n\nmin\n0.114355\n6.498364\n\n\n25%\n0.373691\n11.199439\n\n\n50%\n0.431747\n12.984060\n\n\n75%\n0.479224\n14.658077\n\n\nmax\n1.202888\n16.677952",
    +    "text": "Time average\n\nfn = \"../data/NorthSea_HD_and_windspeed.dfsu\"\nfn_avg = \"Avg_NorthSea_HD_and_windspeed.dfsu\"\nmikeio.generic.avg_time(fn, fn_avg)\n\n  0%|          | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17829.71it/s]\n\n\n\nds = mikeio.read(fn)\nds.mean(axis=0).describe()   # alternative way of getting the time average\n\n\n\n\n\n\n\n\n\nSurface elevation\nWind speed\n\n\n\n\ncount\n958.000000\n958.000000\n\n\nmean\n0.449857\n12.772706\n\n\nstd\n0.178127\n2.367667\n\n\nmin\n0.114355\n6.498364\n\n\n25%\n0.373691\n11.199439\n\n\n50%\n0.431747\n12.984060\n\n\n75%\n0.479224\n14.658077\n\n\nmax\n1.202888\n16.677952\n\n\n\n\n\n\n\n\n\nds_avg = mikeio.read(fn_avg)\nds_avg.describe()\n\n\n\n\n\n\n\n\n\nSurface elevation\nWind speed\n\n\n\n\ncount\n958.000000\n958.000000\n\n\nmean\n0.449857\n12.772706\n\n\nstd\n0.178127\n2.367667\n\n\nmin\n0.114355\n6.498364\n\n\n25%\n0.373691\n11.199439\n\n\n50%\n0.431747\n12.984060\n\n\n75%\n0.479224\n14.658077\n\n\nmax\n1.202888\n16.677952",
         "crumbs": [
           "Home",
           "Examples",
    @@ -1649,7 +1649,7 @@
         "href": "api/spatial.GeometryFMVerticalColumn.html",
         "title": "spatial.GeometryFMVerticalColumn",
         "section": "",
    -    "text": "spatial.GeometryFMVerticalColumn(self, *, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\nA 3d geometry with consisting of a single vertical column\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "spatial.GeometryFMVerticalColumn(self, *, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\nA 3d geometry with consisting of a single vertical column\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/spatial.GeometryFMVerticalColumn.html#attributes",
    @@ -1663,7 +1663,7 @@
         "href": "api/spatial.GeometryFMVerticalColumn.html#methods",
         "title": "spatial.GeometryFMVerticalColumn",
         "section": "",
    -    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalColumn.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/EUMType.html",
    @@ -1972,7 +1972,7 @@
         "href": "api/dfsu.Dfsu3D.html",
         "title": "dfsu.Dfsu3D",
         "section": "",
    -    "text": "dfsu.Dfsu3D(self, filename)\n\n\n\n\n\nName\nDescription\n\n\n\n\nboundary_codes\nUnique list of boundary codes\n\n\nboundary_polylines\nLists of closed polylines defining domain outline\n\n\ndeletevalue\nFile delete value\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nelement_table\nElement to node connectivity\n\n\nend_time\nFile end time\n\n\ngeometry2d\nThe 2d geometry for a 3d object\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_layered\nType is layered dfsu (3d, vertical profile or vertical column)\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nis_spectral\nType is spectral dfsu (point, line or area spectrum)\n\n\nis_tri_only\nDoes the mesh consist of triangles only?\n\n\nitems\nList of items\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of elements\n\n\nn_items\nNumber of items\n\n\nn_layers\nMaximum number of layers\n\n\nn_nodes\nNumber of nodes\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_timesteps\nNumber of time steps\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nnode_coordinates\nCoordinates (x,y,z) of all nodes\n\n\nprojection_string\nThe projection string\n\n\nstart_time\nFile start time\n\n\ntimestep\nTime step size in seconds\n\n\ntype_name\nType name, e.g. Mesh, Dfsu2D\n\n\nvalid_codes\nUnique list of node codes\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nextract_surface_elevation_from_3d\nExtract surface elevation from a 3d dfsu file (based on zn)\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu3D.extract_surface_elevation_from_3d(n_nearest=4)\nExtract surface elevation from a 3d dfsu file (based on zn) to a new 2d dfsu file with a surface elevation item.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nn_nearest\nint\nnumber of points for spatial interpolation (inverse_distance), default=4\n4\n\n\n\n\n\n\n\ndfsu.Dfsu3D.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\n\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\n\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\n\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\n\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\ny\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nz\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nlayers\n\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\n\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\n\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu3D.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
    +    "text": "dfsu.Dfsu3D(self, filename)\n\n\n\n\n\nName\nDescription\n\n\n\n\nboundary_codes\nUnique list of boundary codes\n\n\nboundary_polylines\nLists of closed polylines defining domain outline\n\n\ndeletevalue\nFile delete value\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nelement_table\nElement to node connectivity\n\n\nend_time\nFile end time\n\n\ngeometry2d\nThe 2d geometry for a 3d object\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_layered\nType is layered dfsu (3d, vertical profile or vertical column)\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nis_spectral\nType is spectral dfsu (point, line or area spectrum)\n\n\nis_tri_only\nDoes the mesh consist of triangles only?\n\n\nitems\nList of items\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of elements\n\n\nn_items\nNumber of items\n\n\nn_layers\nMaximum number of layers\n\n\nn_nodes\nNumber of nodes\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_timesteps\nNumber of time steps\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nnode_coordinates\nCoordinates (x,y,z) of all nodes\n\n\nprojection_string\nThe projection string\n\n\nstart_time\nFile start time\n\n\ntimestep\nTime step size in seconds\n\n\ntype_name\nType name, e.g. Mesh, Dfsu2D\n\n\nvalid_codes\nUnique list of node codes\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nextract_surface_elevation_from_3d\nExtract surface elevation from a 3d dfsu file (based on zn)\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu3D.extract_surface_elevation_from_3d(n_nearest=4)\nExtract surface elevation from a 3d dfsu file (based on zn) to a new 2d dfsu file with a surface elevation item.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nn_nearest\nint\nnumber of points for spatial interpolation (inverse_distance), default=4\n4\n\n\n\n\n\n\n\ndfsu.Dfsu3D.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\nstr | int | typing.Sequence[str | int] | None\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\nint | str | slice | None\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\nbool\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\ntyping.Tuple[float, float, float, float] | None\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\ny\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nz\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nlayers\nint | str | typing.Sequence[int] | None\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\nbool\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\nfloat\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu3D.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
       },
       {
         "objectID": "api/dfsu.Dfsu3D.html#attributes",
    @@ -1986,14 +1986,14 @@
         "href": "api/dfsu.Dfsu3D.html#methods",
         "title": "dfsu.Dfsu3D",
         "section": "",
    -    "text": "Name\nDescription\n\n\n\n\nextract_surface_elevation_from_3d\nExtract surface elevation from a 3d dfsu file (based on zn)\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu3D.extract_surface_elevation_from_3d(n_nearest=4)\nExtract surface elevation from a 3d dfsu file (based on zn) to a new 2d dfsu file with a surface elevation item.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nn_nearest\nint\nnumber of points for spatial interpolation (inverse_distance), default=4\n4\n\n\n\n\n\n\n\ndfsu.Dfsu3D.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\n\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\n\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\n\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\n\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\ny\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nz\n\nRead only data for elements containing the (x,y,z) points(s), by default None\nNone\n\n\nlayers\n\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\n\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\n\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu3D.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
    +    "text": "Name\nDescription\n\n\n\n\nextract_surface_elevation_from_3d\nExtract surface elevation from a 3d dfsu file (based on zn)\n\n\nread\nRead data from a dfsu file\n\n\nto_mesh\nwrite object to mesh file\n\n\n\n\n\ndfsu.Dfsu3D.extract_surface_elevation_from_3d(n_nearest=4)\nExtract surface elevation from a 3d dfsu file (based on zn) to a new 2d dfsu file with a surface elevation item.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nn_nearest\nint\nnumber of points for spatial interpolation (inverse_distance), default=4\n4\n\n\n\n\n\n\n\ndfsu.Dfsu3D.read(items=None, time=None, elements=None, area=None, x=None, y=None, z=None, layers=None, keepdims=False, dtype=np.float32, error_bad_data=True, fill_bad_data_value=np.nan)\nRead data from a dfsu file\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nitems\nstr | int | typing.Sequence[str | int] | None\nRead only selected items, by number (0-based), or by name\nNone\n\n\ntime\nint | str | slice | None\nRead only selected time steps, by default None (=all)\nNone\n\n\nkeepdims\nbool\nWhen reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False\nFalse\n\n\narea\ntyping.Tuple[float, float, float, float] | None\nRead only data inside (horizontal) area given as a bounding box (tuple with left, lower, right, upper) or as list of coordinates for a polygon, by default None\nNone\n\n\nx\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\ny\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nz\nfloat | None\nRead only data for elements containing the (x,y,z) points(s)\nNone\n\n\nlayers\nint | str | typing.Sequence[int] | None\nRead only data for specific layers, by default None\nNone\n\n\nelements\ntyping.Collection[int] | None\nRead only selected element ids, by default None\nNone\n\n\nerror_bad_data\nbool\nraise error if data is corrupt, by default True,\nTrue\n\n\nfill_bad_data_value\nfloat\nfill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan\nnp.nan\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nmikeio.dataset.Dataset\nA Dataset with data dimensions [t,elements]\n\n\n\n\n\n\n\ndfsu.Dfsu3D.to_mesh(outfilename)\nwrite object to mesh file\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\noutfilename\nstr\npath to file to be written\nrequired"
       },
       {
         "objectID": "api/spatial.GeometryFMVerticalProfile.html",
         "href": "api/spatial.GeometryFMVerticalProfile.html",
         "title": "spatial.GeometryFMVerticalProfile",
         "section": "",
    -    "text": "spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=None, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nget_nearest_relative_distance\nFor a point near a transect, find the nearest relative distance\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_nearest_relative_distance(coords)\nFor a point near a transect, find the nearest relative distance for showing position on transect plot.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncoords\n[float, float]\nx,y-coordinate of point\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nfloat\nrelative distance in meters from start of transect\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "spatial.GeometryFMVerticalProfile(self, node_coordinates, element_table, codes=None, projection='LONG/LAT', dfsu_type=DfsuFileType.Dfsu3DSigma, element_ids=None, node_ids=None, n_layers=1, n_sigma=None, validate=True, reindex=False)\n\n\n\n\n\nName\nDescription\n\n\n\n\nbottom_elements\nList of 3d element ids of bottom layer\n\n\ncodes\nNode codes of all nodes (0=water, 1=land, 2…=open boundaries)\n\n\ne2_e3_table\nThe 2d-to-3d element connectivity table for a 3d object\n\n\nelem2d_ids\nThe associated 2d element id for each 3d element\n\n\nelement_coordinates\nCenter coordinates of each element\n\n\nis_geo\nAre coordinates geographical (LONG/LAT)?\n\n\nis_local_coordinates\nAre coordinates relative (NON-UTM)?\n\n\nlayer_ids\nThe layer number (0=bottom, 1, 2, …) for each 3d element\n\n\nmax_nodes_per_element\nThe maximum number of nodes for an element\n\n\nn_elements\nNumber of 3d elements\n\n\nn_layers\nMaximum number of layers\n\n\nn_layers_per_column\nList of number of layers for each column\n\n\nn_sigma_layers\nNumber of sigma layers\n\n\nn_z_layers\nMaximum number of z-layers\n\n\nprojection\nThe projection\n\n\nprojection_string\nThe projection string\n\n\ntop_elements\nList of 3d element ids of surface layer\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nget_nearest_relative_distance\nFor a point near a transect, find the nearest relative distance\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_nearest_relative_distance(coords)\nFor a point near a transect, find the nearest relative distance for showing position on transect plot.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncoords\n[float, float]\nx,y-coordinate of point\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nfloat\nrelative distance in meters from start of transect\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/spatial.GeometryFMVerticalProfile.html#attributes",
    @@ -2007,7 +2007,7 @@
         "href": "api/spatial.GeometryFMVerticalProfile.html#methods",
         "title": "spatial.GeometryFMVerticalProfile",
         "section": "",
    -    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nget_nearest_relative_distance\nFor a point near a transect, find the nearest relative distance\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_layer_elements(layers, layer=None)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_nearest_relative_distance(coords)\nFor a point near a transect, find the nearest relative distance for showing position on transect plot.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncoords\n[float, float]\nx,y-coordinate of point\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nfloat\nrelative distance in meters from start of transect\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
    +    "text": "Name\nDescription\n\n\n\n\nget_layer_elements\n3d element ids for one (or more) specific layer(s)\n\n\nget_nearest_relative_distance\nFor a point near a transect, find the nearest relative distance\n\n\nto_2d_geometry\nextract 2d geometry from 3d geometry\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_layer_elements(layers)\n3d element ids for one (or more) specific layer(s)\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlayers\nint or list(int)\nlayer between 0 (bottom) and n_layers-1 (top) (can also be negative counting from -1 at the top layer)\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nnumpy.numpy.array(int)\nelement ids\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.get_nearest_relative_distance(coords)\nFor a point near a transect, find the nearest relative distance for showing position on transect plot.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncoords\n[float, float]\nx,y-coordinate of point\nrequired\n\n\n\n\n\n\n\n\n\nType\nDescription\n\n\n\n\nfloat\nrelative distance in meters from start of transect\n\n\n\n\n\n\n\nspatial.GeometryFMVerticalProfile.to_2d_geometry()\nextract 2d geometry from 3d geometry\n\n\n\n\n\nType\nDescription\n\n\n\n\nUnstructuredGeometry\n2d geometry (bottom nodes)"
       },
       {
         "objectID": "api/PfsDocument.html",
    diff --git a/user-guide/dataarray.html b/user-guide/dataarray.html
    index 05959f010..80dbbae7e 100644
    --- a/user-guide/dataarray.html
    +++ b/user-guide/dataarray.html
    @@ -388,7 +388,7 @@ 

    DataArray

  • values - a numpy.ndarray
  • Use DataArray’s string representation to get an overview of the DataArray

    -
    +
    import mikeio
     
     ds = mikeio.read("../data/HD2D.dfsu")
    @@ -404,7 +404,7 @@ 

    DataArray

    Temporal selection

    -
    +
    da.sel(time="1985-08-06 12:00")
    <mikeio.DataArray>
    @@ -415,7 +415,7 @@ 

    +
    da["1985-8-7":]
    <mikeio.DataArray>
    @@ -429,7 +429,7 @@ 

    Spatial selection

    The sel method finds the nearest element.

    -
    +
    da.sel(x=607002, y=6906734)
    <mikeio.DataArray>
    @@ -444,17 +444,17 @@ 

    Modifying values

    You can modify the values of a DataArray by changing its values:

    -
    +
    da.values[0, 3] = 5.0

    If you wish to change values of a subset of the DataArray you should be aware of the difference between a view and a copy of the data. Similar to NumPy, MIKE IO selection method will return a view of the data when using single index and slices, but a copy of the data using fancy indexing (a list of indicies or boolean indexing). Note that prior to release 1.3, MIKE IO would always return a copy.

    It is recommended to change the values using values property directly on the original DataArray (like above), but it is also possible to change the values of the original DataArray by working on a subset DataArray if it is selected with single index or slice as explained above.

    -
    +
    da_sub = da.isel(time=0)
     da_sub.values[:] = 5.0    # will change da

    Fancy indexing will return a copy and therefore not change the original:

    -
    +
    da_sub = da.isel(time=[0,1,2])
     da_sub.values[:] = 5.0    # will NOT change da
    @@ -462,7 +462,7 @@

    Modifying values

    Plotting

    The plotting of a DataArray is context-aware meaning that plotting behaviour depends on the geometry of the DataArray being plotted.

    -
    +
    da.plot()
    @@ -472,7 +472,7 @@

    -
    +
    da.plot.contourf()
    @@ -482,7 +482,7 @@

    -
    +
    da.plot.mesh()
    diff --git a/user-guide/dfs0.html b/user-guide/dfs0.html index 5a95b718b..a843c08d1 100644 --- a/user-guide/dfs0.html +++ b/user-guide/dfs0.html @@ -381,7 +381,7 @@

    Dfs0

    Read Dfs0 to Dataset

    -
    +
    import mikeio
     
     ds = mikeio.read("../data/da_diagnostic.dfs0")
    @@ -401,7 +401,7 @@ 

    Read Dfs0 to Dataset<

    From Dfs0 to pandas DataFrame

    -
    +
    df = ds.to_dataframe()
     df.head()
    @@ -478,7 +478,7 @@

    From pandas

    MIKE IO adds a new method to the DataFrame called to_dfs0. This method is used to save the DataFrame to a dfs0 file. (This method becomes available after importing the mikeio module.)

    -
    +
    import pandas as pd
     
     df = pd.read_csv(
    diff --git a/user-guide/dfs1.html b/user-guide/dfs1.html
    index 3f77f1d35..86c938148 100644
    --- a/user-guide/dfs1.html
    +++ b/user-guide/dfs1.html
    @@ -368,7 +368,7 @@ 

    Dfs1

    A dfs1 file contains node-based line series data. Dfs1 files do not contain enough metadata to determine their geographical position, but have a relative distance from the origo.

    -
    +
    import mikeio
     
     ds = mikeio.read("../data/tide1.dfs1")
    @@ -385,7 +385,7 @@ 

    Dfs1

    Grid 1D

    The spatial information is available in the geometry attribute (accessible from Dfs1, Dataset, and DataArray), which in the case of a dfs1 file is a Grid1D geometry.

    -
    +
    ds.geometry
    <mikeio.Grid1D>
    diff --git a/user-guide/dfs2.html b/user-guide/dfs2.html
    index f38e6897b..ef6e43015 100644
    --- a/user-guide/dfs2.html
    +++ b/user-guide/dfs2.html
    @@ -370,7 +370,7 @@ 

    Dfs2

    A dfs2 file is also called a grid series file. Values in a dfs2 file are ‘element based’, i.e. values are defined in the centre of each grid cell.

    -
    +
    import mikeio
     ds = mikeio.read("../data/gebco_sound.dfs2")
     ds
    @@ -386,7 +386,7 @@

    Dfs2

    Subset in space

    The most convenient way to subset in space is to use the sel method, which returns a new (smaller) dataset, which can be further processed or written to disk using the to_dfs method.

    -
    +
    ds.geometry
    <mikeio.Grid2D>
    @@ -395,7 +395,7 @@ 

    Subset in space

    projection: LONG/LAT
    -
    +
    ds_aoi = ds.sel(x=slice(12.5, 13.0), y=slice(55.5, 56.0))
     ds_aoi.geometry
    @@ -406,7 +406,7 @@

    Subset in space

    In order to specify an open-ended subset (i.e. where the end of the subset is the end of the domain), use None as the end of the slice.

    -
    +
    ds.sel(x=slice(None, 13.0))
    <mikeio.Dataset>
    @@ -421,7 +421,7 @@ 

    Subset in space

    Grid2D

    The spatial information is available in the geometry attribute (accessible from Dfs2, Dataset, and DataArray), which in the case of a dfs2 file is a Grid2D geometry.

    -
    +
    ds.geometry
    <mikeio.Grid2D>
    diff --git a/user-guide/dfsu.html b/user-guide/dfsu.html
    index 2aa7548b2..653415940 100644
    --- a/user-guide/dfsu.html
    +++ b/user-guide/dfsu.html
    @@ -517,7 +517,7 @@ 

    MIKE IO Fle

    These properties and methods are accessible from the geometry, but also from the Mesh/Dfsu object.

    If a .dfsu file is read with mikeio.read, the returned Dataset ds will contain a Flexible Mesh Geometry geometry. If a .dfsu or a .mesh file is opened with mikeio.open, the returned object will also contain a Flexible Mesh Geometry geometry.

    -
    +
    import mikeio
     
     ds = mikeio.read("../data/oresundHD_run1.dfsu")
    @@ -529,7 +529,7 @@ 

    MIKE IO Fle projection: UTM-33

    -
    +
    dfs = mikeio.open("../data/oresundHD_run1.dfsu")
     dfs.geometry
    diff --git a/user-guide/eum.html b/user-guide/eum.html index 9c9fe1462..4bba9186f 100644 --- a/user-guide/eum.html +++ b/user-guide/eum.html @@ -374,7 +374,7 @@

    EUM

  • unit - an EUMUnit
  • The ItemInfo class has some sensible defaults, thus you can specify only a name or a type. If you don’t specify a unit, the default unit for that type will be used.

    -
    +
    from mikeio import ItemInfo, EUMType, EUMUnit
     
     item = ItemInfo("Viken", EUMType.Water_Level)
    @@ -383,47 +383,47 @@ 

    EUM

    Viken <Water Level> (meter)
    -
    +
    ItemInfo(EUMType.Wind_speed)
    Wind speed <Wind speed> (meter per sec)
    -
    +
    ItemInfo("Viken", EUMType.Water_Level, EUMUnit.feet)
    Viken <Water Level> (feet)

    Matching units for specific type:

    -
    +
    EUMType.Wind_speed.units
    [meter per sec, feet per sec, knot, km per hour, miles per hour]

    Default unit:

    -
    +
    EUMType.Precipitation_Rate.units[0]
    mm per day
    -
    +
    unit = EUMType.Precipitation_Rate.units[0]
     unit
    mm per day
    -
    +
    type(unit)
    <enum 'EUMUnit'>

    a [](mikeio.EUMUnit)` is encoded as integers, which you can utilize in some MIKE applications.

    -
    +
    int(unit)
     2004
    @@ -433,7 +433,7 @@

    EUM