import mikeio
= mikeio.read("../data/wind_north_sea.dfsu", items="Wind speed")
ds ds
Dfsu - 2D interpolation
0: Wind speed <Wind speed> (meter per sec)= ds.Wind_speed
da ; da.plot()
Interpolate to gridThen interpolate all data to the new grid and plot.
= da.geometry.get_overset_grid(dx=0.1)
g g
Interpolate to grid
= da.interp_like(g)
da_grid da_grid
Interpolate to grid
; da_grid.plot()
Interpolate to grid
Save to dfs2 file
-
+
"wind_north_sea_interpolated.dfs2") da_grid.to_dfs(
Save to NetCDF
-
+
= da_grid.to_xarray()
xr_da "wind_north_sea_interpolated.nc") xr_da.to_netcdf(
@@ -478,7 +478,7 @@ Save to GeoTiff
This section requires the rasterio
package.
-
+
import numpy as np
import rasterio
from rasterio.transform import from_origin
@@ -502,7 +502,7 @@ Save to GeoTiff
Interpolate to other mesh
Interpolate the data from this coarse mesh onto a finer resolution mesh
-
+
= mikeio.Mesh('../data/north_sea_2.mesh')
msh msh
@@ -512,7 +512,7 @@ Interpolate to other mesh
projection: LONG/LAT
-
+
= da.interp_like(msh)
dsi dsi
@@ -523,7 +523,7 @@ Interpolate to other mesh
geometry: Dfsu2D (2259 elements, 1296 nodes)
-
+
0].plot(figsize=(9,7), show_mesh=True); da[
@@ -533,7 +533,7 @@ Interpolate to other mesh
-
+
0].plot(figsize=(9,7), show_mesh=True); dsi[
@@ -545,14 +545,14 @@ Interpolate to other mesh
Note: 3 of the new elements are outside the original mesh and data are therefore NaN by default
-
+
= np.where(np.isnan(dsi[0].to_numpy()))[0]
nan_elements nan_elements
array([ 249, 451, 1546])
-
+
2]) da.geometry.contains(msh.element_coordinates[nan_elements,:
array([False, False, False])
@@ -561,10 +561,10 @@
We can force extrapolation to avoid the NaN values
-
+
= da.interp_like(msh, extrapolate=True) dat_interp
-
+
= np.sum(np.isnan(dat_interp.values))
n_nan_elements n_nan_elements
@@ -577,14 +577,14 @@ Interpola
We want to interpolate scatter data onto an existing mesh and create a new dfsu with the interpolated data.
This uses lower level private utility methods not part of the public API.
Interpolating from scatter data will soon be possible in a simpler way.
-
+
from mikeio.spatial._utils import dist_in_meters
from mikeio._interpolation import get_idw_interpolant
-
+
= mikeio.open("../data/wind_north_sea.dfsu") dfs
-
+
; dfs.geometry.plot.mesh()
@@ -594,7 +594,7 @@ Interpola
-
+
# scatter data: x,y,value for 4 points
= np.array([[1,50,1], [4, 52, 3], [8, 55, 2], [-1, 55, 1.5]])
scatter scatter
@@ -611,35 +611,35 @@ Interpola
calc IDW interpolatant weights
Interpolate
-
+
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[0,:2])
dist dist
array([4.00139539, 3.18881018, 6.58769411, 2.69722991])
-
+
= get_idw_interpolant(dist, p=2)
w w
array([0.19438779, 0.30607974, 0.07171749, 0.42781498])
-
+
2], w) # interpolated value in element 0 np.dot(scatter[:,
1.8977844597276883
Let’s do the same for all points in the mesh and plot in the end
-
+
= np.zeros((1,dfs.n_elements))
dati for j in range(dfs.n_elements):
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[j,:2])
dist = get_idw_interpolant(dist, p=2)
w 0,j] = np.dot(scatter[:,2], w) dati[
-
+
= mikeio.DataArray(data=dati, geometry=dfs.geometry, time=dfs.start_time)
da da
@@ -650,7 +650,7 @@ Interpola
geometry: Dfsu2D (958 elements, 570 nodes)
-
+
="Interpolated scatter data"); da.plot(title
@@ -660,13 +660,13 @@ Interpola
-
+
"interpolated_scatter.dfsu") da.to_dfs(
Clean up
-
+
import os
"wind_north_sea_interpolated.dfs2")
diff --git a/examples/Generic.html b/examples/Generic.html
index 32a1b96ce..838eb59d0 100644
--- a/examples/Generic.html
+++ b/examples/Generic.html
@@ -265,7 +265,7 @@ os.remove(Generic dfs processing
@@ -394,7 +394,7 @@
Generic dfs processing
quantile: Create temporal quantiles of dfs file
-
+
import matplotlib.pyplot as plt
import mikeio
import mikeio.generic
@@ -402,7 +402,7 @@ Generic dfs processing
Concatenation
Take a look at these two files with overlapping timesteps.
-
+
= mikeio.read("../data/tide1.dfs1")
t1 t1
@@ -414,7 +414,7 @@ Concatenation
0: Level <Water Level> (meter)
-
+
= mikeio.read("../data/tide2.dfs1")
t2 t2
@@ -427,7 +427,7 @@ Concatenation
Plot one of the points along the line.
-
+
0].isel(x=1).values, label="File 1")
plt.plot(t1.time,t1[0].isel(x=1).values,'k+', label="File 2")
plt.plot(t2.time,t2[ plt.legend()
@@ -439,15 +439,15 @@ Concatenation
-
+
=["../data/tide1.dfs1",
mikeio.generic.concat(infilenames"../data/tide2.dfs1"],
="concat.dfs1") outfilename
- 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 627.04it/s]
+ 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 525.08it/s]
-
+
= mikeio.read("concat.dfs1")
c 0].isel(x=1).plot()
c[ c
@@ -471,16 +471,16 @@ Concatenation
Difference between two files
Take difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs
-
+
= "../data/oresundHD_run1.dfsu"
fn1 = "../data/oresundHD_run2.dfsu"
fn2 = "oresundHD_difference.dfsu"
fn_diff mikeio.generic.diff(fn1, fn2, fn_diff)
- 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2690.73it/s]
+ 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2250.16it/s]
-
+
= plt.subplots(1,3, sharey=True, figsize=(12,5))
_, ax = mikeio.read(fn1, time=-1)[0]
da =0.06, vmax=0.27, ax=ax[0], title='run 1')
@@ -504,11 +504,11 @@ da.plot(vminExtract time s
time slice by specifying start and/or end
specific items
-
+
= "../data/tide1.dfs1"
infile "extracted.dfs1", start='2019-01-02') mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfs1")
e e
@@ -520,11 +520,11 @@ Extract time s
0: Level <Water Level> (meter)
-
+
= "../data/oresund_vertical_slice.dfsu"
infile "extracted.dfsu", items='Salinity', end=-2) mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfsu")
e e
@@ -545,7 +545,7 @@ Extract time s
Scaling
Adding a constant e.g to adjust datum
-
+
= mikeio.read("../data/gebco_sound.dfs2")
ds 0].plot(); ds.Elevation[
@@ -556,23 +556,23 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds[
-1.0
This is the processing step.
-
+
"../data/gebco_sound.dfs2",
mikeio.generic.scale("gebco_sound_local_datum.dfs2",
=-2.1
offset )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1047.27it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1284.23it/s]
-
+
= mikeio.read("gebco_sound_local_datum.dfs2")
ds2 'Elevation'][0].plot() ds2[
@@ -583,7 +583,7 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds2[
-3.1
@@ -591,7 +591,7 @@ Scaling
Spatially varying correction
-
+
import numpy as np
= np.ones_like(ds['Elevation'][0].to_numpy())
factor factor.shape
@@ -600,7 +600,7 @@ Spatially var
Add some spatially varying factors, exaggerated values for educational purpose.
-
+
0:100] = 5.3
factor[:,0:40,] = 0.1
factor[150:,150:] = 10.7
@@ -615,7 +615,7 @@ factor[Spatially var
The 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.
-
+
= np.flipud(factor)
factor_ud = factor_ud.flatten()
factor_vec "../data/gebco_sound.dfs2",
@@ -623,10 +623,10 @@ mikeio.generic.scale(Spatially var
=factor_vec
factor )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1223.54it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1265.25it/s]
-
+
= mikeio.read("gebco_sound_spatial.dfs2")
ds3 0].plot(); ds3.Elevation[
@@ -641,15 +641,15 @@ Spatially var
Time average
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Avg_NorthSea_HD_and_windspeed.dfsu"
fn_avg mikeio.generic.avg_time(fn, fn_avg)
- 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17965.09it/s]
+ 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 15917.66it/s]
-
+
= mikeio.read(fn)
ds =0).describe() # alternative way of getting the time average ds.mean(axis
@@ -713,7 +713,7 @@ Time average
-
+
= mikeio.read(fn_avg)
ds_avg ds_avg.describe()
@@ -781,12 +781,12 @@ Time average
Quantile
Example that calculates the 25%, 50% and 75% percentile for all items in a dfsu file.
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Q_NorthSea_HD_and_windspeed.dfsu"
fn_q =[0.25,0.5,0.75]) mikeio.generic.quantile(fn, fn_q, q
-
+
= mikeio.read(fn_q)
ds ds
@@ -803,7 +803,7 @@ Quantile
5: Quantile 0.75, Wind speed <Wind speed> (meter per sec)
-
+
= ds["Quantile 0.75, Wind speed"]
da_q75 ="75th percentile, wind speed", label="m/s") da_q75.plot(title
@@ -817,7 +817,7 @@ Quantile
Clean up
-
+
import os
"concat.dfs1")
os.remove("oresundHD_difference.dfsu")
diff --git a/examples/Time-interpolation.html b/examples/Time-interpolation.html
index 420005818..d1ffd0a12 100644
--- a/examples/Time-interpolation.html
+++ b/examples/Time-interpolation.html
@@ -263,7 +263,7 @@ os.remove(Time interpolation
@@ -376,11 +376,11 @@
Time interpolation
-
+
import numpy as np
import mikeio
-
+
= mikeio.read("../data/waves.dfs2")
ds ds
@@ -397,7 +397,7 @@ Time interpolation
Interpolate to specific timestep
A common use case is to interpolate to a shorter timestep, in this case 1h.
-
+
= ds.interp_time(3600)
ds_h ds_h
@@ -412,14 +412,14 @@ Interpola
And to store the interpolated data in a new file.
-
+
"waves_3h.dfs2") ds_h.to_dfs(
Interpolate to time axis of another dataset
Read some non-equidistant data typically found in observed data.
-
+
= mikeio.read("../data/waves.dfs0")
ts ts
@@ -434,10 +434,10 @@
+
= ds.interp_time(ts) dsi
-
+
dsi.time
DatetimeIndex(['2004-01-01 01:00:00', '2004-01-01 02:00:00',
@@ -455,13 +455,13 @@
+
"Sign. Wave Height"].shape dsi[
(24, 31, 31)
-
+
= dsi["Sign. Wave Height"].sel(x=250, y=1200).plot(marker='+')
ax "Sign. Wave Height"].plot(ax=ax,marker='+') ts[
@@ -479,7 +479,7 @@ Model validation
In the example below we calculate this metric using the model data interpolated to the observed times.
For a more elaborate model validation library which takes care of these things for you as well as calculating a number of relevant metrics, take a look at `ModelSkill.
Use np.nanmean
to skip NaN.
-
+
"Sign. Wave Height"] ts[
<mikeio.DataArray>
@@ -490,7 +490,7 @@ Model validation
values: [0.06521, 0.06771, ..., 0.0576]
-
+
"Sign. Wave Height"].sel(x=250, y=1200) dsi[
<mikeio.DataArray>
@@ -501,7 +501,7 @@ Model validation
values: [0.0387, 0.03939, ..., nan]
-
+
= (ts["Sign. Wave Height"] - dsi["Sign. Wave Height"].sel(x=250, y=1200))
diff diff.plot()
@@ -512,7 +512,7 @@ Model validation
-
+
= np.abs(diff).nanmean().to_numpy()
mae mae
@@ -522,7 +522,7 @@ Model validation
Clean up
-
+
import os
"waves_3h.dfs2") os.remove(
diff --git a/examples/dfs2/bathy.html b/examples/dfs2/bathy.html
index 5960c44c9..a1f61fc0f 100644
--- a/examples/dfs2/bathy.html
+++ b/examples/dfs2/bathy.html
@@ -266,7 +266,7 @@ Dfs2 - Bathymetric data
@@ -377,11 +377,11 @@
Dfs2 - Bathymetric data
GEBCO Compilation Group (2020) GEBCO 2020 Grid (doi:10.5285/a29c5465-b138-234d-e053-6c86abc040b9)
-
+
import xarray
import mikeio
-
+
= xarray.open_dataset("../../data/gebco_2020_n56.3_s55.2_w12.2_e13.1.nc")
ds ds
@@ -753,7 +753,7 @@ Dfs2 - Bathymetric data
Coordinates: (2)
Data variables:
elevation (lat, lon) int16 114kB ...
-Attributes: (8)- Conventions :
- CF-1.6
- title :
- The GEBCO_2020 Grid - a continuous terrain model for oceans and land at 15 arc-second intervals
- institution :
- On behalf of the General Bathymetric Chart of the Oceans (GEBCO), the data are held at the British Oceanographic Data Centre (BODC).
- source :
- The GEBCO_2020 Grid is the latest global bathymetric product released by the General Bathymetric Chart of the Oceans (GEBCO) and has been developed through the Nippon Foundation-GEBCO Seabed 2030 Project. This is a collaborative project between the Nippon Foundation of Japan and GEBCO. The Seabed 2030 Project aims to bring together all available bathymetric data to produce the definitive map of the world ocean floor and make it available to all.
- history :
- Information on the development of the data set and the source data sets included in the grid can be found in the data set documentation available from https://www.gebco.net
- references :
- DOI: 10.5285/a29c5465-b138-234d-e053-6c86abc040b9
- comment :
- The data in the GEBCO_2020 Grid should not be used for navigation or any purpose relating to safety at sea.
- node_offset :
- 1.0
-
+
; ds.elevation.plot()
@@ -784,7 +784,7 @@ Dfs2 - Bathymetric data
-
+
=12.74792, lat=55.865, method="nearest") ds.elevation.sel(lon
+Attributes: (7)
Check ordering of dimensions, should be (y,x)
-
+
ds.elevation.dims
('lat', 'lon')
-
+
= ds.elevation.values
el el.shape
@@ -1171,37 +1171,37 @@ Dfs2 - Bathymetric data
Check that axes are increasing, S->N W->E
-
+
0],ds.lat.values[-1] ds.lat.values[
(55.20208333333332, 56.29791666666665)
-
+
0] < ds.lat.values[-1] ds.lat.values[
True
-
+
0],ds.lon.values[-1] ds.lon.values[
(12.20208333333332, 13.097916666666663)
-
+
0,0] # Bottom left el[
-8
-
+
-1,0] # Top Left el[
-31
-
+
= mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
geometry geometry
@@ -1211,7 +1211,7 @@ Dfs2 - Bathymetric data
projection: LONG/LAT
-
+
= mikeio.DataArray(data=el,
da =mikeio.ItemInfo("Elevation", mikeio.EUMType.Total_Water_Depth),
item=geometry,
@@ -1226,7 +1226,7 @@ geometryDfs2 - Bathymetric data
geometry: Grid2D (ny=264, nx=216)
-
+
; da.plot()
@@ -1236,7 +1236,7 @@ Dfs2 - Bathymetric data
-
+
='coolwarm', vmin=-100, vmax=100); da.plot(cmap
@@ -1246,10 +1246,10 @@ Dfs2 - Bathymetric data
-
+
"gebco.dfs2") da.to_dfs(
-
+
= mikeio.read("gebco.dfs2")
ds ds.Elevation.plot()
@@ -1262,7 +1262,7 @@ Dfs2 - Bathymetric data
Clean up
-
+
import os
"gebco.dfs2") os.remove(
diff --git a/examples/dfs2/gfs.html b/examples/dfs2/gfs.html
index 3c0467a6e..461e0bad8 100644
--- a/examples/dfs2/gfs.html
+++ b/examples/dfs2/gfs.html
@@ -266,7 +266,7 @@ Dfs2 - Meteo data
@@ -380,13 +380,13 @@
Dfs2 - Meteo data
-
+
import xarray
import pandas as pd
import mikeio
The file gfs_wind.nc
contains a small sample of the GFS forecast data downloaded via their OpenDAP service
-
+
= xarray.open_dataset('../../data/gfs_wind.nc')
ds ds
@@ -760,30 +760,30 @@ Dfs2 - Meteo data
msletmsl (time, lat, lon) float32 10kB ...
ugrd10m (time, lat, lon) float32 10kB ...
vgrd10m (time, lat, lon) float32 10kB ...
-Attributes: (4)
Running a Mike 21 HD model, needs at least three variables of meteorological forcing * Mean Sea Level Pressure * U 10m * V 10m
Let’s take a look the U 10m
-
+
=0).plot(); ds.ugrd10m.isel(time
@@ -797,7 +797,7 @@ Dfs2 - Meteo data
Convert to dfs2
Time
-
+
= pd.DatetimeIndex(ds.time)
time time
@@ -809,36 +809,36 @@ Time
Variable types
-
+
mikeio.EUMType.Air_Pressure
Air Pressure
-
+
mikeio.EUMType.Air_Pressure.units
[hectopascal, millibar]
-
+
mikeio.EUMType.Wind_Velocity
Wind Velocity
-
+
mikeio.EUMType.Wind_Velocity.units
[meter per sec, feet per sec, miles per hour, km per hour, knot]
-
+
= ds.msletmsl.values / 100 # conversion from Pa to hPa
mslp = ds.ugrd10m.values
u = ds.vgrd10m.values v
-
+
= mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
geometry geometry
@@ -848,14 +848,14 @@ Variable types
projection: LONG/LAT
-
+
from mikeio import ItemInfo, EUMType, EUMUnit
= mikeio.DataArray(data=mslp,time=time, geometry=geometry, item=ItemInfo("Mean Sea Level Pressure", EUMType.Air_Pressure, EUMUnit.hectopascal))
mslp_da = mikeio.DataArray(data=u,time=time, geometry=geometry, item=ItemInfo("Wind U", EUMType.Wind_Velocity, EUMUnit.meter_per_sec))
u_da = mikeio.DataArray(data=v,time=time, geometry=geometry, item=ItemInfo("Wind V", EUMType.Wind_Velocity, EUMUnit.meter_per_sec)) v_da
-
+
= mikeio.Dataset([mslp_da, u_da, v_da])
mds mds
@@ -869,11 +869,11 @@ Variable types
2: Wind V <Wind Velocity> (meter per sec)
-
+
"gfs.dfs2") mds.to_dfs(
Clean up
-
+
import os
"gfs.dfs2") os.remove(
diff --git a/examples/dfs2/index.html b/examples/dfs2/index.html
index 580c5acd0..c5ff0ca9d 100644
--- a/examples/dfs2/index.html
+++ b/examples/dfs2/index.html
@@ -228,7 +228,7 @@ Dfs2 examples
diff --git a/examples/index.html b/examples/index.html
index b21302007..3cc800c42 100644
--- a/examples/index.html
+++ b/examples/index.html
@@ -288,7 +288,7 @@
Examples
@@ -420,7 +420,7 @@
Examples
-
+
Dfs2 - Bathymetric data
@@ -428,7 +428,7 @@ Examples
Convert GEBCO 2020 NetCDF to dfs2
-
+
Dfs2 - Meteo data
@@ -436,7 +436,7 @@ Examples
Conversion of NetCDF from Global Forecasting System to Dfs2
-
+
Dfs2 examples
@@ -447,7 +447,7 @@ Examples
-
+
Dfsu - 2D interpolation
@@ -455,7 +455,7 @@ Examples
Interpolate dfsu data to a grid, save as dfs2 and geotiff. Interpolate dfsu data to another mesh.
-
+
Generic dfs processing
@@ -463,7 +463,7 @@ Examples
Tools and methods that applies to any type of dfs files.
-
+
Time interpolation
@@ -866,8 +866,8 @@ Examples
Save to dfs2 file
-"wind_north_sea_interpolated.dfs2") da_grid.to_dfs(
Save to NetCDF
-= da_grid.to_xarray()
xr_da "wind_north_sea_interpolated.nc") xr_da.to_netcdf(
Save to GeoTiff
This section requires the rasterio
package.
import numpy as np
import rasterio
from rasterio.transform import from_origin
@@ -502,7 +502,7 @@ Save to GeoTiff
Interpolate to other mesh
Interpolate the data from this coarse mesh onto a finer resolution mesh
-
+
= mikeio.Mesh('../data/north_sea_2.mesh')
msh msh
@@ -512,7 +512,7 @@ Interpolate to other mesh
projection: LONG/LAT
-
+
= da.interp_like(msh)
dsi dsi
@@ -523,7 +523,7 @@ Interpolate to other mesh
geometry: Dfsu2D (2259 elements, 1296 nodes)
-
+
0].plot(figsize=(9,7), show_mesh=True); da[
@@ -533,7 +533,7 @@ Interpolate to other mesh
-
+
0].plot(figsize=(9,7), show_mesh=True); dsi[
@@ -545,14 +545,14 @@ Interpolate to other mesh
Note: 3 of the new elements are outside the original mesh and data are therefore NaN by default
-
+
= np.where(np.isnan(dsi[0].to_numpy()))[0]
nan_elements nan_elements
array([ 249, 451, 1546])
-
+
2]) da.geometry.contains(msh.element_coordinates[nan_elements,:
array([False, False, False])
@@ -561,10 +561,10 @@
We can force extrapolation to avoid the NaN values
-
+
= da.interp_like(msh, extrapolate=True) dat_interp
-
+
= np.sum(np.isnan(dat_interp.values))
n_nan_elements n_nan_elements
@@ -577,14 +577,14 @@ Interpola
We want to interpolate scatter data onto an existing mesh and create a new dfsu with the interpolated data.
This uses lower level private utility methods not part of the public API.
Interpolating from scatter data will soon be possible in a simpler way.
-
+
from mikeio.spatial._utils import dist_in_meters
from mikeio._interpolation import get_idw_interpolant
-
+
= mikeio.open("../data/wind_north_sea.dfsu") dfs
-
+
; dfs.geometry.plot.mesh()
@@ -594,7 +594,7 @@ Interpola
-
+
# scatter data: x,y,value for 4 points
= np.array([[1,50,1], [4, 52, 3], [8, 55, 2], [-1, 55, 1.5]])
scatter scatter
@@ -611,35 +611,35 @@ Interpola
calc IDW interpolatant weights
Interpolate
-
+
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[0,:2])
dist dist
array([4.00139539, 3.18881018, 6.58769411, 2.69722991])
-
+
= get_idw_interpolant(dist, p=2)
w w
array([0.19438779, 0.30607974, 0.07171749, 0.42781498])
-
+
2], w) # interpolated value in element 0 np.dot(scatter[:,
1.8977844597276883
Let’s do the same for all points in the mesh and plot in the end
-
+
= np.zeros((1,dfs.n_elements))
dati for j in range(dfs.n_elements):
= dist_in_meters(scatter[:,:2], dfs.element_coordinates[j,:2])
dist = get_idw_interpolant(dist, p=2)
w 0,j] = np.dot(scatter[:,2], w) dati[
-
+
= mikeio.DataArray(data=dati, geometry=dfs.geometry, time=dfs.start_time)
da da
@@ -650,7 +650,7 @@ Interpola
geometry: Dfsu2D (958 elements, 570 nodes)
-
+
="Interpolated scatter data"); da.plot(title
@@ -660,13 +660,13 @@ Interpola
-
+
"interpolated_scatter.dfsu") da.to_dfs(
Clean up
-
+
import os
"wind_north_sea_interpolated.dfs2")
diff --git a/examples/Generic.html b/examples/Generic.html
index 32a1b96ce..838eb59d0 100644
--- a/examples/Generic.html
+++ b/examples/Generic.html
@@ -265,7 +265,7 @@ os.remove(Generic dfs processing
@@ -394,7 +394,7 @@
Generic dfs processing
quantile: Create temporal quantiles of dfs file
-
+
import matplotlib.pyplot as plt
import mikeio
import mikeio.generic
@@ -402,7 +402,7 @@ Generic dfs processing
Concatenation
Take a look at these two files with overlapping timesteps.
-
+
= mikeio.read("../data/tide1.dfs1")
t1 t1
@@ -414,7 +414,7 @@ Concatenation
0: Level <Water Level> (meter)
-
+
= mikeio.read("../data/tide2.dfs1")
t2 t2
@@ -427,7 +427,7 @@ Concatenation
Plot one of the points along the line.
-
+
0].isel(x=1).values, label="File 1")
plt.plot(t1.time,t1[0].isel(x=1).values,'k+', label="File 2")
plt.plot(t2.time,t2[ plt.legend()
@@ -439,15 +439,15 @@ Concatenation
-
+
=["../data/tide1.dfs1",
mikeio.generic.concat(infilenames"../data/tide2.dfs1"],
="concat.dfs1") outfilename
- 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 627.04it/s]
+ 0%| | 0/2 [00:00<?, ?it/s]100%|██████████| 2/2 [00:00<00:00, 525.08it/s]
-
+
= mikeio.read("concat.dfs1")
c 0].isel(x=1).plot()
c[ c
@@ -471,16 +471,16 @@ Concatenation
Difference between two files
Take difference between two dfs files with same structure - e.g. to see the difference in result between two calibration runs
-
+
= "../data/oresundHD_run1.dfsu"
fn1 = "../data/oresundHD_run2.dfsu"
fn2 = "oresundHD_difference.dfsu"
fn_diff mikeio.generic.diff(fn1, fn2, fn_diff)
- 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2690.73it/s]
+ 0%| | 0/5 [00:00<?, ?it/s]100%|██████████| 5/5 [00:00<00:00, 2250.16it/s]
-
+
= plt.subplots(1,3, sharey=True, figsize=(12,5))
_, ax = mikeio.read(fn1, time=-1)[0]
da =0.06, vmax=0.27, ax=ax[0], title='run 1')
@@ -504,11 +504,11 @@ da.plot(vminExtract time s
time slice by specifying start and/or end
specific items
-
+
= "../data/tide1.dfs1"
infile "extracted.dfs1", start='2019-01-02') mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfs1")
e e
@@ -520,11 +520,11 @@ Extract time s
0: Level <Water Level> (meter)
-
+
= "../data/oresund_vertical_slice.dfsu"
infile "extracted.dfsu", items='Salinity', end=-2) mikeio.generic.extract(infile,
-
+
= mikeio.read("extracted.dfsu")
e e
@@ -545,7 +545,7 @@ Extract time s
Scaling
Adding a constant e.g to adjust datum
-
+
= mikeio.read("../data/gebco_sound.dfs2")
ds 0].plot(); ds.Elevation[
@@ -556,23 +556,23 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds[
-1.0
This is the processing step.
-
+
"../data/gebco_sound.dfs2",
mikeio.generic.scale("gebco_sound_local_datum.dfs2",
=-2.1
offset )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1047.27it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1284.23it/s]
-
+
= mikeio.read("gebco_sound_local_datum.dfs2")
ds2 'Elevation'][0].plot() ds2[
@@ -583,7 +583,7 @@ Scaling
-
+
'Elevation'][0,104,131].to_numpy() ds2[
-3.1
@@ -591,7 +591,7 @@ Scaling
Spatially varying correction
-
+
import numpy as np
= np.ones_like(ds['Elevation'][0].to_numpy())
factor factor.shape
@@ -600,7 +600,7 @@ Spatially var
Add some spatially varying factors, exaggerated values for educational purpose.
-
+
0:100] = 5.3
factor[:,0:40,] = 0.1
factor[150:,150:] = 10.7
@@ -615,7 +615,7 @@ factor[Spatially var
The 2d array must first be flipped upside down and then converted to a 1d vector using numpy.ndarray.flatten to match how data is stored in dfs files.
-
+
= np.flipud(factor)
factor_ud = factor_ud.flatten()
factor_vec "../data/gebco_sound.dfs2",
@@ -623,10 +623,10 @@ mikeio.generic.scale(Spatially var
=factor_vec
factor )
- 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1223.54it/s]
+ 0%| | 0/1 [00:00<?, ?it/s]100%|██████████| 1/1 [00:00<00:00, 1265.25it/s]
-
+
= mikeio.read("gebco_sound_spatial.dfs2")
ds3 0].plot(); ds3.Elevation[
@@ -641,15 +641,15 @@ Spatially var
Time average
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Avg_NorthSea_HD_and_windspeed.dfsu"
fn_avg mikeio.generic.avg_time(fn, fn_avg)
- 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 17965.09it/s]
+ 0%| | 0/66 [00:00<?, ?it/s]100%|██████████| 66/66 [00:00<00:00, 15917.66it/s]
-
+
= mikeio.read(fn)
ds =0).describe() # alternative way of getting the time average ds.mean(axis
@@ -713,7 +713,7 @@ Time average
-
+
= mikeio.read(fn_avg)
ds_avg ds_avg.describe()
@@ -781,12 +781,12 @@ Time average
Quantile
Example that calculates the 25%, 50% and 75% percentile for all items in a dfsu file.
-
+
= "../data/NorthSea_HD_and_windspeed.dfsu"
fn = "Q_NorthSea_HD_and_windspeed.dfsu"
fn_q =[0.25,0.5,0.75]) mikeio.generic.quantile(fn, fn_q, q
-
+
= mikeio.read(fn_q)
ds ds
@@ -803,7 +803,7 @@ Quantile
5: Quantile 0.75, Wind speed <Wind speed> (meter per sec)
-
+
= ds["Quantile 0.75, Wind speed"]
da_q75 ="75th percentile, wind speed", label="m/s") da_q75.plot(title
@@ -817,7 +817,7 @@ Quantile
Clean up
-
+
import os
"concat.dfs1")
os.remove("oresundHD_difference.dfsu")
diff --git a/examples/Time-interpolation.html b/examples/Time-interpolation.html
index 420005818..d1ffd0a12 100644
--- a/examples/Time-interpolation.html
+++ b/examples/Time-interpolation.html
@@ -263,7 +263,7 @@ os.remove(Time interpolation
@@ -376,11 +376,11 @@
Time interpolation
-
+
import numpy as np
import mikeio
-
+
= mikeio.read("../data/waves.dfs2")
ds ds
@@ -397,7 +397,7 @@ Time interpolation
Interpolate to specific timestep
A common use case is to interpolate to a shorter timestep, in this case 1h.
-
+
= ds.interp_time(3600)
ds_h ds_h
@@ -412,14 +412,14 @@ Interpola
And to store the interpolated data in a new file.
-
+
"waves_3h.dfs2") ds_h.to_dfs(
Interpolate to time axis of another dataset
Read some non-equidistant data typically found in observed data.
-
+
= mikeio.read("../data/waves.dfs0")
ts ts
@@ -434,10 +434,10 @@
+
= ds.interp_time(ts) dsi
-
+
dsi.time
DatetimeIndex(['2004-01-01 01:00:00', '2004-01-01 02:00:00',
@@ -455,13 +455,13 @@
+
"Sign. Wave Height"].shape dsi[
(24, 31, 31)
-
+
= dsi["Sign. Wave Height"].sel(x=250, y=1200).plot(marker='+')
ax "Sign. Wave Height"].plot(ax=ax,marker='+') ts[
@@ -479,7 +479,7 @@ Model validation
In the example below we calculate this metric using the model data interpolated to the observed times.
For a more elaborate model validation library which takes care of these things for you as well as calculating a number of relevant metrics, take a look at `ModelSkill.
Use np.nanmean
to skip NaN.
-
+
"Sign. Wave Height"] ts[
<mikeio.DataArray>
@@ -490,7 +490,7 @@ Model validation
values: [0.06521, 0.06771, ..., 0.0576]
-
+
"Sign. Wave Height"].sel(x=250, y=1200) dsi[
<mikeio.DataArray>
@@ -501,7 +501,7 @@ Model validation
values: [0.0387, 0.03939, ..., nan]
-
+
= (ts["Sign. Wave Height"] - dsi["Sign. Wave Height"].sel(x=250, y=1200))
diff diff.plot()
@@ -512,7 +512,7 @@ Model validation
-
+
= np.abs(diff).nanmean().to_numpy()
mae mae
@@ -522,7 +522,7 @@ Model validation
Clean up
-
+
import os
"waves_3h.dfs2") os.remove(
diff --git a/examples/dfs2/bathy.html b/examples/dfs2/bathy.html
index 5960c44c9..a1f61fc0f 100644
--- a/examples/dfs2/bathy.html
+++ b/examples/dfs2/bathy.html
@@ -266,7 +266,7 @@ Dfs2 - Bathymetric data
@@ -377,11 +377,11 @@
Dfs2 - Bathymetric data
GEBCO Compilation Group (2020) GEBCO 2020 Grid (doi:10.5285/a29c5465-b138-234d-e053-6c86abc040b9)
-
+
import xarray
import mikeio
-
+
= xarray.open_dataset("../../data/gebco_2020_n56.3_s55.2_w12.2_e13.1.nc")
ds ds
@@ -753,7 +753,7 @@ Dfs2 - Bathymetric data
Coordinates: (2)
Data variables:
elevation (lat, lon) int16 114kB ...
-Attributes: (8)- Conventions :
- CF-1.6
- title :
- The GEBCO_2020 Grid - a continuous terrain model for oceans and land at 15 arc-second intervals
- institution :
- On behalf of the General Bathymetric Chart of the Oceans (GEBCO), the data are held at the British Oceanographic Data Centre (BODC).
- source :
- The GEBCO_2020 Grid is the latest global bathymetric product released by the General Bathymetric Chart of the Oceans (GEBCO) and has been developed through the Nippon Foundation-GEBCO Seabed 2030 Project. This is a collaborative project between the Nippon Foundation of Japan and GEBCO. The Seabed 2030 Project aims to bring together all available bathymetric data to produce the definitive map of the world ocean floor and make it available to all.
- history :
- Information on the development of the data set and the source data sets included in the grid can be found in the data set documentation available from https://www.gebco.net
- references :
- DOI: 10.5285/a29c5465-b138-234d-e053-6c86abc040b9
- comment :
- The data in the GEBCO_2020 Grid should not be used for navigation or any purpose relating to safety at sea.
- node_offset :
- 1.0
-
+
; ds.elevation.plot()
@@ -784,7 +784,7 @@ Dfs2 - Bathymetric data
-
+
=12.74792, lat=55.865, method="nearest") ds.elevation.sel(lon
+Attributes: (7)
Check ordering of dimensions, should be (y,x)
-
+
ds.elevation.dims
('lat', 'lon')
-
+
= ds.elevation.values
el el.shape
@@ -1171,37 +1171,37 @@ Dfs2 - Bathymetric data
Check that axes are increasing, S->N W->E
-
+
0],ds.lat.values[-1] ds.lat.values[
(55.20208333333332, 56.29791666666665)
-
+
0] < ds.lat.values[-1] ds.lat.values[
True
-
+
0],ds.lon.values[-1] ds.lon.values[
(12.20208333333332, 13.097916666666663)
-
+
0,0] # Bottom left el[
-8
-
+
-1,0] # Top Left el[
-31
-
+
= mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
geometry geometry
@@ -1211,7 +1211,7 @@ Dfs2 - Bathymetric data
projection: LONG/LAT
-
+
= mikeio.DataArray(data=el,
da =mikeio.ItemInfo("Elevation", mikeio.EUMType.Total_Water_Depth),
item=geometry,
@@ -1226,7 +1226,7 @@ geometryDfs2 - Bathymetric data
geometry: Grid2D (ny=264, nx=216)
-
+
; da.plot()
@@ -1236,7 +1236,7 @@ Dfs2 - Bathymetric data
-
+
='coolwarm', vmin=-100, vmax=100); da.plot(cmap
@@ -1246,10 +1246,10 @@ Dfs2 - Bathymetric data
-
+
"gebco.dfs2") da.to_dfs(
-
+
= mikeio.read("gebco.dfs2")
ds ds.Elevation.plot()
@@ -1262,7 +1262,7 @@ Dfs2 - Bathymetric data
Clean up
-
+
import os
"gebco.dfs2") os.remove(
diff --git a/examples/dfs2/gfs.html b/examples/dfs2/gfs.html
index 3c0467a6e..461e0bad8 100644
--- a/examples/dfs2/gfs.html
+++ b/examples/dfs2/gfs.html
@@ -266,7 +266,7 @@ Dfs2 - Meteo data
@@ -380,13 +380,13 @@
Dfs2 - Meteo data
-
+
import xarray
import pandas as pd
import mikeio
The file gfs_wind.nc
contains a small sample of the GFS forecast data downloaded via their OpenDAP service
-
+
= xarray.open_dataset('../../data/gfs_wind.nc')
ds ds
@@ -760,30 +760,30 @@ Dfs2 - Meteo data
msletmsl (time, lat, lon) float32 10kB ...
ugrd10m (time, lat, lon) float32 10kB ...
vgrd10m (time, lat, lon) float32 10kB ...
-Attributes: (4)
Running a Mike 21 HD model, needs at least three variables of meteorological forcing * Mean Sea Level Pressure * U 10m * V 10m
Let’s take a look the U 10m
-
+
=0).plot(); ds.ugrd10m.isel(time
@@ -797,7 +797,7 @@ Dfs2 - Meteo data
Convert to dfs2
Time
-
+
= pd.DatetimeIndex(ds.time)
time time
@@ -809,36 +809,36 @@ Time
Variable types
-
+
mikeio.EUMType.Air_Pressure
Air Pressure
-
+
mikeio.EUMType.Air_Pressure.units
[hectopascal, millibar]
-
+
mikeio.EUMType.Wind_Velocity
Wind Velocity
-
+
mikeio.EUMType.Wind_Velocity.units
[meter per sec, feet per sec, miles per hour, km per hour, knot]
-
+
= ds.msletmsl.values / 100 # conversion from Pa to hPa
mslp = ds.ugrd10m.values
u = ds.vgrd10m.values v
-
+
= mikeio.Grid2D(x=ds.lon.values, y=ds.lat.values, projection="LONG/LAT")
geometry geometry
@@ -848,14 +848,14 @@ Variable types
projection: LONG/LAT
-
+
from mikeio import ItemInfo, EUMType, EUMUnit
= mikeio.DataArray(data=mslp,time=time, geometry=geometry, item=ItemInfo("Mean Sea Level Pressure", EUMType.Air_Pressure, EUMUnit.hectopascal))
mslp_da = mikeio.DataArray(data=u,time=time, geometry=geometry, item=ItemInfo("Wind U", EUMType.Wind_Velocity, EUMUnit.meter_per_sec))
u_da = mikeio.DataArray(data=v,time=time, geometry=geometry, item=ItemInfo("Wind V", EUMType.Wind_Velocity, EUMUnit.meter_per_sec)) v_da
-
+
= mikeio.Dataset([mslp_da, u_da, v_da])
mds mds
@@ -869,11 +869,11 @@ Variable types
2: Wind V <Wind Velocity> (meter per sec)
-
+
"gfs.dfs2") mds.to_dfs(
Clean up
-
+
import os
"gfs.dfs2") os.remove(
diff --git a/examples/dfs2/index.html b/examples/dfs2/index.html
index 580c5acd0..c5ff0ca9d 100644
--- a/examples/dfs2/index.html
+++ b/examples/dfs2/index.html
@@ -228,7 +228,7 @@ Dfs2 examples
diff --git a/examples/index.html b/examples/index.html
index b21302007..3cc800c42 100644
--- a/examples/index.html
+++ b/examples/index.html
@@ -288,7 +288,7 @@
Examples
@@ -420,7 +420,7 @@
Examples
-
+
Dfs2 - Bathymetric data
@@ -428,7 +428,7 @@ Examples
Convert GEBCO 2020 NetCDF to dfs2
-
+
Dfs2 - Meteo data
@@ -436,7 +436,7 @@ Examples
Conversion of NetCDF from Global Forecasting System to Dfs2
-
+
Dfs2 examples
@@ -447,7 +447,7 @@ Examples
-
+
Dfsu - 2D interpolation
@@ -455,7 +455,7 @@ Examples
Interpolate dfsu data to a grid, save as dfs2 and geotiff. Interpolate dfsu data to another mesh.
-
+
Generic dfs processing
@@ -463,7 +463,7 @@ Examples
Tools and methods that applies to any type of dfs files.
-
+
Time interpolation
@@ -866,8 +866,8 @@ Examples