Skip to content

Commit

Permalink
black format all notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
freemansw1 committed Jul 18, 2024
1 parent 61371a6 commit a9c74c7
Show file tree
Hide file tree
Showing 6 changed files with 406 additions and 221 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"import shutil\n",
"from six.moves import urllib\n",
"from pathlib import Path\n",
"\n",
"%matplotlib inline"
]
},
Expand All @@ -60,7 +61,8 @@
"source": [
"# Import tobac itself\n",
"import tobac\n",
"print('using tobac version', str(tobac.__version__))"
"\n",
"print(\"using tobac version\", str(tobac.__version__))"
]
},
{
Expand All @@ -78,10 +80,11 @@
"source": [
"# Disable a few warnings:\n",
"import warnings\n",
"warnings.filterwarnings('ignore', category=UserWarning, append=True)\n",
"warnings.filterwarnings('ignore', category=RuntimeWarning, append=True)\n",
"warnings.filterwarnings('ignore', category=FutureWarning, append=True)\n",
"warnings.filterwarnings('ignore',category=pd.io.pytables.PerformanceWarning)"
"\n",
"warnings.filterwarnings(\"ignore\", category=UserWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=RuntimeWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=FutureWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=pd.io.pytables.PerformanceWarning)"
]
},
{
Expand All @@ -106,7 +109,7 @@
},
"outputs": [],
"source": [
"data_out=Path('../')"
"data_out = Path(\"../\")"
]
},
{
Expand All @@ -123,18 +126,18 @@
"outputs": [],
"source": [
"# Download the data: This only has to be done once for all tobac examples and can take a while\n",
"data_file = list(data_out.rglob('data/Example_input_Precip.nc'))\n",
"data_file = list(data_out.rglob(\"data/Example_input_Precip.nc\"))\n",
"if len(data_file) == 0:\n",
" file_path='https://zenodo.org/records/3195910/files/climate-processes/tobac_example_data-v1.0.1.zip'\n",
" #file_path='http://zenodo..'\n",
" tempfile=Path('temp.zip')\n",
" print('start downloading data')\n",
" request=urllib.request.urlretrieve(file_path, tempfile)\n",
" print('start extracting data')\n",
" file_path = \"https://zenodo.org/records/3195910/files/climate-processes/tobac_example_data-v1.0.1.zip\"\n",
" # file_path='http://zenodo..'\n",
" tempfile = Path(\"temp.zip\")\n",
" print(\"start downloading data\")\n",
" request = urllib.request.urlretrieve(file_path, tempfile)\n",
" print(\"start extracting data\")\n",
" shutil.unpack_archive(tempfile, data_out)\n",
" tempfile.unlink()\n",
" print('data extracted')\n",
" data_file = list(data_out.rglob('data/Example_input_Precip.nc'))"
" print(\"data extracted\")\n",
" data_file = list(data_out.rglob(\"data/Example_input_Precip.nc\"))"
]
},
{
Expand All @@ -157,7 +160,7 @@
},
"outputs": [],
"source": [
"Precip=iris.load_cube(str(data_file[0]),'surface_precipitation_average')"
"Precip = iris.load_cube(str(data_file[0]), \"surface_precipitation_average\")"
]
},
{
Expand Down Expand Up @@ -324,7 +327,7 @@
}
],
"source": [
"#display information about the iris cube containing the surface precipitation data:\n",
"# display information about the iris cube containing the surface precipitation data:\n",
"display(Precip)"
]
},
Expand All @@ -341,11 +344,11 @@
},
"outputs": [],
"source": [
"#Set up directory to save output:\n",
"savedir=Path(\"Save\")\n",
"# Set up directory to save output:\n",
"savedir = Path(\"Save\")\n",
"if not savedir.is_dir():\n",
" savedir.mkdir()\n",
"plot_dir=Path(\"Plot\")\n",
"plot_dir = Path(\"Plot\")\n",
"if not plot_dir.is_dir():\n",
" plot_dir.mkdir()"
]
Expand Down Expand Up @@ -380,17 +383,17 @@
},
"outputs": [],
"source": [
"parameters_features={}\n",
"parameters_features['position_threshold']='weighted_diff'\n",
"parameters_features['sigma_threshold']=0.5\n",
"parameters_features['min_distance']=0\n",
"parameters_features['sigma_threshold']=1\n",
"parameters_features['threshold']=[1,2,3,4,5,10,15] #mm/h\n",
"parameters_features['n_erosion_threshold']=0\n",
"parameters_features['n_min_threshold']=3\n",
"parameters_features = {}\n",
"parameters_features[\"position_threshold\"] = \"weighted_diff\"\n",
"parameters_features[\"sigma_threshold\"] = 0.5\n",
"parameters_features[\"min_distance\"] = 0\n",
"parameters_features[\"sigma_threshold\"] = 1\n",
"parameters_features[\"threshold\"] = [1, 2, 3, 4, 5, 10, 15] # mm/h\n",
"parameters_features[\"n_erosion_threshold\"] = 0\n",
"parameters_features[\"n_min_threshold\"] = 3\n",
"\n",
"# get temporal and spation resolution of the data\n",
"dxy,dt=tobac.get_spacings(Precip)"
"dxy, dt = tobac.get_spacings(Precip)"
]
},
{
Expand Down Expand Up @@ -421,9 +424,9 @@
"outputs": [],
"source": [
"statistics = {}\n",
"statistics['mean_precip'] = np.mean\n",
"statistics['total_precip'] = np.sum\n",
"statistics['max_precip'] = np.max"
"statistics[\"mean_precip\"] = np.mean\n",
"statistics[\"total_precip\"] = np.sum\n",
"statistics[\"max_precip\"] = np.max"
]
},
{
Expand All @@ -446,7 +449,7 @@
},
"outputs": [],
"source": [
"statistics['percentiles'] = (np.percentile, {'q': [95,99]})"
"statistics[\"percentiles\"] = (np.percentile, {\"q\": [95, 99]})"
]
},
{
Expand Down Expand Up @@ -485,11 +488,13 @@
],
"source": [
"# Feature detection based on based on surface precipitation field and a range of thresholds\n",
"print('starting feature detection based on multiple thresholds')\n",
"Features= tobac.feature_detection_multithreshold(Precip,dxy,**parameters_features, statistic=statistics) \n",
"print('feature detection done')\n",
"Features.to_hdf(savedir / 'Features.h5','table')\n",
"print('features saved')"
"print(\"starting feature detection based on multiple thresholds\")\n",
"Features = tobac.feature_detection_multithreshold(\n",
" Precip, dxy, **parameters_features, statistic=statistics\n",
")\n",
"print(\"feature detection done\")\n",
"Features.to_hdf(savedir / \"Features.h5\", \"table\")\n",
"print(\"features saved\")"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
"import shutil\n",
"from six.moves import urllib\n",
"from pathlib import Path\n",
"\n",
"%matplotlib inline"
]
},
Expand All @@ -50,10 +51,11 @@
"source": [
"# Disable a few warnings:\n",
"import warnings\n",
"warnings.filterwarnings('ignore', category=UserWarning, append=True)\n",
"warnings.filterwarnings('ignore', category=RuntimeWarning, append=True)\n",
"warnings.filterwarnings('ignore', category=FutureWarning, append=True)\n",
"warnings.filterwarnings('ignore',category=pd.io.pytables.PerformanceWarning)"
"\n",
"warnings.filterwarnings(\"ignore\", category=UserWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=RuntimeWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=FutureWarning, append=True)\n",
"warnings.filterwarnings(\"ignore\", category=pd.io.pytables.PerformanceWarning)"
]
},
{
Expand All @@ -79,7 +81,8 @@
"source": [
"# Import tobac itself\n",
"import tobac\n",
"print('using tobac version', str(tobac.__version__))"
"\n",
"print(\"using tobac version\", str(tobac.__version__))"
]
},
{
Expand All @@ -95,20 +98,20 @@
},
"outputs": [],
"source": [
"data_out=Path('../')\n",
"data_out = Path(\"../\")\n",
"# Download the data: This only has to be done once for all tobac examples and can take a while\n",
"data_file = list(data_out.rglob('data/Example_input_Precip.nc'))\n",
"data_file = list(data_out.rglob(\"data/Example_input_Precip.nc\"))\n",
"if len(data_file) == 0:\n",
" file_path='https://zenodo.org/records/3195910/files/climate-processes/tobac_example_data-v1.0.1.zip'\n",
" #file_path='http://zenodo..'\n",
" tempfile=Path('temp.zip')\n",
" print('start downloading data')\n",
" request=urllib.request.urlretrieve(file_path, tempfile)\n",
" print('start extracting data')\n",
" file_path = \"https://zenodo.org/records/3195910/files/climate-processes/tobac_example_data-v1.0.1.zip\"\n",
" # file_path='http://zenodo..'\n",
" tempfile = Path(\"temp.zip\")\n",
" print(\"start downloading data\")\n",
" request = urllib.request.urlretrieve(file_path, tempfile)\n",
" print(\"start extracting data\")\n",
" shutil.unpack_archive(tempfile, data_out)\n",
" tempfile.unlink()\n",
" print('data extracted')\n",
" data_file = list(data_out.rglob('data/Example_input_Precip.nc'))"
" print(\"data extracted\")\n",
" data_file = list(data_out.rglob(\"data/Example_input_Precip.nc\"))"
]
},
{
Expand All @@ -124,11 +127,11 @@
},
"outputs": [],
"source": [
"#Set up directory to save output:\n",
"savedir=Path(\"Save\")\n",
"# Set up directory to save output:\n",
"savedir = Path(\"Save\")\n",
"if not savedir.is_dir():\n",
" savedir.mkdir()\n",
"plot_dir=Path(\"Plot\")\n",
"plot_dir = Path(\"Plot\")\n",
"if not plot_dir.is_dir():\n",
" plot_dir.mkdir()"
]
Expand All @@ -146,7 +149,7 @@
},
"outputs": [],
"source": [
"Precip=iris.load_cube(str(data_file[0]),'surface_precipitation_average')"
"Precip = iris.load_cube(str(data_file[0]), \"surface_precipitation_average\")"
]
},
{
Expand All @@ -169,17 +172,17 @@
},
"outputs": [],
"source": [
"parameters_features={}\n",
"parameters_features['position_threshold']='weighted_diff'\n",
"parameters_features['sigma_threshold']=0.5\n",
"parameters_features['min_distance']=0\n",
"parameters_features['sigma_threshold']=1\n",
"parameters_features['threshold']=[1,2,3,4,5,10,15] #mm/h\n",
"parameters_features['n_erosion_threshold']=0\n",
"parameters_features['n_min_threshold']=3\n",
"parameters_features = {}\n",
"parameters_features[\"position_threshold\"] = \"weighted_diff\"\n",
"parameters_features[\"sigma_threshold\"] = 0.5\n",
"parameters_features[\"min_distance\"] = 0\n",
"parameters_features[\"sigma_threshold\"] = 1\n",
"parameters_features[\"threshold\"] = [1, 2, 3, 4, 5, 10, 15] # mm/h\n",
"parameters_features[\"n_erosion_threshold\"] = 0\n",
"parameters_features[\"n_min_threshold\"] = 3\n",
"\n",
"# get temporal and spation resolution of the data\n",
"dxy,dt=tobac.get_spacings(Precip)"
"dxy, dt = tobac.get_spacings(Precip)"
]
},
{
Expand Down Expand Up @@ -218,11 +221,11 @@
],
"source": [
"# Feature detection based on based on surface precipitation field and a range of thresholds\n",
"print('starting feature detection based on multiple thresholds')\n",
"Features= tobac.feature_detection_multithreshold(Precip,dxy,**parameters_features) \n",
"print('feature detection done')\n",
"Features.to_hdf(savedir / 'Features.h5','table')\n",
"print('features saved')"
"print(\"starting feature detection based on multiple thresholds\")\n",
"Features = tobac.feature_detection_multithreshold(Precip, dxy, **parameters_features)\n",
"print(\"feature detection done\")\n",
"Features.to_hdf(savedir / \"Features.h5\", \"table\")\n",
"print(\"features saved\")"
]
},
{
Expand Down Expand Up @@ -255,12 +258,12 @@
"outputs": [],
"source": [
"# Dictionary containing keyword arguments for segmentation step:\n",
"parameters_segmentation={}\n",
"parameters_segmentation['method']='watershed'\n",
"parameters_segmentation['threshold']=1 # mm/h mixing ratio\n",
"parameters_segmentation = {}\n",
"parameters_segmentation[\"method\"] = \"watershed\"\n",
"parameters_segmentation[\"threshold\"] = 1 # mm/h mixing ratio\n",
"\n",
"# get temporal and spation resolution of the data\n",
"dxy,dt=tobac.get_spacings(Precip)"
"dxy, dt = tobac.get_spacings(Precip)"
]
},
{
Expand Down Expand Up @@ -291,9 +294,9 @@
"outputs": [],
"source": [
"statistics = {}\n",
"statistics['mean_precip'] = np.mean\n",
"statistics['total_precip'] = np.sum\n",
"statistics['max_precip'] = np.max"
"statistics[\"mean_precip\"] = np.mean\n",
"statistics[\"total_precip\"] = np.sum\n",
"statistics[\"max_precip\"] = np.max"
]
},
{
Expand All @@ -316,7 +319,7 @@
},
"outputs": [],
"source": [
"statistics['percentiles'] = (np.percentile, {'q': [95,99]})"
"statistics[\"percentiles\"] = (np.percentile, {\"q\": [95, 99]})"
]
},
{
Expand Down Expand Up @@ -355,12 +358,16 @@
],
"source": [
"# Perform Segmentation and save resulting mask to NetCDF file:\n",
"print('Starting segmentation based on surface precipitation')\n",
"Mask,Features_Precip=tobac.segmentation_2D(Features,Precip,dxy,**parameters_segmentation, statistic=statistics)\n",
"print('segmentation based on surface precipitation performed, start saving results to files')\n",
"iris.save([Mask], savedir / 'Mask_Segmentation_precip.nc', zlib=True, complevel=4) \n",
"Features_Precip.to_hdf(savedir / 'Features_Precip.h5', 'table')\n",
"print('segmentation surface precipitation performed and saved')"
"print(\"Starting segmentation based on surface precipitation\")\n",
"Mask, Features_Precip = tobac.segmentation_2D(\n",
" Features, Precip, dxy, **parameters_segmentation, statistic=statistics\n",
")\n",
"print(\n",
" \"segmentation based on surface precipitation performed, start saving results to files\"\n",
")\n",
"iris.save([Mask], savedir / \"Mask_Segmentation_precip.nc\", zlib=True, complevel=4)\n",
"Features_Precip.to_hdf(savedir / \"Features_Precip.h5\", \"table\")\n",
"print(\"segmentation surface precipitation performed and saved\")"
]
},
{
Expand Down
Loading

0 comments on commit a9c74c7

Please sign in to comment.