Skip to content

Commit

Permalink
Merge pull request #37 from zsarnoczay/master
Browse files Browse the repository at this point in the history
azs - mostly updates related to water network damage simulation and working with custom DL models
  • Loading branch information
zsarnoczay authored Mar 28, 2024
2 parents 7d72d09 + 1e0906a commit b4b807b
Show file tree
Hide file tree
Showing 18 changed files with 1,885 additions and 756 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/deploy_to_pypi.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Deploy to PyPI

on:
release:
types: [created]

jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install setuptools wheel twine
- name: Build package
run: |
python setup.py sdist bdist_wheel
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: zsarnoczay
password: ${{ secrets.PELICUN_GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion pelicun/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@

name = "pelicun"

__version__ = '3.2b9'
__version__ = '3.2'

__copyright__ = ("Copyright (c) 2018 Leland Stanford "
"Junior University and The Regents "
Expand Down
2 changes: 1 addition & 1 deletion pelicun/assessment.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def __init__(self, config_options=None):

self.options = base.Options(config_options, self)

self.unit_conversion_factors = file_io.parse_units(
self.unit_conversion_factors = base.parse_units(
self.options.units_file)

self.log = self.options.log
Expand Down
181 changes: 181 additions & 0 deletions pelicun/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,16 @@
int_or_None
process_loc
dedupe_index
dict_raise_on_duplicates
parse_units
convert_units
Options
Logger
"""

from __future__ import annotations
import os
import sys
from datetime import datetime
Expand Down Expand Up @@ -1076,3 +1080,180 @@ def dedupe_index(dataframe, dtype=str):
# Placeholder for advanced calculations
'One': 'ONE'
}


def dict_raise_on_duplicates(ordered_pairs):
"""
Reject duplicate keys.
https://stackoverflow.com/questions/14902299/
json-loads-allows-duplicate-keys-
in-a-dictionary-overwriting-the-first-value
"""
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError(f"duplicate key: {k}")
d[k] = v
return d


def parse_units(custom_file=None, preserve_categories=False):
"""
Parse the unit conversion factor JSON file and return a dictionary.
Parameters
----------
custom_file: str, optional
If a custom file is provided, only the units specified in the
custom file are used.
Raises
------
KeyError
If a key is defined twice.
ValueError
If a unit conversion factor is not a float.
FileNotFoundError
If a file does not exist.
Exception
If a file does not have the JSON format.
"""

def get_contents(file_path, preserve_categories=False):
try:
with open(file_path, 'r', encoding='utf-8') as f:
dictionary = json.load(f, object_pairs_hook=dict_raise_on_duplicates)
except FileNotFoundError as exc:
raise FileNotFoundError(f'{file_path} was not found.') from exc
except json.decoder.JSONDecodeError as exc:
raise ValueError(f'{file_path} is not a valid JSON file.') from exc
for category_dict in list(dictionary.values()):
# ensure all first-level keys point to a dictionary
if not isinstance(category_dict, dict):
raise ValueError(
f'{file_path} contains first-level keys '
'that don\'t point to a dictionary'
)
# convert values to float
for key, val in category_dict.items():
try:
category_dict[key] = float(val)
except (ValueError, TypeError) as exc:
raise type(exc)(
f'Unit {key} has a value of {val} '
'which cannot be interpreted as a float'
) from exc

if preserve_categories:
return dictionary

flattened = {}
for category in dictionary:
for unit_name, factor in dictionary[category].items():
if unit_name in flattened:
raise ValueError(f'{unit_name} defined twice in {file_path}.')
flattened[unit_name] = factor

return flattened

if custom_file:
return get_contents(custom_file, preserve_categories)

return get_contents(
pelicun_path / "settings/default_units.json", preserve_categories
)


def convert_units(
values: (float | list[float] | np.ndarray),
unit: str,
to_unit: str,
category: (str | None) = None
) -> (float | list[float] | np.ndarray):
"""
Converts numeric values between different units.
Supports conversion within a specified category of units and
automatically infers the category if not explicitly provided. It
maintains the type of the input in the output.
Parameters
----------
values (float | list[float] | np.ndarray):
The numeric value(s) to convert.
unit (str):
The current unit of the values.
to_unit (str):
The target unit to convert the values into.
category (Optional[str]):
The category of the units (e.g., 'length', 'pressure'). If not
provided, the category will be inferred based on the provided
units.
Returns
-------
(float | list[float] | np.ndarray):
The converted value(s) in the target unit, in the same data type
as the input values.
Raises
------
TypeError:
If the input `values` are not of type float, list, or
np.ndarray.
ValueError:
If the `unit`, `to_unit`, or `category` is unknown or if `unit`
and `to_unit` are not in the same category.
"""

if isinstance(values, (float, list)):
vals = np.atleast_1d(values)
elif isinstance(values, np.ndarray):
vals = values
else:
raise TypeError('Invalid input type for `values`')

# load default units
all_units = parse_units(preserve_categories=True)

# if a category is given use it, otherwise try to determine it
if category:
if category not in all_units:
raise ValueError(f'Unknown category: `{category}`')
units = all_units[category]
for unt in unit, to_unit:
if unt not in units:
raise ValueError(
f'Unknown unit: `{unt}`'
)
else:
unit_category: (str | None) = None
for key in all_units:
units = all_units[key]
if unit in units:
unit_category = key
break
if not unit_category:
raise ValueError(f'Unknown unit `{unit}`')
units = all_units[unit_category]
if to_unit not in units:
raise ValueError(
f'`{unit}` is a `{unit_category}` unit, but `{to_unit}` '
f'is not specified in that category.'
)

# convert units
from_factor = units[unit]
to_factor = units[to_unit]
new_values = vals * from_factor / to_factor

# return the results in the same type as that of the provided
# values
if isinstance(values, float):
return new_values[0]
if isinstance(values, list):
return new_values.tolist()
return new_values
85 changes: 1 addition & 84 deletions pelicun/file_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,20 +47,17 @@
.. autosummary::
dict_raise_on_duplicates
get_required_resources
save_to_csv
load_data
load_from_file
parse_units
"""

import json
from pathlib import Path
import numpy as np
import pandas as pd
from . import base
from pelicun import base


convert_dv_name = {
Expand Down Expand Up @@ -94,23 +91,6 @@
}


def dict_raise_on_duplicates(ordered_pairs):
"""
Reject duplicate keys.
https://stackoverflow.com/questions/14902299/
json-loads-allows-duplicate-keys-
in-a-dictionary-overwriting-the-first-value
"""
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError(f"duplicate key: {k}")
d[k] = v
return d


def save_to_csv(data, filepath, units=None, unit_conversion_factors=None,
orientation=0, use_simpleindex=True, log=None):
"""
Expand Down Expand Up @@ -453,66 +433,3 @@ def load_from_file(filepath, log=None):
f'to load from csv: {filepath}')

return data


def parse_units(custom_file=None):
"""
Parse the unit conversion factor JSON file and return a dictionary.
Parameters
----------
custom_file: str, optional
If a custom file is provided, only the units specified in the
custom file are used.
Raises
------
KeyError
If a key is defined twice.
ValueError
If a unit conversion factor is not a float.
FileNotFoundError
If a file does not exist.
Exception
If a file does not have the JSON format.
"""

def get_contents(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as f:
dictionary = json.load(
f, object_pairs_hook=dict_raise_on_duplicates)
except FileNotFoundError as exc:
raise FileNotFoundError(
f'{file_path} was not found.') from exc
except json.decoder.JSONDecodeError as exc:
raise ValueError(
f'{file_path} is not a valid JSON file.') from exc
for category_dict in list(dictionary.values()):
# ensure all first-level keys point to a dictionary
if not isinstance(category_dict, dict):
raise ValueError(
f'{file_path} contains first-level keys '
'that don\'t point to a dictionary')
# convert values to float
for key, val in category_dict.items():
try:
category_dict[key] = float(val)
except (ValueError, TypeError) as exc:
raise type(exc)(
f'Unit {key} has a value of {val} '
'which cannot be interpreted as a float') from exc

flattened = {}
for category in dictionary:
for unit_name, factor in dictionary[category].items():
if unit_name in flattened:
raise ValueError(f'{unit_name} defined twice in {file_path}.')
flattened[unit_name] = factor

return flattened

if custom_file:
return get_contents(custom_file)

return get_contents(base.pelicun_path / "settings/default_units.json")
5 changes: 5 additions & 0 deletions pelicun/resources/SimCenterDBDL/damage_DB_Hazus_EQ_water.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
ID,Incomplete,Demand-Type,Demand-Unit,Demand-Offset,Demand-Directional,LS1-Family,LS1-Theta_0,LS1-DamageStateWeights
PWP.B.GS,0,Peak Ground Velocity,cmps,0,0,multilinear_CDF,"0.1,2,3,4,5,6,8,10,15,20,30,40,50,60,80,100,150,200,300,400,500,579|0.00,0.00000290,0.00000722,0.00001379,0.00002279,0.00003435,0.00006561,0.00010840,0.00026993,0.00051566,0.00128401,0.00245290,0.00405254,0.00610781,0.01166803,0.01927724,0.04800100,0.09169855,0.22833253,0.43619425,0.72065511,1.00",0.8 | 0.2
PWP.D.GS,0,Peak Ground Velocity,cmps,0,0,multilinear_CDF,"0.2,3,4,5,6,8,10,15,20,30,40,50,60,80,100,150,200,300,400,500,600,800,990|0.00,0.00000217,0.00000414,0.00000684,0.00001030,0.00001968,0.00003252,0.00008098,0.00015470,0.00038520,0.00073587,0.00121576,0.00183234,0.00350041,0.00578317,0.01440030,0.02750956,0.06849976,0.13085828,0.21619653,0.32584160,0.62247037,1.00",0.8 | 0.2
PWP.B.GF,0,Permanent Ground Deformation,inch,0,0,multilinear_CDF,"0.008,0.1,0.2,0.3,0.4,0.5,0.6,0.8,1,2,3,4,5,6,8,10,15,20,30,40,50,60,80,100,150,200,300,400,500,600,800,1000,2000,3000,4000,5000,6000,8000,9050|0.00,0.00167898,0.00247527,0.00310622,0.00364921,0.00413493,0.00457941,0.00537991,0.00609600,0.00898715,0.01127802,0.01324947,0.01501302,0.01662684,0.01953329,0.02213324,0.02777513,0.03263035,0.04094802,0.04810592,0.05450899,0.06036840,0.07092108,0.08036093,0.10084539,0.11847365,0.14867326,0.17466205,0.19791019,0.21918441,0.25749888,0.29177290,0.43015181,0.53979998,0.63415957,0.71856848,0.79581049,0.93492194,1.00",0.2 | 0.8
PWP.D.GF,0,Permanent Ground Deformation,inch,0,0,multilinear_CDF,"0.3,0.4,0.5,0.6,0.8,1,2,3,4,5,6,8,10,15,20,30,40,50,60,80,100,150,200,300,400,500,600,800,1000,2000,3000,4000,5000,6000,8000,10000,20000,30000,40000,50000,60000,80000|0.00,0.00109476,0.00124048,0.00137382,0.00161397,0.00182880,0.00269614,0.00338341,0.00397484,0.00450391,0.00498805,0.00585999,0.00663997,0.00833254,0.00978911,0.01228440,0.01443178,0.01635270,0.01811052,0.02127632,0.02410828,0.03025362,0.03554209,0.04460198,0.05239862,0.05937306,0.06575532,0.07724967,0.08753187,0.12904554,0.16193999,0.19024787,0.21557054,0.23874315,0.28047658,0.31780902,0.46853605,0.58796858,0.69074827,0.78268933,0.86682397,1.00",0.2 | 0.8
Loading

0 comments on commit b4b807b

Please sign in to comment.