Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MNT: Setup.cfg update #749

Merged
merged 2 commits into from
Nov 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 31 additions & 23 deletions act/io/armfiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,26 @@
"""

import copy
import datetime as dt
import glob
import json
import re
import urllib
import warnings
from pathlib import Path, PosixPath
from netCDF4 import Dataset
from os import PathLike
import tarfile
import tempfile
import urllib
import warnings
from os import PathLike
from pathlib import Path, PosixPath

from cftime import num2date
import numpy as np
import xarray as xr
import datetime as dt
from cftime import num2date
from netCDF4 import Dataset

import act
import act.utils as utils
from act.config import DEFAULT_DATASTREAM_NAME
from act.utils.io_utils import unpack_tar, unpack_gzip, cleanup_files, is_gunzip_file
from act.utils.io_utils import cleanup_files, is_gunzip_file, unpack_gzip, unpack_tar


def read_netcdf(
Expand Down Expand Up @@ -108,7 +107,6 @@ def read_netcdf(
message = 'act.io.armfiles.read_netcdf will be replaced in version 2.0.0 by act.io.arm.read_arm_netcdf()'
warnings.warn(message, DeprecationWarning, 2)


ds = None
filenames, cleanup_temp_directory = check_if_tar_gz_file(filenames)

Expand Down Expand Up @@ -137,7 +135,8 @@ def read_netcdf(
if 'drop_variables' in kwargs.keys():
drop_variables = kwargs['drop_variables']
kwargs['drop_variables'] = keep_variables_to_drop_variables(
filenames, keep_variables, drop_variables=drop_variables)
filenames, keep_variables, drop_variables=drop_variables
)

# Create an exception tuple to use with try statements. Doing it this way
# so we can add the FileNotFoundError if requested. Can add more error
Expand Down Expand Up @@ -178,7 +177,9 @@ def read_netcdf(
# If requested use base_time and time_offset to derive time. Assumes that the units
# of both are in seconds and that the value is number of seconds since epoch.
if use_base_time:
time = num2date(ds['base_time'].values + ds['time_offset'].values, ds['base_time'].attrs['units'])
time = num2date(
ds['base_time'].values + ds['time_offset'].values, ds['base_time'].attrs['units']
)
time = time.astype('datetime64[ns]')

# Need to use a new Dataset creation to correctly index time for use with
Expand Down Expand Up @@ -280,10 +281,7 @@ def read_netcdf(
return ds


def keep_variables_to_drop_variables(
filenames,
keep_variables,
drop_variables=None):
def keep_variables_to_drop_variables(filenames, keep_variables, drop_variables=None):
"""
Returns a list of variable names to exclude from reading by passing into
`Xarray.open_dataset` drop_variables keyword. This can greatly help reduce
Expand Down Expand Up @@ -347,7 +345,6 @@ def keep_variables_to_drop_variables(
# Use netCDF4 library to extract the variable and dimension names.
rootgrp = Dataset(filename, 'r')
read_variables = list(rootgrp.variables)
dimensions = list(rootgrp.dimensions)
# Loop over the variables to exclude needed coordinate dimention names.
dims_to_keep = []
for var_name in keep_variables:
Expand Down Expand Up @@ -400,7 +397,9 @@ def check_arm_standards(ds):
return the_flag


def create_ds_from_arm_dod(proc, set_dims, version='', fill_value=-9999.0, scalar_fill_dim=None, local_file=False):
def create_ds_from_arm_dod(
proc, set_dims, version='', fill_value=-9999.0, scalar_fill_dim=None, local_file=False
):
"""

Queries the ARM DOD api and builds a dataset based on the ARM DOD and
Expand Down Expand Up @@ -631,7 +630,9 @@ def write_netcdf(
try:
att_values = write_ds[var_name].attrs[attr_name]
if isinstance(att_values, (list, tuple)):
att_values = [att_value.replace(' ', join_char) for att_value in att_values]
att_values = [
att_value.replace(' ', join_char) for att_value in att_values
]
write_ds[var_name].attrs[attr_name] = ' '.join(att_values)

except KeyError:
Expand Down Expand Up @@ -759,9 +760,16 @@ def write_netcdf(
pass
current_time = dt.datetime.now().replace(microsecond=0)
if 'history' in list(write_ds.attrs.keys()):
write_ds.attrs['history'] += ''.join(['\n', str(current_time), ' created by ACT ', str(act.__version__),
' act.io.write.write_netcdf'])

write_ds.attrs['history'] += ''.join(
[
'\n',
str(current_time),
' created by ACT ',
str(act.__version__),
' act.io.write.write_netcdf',
]
)

if hasattr(write_ds, 'time_bounds') and not write_ds.time.encoding:
write_ds.time.encoding.update(write_ds.time_bounds.encoding)

Expand Down Expand Up @@ -830,7 +838,7 @@ def read_mmcr(filenames):
# read it in with xarray
multi_ds = []
for f in filenames:
nc = Dataset(f, "a")
nc = Dataset(f, 'a')
# Change heights name to range to read appropriately to xarray
if 'heights' in nc.dimensions:
nc.renameDimension('heights', 'range')
Expand Down Expand Up @@ -878,7 +886,7 @@ def read_mmcr(filenames):
data=data,
coords={time_name: ds['time'].values[idx], range_name: range_data[idy]},
dims=[time_name, range_name],
attrs=attrs
attrs=attrs,
)
ds[new_var_name] = da

Expand Down
5 changes: 2 additions & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
[flake8]
exclude = act/io/armfiles.py docs *__init__.py* setup.cfg
exclude = act/tests/data/ docs *__init__.py* setup.cfg
ignore = E203,E266,E501,W503,E722,E402,C901,E731,F401
max-line-length = 100
max-complexity = 18
extend-exclude = act/io/armfiles.py docs *__init__.py*
extend-exclude = docs *__init__.py*
extend-ignore = E203,E266,E501,W503,E722,E402,C901,E731,F401

[isort]
Expand All @@ -18,7 +18,6 @@ line_length=100
skip=
docs/source/conf.py
setup.py
act/io/armfiles.py

[tool:pytest]
addopts = --cov=./ --cov-report=xml --verbose
Expand Down