Skip to content

Commit

Permalink
MNT: Update codebase with pre-commit while removing armfiles from set…
Browse files Browse the repository at this point in the history
…up.cfg.
  • Loading branch information
zssherman committed Nov 7, 2023
1 parent bcb8f62 commit bf4c70e
Show file tree
Hide file tree
Showing 81 changed files with 3,157 additions and 1,569 deletions.
4 changes: 2 additions & 2 deletions act/corrections/mpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ def correct_mpl(
x_data = x_data - x_ap

# R-Squared Correction
co_data = co_data * height ** 2
x_data = x_data * height ** 2
co_data = co_data * height**2
x_data = x_data * height**2

# Overlap Correction
for j in range(ds[range_bins_var_name].size):
Expand Down
11 changes: 9 additions & 2 deletions act/discovery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,14 @@

__getattr__, __dir__, __all__ = lazy.attach(
__name__,
submodules=['get_armfiles', 'get_cropscape', 'get_airnow', 'get_noaapsl', 'get_neon', 'get_surfrad'],
submodules=[
'get_armfiles',
'get_cropscape',
'get_airnow',
'get_noaapsl',
'get_neon',
'get_surfrad',
],
submod_attrs={
'get_arm': ['download_arm_data'],
'get_armfiles': ['download_data', 'download_arm_data', 'get_arm_doi'],
Expand All @@ -17,6 +24,6 @@
'get_cropscape': ['croptype'],
'get_noaapsl': ['download_noaa_psl_data'],
'get_neon': ['get_site_products', 'get_product_avail', 'download_neon_data'],
'get_surfrad': ['download_surfrad']
'get_surfrad': ['download_surfrad'],
},
)
178 changes: 133 additions & 45 deletions act/discovery/airnow.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pandas as pd
import numpy as np
import pandas as pd
import xarray as xr


Expand Down Expand Up @@ -38,25 +38,42 @@ def get_airnow_forecast(token, date, zipcode=None, latlon=None, distance=25):
"""

# default beginning of the query url
query_url = ('https://airnowapi.org/aq/forecast/')
query_url = 'https://airnowapi.org/aq/forecast/'

# checking is either a zipcode or latlon coordinate is defined
# if neither is defined then error is raised
if (zipcode is None) and (latlon is None):
raise NameError("Zipcode or latlon must be defined")
raise NameError('Zipcode or latlon must be defined')

if zipcode:
url = (query_url + ('zipcode/?' + 'format=text/csv' + '&zipCode='
+ str(zipcode) + '&date=' + str(date)
+ '&distance=' + str(distance)
+ '&API_KEY=' + str(token)))
url = query_url + (
'zipcode/?'
+ 'format=text/csv'
+ '&zipCode='
+ str(zipcode)
+ '&date='
+ str(date)
+ '&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)

if latlon:
url = (query_url + ('latLong/?' + 'format=text/csv'
+ '&latitude=' + str(latlon[0]) + '&longitude='
+ str(latlon[1]) + '&date=' + str(date)
+ '&distance=' + str(distance)
+ '&API_KEY=' + str(token)))
url = query_url + (
'latLong/?'
+ 'format=text/csv'
+ '&latitude='
+ str(latlon[0])
+ '&longitude='
+ str(latlon[1])
+ '&date='
+ str(date)
+ '&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)

df = pd.read_csv(url)

Expand Down Expand Up @@ -103,37 +120,78 @@ def get_airnow_obs(token, date=None, zipcode=None, latlon=None, distance=25):
"""

# default beginning of the query url
query_url = ('https://www.airnowapi.org/aq/observation/')
query_url = 'https://www.airnowapi.org/aq/observation/'

# checking is either a zipcode or latlon coordinate is defined
# if neither is defined then error is raised
if (zipcode is None) and (latlon is None):
raise NameError("Zipcode or latlon must be defined")
raise NameError('Zipcode or latlon must be defined')

# setting the observation type to either current or historical based on the date
if date is None:
obs_type = 'current'
if zipcode:
url = (query_url + ('zipCode/' + str(obs_type) + '/?' + 'format=text/csv'
+ '&zipCode=' + str(zipcode) + '&distance=' + str(distance)
+ '&API_KEY=' + str(token)))
url = query_url + (
'zipCode/'
+ str(obs_type)
+ '/?'
+ 'format=text/csv'
+ '&zipCode='
+ str(zipcode)
+ '&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)
if latlon:
url = (query_url + ('latLong/' + str(obs_type) + '/?' + 'format=text/csv'
+ '&latitude=' + str(latlon[0])
+ '&longitude=' + str(latlon[1]) + '&distance='
+ str(distance) + '&API_KEY=' + str(token)))
url = query_url + (
'latLong/'
+ str(obs_type)
+ '/?'
+ 'format=text/csv'
+ '&latitude='
+ str(latlon[0])
+ '&longitude='
+ str(latlon[1])
+ '&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)
else:
obs_type = 'historical'
if zipcode:
url = (query_url + ('zipCode/' + str(obs_type) + '/?' + 'format=text/csv'
+ '&zipCode=' + str(zipcode) + '&date=' + str(date)
+ 'T00-0000&distance=' + str(distance) + '&API_KEY=' + str(token)))
url = query_url + (
'zipCode/'
+ str(obs_type)
+ '/?'
+ 'format=text/csv'
+ '&zipCode='
+ str(zipcode)
+ '&date='
+ str(date)
+ 'T00-0000&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)
if latlon:
url = (query_url + ('latLong/' + str(obs_type) + '/?' + 'format=text/csv'
+ '&latitude=' + str(latlon[0])
+ '&longitude=' + str(latlon[1]) + '&date='
+ str(date) + 'T00-0000&distance=' + str(distance)
+ '&API_KEY=' + str(token)))
url = query_url + (
'latLong/'
+ str(obs_type)
+ '/?'
+ 'format=text/csv'
+ '&latitude='
+ str(latlon[0])
+ '&longitude='
+ str(latlon[1])
+ '&date='
+ str(date)
+ 'T00-0000&distance='
+ str(distance)
+ '&API_KEY='
+ str(token)
)

df = pd.read_csv(url)

Expand All @@ -143,8 +201,9 @@ def get_airnow_obs(token, date=None, zipcode=None, latlon=None, distance=25):
return ds


def get_airnow_bounded_obs(token, start_date, end_date, latlon_bnds, parameters='OZONE,PM25', data_type='B',
mon_type=0):
def get_airnow_bounded_obs(
token, start_date, end_date, latlon_bnds, parameters='OZONE,PM25', data_type='B', mon_type=0
):
"""
Get AQI values or data concentrations for a specific date and time range and set of
parameters within a geographic area of intrest
Expand Down Expand Up @@ -184,16 +243,44 @@ def get_airnow_bounded_obs(token, start_date, end_date, latlon_bnds, parameters=
verbose = 1
inc_raw_con = 1

url = ('https://www.airnowapi.org/aq/data/?startDate=' + str(start_date)
+ '&endDate=' + str(end_date) + '&parameters=' + str(parameters)
+ '&BBOX=' + str(latlon_bnds) + '&dataType=' + str(data_type)
+ '&format=text/csv' + '&verbose=' + str(verbose)
+ '&monitorType=' + str(mon_type) + '&includerawconcentrations='
+ str(inc_raw_con) + '&API_KEY=' + str(token))
url = (
'https://www.airnowapi.org/aq/data/?startDate='
+ str(start_date)
+ '&endDate='
+ str(end_date)
+ '&parameters='
+ str(parameters)
+ '&BBOX='
+ str(latlon_bnds)
+ '&dataType='
+ str(data_type)
+ '&format=text/csv'
+ '&verbose='
+ str(verbose)
+ '&monitorType='
+ str(mon_type)
+ '&includerawconcentrations='
+ str(inc_raw_con)
+ '&API_KEY='
+ str(token)
)

# Set Column names
names = ['latitude', 'longitude', 'time', 'parameter', 'concentration', 'unit',
'raw_concentration', 'AQI', 'category', 'site_name', 'site_agency', 'aqs_id', 'full_aqs_id']
names = [
'latitude',
'longitude',
'time',
'parameter',
'concentration',
'unit',
'raw_concentration',
'AQI',
'category',
'site_name',
'site_agency',
'aqs_id',
'full_aqs_id',
]

# Read data into CSV
df = pd.read_csv(url, names=names)
Expand All @@ -211,12 +298,9 @@ def get_airnow_bounded_obs(token, start_date, end_date, latlon_bnds, parameters=
data_vars={
'latitude': (['sites'], latitude),
'longitude': (['sites'], longitude),
'aqs_id': (['sites'], aqs_id)
'aqs_id': (['sites'], aqs_id),
},
coords={
'time': (['time'], times),
'sites': (['sites'], sites)
}
coords={'time': (['time'], times), 'sites': (['sites'], sites)},
)

# Set up emtpy data with nans
Expand All @@ -233,7 +317,11 @@ def get_airnow_bounded_obs(token, start_date, end_date, latlon_bnds, parameters=
data[v, t, s] = list(result[variables[v]])[0]
atts = {'units': ''}
else:
result = df.loc[(df['time'] == times[t]) & (df['site_name'] == sites[s]) & (df['parameter'] == variables[v])]
result = df.loc[
(df['time'] == times[t])
& (df['site_name'] == sites[s])
& (df['parameter'] == variables[v])
]
if len(result['concentration']) > 0:
data[v, t, s] = list(result['concentration'])[0]
atts = {'units': list(result['unit'])[0]}
Expand Down
17 changes: 12 additions & 5 deletions act/discovery/arm.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
import json
import os
import sys
from datetime import timedelta
import requests
import textwrap
import warnings
from datetime import timedelta

import requests

try:
from urllib.request import urlopen
Expand Down Expand Up @@ -163,7 +164,9 @@ def download_arm_data(username, token, datastream, startdate, enddate, time=None
open_bytes_file.write(data)
file_names.append(output_file)
# Get ARM DOI and print it out
doi = get_arm_doi(datastream, start_datetime.strftime('%Y-%m-%d'), end_datetime.strftime('%Y-%m-%d'))
doi = get_arm_doi(
datastream, start_datetime.strftime('%Y-%m-%d'), end_datetime.strftime('%Y-%m-%d')
)
print('\nIf you use these data to prepare a publication, please cite:\n')
print(textwrap.fill(doi, width=80))
print('')
Expand Down Expand Up @@ -197,13 +200,17 @@ def get_arm_doi(datastream, startdate, enddate):
"""

# Get the DOI information
doi_url = 'https://adc.arm.gov/citationservice/citation/datastream?id=' + datastream + '&citationType=apa'
doi_url = (
'https://adc.arm.gov/citationservice/citation/datastream?id='
+ datastream
+ '&citationType=apa'
)
doi_url += '&startDate=' + startdate
doi_url += '&endDate=' + enddate
try:
doi = requests.get(url=doi_url)
except ValueError as err:
return "Webservice potentially down or arguments are not valid: " + err
return 'Webservice potentially down or arguments are not valid: ' + err

if len(doi.text) > 0:
doi = doi.json()['citation']
Expand Down
2 changes: 1 addition & 1 deletion act/discovery/asos.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
import time
import warnings
from datetime import datetime
from io import StringIO

import numpy as np
import pandas as pd
import xarray as xr
from six import StringIO

try:
from urllib.request import urlopen
Expand Down
1 change: 1 addition & 0 deletions act/discovery/cropscape.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""

import datetime

import requests

try:
Expand Down
Loading

0 comments on commit bf4c70e

Please sign in to comment.