From c11bca135fe1e14ec376dba2739ad72a701fc720 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 7 Aug 2022 12:15:04 +0200
Subject: [PATCH 01/27] added clean_cache function
function that removes all files from cache directory or only one, specified by name. Likely to be moved to cache.py, once this is on dev
---
src/pygenesis/config.py | 51 +++++++++++++++++++++++++++++++++++++++++
1 file changed, 51 insertions(+)
diff --git a/src/pygenesis/config.py b/src/pygenesis/config.py
index 9018a68..a10f977 100644
--- a/src/pygenesis/config.py
+++ b/src/pygenesis/config.py
@@ -8,8 +8,11 @@
If there is no config.ini in the given config_dir, a default config will be created with empty credentials.
"""
import logging
+import os
+import shutil
from configparser import ConfigParser
from pathlib import Path
+from typing import Optional
PKG_NAME = __name__.split(".", maxsplit=1)[0]
@@ -144,4 +147,52 @@ def _create_default_config() -> ConfigParser:
return config
+# TODO: Decide where this function should go... Maybe a feature of the new cache.py?
+def clean_cache(file: Optional[Path]) -> None:
+ """Clean the data cache by overall or specific file removal.
+
+ Args:
+ file (Path, optional): Path to the file which should be removed from cache directory.
+ """
+ config_file = get_config_path_from_settings()
+ config = _load_config(config_file)
+
+ # check for cache_dir in DATA section of the config.ini
+ if config.has_section("DATA"):
+ logger.info("Cache config %s was loaded successfully.", config_file)
+
+ if not config.get("DATA", "cache_dir") or not os.path.isdir(
+ config.get("DATA", "cache_dir")
+ ):
+ logger.critical(
+ "Cache directory not set and/or corrupted! "
+ "Please make sure to run init_config() and set up the data cache appropriately. "
+ )
+ raise KeyError(
+ "Issue with 'cache_dir' in the config.ini. Please rerun init_config()."
+ )
+
+ # load the folder path
+ cache_dir = config["DATA"]["cache_dir"]
+
+ # remove (previously specified) file(s) from the data cache
+ files = (
+ [os.path.join(cache_dir, file)]
+ if file is not None
+ else os.listdir(cache_dir)
+ )
+
+ for filename in files:
+ file_path = os.path.join(cache_dir, filename)
+ try:
+ if os.path.isfile(file_path) or os.path.islink(file_path):
+ os.unlink(file_path)
+ elif os.path.isdir(file_path):
+ shutil.rmtree(file_path)
+ except Exception as e:
+ print("Failed to delete %s. Reason: %s" % (file_path, e))
+
+ return None
+
+
create_settings()
From 5edc34f343f15659f710250e99a4e69058db3da3 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 14 Aug 2022 20:17:37 +0200
Subject: [PATCH 02/27] Added ToDos, Unified query param comparison
---
src/pygenesis/destatis.py | 14 ++++++--------
src/pygenesis/http_helper.py | 2 ++
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/src/pygenesis/destatis.py b/src/pygenesis/destatis.py
index fa53c5e..df94376 100644
--- a/src/pygenesis/destatis.py
+++ b/src/pygenesis/destatis.py
@@ -37,45 +37,43 @@ def get_metadata(endpoint: str, name: str):
return response.json()
-def get_catalogue(endpoint: str, query_params: dict):
+def get_catalogue(endpoint: str, params: dict):
"""Method for downloading catalogue data from www-genesis.destatis.de.
Args:
endpoint (str): One of the supported endpoints, e.g. cubes.
- query_params (dict): The query parameter as defined by the API.
+ params (dict): The query parameter as defined by the API.
Returns:
list: A list of hits in the catalog matching the query parameter.
"""
url = f"{config['GENESIS API']['base_url']}catalogue/{endpoint}"
- params = {
+ params |= {
"username": config["GENESIS API"]["username"],
"password": config["GENESIS API"]["password"],
}
- params |= query_params
response = requests.request("GET", url, params=params, verify=False)
return response.json()
-def get_cubefile(query_params: dict):
+def get_cubefile(params: dict):
"""Method for downloading cube files from www-genesis.destatis.de.
Args:
- query_params (dict): The query parameter as defined by the API.
+ params (dict): The query parameter as defined by the API.
Returns:
str: The content of the cubefile.
"""
url = f"{config['GENESIS API']['base_url']}data/cubefile"
- params = {
+ params |= {
"username": config["GENESIS API"]["username"],
"password": config["GENESIS API"]["password"],
}
- params |= query_params
response = requests.request("GET", url, params=params, verify=False)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index a6bbbd9..6c32d1b 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -82,6 +82,8 @@ def _check_destatis_status_code(destatis_status_code: int) -> None:
Exception: Generic exception if the status code from destatis is equal
to -1
"""
+ # TODO: add handling of status code 0 for success (to not overlook any random errors) - is status 0 always there - e.g. not contained in docu examples + jobs,...
+ # TODO: also, maybe take full dict as parameter & raise Exception with "Content" from status
if destatis_status_code == -1:
raise Exception(
"Error: There is a system error.\
From 0faa29a23576153185ae8db2ed0e30cf6a31e3a7 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 14 Aug 2022 22:14:23 +0200
Subject: [PATCH 03/27] Updated destatis status check #45
Incorporated further response codes and response type checks.
Output for Code 0 tbd
---
src/pygenesis/http_helper.py | 36 +++++++++++++++++++++++++++++-------
1 file changed, 29 insertions(+), 7 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 6c32d1b..f8e2242 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -1,4 +1,6 @@
"""Wrapper module for the data endpoint."""
+import warnings
+
import requests
from pygenesis.config import load_config
@@ -66,26 +68,46 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
response_dict = response.json()
except ValueError:
return None
- _check_destatis_status_code(response_dict.get("Status", {}).get("Code"))
+ _check_destatis_status(response_dict.get("Status", {}))
return None
-def _check_destatis_status_code(destatis_status_code: int) -> None:
+def _check_destatis_status(destatis_status: dict) -> None:
"""
- Helper method which checks the status code from destatis.
- If the status code is not valid an exception will be raised.
+ Helper method which checks the status message from destatis.
+ If the status message is erroneous an exception will be raised.
+
+ Possible Codes (2.1.2 Grundstruktur der Responses):
+ - 0: "erfolgreich" (Type: "Information")
+ - 22: "erfolgreich mit Parameteranpassung" (Type: "Warnung")
+ - 104: "Kein passendes Objekt zu Suche" (Type: "Information")
Args:
- destatis_status_code (int): Status code from destatis
+ destatis_status (dict): Status response dict from destatis
Raises:
Exception: Generic exception if the status code from destatis is equal
to -1
"""
- # TODO: add handling of status code 0 for success (to not overlook any random errors) - is status 0 always there - e.g. not contained in docu examples + jobs,...
- # TODO: also, maybe take full dict as parameter & raise Exception with "Content" from status
+ # -1 is a status code that according to the documentation should not occur
+ # and thus only is found if the status response dict is empty
+ destatis_status_code = destatis_status.get("Code", -1)
+ destatis_status_type = destatis_status.get("Type")
+ destatis_status_content = destatis_status.get("Content")
+
+ # check for generic/ system error
if destatis_status_code == -1:
raise Exception(
"Error: There is a system error.\
Please check your query parameters."
)
+
+ # check for destatis/ query errors
+ elif (destatis_status_code == 104) or (destatis_status_type == "Error"):
+ raise Exception(destatis_status_content)
+
+ # print warnings to user
+ elif (destatis_status_code == 22) or (destatis_status_type == "Warnung"):
+ warnings.warn(destatis_status_content, UserWarning, stacklevel=2)
+
+ return None
From 79da990e2653e0155032de9203feb72e922e8aaa Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Thu, 18 Aug 2022 21:59:07 +0200
Subject: [PATCH 04/27] Merged updates from personal branch
---
src/pygenesis/http_helper.py | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index f8e2242..065ad64 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -86,8 +86,7 @@ def _check_destatis_status(destatis_status: dict) -> None:
destatis_status (dict): Status response dict from destatis
Raises:
- Exception: Generic exception if the status code from destatis is equal
- to -1
+ Exception: Generic exception if the status code displays an error
"""
# -1 is a status code that according to the documentation should not occur
# and thus only is found if the status response dict is empty
@@ -95,6 +94,9 @@ def _check_destatis_status(destatis_status: dict) -> None:
destatis_status_type = destatis_status.get("Type")
destatis_status_content = destatis_status.get("Content")
+ error_en_de = ["Error", "Fehler"]
+ warning_en_de = ["Warning", "Warnung"]
+
# check for generic/ system error
if destatis_status_code == -1:
raise Exception(
@@ -103,11 +105,13 @@ def _check_destatis_status(destatis_status: dict) -> None:
)
# check for destatis/ query errors
- elif (destatis_status_code == 104) or (destatis_status_type == "Error"):
+ elif (destatis_status_code == 104) or (destatis_status_type in error_en_de):
raise Exception(destatis_status_content)
# print warnings to user
- elif (destatis_status_code == 22) or (destatis_status_type == "Warnung"):
+ elif (destatis_status_code == 22) or (
+ destatis_status_type in warning_en_de
+ ):
warnings.warn(destatis_status_content, UserWarning, stacklevel=2)
return None
From 847e1b20dffa5da3ca8ab89827e8f526459c3a33 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Fri, 19 Aug 2022 08:13:28 +0200
Subject: [PATCH 05/27] Added http 5xx error check, added destatis status tests
---
src/pygenesis/http_helper.py | 8 ++-
tests/unit_tests/test_http_helper.py | 93 +++++++++++++++++++++++++---
2 files changed, 91 insertions(+), 10 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 065ad64..3fd269a 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -36,6 +36,7 @@ def get_response_from_endpoint(
_check_invalid_status_code(response.status_code)
_check_invalid_destatis_status_code(response)
+
return response
@@ -47,13 +48,15 @@ def _check_invalid_status_code(status_code: int) -> None:
status_code (int): Status code from the response object
Raises:
- Exception: Generic exception if 401 is returned
+ Exception: Generic exception if status 4xx or 5xx is returned
"""
- if (status_code // 100) == 4:
+ if (status_code // 100) in [4, 5]:
raise Exception(
f"Error {status_code}: The server returned a {status_code} status code"
)
+ return None
+
def _check_invalid_destatis_status_code(response: requests.Response) -> None:
"""
@@ -69,6 +72,7 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
except ValueError:
return None
_check_destatis_status(response_dict.get("Status", {}))
+
return None
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index 6566cbe..c4a4916 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -1,6 +1,12 @@
import pytest
-from src.pygenesis.http_helper import _check_invalid_status_code
+from src.pygenesis.http_helper import (
+ _check_destatis_status,
+ _check_invalid_destatis_status_code,
+ _check_invalid_status_code,
+)
+
+# TODO: Add generic dummy request to the server, which is not getting us timed out?
def test__check_invalid_status_code_with_error():
@@ -8,13 +14,13 @@ def test__check_invalid_status_code_with_error():
Basic tests to check an error status code (4xx, 5xx)
for _handle_status_code method.
"""
- status_code = 400
- with pytest.raises(Exception) as e:
- _check_invalid_status_code(status_code)
- assert (
- str(e.value)
- == f"Error {status_code}: The server returned a {status_code} status code"
- )
+ for status_code in [400, 500]:
+ with pytest.raises(Exception) as e:
+ _check_invalid_status_code(status_code)
+ assert (
+ str(e.value)
+ == f"Error {status_code}: The server returned a {status_code} status code"
+ )
def test__check_invalid_status_code_without_error():
@@ -27,3 +33,74 @@ def test__check_invalid_status_code_without_error():
_check_invalid_status_code(status_code)
except Exception:
assert False
+
+
+def _status_dict_helper(code: int = 0, type_: str = "Information") -> dict:
+ """ """
+ status_dict = {
+ "Code": code,
+ "Content": "erfolgreich",
+ "Type": type_,
+ }
+
+ # TODO: add raw response
+ """status_dict = {
+ "Ident":{
+ "Service":"find",
+ "Method":"find"
+ },
+ "Status": {
+ "Code": code,
+ "Content": "erfolgreich",
+ "Type": type_,
+ },
+ }"""
+
+ return status_dict
+
+
+# TODO: Is implementation of raw requests response type for actual
+# _check_invalid_destatis_status_code test possible?
+def test__check_invalid_destatis_status_code_with_error():
+ """
+ Basic tests to check an error status code as defined in the
+ documentation via code (e.g. 104) or name ('Error', 'Fehler').
+ """
+ for status_dict in [
+ _status_dict_helper(code=104),
+ _status_dict_helper(type_="Error"),
+ _status_dict_helper(type_="Fehler"),
+ ]:
+ status_content = status_dict.get("Content")
+
+ with pytest.raises(Exception) as e:
+ _check_destatis_status(status_dict)
+ assert str(e.value) == status_content
+
+
+def test__check_invalid_destatis_status_code_with_warning():
+ """
+ Basic tests to check a warning status code as defined in the
+ documentation via code (e.g. 22) or name ('Warning', 'Warnung').
+ """
+
+ for status_dict in [
+ _status_dict_helper(code=22),
+ _status_dict_helper(type_="Warnung"),
+ _status_dict_helper(type_="Warning"),
+ ]:
+ # TODO: Is the best/ most specific way to capture the warning?
+ with pytest.warns(UserWarning):
+ _check_destatis_status(status_dict)
+
+
+def test__check_invalid_destatis_status_code_without_error():
+ """
+ Basic tests to check the successful status code 0 as defined in the documentation.
+ """
+ status_dict = _status_dict_helper()
+
+ try:
+ _check_destatis_status(status_dict)
+ except Exception:
+ assert False
From 1f8a9339a5cd5ff6b12d69d06cce1e2c00f1c6de Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 20 Aug 2022 11:40:20 +0200
Subject: [PATCH 06/27] Updated urls, unified Destatis type-style
---
src/pygenesis/http_helper.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 3fd269a..2427385 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -12,7 +12,7 @@ def get_response_from_endpoint(
endpoint: str, method: str, params: dict
) -> requests.Response:
"""
- Wrapper method which constructs a url for querying data from destatis and
+ Wrapper method which constructs a url for querying data from Destatis and
sends a GET request.
Args:
@@ -21,9 +21,9 @@ def get_response_from_endpoint(
params (dict): dictionary of query parameters
Returns:
- requests.Response: the response from destatis
+ requests.Response: the response from Destatis
"""
- url = f"{config['GENESIS API']['base_url']}/{endpoint}/{method}"
+ url = f"{config['GENESIS API']['base_url']}{endpoint}/{method}"
params |= {
"username": config["GENESIS API"]["username"],
@@ -60,7 +60,7 @@ def _check_invalid_status_code(status_code: int) -> None:
def _check_invalid_destatis_status_code(response: requests.Response) -> None:
"""
- Helper method which handles the status code returned from destatis
+ Helper method which handles the status code returned from Destatis
(if exists)
Args:
@@ -78,7 +78,7 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
def _check_destatis_status(destatis_status: dict) -> None:
"""
- Helper method which checks the status message from destatis.
+ Helper method which checks the status message from Destatis.
If the status message is erroneous an exception will be raised.
Possible Codes (2.1.2 Grundstruktur der Responses):
@@ -87,7 +87,7 @@ def _check_destatis_status(destatis_status: dict) -> None:
- 104: "Kein passendes Objekt zu Suche" (Type: "Information")
Args:
- destatis_status (dict): Status response dict from destatis
+ destatis_status (dict): Status response dict from Destatis
Raises:
Exception: Generic exception if the status code displays an error
From 6c29d950d230b789bd561d31d38abe229bf2489b Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 20 Aug 2022 11:41:09 +0200
Subject: [PATCH 07/27] Added generic response creation for generic http_helper
test
---
tests/unit_tests/test_http_helper.py | 105 +++++++++++++++++----------
1 file changed, 65 insertions(+), 40 deletions(-)
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index c4a4916..f7a03a6 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -1,12 +1,15 @@
+import json
+
import pytest
+import requests
from src.pygenesis.http_helper import (
- _check_destatis_status,
_check_invalid_destatis_status_code,
_check_invalid_status_code,
)
-# TODO: Add generic dummy request to the server, which is not getting us timed out?
+# TODO: Add generic dummy request to the server, which is not getting us timed out,
+# to test get_response_from_endpoint completely?
def test__check_invalid_status_code_with_error():
@@ -35,72 +38,94 @@ def test__check_invalid_status_code_without_error():
assert False
-def _status_dict_helper(code: int = 0, type_: str = "Information") -> dict:
- """ """
- status_dict = {
- "Code": code,
- "Content": "erfolgreich",
- "Type": type_,
- }
+def _generic_request_status(
+ status_response: bool = True,
+ code: int = 0,
+ status_type: str = "Information",
+) -> requests.Response:
+ """
+ Helper method which allows to create a generic request.Response that covers all Destatis answers
+
+ Args:
+ status_response (bool): Whether Destatis answer contains a status response
+ code (str): Status response code
+ status_type (str): Status reponse type/ name
- # TODO: add raw response
- """status_dict = {
- "Ident":{
- "Service":"find",
- "Method":"find"
+ Returns:
+ requests.Response: the response from Destatis
+ """
+ # define possible status dict and texts
+ status_dict = {
+ "Ident": {
+ "Service": "A DESTATIS service",
+ "Method": "A DESTATIS method",
},
"Status": {
"Code": code,
- "Content": "erfolgreich",
- "Type": type_,
+ "Content": "Erfolg/ Success/ Some Issue",
+ "Type": status_type,
},
- }"""
+ }
+
+ response_text = "Some text for a successful response without status..."
- return status_dict
+ # set up generic requests.Reponse
+ request_status = requests.Response()
+ request_status.status_code = 200 # success
+
+ # TODO: Why is specific (UTF-8) encoding necessary?
+ if status_response:
+
+ request_status._content = json.dumps(status_dict).encode("utf-8")
+ else:
+ request_status._content = response_text.encode("utf-8")
+
+ return request_status
-# TODO: Is implementation of raw requests response type for actual
-# _check_invalid_destatis_status_code test possible?
def test__check_invalid_destatis_status_code_with_error():
"""
Basic tests to check an error status code as defined in the
- documentation via code (e.g. 104) or name ('Error', 'Fehler').
+ documentation via code (e.g. 104) or type ('Error', 'Fehler').
"""
- for status_dict in [
- _status_dict_helper(code=104),
- _status_dict_helper(type_="Error"),
- _status_dict_helper(type_="Fehler"),
+ for status in [
+ _generic_request_status(code=104),
+ _generic_request_status(status_type="Error"),
+ _generic_request_status(status_type="Fehler"),
]:
- status_content = status_dict.get("Content")
+ # extract status content which is raised
+ status_content = status.json().get("Status").get("Content")
with pytest.raises(Exception) as e:
- _check_destatis_status(status_dict)
+ _check_invalid_destatis_status_code(status)
assert str(e.value) == status_content
def test__check_invalid_destatis_status_code_with_warning():
"""
Basic tests to check a warning status code as defined in the
- documentation via code (e.g. 22) or name ('Warning', 'Warnung').
+ documentation via code (e.g. 22) or type ('Warning', 'Warnung').
"""
- for status_dict in [
- _status_dict_helper(code=22),
- _status_dict_helper(type_="Warnung"),
- _status_dict_helper(type_="Warning"),
+ for status in [
+ _generic_request_status(code=22),
+ _generic_request_status(status_type="Warnung"),
+ _generic_request_status(status_type="Warning"),
]:
# TODO: Is the best/ most specific way to capture the warning?
with pytest.warns(UserWarning):
- _check_destatis_status(status_dict)
+ _check_invalid_destatis_status_code(status)
def test__check_invalid_destatis_status_code_without_error():
"""
- Basic tests to check the successful status code 0 as defined in the documentation.
+ Basic tests to check the successful status code 0 or only text response as defined in the documentation.
"""
- status_dict = _status_dict_helper()
-
- try:
- _check_destatis_status(status_dict)
- except Exception:
- assert False
+ for status in [
+ _generic_request_status(),
+ _generic_request_status(status_response=False),
+ ]:
+ try:
+ _check_invalid_destatis_status_code(status)
+ except Exception:
+ assert False
From 0d3a57ea9e355c46b52cb8aff1ae765d1f155470 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 20 Aug 2022 11:42:27 +0200
Subject: [PATCH 08/27] Updated type hints, made contents rely on functionality
from http_helpers
---
src/pygenesis/destatis.py | 41 +++++++++------------------------------
1 file changed, 9 insertions(+), 32 deletions(-)
diff --git a/src/pygenesis/destatis.py b/src/pygenesis/destatis.py
index df94376..e17b4f3 100644
--- a/src/pygenesis/destatis.py
+++ b/src/pygenesis/destatis.py
@@ -1,12 +1,11 @@
"""Module provides functions to work with the GENESIS REST-API."""
-import requests
-
from pygenesis.config import load_config
+from pygenesis.http_helper import get_response_from_endpoint
config = load_config()
-def get_metadata(endpoint: str, name: str):
+def get_metadata(endpoint: str, name: str) -> str:
"""Method for downloading metadata from www-genesis.destatis.de.
Method supports the following endpoints:
@@ -22,22 +21,16 @@ def get_metadata(endpoint: str, name: str):
name (str): Unique name of the object.
Returns:
- dict: Content of "Object" response.
+ str: Content of "Object" response.
"""
- url = f"{config['GENESIS API']['base_url']}metadata/{endpoint}"
-
params = {
- "username": config["GENESIS API"]["username"],
- "password": config["GENESIS API"]["password"],
"name": name,
}
- response = requests.request("GET", url, params=params, verify=False)
-
- return response.json()
+ return get_response_from_endpoint("metadata", endpoint, params).text
-def get_catalogue(endpoint: str, params: dict):
+def get_catalogue(endpoint: str, params: dict) -> dict:
"""Method for downloading catalogue data from www-genesis.destatis.de.
Args:
@@ -45,21 +38,13 @@ def get_catalogue(endpoint: str, params: dict):
params (dict): The query parameter as defined by the API.
Returns:
- list: A list of hits in the catalog matching the query parameter.
+ dict: JSON formated response for the given query parameters.
"""
- url = f"{config['GENESIS API']['base_url']}catalogue/{endpoint}"
-
- params |= {
- "username": config["GENESIS API"]["username"],
- "password": config["GENESIS API"]["password"],
- }
- response = requests.request("GET", url, params=params, verify=False)
+ return get_response_from_endpoint("catalogue", endpoint, params).json()
- return response.json()
-
-def get_cubefile(params: dict):
+def get_cubefile(params: dict) -> str:
"""Method for downloading cube files from www-genesis.destatis.de.
Args:
@@ -68,13 +53,5 @@ def get_cubefile(params: dict):
Returns:
str: The content of the cubefile.
"""
- url = f"{config['GENESIS API']['base_url']}data/cubefile"
-
- params |= {
- "username": config["GENESIS API"]["username"],
- "password": config["GENESIS API"]["password"],
- }
-
- response = requests.request("GET", url, params=params, verify=False)
- return response.text
+ return get_response_from_endpoint("data", "cubefile", params).text
From a2a6e79c6835b0e0d4f5d1edf20492cbe0a0b666 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Wed, 24 Aug 2022 21:07:56 +0200
Subject: [PATCH 09/27] merged current dev into feature branch.
---
.pylintrc | 2 +-
nb/download_cubefile.ipynb | 201 ++++++++++++++++++++++++++++++++++++
nb/download_tablefile.ipynb | 137 ++++++++++++++++++++----
nb/parse_cube.ipynb | 168 +++++++++++-------------------
src/pygenesis/cache.py | 52 ++++++++++
src/pygenesis/config.py | 4 +
src/pygenesis/cube.py | 123 ++++++++++------------
src/pygenesis/data.py | 49 +++++++++
src/pygenesis/table.py | 31 ++----
tests/test_cache.py | 92 +++++++++++++++++
tests/test_config.py | 8 +-
11 files changed, 643 insertions(+), 224 deletions(-)
create mode 100644 nb/download_cubefile.ipynb
create mode 100644 src/pygenesis/cache.py
create mode 100644 src/pygenesis/data.py
create mode 100644 tests/test_cache.py
diff --git a/.pylintrc b/.pylintrc
index 8d6a9b1..4deabf8 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -12,7 +12,7 @@ ignore=third_party
# Files or directories matching the regex patterns are skipped. The regex
# matches against base names, not paths.
-ignore-patterns=
+ignore-patterns=test_.*
# Pickle collected data for later comparisons.
persistent=no
diff --git a/nb/download_cubefile.ipynb b/nb/download_cubefile.ipynb
new file mode 100644
index 0000000..eb5a2a2
--- /dev/null
+++ b/nb/download_cubefile.ipynb
@@ -0,0 +1,201 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "69e1d305",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%load_ext autoreload\n",
+ "%autoreload 2"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "ff9eca4f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# only if you get an error from below\n",
+ "# from pygenesis import init_config\n",
+ "# init_config()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "4a207a77",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "\n",
+ "from pygenesis.data import get_data"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "6e6df177",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "params = {\"values\": \"true\", \"metadata\": \"true\", \"additionals\": \"false\"}\n",
+ "data = get_data(name=\"47414BJ002\", method=\"cubefile\", **params)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "id": "a8bcd5b4",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " DINSG | \n",
+ " WZ08N7 | \n",
+ " WERTE4 | \n",
+ " JAHR | \n",
+ " UMS103 | \n",
+ " QUALITAET | \n",
+ " GESPERRT | \n",
+ " WERT-VERFAELSCHT | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " DG | \n",
+ " WZ08-49-01 | \n",
+ " NOMINAL | \n",
+ " 2015 | \n",
+ " 100.0 | \n",
+ " e | \n",
+ " NaN | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " DG | \n",
+ " WZ08-49-01 | \n",
+ " NOMINAL | \n",
+ " 2016 | \n",
+ " 99.3 | \n",
+ " e | \n",
+ " NaN | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " DG | \n",
+ " WZ08-49-01 | \n",
+ " NOMINAL | \n",
+ " 2017 | \n",
+ " 105.7 | \n",
+ " e | \n",
+ " NaN | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " DG | \n",
+ " WZ08-49-01 | \n",
+ " NOMINAL | \n",
+ " 2018 | \n",
+ " 111.6 | \n",
+ " e | \n",
+ " NaN | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " DG | \n",
+ " WZ08-49-01 | \n",
+ " NOMINAL | \n",
+ " 2019 | \n",
+ " 115.6 | \n",
+ " e | \n",
+ " NaN | \n",
+ " 0.0 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " DINSG WZ08N7 WERTE4 JAHR UMS103 QUALITAET GESPERRT \\\n",
+ "0 DG WZ08-49-01 NOMINAL 2015 100.0 e NaN \n",
+ "1 DG WZ08-49-01 NOMINAL 2016 99.3 e NaN \n",
+ "2 DG WZ08-49-01 NOMINAL 2017 105.7 e NaN \n",
+ "3 DG WZ08-49-01 NOMINAL 2018 111.6 e NaN \n",
+ "4 DG WZ08-49-01 NOMINAL 2019 115.6 e NaN \n",
+ "\n",
+ " WERT-VERFAELSCHT \n",
+ "0 0.0 \n",
+ "1 0.0 \n",
+ "2 0.0 \n",
+ "3 0.0 \n",
+ "4 0.0 "
+ ]
+ },
+ "execution_count": 11,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "data.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fed610c9",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/nb/download_tablefile.ipynb b/nb/download_tablefile.ipynb
index 8cef7a0..8bb7743 100644
--- a/nb/download_tablefile.ipynb
+++ b/nb/download_tablefile.ipynb
@@ -12,20 +12,12 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 3,
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "Error while loading the config file. Could not find C:\\Users\\micha\\AppData\\Local\\Temp\\pytest-of-micha\\pytest-78\\.pygenesis3\\config.ini. Please make sure to run init_config() first. \n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"import pandas as pd\n",
- "from pygenesis.table import get_tablefile_data"
+ "from pygenesis.data import get_data"
]
},
{
@@ -44,27 +36,27 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 10,
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
- "data = get_tablefile_data(\"61111-0002\", table_area=all)"
+ "data = get_data(name=\"61111-0002\", method=\"tablefile\", table_area=all)"
]
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "(pandas.core.frame.DataFrame, 20)"
+ "(pandas.core.frame.DataFrame, 24)"
]
},
- "execution_count": 4,
+ "execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
@@ -75,7 +67,7 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 12,
"metadata": {},
"outputs": [
{
@@ -498,6 +490,82 @@
" ... | \n",
" ... | \n",
" \n",
+ " \n",
+ " 20 | \n",
+ " 61111 | \n",
+ " Verbraucherpreisindex für Deutschland | \n",
+ " JAHR | \n",
+ " Jahr | \n",
+ " 2022 | \n",
+ " DINSG | \n",
+ " Deutschland insgesamt | \n",
+ " DG | \n",
+ " Deutschland | \n",
+ " MONAT | \n",
+ " Monate | \n",
+ " MONAT09 | \n",
+ " September | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " 21 | \n",
+ " 61111 | \n",
+ " Verbraucherpreisindex für Deutschland | \n",
+ " JAHR | \n",
+ " Jahr | \n",
+ " 2022 | \n",
+ " DINSG | \n",
+ " Deutschland insgesamt | \n",
+ " DG | \n",
+ " Deutschland | \n",
+ " MONAT | \n",
+ " Monate | \n",
+ " MONAT10 | \n",
+ " Oktober | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " 22 | \n",
+ " 61111 | \n",
+ " Verbraucherpreisindex für Deutschland | \n",
+ " JAHR | \n",
+ " Jahr | \n",
+ " 2022 | \n",
+ " DINSG | \n",
+ " Deutschland insgesamt | \n",
+ " DG | \n",
+ " Deutschland | \n",
+ " MONAT | \n",
+ " Monate | \n",
+ " MONAT11 | \n",
+ " November | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " 23 | \n",
+ " 61111 | \n",
+ " Verbraucherpreisindex für Deutschland | \n",
+ " JAHR | \n",
+ " Jahr | \n",
+ " 2022 | \n",
+ " DINSG | \n",
+ " Deutschland insgesamt | \n",
+ " DG | \n",
+ " Deutschland | \n",
+ " MONAT | \n",
+ " Monate | \n",
+ " MONAT12 | \n",
+ " Dezember | \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
" \n",
"\n",
""
@@ -524,6 +592,10 @@
"17 61111 Verbraucherpreisindex für Deutschland JAHR \n",
"18 61111 Verbraucherpreisindex für Deutschland JAHR \n",
"19 61111 Verbraucherpreisindex für Deutschland JAHR \n",
+ "20 61111 Verbraucherpreisindex für Deutschland JAHR \n",
+ "21 61111 Verbraucherpreisindex für Deutschland JAHR \n",
+ "22 61111 Verbraucherpreisindex für Deutschland JAHR \n",
+ "23 61111 Verbraucherpreisindex für Deutschland JAHR \n",
"\n",
" Zeit_Label Zeit 1_Merkmal_Code 1_Merkmal_Label 1_Auspraegung_Code \\\n",
"0 Jahr 2021 DINSG Deutschland insgesamt DG \n",
@@ -546,6 +618,10 @@
"17 Jahr 2022 DINSG Deutschland insgesamt DG \n",
"18 Jahr 2022 DINSG Deutschland insgesamt DG \n",
"19 Jahr 2022 DINSG Deutschland insgesamt DG \n",
+ "20 Jahr 2022 DINSG Deutschland insgesamt DG \n",
+ "21 Jahr 2022 DINSG Deutschland insgesamt DG \n",
+ "22 Jahr 2022 DINSG Deutschland insgesamt DG \n",
+ "23 Jahr 2022 DINSG Deutschland insgesamt DG \n",
"\n",
" 1_Auspraegung_Label 2_Merkmal_Code 2_Merkmal_Label 2_Auspraegung_Code \\\n",
"0 Deutschland MONAT Monate MONAT01 \n",
@@ -568,6 +644,10 @@
"17 Deutschland MONAT Monate MONAT06 \n",
"18 Deutschland MONAT Monate MONAT07 \n",
"19 Deutschland MONAT Monate MONAT08 \n",
+ "20 Deutschland MONAT Monate MONAT09 \n",
+ "21 Deutschland MONAT Monate MONAT10 \n",
+ "22 Deutschland MONAT Monate MONAT11 \n",
+ "23 Deutschland MONAT Monate MONAT12 \n",
"\n",
" 2_Auspraegung_Label PREIS1__Verbraucherpreisindex__2015=100 \\\n",
"0 Januar 106,3 \n",
@@ -590,6 +670,10 @@
"17 Juni 117,4 \n",
"18 Juli ... \n",
"19 August ... \n",
+ "20 September ... \n",
+ "21 Oktober ... \n",
+ "22 November ... \n",
+ "23 Dezember ... \n",
"\n",
" CH0004__Veraenderung_zum_Vorjahresmonat__in_(%) \\\n",
"0 +1,0 \n",
@@ -612,6 +696,10 @@
"17 +7,6 \n",
"18 ... \n",
"19 ... \n",
+ "20 ... \n",
+ "21 ... \n",
+ "22 ... \n",
+ "23 ... \n",
"\n",
" CH0005__Veraenderung_zum_Vormonat__in_(%) \n",
"0 +0,8 \n",
@@ -633,10 +721,14 @@
"16 +0,9 \n",
"17 +0,1 \n",
"18 ... \n",
- "19 ... "
+ "19 ... \n",
+ "20 ... \n",
+ "21 ... \n",
+ "22 ... \n",
+ "23 ... "
]
},
- "execution_count": 5,
+ "execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
@@ -644,6 +736,13 @@
"source": [
"data"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
diff --git a/nb/parse_cube.ipynb b/nb/parse_cube.ipynb
index e2fa37b..6c84c2e 100644
--- a/nb/parse_cube.ipynb
+++ b/nb/parse_cube.ipynb
@@ -16,7 +16,7 @@
"metadata": {},
"outputs": [],
"source": [
- "# only if you get an error from above\n",
+ "# only if you get an error from below\n",
"# from pygenesis import init_config\n",
"# init_config()"
]
@@ -29,8 +29,8 @@
"source": [
"import pandas as pd\n",
"\n",
- "from pygenesis.destatis import get_cubefile\n",
- "from pygenesis.cube import parse_cube, rename_axes"
+ "from pygenesis.cube import parse_cube, rename_axes\n",
+ "from pygenesis.http_helper import get_response_from_endpoint"
]
},
{
@@ -53,23 +53,16 @@
"metadata": {
"scrolled": true
},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "D:\\git\\correlaid\\genesis-python\\.venv\\lib\\site-packages\\urllib3\\connectionpool.py:1043: InsecureRequestWarning: Unverified HTTPS request is being made to host 'www-genesis.destatis.de'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings\n",
- " warnings.warn(\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
- "data = get_cubefile({\"name\": \"47414BJ002\", \"values\": \"true\", \"metadata\": \"true\", \"additionals\": \"false\"})"
+ "params = {\"name\": \"47414BJ002\", \"area\": \"all\", \"values\": \"true\", \"metadata\": \"true\", \"additionals\": \"false\"}\n",
+ "response = get_response_from_endpoint(\"data\", \"cubefile\", params)\n",
+ "data = response.text"
]
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -78,7 +71,7 @@
"(str, 79264)"
]
},
- "execution_count": 4,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -89,13 +82,13 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "['* Der Benutzer DEI6I4B3UW der Benutzergruppe DE0142 hat am 30.07.2022 um 20:43:34 diesen Export angestossen.',\n",
+ "['* Der Benutzer DEI6I4B3UW der Benutzergruppe DE0142 hat am 01.08.2022 um 08:16:00 diesen Export angestossen.',\n",
" 'K;DQ;FACH-SCHL;GHH-ART;GHM-WERTE-JN;GENESIS-VBD;REGIOSTAT;EU-VBD;\"mit Werten\"',\n",
" 'D;47414BJ002;;N;N;N;N',\n",
" 'K;DQ-ERH;FACH-SCHL',\n",
@@ -117,7 +110,7 @@
" 'D;DG;WZ08-49-01;NOMINAL;2020;96.0;e;;0.0']"
]
},
- "execution_count": 5,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
@@ -137,7 +130,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
@@ -146,7 +139,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 9,
"metadata": {},
"outputs": [
{
@@ -164,36 +157,23 @@
" 0 JAHR 4 4,\n",
" 'DQI': NAME ME-NAME DST TYP NKM-STELLEN GHH-ART GHM-WERTE-JN\n",
" 0 UMS103 2015=100 FEST PROZENT 1 N,\n",
- " 'QEI': FACH-SCHL-1 FACH-SCHL-2 FACH-SCHL-3 ZI-WERT WERT QUALITAET GESPERRT \\\n",
- " 0 DG WZ08-49-01 NOMINAL 2015 100.0 e \n",
- " 1 DG WZ08-49-01 NOMINAL 2016 99.3 e \n",
- " 2 DG WZ08-49-01 NOMINAL 2017 105.7 e \n",
- " 3 DG WZ08-49-01 NOMINAL 2018 111.6 e \n",
- " 4 DG WZ08-49-01 NOMINAL 2019 115.6 e \n",
- " ... ... ... ... ... ... ... ... \n",
- " 2018 DG WZ08-N REAL 2017 108.4 e \n",
- " 2019 DG WZ08-N REAL 2018 110.6 e \n",
- " 2020 DG WZ08-N REAL 2019 110.8 e \n",
- " 2021 DG WZ08-N REAL 2020 94.1 e \n",
- " 2022 DG WZ08-N REAL 2021 101.2 p \n",
- " \n",
- " WERT-VERFAELSCHT \n",
- " 0 0.0 \n",
- " 1 0.0 \n",
- " 2 0.0 \n",
- " 3 0.0 \n",
- " 4 0.0 \n",
- " ... ... \n",
- " 2018 0.0 \n",
- " 2019 0.0 \n",
- " 2020 0.0 \n",
- " 2021 0.0 \n",
- " 2022 0.0 \n",
+ " 'QEI': FACH-SCHL-1 FACH-SCHL-2 FACH-SCHL-3 ZI-WERT WERT QUALITAET GESPERRT WERT-VERFAELSCHT\n",
+ " 0 DG WZ08-49-01 NOMINAL 2015 100.0 e 0.0\n",
+ " 1 DG WZ08-49-01 NOMINAL 2016 99.3 e 0.0\n",
+ " 2 DG WZ08-49-01 NOMINAL 2017 105.7 e 0.0\n",
+ " 3 DG WZ08-49-01 NOMINAL 2018 111.6 e 0.0\n",
+ " 4 DG WZ08-49-01 NOMINAL 2019 115.6 e 0.0\n",
+ " ... ... ... ... ... ... ... ... ...\n",
+ " 2018 DG WZ08-N REAL 2017 108.4 e 0.0\n",
+ " 2019 DG WZ08-N REAL 2018 110.6 e 0.0\n",
+ " 2020 DG WZ08-N REAL 2019 110.8 e 0.0\n",
+ " 2021 DG WZ08-N REAL 2020 94.1 e 0.0\n",
+ " 2022 DG WZ08-N REAL 2021 101.2 p 0.0\n",
" \n",
" [2023 rows x 8 columns]}"
]
},
- "execution_count": 7,
+ "execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
@@ -204,7 +184,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 10,
"metadata": {},
"outputs": [
{
@@ -366,36 +346,23 @@
""
],
"text/plain": [
- " FACH-SCHL-1 FACH-SCHL-2 FACH-SCHL-3 ZI-WERT WERT QUALITAET GESPERRT \\\n",
- "0 DG WZ08-49-01 NOMINAL 2015 100.0 e \n",
- "1 DG WZ08-49-01 NOMINAL 2016 99.3 e \n",
- "2 DG WZ08-49-01 NOMINAL 2017 105.7 e \n",
- "3 DG WZ08-49-01 NOMINAL 2018 111.6 e \n",
- "4 DG WZ08-49-01 NOMINAL 2019 115.6 e \n",
- "... ... ... ... ... ... ... ... \n",
- "2018 DG WZ08-N REAL 2017 108.4 e \n",
- "2019 DG WZ08-N REAL 2018 110.6 e \n",
- "2020 DG WZ08-N REAL 2019 110.8 e \n",
- "2021 DG WZ08-N REAL 2020 94.1 e \n",
- "2022 DG WZ08-N REAL 2021 101.2 p \n",
- "\n",
- " WERT-VERFAELSCHT \n",
- "0 0.0 \n",
- "1 0.0 \n",
- "2 0.0 \n",
- "3 0.0 \n",
- "4 0.0 \n",
- "... ... \n",
- "2018 0.0 \n",
- "2019 0.0 \n",
- "2020 0.0 \n",
- "2021 0.0 \n",
- "2022 0.0 \n",
+ " FACH-SCHL-1 FACH-SCHL-2 FACH-SCHL-3 ZI-WERT WERT QUALITAET GESPERRT WERT-VERFAELSCHT\n",
+ "0 DG WZ08-49-01 NOMINAL 2015 100.0 e 0.0\n",
+ "1 DG WZ08-49-01 NOMINAL 2016 99.3 e 0.0\n",
+ "2 DG WZ08-49-01 NOMINAL 2017 105.7 e 0.0\n",
+ "3 DG WZ08-49-01 NOMINAL 2018 111.6 e 0.0\n",
+ "4 DG WZ08-49-01 NOMINAL 2019 115.6 e 0.0\n",
+ "... ... ... ... ... ... ... ... ...\n",
+ "2018 DG WZ08-N REAL 2017 108.4 e 0.0\n",
+ "2019 DG WZ08-N REAL 2018 110.6 e 0.0\n",
+ "2020 DG WZ08-N REAL 2019 110.8 e 0.0\n",
+ "2021 DG WZ08-N REAL 2020 94.1 e 0.0\n",
+ "2022 DG WZ08-N REAL 2021 101.2 p 0.0\n",
"\n",
"[2023 rows x 8 columns]"
]
},
- "execution_count": 8,
+ "execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
@@ -406,7 +373,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
@@ -444,13 +411,13 @@
" \n",
" \n",
" 1 | \n",
- " ALT041 | \n",
+ " WZ08N7 | \n",
" 2 | \n",
" 2 | \n",
"
\n",
" \n",
" 2 | \n",
- " FAMST2 | \n",
+ " WERTE4 | \n",
" 3 | \n",
" 3 | \n",
"
\n",
@@ -461,11 +428,11 @@
"text/plain": [
" NAME RHF-BSR RHF-ACHSE\n",
"0 DINSG 1 1\n",
- "1 ALT041 2 2\n",
- "2 FAMST2 3 3"
+ "1 WZ08N7 2 2\n",
+ "2 WERTE4 3 3"
]
},
- "execution_count": 9,
+ "execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
@@ -485,7 +452,7 @@
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 12,
"metadata": {},
"outputs": [
{
@@ -647,36 +614,23 @@
""
],
"text/plain": [
- " DINSG WZ08N7 WERTE4 JAHR UMS103 QUALITAET GESPERRT \\\n",
- "0 DG WZ08-49-01 NOMINAL 2015 100.0 e \n",
- "1 DG WZ08-49-01 NOMINAL 2016 99.3 e \n",
- "2 DG WZ08-49-01 NOMINAL 2017 105.7 e \n",
- "3 DG WZ08-49-01 NOMINAL 2018 111.6 e \n",
- "4 DG WZ08-49-01 NOMINAL 2019 115.6 e \n",
- "... ... ... ... ... ... ... ... \n",
- "2018 DG WZ08-N REAL 2017 108.4 e \n",
- "2019 DG WZ08-N REAL 2018 110.6 e \n",
- "2020 DG WZ08-N REAL 2019 110.8 e \n",
- "2021 DG WZ08-N REAL 2020 94.1 e \n",
- "2022 DG WZ08-N REAL 2021 101.2 p \n",
- "\n",
- " WERT-VERFAELSCHT \n",
- "0 0.0 \n",
- "1 0.0 \n",
- "2 0.0 \n",
- "3 0.0 \n",
- "4 0.0 \n",
- "... ... \n",
- "2018 0.0 \n",
- "2019 0.0 \n",
- "2020 0.0 \n",
- "2021 0.0 \n",
- "2022 0.0 \n",
+ " DINSG WZ08N7 WERTE4 JAHR UMS103 QUALITAET GESPERRT WERT-VERFAELSCHT\n",
+ "0 DG WZ08-49-01 NOMINAL 2015 100.0 e 0.0\n",
+ "1 DG WZ08-49-01 NOMINAL 2016 99.3 e 0.0\n",
+ "2 DG WZ08-49-01 NOMINAL 2017 105.7 e 0.0\n",
+ "3 DG WZ08-49-01 NOMINAL 2018 111.6 e 0.0\n",
+ "4 DG WZ08-49-01 NOMINAL 2019 115.6 e 0.0\n",
+ "... ... ... ... ... ... ... ... ...\n",
+ "2018 DG WZ08-N REAL 2017 108.4 e 0.0\n",
+ "2019 DG WZ08-N REAL 2018 110.6 e 0.0\n",
+ "2020 DG WZ08-N REAL 2019 110.8 e 0.0\n",
+ "2021 DG WZ08-N REAL 2020 94.1 e 0.0\n",
+ "2022 DG WZ08-N REAL 2021 101.2 p 0.0\n",
"\n",
"[2023 rows x 8 columns]"
]
},
- "execution_count": 13,
+ "execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
new file mode 100644
index 0000000..ca5372e
--- /dev/null
+++ b/src/pygenesis/cache.py
@@ -0,0 +1,52 @@
+"""Module provides functions/decorators to cache downloaded data."""
+import logging
+from datetime import date
+from functools import wraps
+from pathlib import Path
+from typing import Callable
+
+import pandas as pd
+
+from pygenesis.config import load_config
+
+logger = logging.getLogger(__name__)
+
+
+def cache_data(func: Callable) -> Callable:
+ """Store downloaded data on disk with download time as parent folder.
+
+ Args:
+ func (Callable): One of the data methods of the data endpoint.
+ """
+
+ @wraps(func)
+ def wrapper_func(**kwargs):
+ config = load_config()
+ cache_dir = Path(config["DATA"]["cache_dir"])
+
+ if not cache_dir.is_dir() or not cache_dir.exists():
+ logger.critical(
+ "Cache dir does not exist! Please make sure init_config() was run properly. Path: %s",
+ cache_dir,
+ )
+
+ name = kwargs["name"]
+ data_dir = cache_dir / name
+ if data_dir.exists():
+ # TODO: Implement solution for updated data.
+ # So don't return latest version but check first for newer version in GENESIS.
+ # if data_dir exists, there has to be at least one stored version of this data
+ versions = sorted((p.name for p in data_dir.glob("*")), key=int)
+ latest = versions[-1]
+ data = pd.read_csv(data_dir / latest / f"{name}.xz")
+ else:
+ data: pd.DateFrame = func(**kwargs)
+ file_path = (
+ data_dir / str(date.today()).replace("-", "") / f"{name}.xz"
+ )
+ file_path.parent.mkdir(parents=True, exist_ok=True)
+ data.to_csv(file_path, index=False)
+
+ return data
+
+ return wrapper_func
diff --git a/src/pygenesis/config.py b/src/pygenesis/config.py
index a10f977..84ab33f 100644
--- a/src/pygenesis/config.py
+++ b/src/pygenesis/config.py
@@ -80,6 +80,10 @@ def init_config(config_dir: Path = DEFAULT_CONFIG_DIR) -> None:
config = _create_default_config()
_write_config(config, config_file)
+ cache_dir = Path(config["DATA"]["cache_dir"])
+ if not cache_dir.exists():
+ cache_dir.mkdir()
+
logger.info("New config was created. Path: %s.", config_file)
diff --git a/src/pygenesis/cube.py b/src/pygenesis/cube.py
index 9df44ee..b71f71c 100644
--- a/src/pygenesis/cube.py
+++ b/src/pygenesis/cube.py
@@ -4,77 +4,18 @@
import pandas as pd
-def is_cube_metadata_header(line: str) -> bool:
- """Check if a line is a cube metadata header.
+def get_cubefile_data(data: str) -> pd.DataFrame:
+ """Return cubefile data as pandas data frame.
Args:
- line (str): A single line of a cubefile.
+ data (str): Raw cubefile content.
Returns:
- bool: True if the line starts with a "K", False otherwise.
+ pd.DataFrame: Parsed cube file.
"""
- return line[0] == "K"
-
-
-def get_cube_metadata_header_type(line: str) -> str:
- """Return the header type.
-
- Args:
- line (str): A single line of a cubefile.
-
- Returns:
- str: The header type, which is the second entry in the header.
- """
- return line.split(";")[1]
-
-
-def get_cube_metadata_header(
- line: str, rename_duplicates: bool = False
-) -> list[str]:
- """Return the metadata header of a cubefile.
-
- Args:
- line (str): A single line of a cubefile.
- rename_duplicates (bool, optional): If False, the raw header is returned.
- If True, identical column names are appended with a unique counter.
- Defaults to False.
-
- Returns:
- list[str]: A list of column names, except for "nur Werte" and "mit Werten".
- """
- raw_header = line.split(";")[2:]
- raw_header = [
- name
- for name in raw_header
- if name not in ['"nur Werte"', '"mit Werten"']
- ]
-
- if not rename_duplicates:
- return raw_header
-
- # header can have multiple entries with same label, which is problematic for pandas
- # so lets just add a counter
- header = [""] * len(raw_header)
- for name in set(raw_header):
- if raw_header.count(name) == 1:
- header[raw_header.index(name)] = name
- else:
- for counter in range(raw_header.count(name)):
- header[raw_header.index(name) + counter] = f"{name}-{counter+1}"
-
- return header
-
+ cube = rename_axes(parse_cube(data))
-def parse_cube_data_line(line: str) -> list[str]:
- """Return the content of a cube data line.
-
- Args:
- line (str): A single line of a cubefile.
-
- Returns:
- list[str]: The content of a cube data line, omitting the first element.
- """
- return line.split(";")[1:]
+ return cube["QEI"]
def parse_cube(data: str) -> dict:
@@ -92,19 +33,19 @@ def parse_cube(data: str) -> dict:
for line in data.splitlines():
# skip all rows until first header
- if header is None and not is_cube_metadata_header(line):
+ if header is None and not _is_cube_metadata_header(line):
continue
- if is_cube_metadata_header(line):
+ if _is_cube_metadata_header(line):
if data_block:
cube[header_type] = pd.DataFrame(data_block, columns=header)
- header = get_cube_metadata_header(line, rename_duplicates=True)
- header_type: str = get_cube_metadata_header_type(line)
+ header = _get_cube_metadata_header(line, rename_duplicates=True)
+ header_type: str = _get_cube_metadata_header_type(line)
data_block = []
continue
- line_content = parse_cube_data_line(line)
+ line_content = _parse_cube_data_line(line)
data_block.append(line_content)
# the last data block has no header after it so we have to do it here
@@ -157,3 +98,45 @@ def rename_axes(
cube["QEI"].rename(columns=dict(zip(old_cols, new_cols)), inplace=True)
return cube
+
+
+def _is_cube_metadata_header(line: str) -> bool:
+ """Check if a line is a cube metadata header."""
+ return line[0] == "K"
+
+
+def _get_cube_metadata_header_type(line: str) -> str:
+ """Return the header type."""
+ return line.split(";")[1]
+
+
+def _get_cube_metadata_header(
+ line: str, rename_duplicates: bool = False
+) -> list[str]:
+ """Return the metadata header of a cubefile."""
+ raw_header = line.split(";")[2:]
+ raw_header = [
+ name
+ for name in raw_header
+ if name not in ['"nur Werte"', '"mit Werten"']
+ ]
+
+ if not rename_duplicates:
+ return raw_header
+
+ # header can have multiple entries with same label, which is problematic for pandas
+ # so lets just add a counter
+ header = [""] * len(raw_header)
+ for name in set(raw_header):
+ if raw_header.count(name) == 1:
+ header[raw_header.index(name)] = name
+ else:
+ for counter in range(raw_header.count(name)):
+ header[raw_header.index(name) + counter] = f"{name}-{counter+1}"
+
+ return header
+
+
+def _parse_cube_data_line(line: str) -> list[str]:
+ """Return the content of a cube data line."""
+ return line.split(";")[1:]
diff --git a/src/pygenesis/data.py b/src/pygenesis/data.py
new file mode 100644
index 0000000..a5211df
--- /dev/null
+++ b/src/pygenesis/data.py
@@ -0,0 +1,49 @@
+"""Provides functionality to download data from GENESIS data endpoint."""
+from typing import Literal
+
+import pandas as pd
+
+from pygenesis.cache import cache_data
+from pygenesis.cube import get_cubefile_data
+from pygenesis.http_helper import get_response_from_endpoint
+from pygenesis.table import get_tablefile_data
+
+METHODS = Literal["tablefile", "cubefile"]
+
+
+@cache_data
+def get_data(
+ *, name: str, method: METHODS, area: str = "all", **kwargs
+) -> pd.DataFrame:
+ """Download data from GENESIS.
+
+ Based on the name, area and additional query parameters the
+ given method from the data-endpoint will be queried.
+
+ Args:
+ name (str): Name of the object.
+ method (str): Method of the data endpoint used to query data. One of ["tablefile", "cubefile"].
+ area (str, optional): Area the object is stored. Defaults to "all".
+
+ Returns:
+ pd.DataFrame: Parsed data file.
+ """
+ kwargs = kwargs or {}
+
+ params = {
+ "name": name,
+ "area": area,
+ }
+
+ if method == "tablefile":
+ params["format"] = "ffcsv"
+
+ params |= kwargs
+
+ response = get_response_from_endpoint("data", method, params)
+ data = response.text
+
+ if method == "tablefile":
+ return get_tablefile_data(data)
+ else:
+ return get_cubefile_data(data)
diff --git a/src/pygenesis/table.py b/src/pygenesis/table.py
index af9b3c4..3580470 100644
--- a/src/pygenesis/table.py
+++ b/src/pygenesis/table.py
@@ -2,34 +2,15 @@
import pandas as pd
from pygenesis.csv_helper import get_df_from_text
-from pygenesis.http_helper import get_response_from_endpoint
-def get_tablefile_data(
- table_name: str, table_area: str = "all", **kwargs
-) -> pd.DataFrame:
- """
- Based on the table name, table area and additional query parameters the
- tablefile method from the data-endpoint will be queried.
+def get_tablefile_data(data: str) -> pd.DataFrame:
+ """Return table file data as pandas data frame.
Args:
- table_name (str): Name of the table
- table_area (str, optional): Area of the table (Defaul: all)
- query_params (dict, optional): Additional query parameters
- (Default: None)
+ data (str): Raw tablefile content.
+
Returns:
- pd.DataFrame
+ pd.DataFrame: Parsed table file.
"""
-
- kwargs = kwargs or {}
-
- params = {
- "name": table_name,
- "area": table_area,
- "format": "ffcsv",
- }
-
- params |= kwargs
-
- response = get_response_from_endpoint("data", "tablefile", params)
- return get_df_from_text(response.text)
+ return get_df_from_text(data)
diff --git a/tests/test_cache.py b/tests/test_cache.py
new file mode 100644
index 0000000..919436e
--- /dev/null
+++ b/tests/test_cache.py
@@ -0,0 +1,92 @@
+import time
+from datetime import date
+from pathlib import Path
+
+import numpy as np
+import pandas as pd
+import pytest
+
+from pygenesis.cache import cache_data
+from pygenesis.config import (
+ DEFAULT_SETTINGS_FILE,
+ _write_config,
+ init_config,
+ load_settings,
+)
+
+SLEEP_TIME = 0.1
+
+
+@pytest.fixture()
+def cache_dir(tmp_path_factory):
+ return tmp_path_factory.mktemp(".pygenesis")
+
+
+@pytest.fixture(autouse=True)
+def restore_settings():
+ old_settings = load_settings()
+ yield
+ _write_config(old_settings, DEFAULT_SETTINGS_FILE)
+
+
+@cache_data
+def decorated_data(*, name):
+ time.sleep(SLEEP_TIME)
+ return pd.DataFrame(
+ np.random.random(size=(10, 5)), columns=["a", "b", "c", "d", "e"]
+ )
+
+
+def test_cache_data_wrapper(cache_dir):
+ init_config(cache_dir)
+
+ assert len(list((cache_dir / "data").glob("*"))) == 0
+
+ data = decorated_data(name="test_cache_decorator")
+
+ assert isinstance(data, pd.DataFrame)
+ assert not data.empty
+
+ cached_data_file: Path = (
+ cache_dir
+ / "data"
+ / "test_cache_decorator"
+ / str(date.today()).replace("-", "")
+ / "test_cache_decorator.xz"
+ )
+
+ assert cached_data_file.exists() and cached_data_file.is_file()
+
+ objs_in_data = [p for p in cache_dir.joinpath("data").glob("*") if p]
+
+ assert len(objs_in_data) == 1
+ assert objs_in_data[0] == cache_dir / "data" / "test_cache_decorator"
+
+ objs_in_name_dir = [
+ p
+ for p in cache_dir.joinpath("data/test_cache_decorator").glob("*")
+ if p
+ ]
+
+ assert len(objs_in_name_dir) == 1
+ assert objs_in_name_dir[0] == cached_data_file.parent
+
+ restored_data = pd.read_csv(cached_data_file)
+
+ pd.testing.assert_frame_equal(data, restored_data, check_index_type=False)
+
+
+def test_cache_data_twice(cache_dir):
+ init_config(cache_dir)
+
+ load_time = time.perf_counter()
+ data = decorated_data(name="test_cache_decorator")
+ load_time = time.perf_counter() - load_time
+
+ assert load_time >= SLEEP_TIME
+
+ load_time = time.perf_counter()
+ data = decorated_data(name="test_cache_decorator")
+ load_time = time.perf_counter() - load_time
+
+ assert load_time < SLEEP_TIME
diff --git a/tests/test_config.py b/tests/test_config.py
index c0523ae..32c0762 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -16,8 +16,7 @@
@pytest.fixture()
def config_dir(tmp_path_factory):
- config_dir = tmp_path_factory.mktemp(".pygenesis")
- return config_dir
+ return tmp_path_factory.mktemp(".pygenesis")
@pytest.fixture(autouse=True)
@@ -55,12 +54,17 @@ def test_init_config_with_config_dir(config_dir, caplog):
assert caplog.records[1].levelname == "INFO"
assert "Settings file updated" in caplog.text
assert "New config was created" in caplog.text
+ assert (config_dir / "data").exists()
config = load_config()
assert isinstance(config, ConfigParser)
assert len(config.sections()) > 0
+ assert config["DATA"]["cache_dir"] == str(config_dir / "data")
+ assert len(list((config_dir / "data").glob("*"))) == 0
+
config_file = get_config_path_from_settings()
+
assert config_file.exists() and config_file.is_file()
From 472f4eb2671f0b5347890f1a768fe9be1a11fefe Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Wed, 24 Aug 2022 21:51:52 +0200
Subject: [PATCH 10/27] Merged changes from draft branch, including pylint
fixes, Exception and Error clarification. Also moved clean_cache function.
Some ToDos still left, #45.
---
nb/download_tablefile.ipynb | 8 ++---
nb/parse_cube.ipynb | 36 +++++++-------------
src/pygenesis/cache.py | 38 ++++++++++++++++++++-
src/pygenesis/config.py | 51 ----------------------------
src/pygenesis/http_helper.py | 27 +++++++--------
tests/unit_tests/test_http_helper.py | 4 +--
6 files changed, 69 insertions(+), 95 deletions(-)
diff --git a/nb/download_tablefile.ipynb b/nb/download_tablefile.ipynb
index 8bb7743..1702ee9 100644
--- a/nb/download_tablefile.ipynb
+++ b/nb/download_tablefile.ipynb
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -747,7 +747,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3 (ipykernel)",
+ "display_name": "Python 3.9.12 ('pygenesis')",
"language": "python",
"name": "python3"
},
@@ -761,11 +761,11 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.7"
+ "version": "3.9.12"
},
"vscode": {
"interpreter": {
- "hash": "ee0113c470e2ab03fd08da308faad6cb3acef2959a5a1adc44423161e2606732"
+ "hash": "c50015765afe066708d859da3faaa0505e12b679b95f6727e524b172064c6917"
}
}
},
diff --git a/nb/parse_cube.ipynb b/nb/parse_cube.ipynb
index 6c84c2e..b03dddb 100644
--- a/nb/parse_cube.ipynb
+++ b/nb/parse_cube.ipynb
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -12,7 +12,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -23,7 +23,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -49,7 +49,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {
"scrolled": true
},
@@ -641,21 +641,9 @@
},
{
"cell_type": "code",
- "execution_count": 21,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "array(['2015', '2016', '2017', '2018', '2019', '2020', '2021'],\n",
- " dtype=object)"
- ]
- },
- "execution_count": 21,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
+ "outputs": [],
"source": [
"rename_axes(cube)[\"QEI\"][\"JAHR\"].unique()"
]
@@ -669,11 +657,8 @@
}
],
"metadata": {
- "interpreter": {
- "hash": "02e23b522f8c3795158421909d41ced4ef90521258d58d1c53bee449d96f71e3"
- },
"kernelspec": {
- "display_name": "Python 3 (ipykernel)",
+ "display_name": "Python 3.9.12 ('pygenesis')",
"language": "python",
"name": "python3"
},
@@ -687,7 +672,12 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.7"
+ "version": "3.9.12"
+ },
+ "vscode": {
+ "interpreter": {
+ "hash": "c50015765afe066708d859da3faaa0505e12b679b95f6727e524b172064c6917"
+ }
}
},
"nbformat": 4,
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index ca5372e..bf0fd70 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -1,9 +1,10 @@
"""Module provides functions/decorators to cache downloaded data."""
import logging
+import shutil
from datetime import date
from functools import wraps
from pathlib import Path
-from typing import Callable
+from typing import Callable, Optional
import pandas as pd
@@ -50,3 +51,38 @@ def wrapper_func(**kwargs):
return data
return wrapper_func
+
+
+# TODO: Write test, use ID instead of file
+def clean_cache(file: Optional[Path]) -> None:
+ """Clean the data cache by overall or specific file removal.
+
+ Args:
+ file (Path, optional): Path to the file which should be removed from cache directory.
+ """
+ config = load_config()
+
+ # check for cache_dir in DATA section of the config.ini
+ # TODO: What happens if this key is not defined? is that error understandable?
+ cache_dir = Path(config["DATA"]["cache_dir"])
+
+ if not cache_dir.is_dir() or not cache_dir.exists():
+ logger.critical(
+ "Cache dir does not exist! Please make sure init_config() was run properly. Path: %s",
+ cache_dir,
+ )
+
+ # remove (previously specified) file(s) from the data cache
+ files = [cache_dir / file] if file is not None else cache_dir.glob(file)
+
+ # TODO: remove complete tree according to ID file tree structure
+ for filename in files:
+ file_path = cache_dir / filename
+ try:
+ if file_path.is_file() or file_path.is_symlink():
+ file_path.unlink()
+ elif file_path.is_dir():
+ shutil.rmtree(file_path)
+ # TODO: narrow down this exception
+ except Exception as e:
+ print(f"Failed to delete {file_path}. Reason: {e}")
diff --git a/src/pygenesis/config.py b/src/pygenesis/config.py
index 84ab33f..8f48d2b 100644
--- a/src/pygenesis/config.py
+++ b/src/pygenesis/config.py
@@ -8,11 +8,8 @@
If there is no config.ini in the given config_dir, a default config will be created with empty credentials.
"""
import logging
-import os
-import shutil
from configparser import ConfigParser
from pathlib import Path
-from typing import Optional
PKG_NAME = __name__.split(".", maxsplit=1)[0]
@@ -151,52 +148,4 @@ def _create_default_config() -> ConfigParser:
return config
-# TODO: Decide where this function should go... Maybe a feature of the new cache.py?
-def clean_cache(file: Optional[Path]) -> None:
- """Clean the data cache by overall or specific file removal.
-
- Args:
- file (Path, optional): Path to the file which should be removed from cache directory.
- """
- config_file = get_config_path_from_settings()
- config = _load_config(config_file)
-
- # check for cache_dir in DATA section of the config.ini
- if config.has_section("DATA"):
- logger.info("Cache config %s was loaded successfully.", config_file)
-
- if not config.get("DATA", "cache_dir") or not os.path.isdir(
- config.get("DATA", "cache_dir")
- ):
- logger.critical(
- "Cache directory not set and/or corrupted! "
- "Please make sure to run init_config() and set up the data cache appropriately. "
- )
- raise KeyError(
- "Issue with 'cache_dir' in the config.ini. Please rerun init_config()."
- )
-
- # load the folder path
- cache_dir = config["DATA"]["cache_dir"]
-
- # remove (previously specified) file(s) from the data cache
- files = (
- [os.path.join(cache_dir, file)]
- if file is not None
- else os.listdir(cache_dir)
- )
-
- for filename in files:
- file_path = os.path.join(cache_dir, filename)
- try:
- if os.path.isfile(file_path) or os.path.islink(file_path):
- os.unlink(file_path)
- elif os.path.isdir(file_path):
- shutil.rmtree(file_path)
- except Exception as e:
- print("Failed to delete %s. Reason: %s" % (file_path, e))
-
- return None
-
-
create_settings()
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 2427385..c66839c 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -48,12 +48,12 @@ def _check_invalid_status_code(status_code: int) -> None:
status_code (int): Status code from the response object
Raises:
- Exception: Generic exception if status 4xx or 5xx is returned
+ AssertionError: Assert that status is not 4xx or 5xx
"""
- if (status_code // 100) in [4, 5]:
- raise Exception(
- f"Error {status_code}: The server returned a {status_code} status code"
- )
+ assert status_code // 100 not in [
+ 4,
+ 5,
+ ], f"Error {status_code}: The server returned a {status_code} status code"
return None
@@ -73,13 +73,11 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
return None
_check_destatis_status(response_dict.get("Status", {}))
- return None
-
def _check_destatis_status(destatis_status: dict) -> None:
"""
Helper method which checks the status message from Destatis.
- If the status message is erroneous an exception will be raised.
+ If the status message is erroneous an error will be raised.
Possible Codes (2.1.2 Grundstruktur der Responses):
- 0: "erfolgreich" (Type: "Information")
@@ -90,10 +88,10 @@ def _check_destatis_status(destatis_status: dict) -> None:
destatis_status (dict): Status response dict from Destatis
Raises:
- Exception: Generic exception if the status code displays an error
+ # TODO: Is this a Value or KeyError?
+ ValueError: If the status code or type displays an error (caused by the user inputs)
"""
- # -1 is a status code that according to the documentation should not occur
- # and thus only is found if the status response dict is empty
+ # -1 status code for unexpected errors and if no status code is given (faulty response)
destatis_status_code = destatis_status.get("Code", -1)
destatis_status_type = destatis_status.get("Type")
destatis_status_content = destatis_status.get("Content")
@@ -103,14 +101,14 @@ def _check_destatis_status(destatis_status: dict) -> None:
# check for generic/ system error
if destatis_status_code == -1:
- raise Exception(
+ raise ValueError(
"Error: There is a system error.\
Please check your query parameters."
)
# check for destatis/ query errors
elif (destatis_status_code == 104) or (destatis_status_type in error_en_de):
- raise Exception(destatis_status_content)
+ raise ValueError(destatis_status_content)
# print warnings to user
elif (destatis_status_code == 22) or (
@@ -118,4 +116,5 @@ def _check_destatis_status(destatis_status: dict) -> None:
):
warnings.warn(destatis_status_content, UserWarning, stacklevel=2)
- return None
+ # TODO: pass response information to user, however logger.info might be overlooked
+ # as standard only shows beyond warning -> HowTo?
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index f7a03a6..9607574 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -18,7 +18,7 @@ def test__check_invalid_status_code_with_error():
for _handle_status_code method.
"""
for status_code in [400, 500]:
- with pytest.raises(Exception) as e:
+ with pytest.raises(AssertionError) as e:
_check_invalid_status_code(status_code)
assert (
str(e.value)
@@ -96,7 +96,7 @@ def test__check_invalid_destatis_status_code_with_error():
# extract status content which is raised
status_content = status.json().get("Status").get("Content")
- with pytest.raises(Exception) as e:
+ with pytest.raises(ValueError) as e:
_check_invalid_destatis_status_code(status)
assert str(e.value) == status_content
From b191c8fddd2a8186b3489fc84041824e07c0e74f Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Wed, 24 Aug 2022 22:20:56 +0200
Subject: [PATCH 11/27] fixed linting issues, including inconsistent returns.
---
src/pygenesis/http_helper.py | 14 +++++++++-----
1 file changed, 9 insertions(+), 5 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index c66839c..fb17bba 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -55,8 +55,6 @@ def _check_invalid_status_code(status_code: int) -> None:
5,
], f"Error {status_code}: The server returned a {status_code} status code"
- return None
-
def _check_invalid_destatis_status_code(response: requests.Response) -> None:
"""
@@ -69,9 +67,15 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
"""
try:
response_dict = response.json()
- except ValueError:
- return None
- _check_destatis_status(response_dict.get("Status", {}))
+ # catch possible errors raised by .json() (and only .json())
+ except (
+ UnicodeDecodeError,
+ requests.exceptions.JSONDecodeError,
+ ):
+ response_dict = None
+
+ if response_dict is not None:
+ _check_destatis_status(response_dict.get("Status", {}))
def _check_destatis_status(destatis_status: dict) -> None:
From cfa9a7495fee0a6691c277d8935644e5aaa9083e Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Wed, 24 Aug 2022 22:29:55 +0200
Subject: [PATCH 12/27] hotfix of overlooked error #45.
---
src/pygenesis/http_helper.py | 2 ++
tests/unit_tests/test_http_helper.py | 1 -
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index fb17bba..1aa84e6 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -1,4 +1,5 @@
"""Wrapper module for the data endpoint."""
+import json
import warnings
import requests
@@ -70,6 +71,7 @@ def _check_invalid_destatis_status_code(response: requests.Response) -> None:
# catch possible errors raised by .json() (and only .json())
except (
UnicodeDecodeError,
+ json.decoder.JSONDecodeError,
requests.exceptions.JSONDecodeError,
):
response_dict = None
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index 9607574..cc2e0fa 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -75,7 +75,6 @@ def _generic_request_status(
# TODO: Why is specific (UTF-8) encoding necessary?
if status_response:
-
request_status._content = json.dumps(status_dict).encode("utf-8")
else:
request_status._content = response_text.encode("utf-8")
From 183b967cd0bc94192dfbd7931ff96a65c532e76e Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Fri, 26 Aug 2022 16:00:58 +0200
Subject: [PATCH 13/27] narrowed the catched exception
---
src/pygenesis/cache.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index bf0fd70..e20c5e1 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -83,6 +83,5 @@ def clean_cache(file: Optional[Path]) -> None:
file_path.unlink()
elif file_path.is_dir():
shutil.rmtree(file_path)
- # TODO: narrow down this exception
- except Exception as e:
+ except (OSError, ValueError, FileNotFoundError) as e:
print(f"Failed to delete {file_path}. Reason: {e}")
From 89687397511ddca669f6942ede62c62e5ebc18f3 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Fri, 26 Aug 2022 23:29:52 +0200
Subject: [PATCH 14/27] Updated clean_cache, updated http_helper
---
src/pygenesis/cache.py | 31 ++++++++++++++--------------
src/pygenesis/http_helper.py | 9 +++++---
tests/unit_tests/test_http_helper.py | 8 +++----
3 files changed, 26 insertions(+), 22 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index e20c5e1..04c642d 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -31,6 +31,7 @@ def wrapper_func(**kwargs):
cache_dir,
)
+ # TODO: Is "name" general naming for data download (or is there other uses besides from get_data)?
name = kwargs["name"]
data_dir = cache_dir / name
if data_dir.exists():
@@ -54,11 +55,12 @@ def wrapper_func(**kwargs):
# TODO: Write test, use ID instead of file
-def clean_cache(file: Optional[Path]) -> None:
+def clean_cache(file: Optional[str]) -> None:
"""Clean the data cache by overall or specific file removal.
Args:
- file (Path, optional): Path to the file which should be removed from cache directory.
+ file (str, optional): file directory name of file which should be removed from
+ cache directory. Also compatible with specific file from file directory.
"""
config = load_config()
@@ -72,16 +74,15 @@ def clean_cache(file: Optional[Path]) -> None:
cache_dir,
)
- # remove (previously specified) file(s) from the data cache
- files = [cache_dir / file] if file is not None else cache_dir.glob(file)
-
- # TODO: remove complete tree according to ID file tree structure
- for filename in files:
- file_path = cache_dir / filename
- try:
- if file_path.is_file() or file_path.is_symlink():
- file_path.unlink()
- elif file_path.is_dir():
- shutil.rmtree(file_path)
- except (OSError, ValueError, FileNotFoundError) as e:
- print(f"Failed to delete {file_path}. Reason: {e}")
+ # remove specified file (directory) from the data cache or clear complete cache
+ file_path = (
+ cache_dir / cache_dir.glob(file) if file is not None else cache_dir
+ )
+
+ try:
+ if file_path.is_file() or file_path.is_symlink():
+ file_path.unlink()
+ elif file_path.is_dir():
+ shutil.rmtree(file_path)
+ except (OSError, ValueError, FileNotFoundError) as e:
+ print(f"Failed to delete {file_path}. Reason: {e}")
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 1aa84e6..ba468fb 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -102,6 +102,7 @@ def _check_destatis_status(destatis_status: dict) -> None:
destatis_status_type = destatis_status.get("Type")
destatis_status_content = destatis_status.get("Content")
+ # define status types
error_en_de = ["Error", "Fehler"]
warning_en_de = ["Warning", "Warnung"]
@@ -116,11 +117,13 @@ def _check_destatis_status(destatis_status: dict) -> None:
elif (destatis_status_code == 104) or (destatis_status_type in error_en_de):
raise ValueError(destatis_status_content)
- # print warnings to user
+ # output warnings to user
elif (destatis_status_code == 22) or (
destatis_status_type in warning_en_de
):
warnings.warn(destatis_status_content, UserWarning, stacklevel=2)
- # TODO: pass response information to user, however logger.info might be overlooked
- # as standard only shows beyond warning -> HowTo?
+ # output information to user
+ # TODO: Would logger.info (with forced visibility) be the better option?
+ elif destatis_status_type.lower() == "information":
+ print(f"Code {destatis_status_code}: {destatis_status_content}")
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index cc2e0fa..b5cf882 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -73,11 +73,11 @@ def _generic_request_status(
request_status = requests.Response()
request_status.status_code = 200 # success
- # TODO: Why is specific (UTF-8) encoding necessary?
+ # Define UTF-8 encoding as requests guesses otherwise
if status_response:
- request_status._content = json.dumps(status_dict).encode("utf-8")
+ request_status._content = json.dumps(status_dict).encode("UTF-8")
else:
- request_status._content = response_text.encode("utf-8")
+ request_status._content = response_text.encode("UTF-8")
return request_status
@@ -111,7 +111,7 @@ def test__check_invalid_destatis_status_code_with_warning():
_generic_request_status(status_type="Warnung"),
_generic_request_status(status_type="Warning"),
]:
- # TODO: Is the best/ most specific way to capture the warning?
+ # TODO: Is this the best/ most specific way to capture the warning?
with pytest.warns(UserWarning):
_check_invalid_destatis_status_code(status)
From 07ff06fc7e615ccdbd091ba31e7cb7c92b3307c4 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Fri, 26 Aug 2022 23:35:39 +0200
Subject: [PATCH 15/27] Fix lint issue with assert
---
src/pygenesis/http_helper.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index ba468fb..190b53c 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -51,10 +51,13 @@ def _check_invalid_status_code(status_code: int) -> None:
Raises:
AssertionError: Assert that status is not 4xx or 5xx
"""
- assert status_code // 100 not in [
+ if status_code // 100 in [
4,
5,
- ], f"Error {status_code}: The server returned a {status_code} status code"
+ ]:
+ raise AssertionError(
+ f"Error {status_code}: The server returned a {status_code} status code"
+ )
def _check_invalid_destatis_status_code(response: requests.Response) -> None:
From 8994f1b695bd2ccef9233b438dcaa8492643b529 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Fri, 26 Aug 2022 23:51:10 +0200
Subject: [PATCH 16/27] mypy fixes for ci/cd pipeline #45
---
src/pygenesis/cache.py | 5 ++---
src/pygenesis/destatis.py | 16 +++++++++-------
src/pygenesis/http_helper.py | 6 +++---
3 files changed, 14 insertions(+), 13 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index 04c642d..e08df06 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -75,9 +75,8 @@ def clean_cache(file: Optional[str]) -> None:
)
# remove specified file (directory) from the data cache or clear complete cache
- file_path = (
- cache_dir / cache_dir.glob(file) if file is not None else cache_dir
- )
+ # TODO: Find corresponding directories with cache_dir.glob(file)
+ file_path = cache_dir / file if file is not None else cache_dir
try:
if file_path.is_file() or file_path.is_symlink():
diff --git a/src/pygenesis/destatis.py b/src/pygenesis/destatis.py
index e17b4f3..f61d608 100644
--- a/src/pygenesis/destatis.py
+++ b/src/pygenesis/destatis.py
@@ -1,11 +1,13 @@
"""Module provides functions to work with the GENESIS REST-API."""
+from typing import Any
+
from pygenesis.config import load_config
from pygenesis.http_helper import get_response_from_endpoint
config = load_config()
-def get_metadata(endpoint: str, name: str) -> str:
+def get_metadata(endpoint: str, name: str) -> Any:
"""Method for downloading metadata from www-genesis.destatis.de.
Method supports the following endpoints:
@@ -21,16 +23,16 @@ def get_metadata(endpoint: str, name: str) -> str:
name (str): Unique name of the object.
Returns:
- str: Content of "Object" response.
+ Any: JSON formatted content of "Object" response.
"""
params = {
"name": name,
}
- return get_response_from_endpoint("metadata", endpoint, params).text
+ return get_response_from_endpoint("metadata", endpoint, params).json()
-def get_catalogue(endpoint: str, params: dict) -> dict:
+def get_catalogue(endpoint: str, params: dict) -> Any:
"""Method for downloading catalogue data from www-genesis.destatis.de.
Args:
@@ -38,20 +40,20 @@ def get_catalogue(endpoint: str, params: dict) -> dict:
params (dict): The query parameter as defined by the API.
Returns:
- dict: JSON formated response for the given query parameters.
+ Any: JSON formated response for the given query parameters.
"""
return get_response_from_endpoint("catalogue", endpoint, params).json()
-def get_cubefile(params: dict) -> str:
+def get_cubefile(params: dict) -> Any:
"""Method for downloading cube files from www-genesis.destatis.de.
Args:
params (dict): The query parameter as defined by the API.
Returns:
- str: The content of the cubefile.
+ Any: The content of the cubefile.
"""
return get_response_from_endpoint("data", "cubefile", params).text
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 190b53c..039425a 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -101,9 +101,9 @@ def _check_destatis_status(destatis_status: dict) -> None:
ValueError: If the status code or type displays an error (caused by the user inputs)
"""
# -1 status code for unexpected errors and if no status code is given (faulty response)
- destatis_status_code = destatis_status.get("Code", -1)
- destatis_status_type = destatis_status.get("Type")
- destatis_status_content = destatis_status.get("Content")
+ destatis_status_code = int(destatis_status.get("Code", -1))
+ destatis_status_type = str(destatis_status.get("Type", "Information"))
+ destatis_status_content = str(destatis_status.get("Content"))
# define status types
error_en_de = ["Error", "Fehler"]
From cd684c607856bc24ae18674de5715d958e070d20 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 27 Aug 2022 14:45:41 +0200
Subject: [PATCH 17/27] Updated clean_cache function, added a first version of
a test
---
src/pygenesis/cache.py | 32 ++++++++++++++++++--------------
tests/test_cache.py | 38 +++++++++++++++++++++++++++++++++++++-
2 files changed, 55 insertions(+), 15 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index e08df06..462db94 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -31,7 +31,7 @@ def wrapper_func(**kwargs):
cache_dir,
)
- # TODO: Is "name" general naming for data download (or is there other uses besides from get_data)?
+ # TODO: Is "name" generally in all subsequent methods (e.g. beyond get_data - or is only data meaningful to cache)?
name = kwargs["name"]
data_dir = cache_dir / name
if data_dir.exists():
@@ -54,8 +54,7 @@ def wrapper_func(**kwargs):
return wrapper_func
-# TODO: Write test, use ID instead of file
-def clean_cache(file: Optional[str]) -> None:
+def clean_cache(file: Optional[str] = None) -> None:
"""Clean the data cache by overall or specific file removal.
Args:
@@ -74,14 +73,19 @@ def clean_cache(file: Optional[str]) -> None:
cache_dir,
)
- # remove specified file (directory) from the data cache or clear complete cache
- # TODO: Find corresponding directories with cache_dir.glob(file)
- file_path = cache_dir / file if file is not None else cache_dir
-
- try:
- if file_path.is_file() or file_path.is_symlink():
- file_path.unlink()
- elif file_path.is_dir():
- shutil.rmtree(file_path)
- except (OSError, ValueError, FileNotFoundError) as e:
- print(f"Failed to delete {file_path}. Reason: {e}")
+ # remove specified file (directory) from the data cache or clear complete cache (remove childs, preserve base)
+ file_paths = (
+ [cache_dir / file]
+ if file is not None
+ else [child for child in cache_dir.iterdir()]
+ )
+
+ for file_path in file_paths:
+ # delete if file or symlink, otherwise remove complete tree
+ try:
+ if file_path.is_file() or file_path.is_symlink():
+ file_path.unlink()
+ elif file_path.is_dir():
+ shutil.rmtree(file_path)
+ except (OSError, ValueError, FileNotFoundError) as e:
+ print(f"Failed to delete {file_path}. Reason: {e}")
diff --git a/tests/test_cache.py b/tests/test_cache.py
index 919436e..69b0d97 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -1,12 +1,13 @@
import time
from datetime import date
from pathlib import Path
+from typing import Optional
import numpy as np
import pandas as pd
import pytest
-from pygenesis.cache import cache_data
+from pygenesis.cache import cache_data, clean_cache
from pygenesis.config import (
DEFAULT_SETTINGS_FILE,
_write_config,
@@ -90,3 +91,38 @@ def test_cache_data_twice(cache_dir):
load_time = time.perf_counter() - load_time
assert load_time < SLEEP_TIME
+
+
+# TODO: double-check functionality of this test
+def clean_cache_setup(cache_dir, file: Optional[str] = None):
+ """
+ Convenience function to cache a file and remove it with different options.
+ """
+ init_config(cache_dir)
+
+ assert len(list((cache_dir / "data").glob("*"))) == 0
+
+ name = "test_clean_cache_decorator" if file is None else file
+ data = decorated_data(name=name)
+
+ assert isinstance(data, pd.DataFrame)
+ assert not data.empty
+
+ cached_data_file: Path = (
+ cache_dir
+ / "data"
+ / name
+ / str(date.today()).replace("-", "")
+ / f"{name}.xz"
+ )
+
+ assert cached_data_file.exists() and cached_data_file.is_file()
+
+ clean_cache(file=file)
+
+ assert not cached_data_file.exists() and not cached_data_file.is_file()
+
+
+def test_clean_cache(cache_dir):
+ clean_cache_setup(cache_dir)
+ clean_cache_setup(cache_dir, file="test_clean_cache_decorator_file")
From 4c80e904ddecb6708963fadf1099d47daae95712 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 27 Aug 2022 14:49:47 +0200
Subject: [PATCH 18/27] Fixed linting issues, #45
---
nb/download_tablefile.ipynb | 32 +++++++++++++++++++++++++++++---
src/pygenesis/cache.py | 10 +++++-----
2 files changed, 34 insertions(+), 8 deletions(-)
diff --git a/nb/download_tablefile.ipynb b/nb/download_tablefile.ipynb
index 1702ee9..9ff30a8 100644
--- a/nb/download_tablefile.ipynb
+++ b/nb/download_tablefile.ipynb
@@ -12,7 +12,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -36,15 +36,41 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 5,
"metadata": {
"scrolled": true
},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Cache dir does not exist! Please make sure init_config() was run properly. Path: C:\\Users\\MDick\\.pygenesis\\data\n"
+ ]
+ }
+ ],
"source": [
"data = get_data(name=\"61111-0002\", method=\"tablefile\", table_area=all)"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from pygenesis.cache import clean_cache"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "clean_cache(\"61111-0002\")"
+ ]
+ },
{
"cell_type": "code",
"execution_count": 11,
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index 462db94..0c38fa2 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -31,7 +31,8 @@ def wrapper_func(**kwargs):
cache_dir,
)
- # TODO: Is "name" generally in all subsequent methods (e.g. beyond get_data - or is only data meaningful to cache)?
+ # TODO: Is "name" generally in all subsequent methods
+ # (e.g. beyond get_data - or is only data meaningful to cache)?
name = kwargs["name"]
data_dir = cache_dir / name
if data_dir.exists():
@@ -73,11 +74,10 @@ def clean_cache(file: Optional[str] = None) -> None:
cache_dir,
)
- # remove specified file (directory) from the data cache or clear complete cache (remove childs, preserve base)
+ # remove specified file (directory) from the data cache
+ # or clear complete cache (remove childs, preserve base)
file_paths = (
- [cache_dir / file]
- if file is not None
- else [child for child in cache_dir.iterdir()]
+ [cache_dir / file] if file is not None else list(cache_dir.iterdir())
)
for file_path in file_paths:
From 61152e3df28bdc41fdd5d9b20e786fd285602fc6 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 27 Aug 2022 23:33:26 +0200
Subject: [PATCH 19/27] updated with remarks from pull request #45, #46
---
src/pygenesis/cache.py | 20 ++++++++-----
src/pygenesis/custom_exceptions.py | 7 +++++
src/pygenesis/http_helper.py | 28 ++++++++----------
tests/unit_tests/test_http_helper.py | 44 ++++++++++++++++++----------
4 files changed, 60 insertions(+), 39 deletions(-)
create mode 100644 src/pygenesis/custom_exceptions.py
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index 0c38fa2..5531bf7 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -1,4 +1,4 @@
-"""Module provides functions/decorators to cache downloaded data."""
+"""Module provides functions/decorators to cache downloaded data as well as remove cached data."""
import logging
import shutil
from datetime import date
@@ -31,9 +31,13 @@ def wrapper_func(**kwargs):
cache_dir,
)
- # TODO: Is "name" generally in all subsequent methods
- # (e.g. beyond get_data - or is only data meaningful to cache)?
- name = kwargs["name"]
+ # get name specified, if None given do not cache data
+ name = kwargs.get("name", None)
+
+ if name is None:
+ data: pd.DateFrame = func(**kwargs)
+ return data
+
data_dir = cache_dir / name
if data_dir.exists():
# TODO: Implement solution for updated data.
@@ -76,9 +80,7 @@ def clean_cache(file: Optional[str] = None) -> None:
# remove specified file (directory) from the data cache
# or clear complete cache (remove childs, preserve base)
- file_paths = (
- [cache_dir / file] if file is not None else list(cache_dir.iterdir())
- )
+ file_paths = [cache_dir / file] if file is not None else cache_dir.iterdir()
for file_path in file_paths:
# delete if file or symlink, otherwise remove complete tree
@@ -88,4 +90,6 @@ def clean_cache(file: Optional[str] = None) -> None:
elif file_path.is_dir():
shutil.rmtree(file_path)
except (OSError, ValueError, FileNotFoundError) as e:
- print(f"Failed to delete {file_path}. Reason: {e}")
+ logger.warning(f"Failed to delete {file_path}. Reason: {e}")
+
+ logger.info(f"Removed files: {file_paths}")
diff --git a/src/pygenesis/custom_exceptions.py b/src/pygenesis/custom_exceptions.py
new file mode 100644
index 0000000..2b09525
--- /dev/null
+++ b/src/pygenesis/custom_exceptions.py
@@ -0,0 +1,7 @@
+"""Define custom "speaking" Exception and Error classes."""
+
+
+class DestatisStatusError(ValueError):
+ """Raised when Destatis status code indicates an error ("Fehler")"""
+
+ pass
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 039425a..2bde080 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -1,12 +1,14 @@
"""Wrapper module for the data endpoint."""
import json
-import warnings
+import logging
import requests
from pygenesis.config import load_config
+from pygenesis.custom_exceptions import DestatisStatusError
config = load_config()
+logger = logging.getLogger(__name__)
def get_response_from_endpoint(
@@ -51,11 +53,8 @@ def _check_invalid_status_code(status_code: int) -> None:
Raises:
AssertionError: Assert that status is not 4xx or 5xx
"""
- if status_code // 100 in [
- 4,
- 5,
- ]:
- raise AssertionError(
+ if status_code // 100 in [4, 5]:
+ raise requests.exceptions.HTTPError(
f"Error {status_code}: The server returned a {status_code} status code"
)
@@ -97,13 +96,12 @@ def _check_destatis_status(destatis_status: dict) -> None:
destatis_status (dict): Status response dict from Destatis
Raises:
- # TODO: Is this a Value or KeyError?
- ValueError: If the status code or type displays an error (caused by the user inputs)
+ DestatisStatusError: If the status code or type displays an error (caused by the user inputs)
"""
# -1 status code for unexpected errors and if no status code is given (faulty response)
- destatis_status_code = int(destatis_status.get("Code", -1))
- destatis_status_type = str(destatis_status.get("Type", "Information"))
- destatis_status_content = str(destatis_status.get("Content"))
+ destatis_status_code = destatis_status.get("Code", -1)
+ destatis_status_type = destatis_status.get("Type", "Information")
+ destatis_status_content = destatis_status.get("Content")
# define status types
error_en_de = ["Error", "Fehler"]
@@ -111,22 +109,22 @@ def _check_destatis_status(destatis_status: dict) -> None:
# check for generic/ system error
if destatis_status_code == -1:
- raise ValueError(
+ raise DestatisStatusError(
"Error: There is a system error.\
Please check your query parameters."
)
# check for destatis/ query errors
elif (destatis_status_code == 104) or (destatis_status_type in error_en_de):
- raise ValueError(destatis_status_content)
+ raise DestatisStatusError(destatis_status_content)
# output warnings to user
elif (destatis_status_code == 22) or (
destatis_status_type in warning_en_de
):
- warnings.warn(destatis_status_content, UserWarning, stacklevel=2)
+ logger.warning(destatis_status_content)
# output information to user
# TODO: Would logger.info (with forced visibility) be the better option?
elif destatis_status_type.lower() == "information":
- print(f"Code {destatis_status_code}: {destatis_status_content}")
+ logger.info(f"Code {destatis_status_code}: {destatis_status_content}")
diff --git a/tests/unit_tests/test_http_helper.py b/tests/unit_tests/test_http_helper.py
index b5cf882..600c849 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/unit_tests/test_http_helper.py
@@ -1,9 +1,11 @@
import json
+import logging
import pytest
import requests
-from src.pygenesis.http_helper import (
+from pygenesis.custom_exceptions import DestatisStatusError
+from pygenesis.http_helper import (
_check_invalid_destatis_status_code,
_check_invalid_status_code,
)
@@ -18,7 +20,7 @@ def test__check_invalid_status_code_with_error():
for _handle_status_code method.
"""
for status_code in [400, 500]:
- with pytest.raises(AssertionError) as e:
+ with pytest.raises(requests.exceptions.HTTPError) as e:
_check_invalid_status_code(status_code)
assert (
str(e.value)
@@ -95,36 +97,46 @@ def test__check_invalid_destatis_status_code_with_error():
# extract status content which is raised
status_content = status.json().get("Status").get("Content")
- with pytest.raises(ValueError) as e:
+ with pytest.raises(DestatisStatusError) as e:
_check_invalid_destatis_status_code(status)
assert str(e.value) == status_content
-def test__check_invalid_destatis_status_code_with_warning():
+def test__check_invalid_destatis_status_code_with_warning(caplog):
"""
Basic tests to check a warning status code as defined in the
documentation via code (e.g. 22) or type ('Warning', 'Warnung').
"""
+ caplog.set_level(logging.WARNING)
for status in [
_generic_request_status(code=22),
_generic_request_status(status_type="Warnung"),
_generic_request_status(status_type="Warning"),
]:
- # TODO: Is this the best/ most specific way to capture the warning?
- with pytest.warns(UserWarning):
- _check_invalid_destatis_status_code(status)
+ # extract status content which is contained in warning
+ status_content = status.json().get("Status").get("Content")
+
+ _check_invalid_destatis_status_code(status)
+ assert status_content in caplog.text
-def test__check_invalid_destatis_status_code_without_error():
+
+def test__check_invalid_destatis_status_code_without_error(caplog):
"""
Basic tests to check the successful status code 0 or only text response as defined in the documentation.
"""
- for status in [
- _generic_request_status(),
- _generic_request_status(status_response=False),
- ]:
- try:
- _check_invalid_destatis_status_code(status)
- except Exception:
- assert False
+ # JSON response with status code
+ caplog.set_level(logging.INFO)
+ status = _generic_request_status()
+ status_content = status.json().get("Status").get("Content")
+ _check_invalid_destatis_status_code(status)
+
+ assert status_content in caplog.text
+
+ # text only response
+ status_text = _generic_request_status(status_response=False)
+ try:
+ _check_invalid_destatis_status_code(status_text)
+ except Exception:
+ assert False
From c1f29b3bca16a0dac963c78c9c63692afff3f89f Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sat, 27 Aug 2022 23:40:20 +0200
Subject: [PATCH 20/27] updated logging due to linting errors W1203
---
src/pygenesis/cache.py | 4 ++--
src/pygenesis/http_helper.py | 4 +++-
2 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index 5531bf7..db7b503 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -90,6 +90,6 @@ def clean_cache(file: Optional[str] = None) -> None:
elif file_path.is_dir():
shutil.rmtree(file_path)
except (OSError, ValueError, FileNotFoundError) as e:
- logger.warning(f"Failed to delete {file_path}. Reason: {e}")
+ logger.warning("Failed to delete %s. Reason: %s", file_path, e)
- logger.info(f"Removed files: {file_paths}")
+ logger.info("Removed files: %s", file_paths)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 2bde080..cbd7dbf 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -127,4 +127,6 @@ def _check_destatis_status(destatis_status: dict) -> None:
# output information to user
# TODO: Would logger.info (with forced visibility) be the better option?
elif destatis_status_type.lower() == "information":
- logger.info(f"Code {destatis_status_code}: {destatis_status_content}")
+ logger.info(
+ "Code %d : %s", destatis_status_code, destatis_status_content
+ )
From 1b72643e07aae1f3f25c9db7a7c0b488df568fb9 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 16:37:35 +0200
Subject: [PATCH 21/27] Improved test coverage, update temporary test directory
for user names with spaces and non-latin characters, added mocked API call,
added mocked create_settings call, moved test_http_helper, added TODOs for
the future, #45
---
pyproject.toml | 1 +
src/pygenesis/cache.py | 11 +--
src/pygenesis/destatis.py | 59 ----------------
src/pygenesis/http_helper.py | 5 +-
src/pygenesis/statistic.py | 35 ----------
tests/test_cache.py | 28 ++++++--
tests/test_config.py | 17 ++++-
tests/{unit_tests => }/test_http_helper.py | 80 ++++++++++++++--------
tests/unit_tests/__init__.py | 0
9 files changed, 99 insertions(+), 137 deletions(-)
delete mode 100644 src/pygenesis/destatis.py
delete mode 100644 src/pygenesis/statistic.py
rename tests/{unit_tests => }/test_http_helper.py (84%)
delete mode 100644 tests/unit_tests/__init__.py
diff --git a/pyproject.toml b/pyproject.toml
index ca7c667..8de1656 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -9,6 +9,7 @@ python = "^3.8"
requests = "^2.27.1"
pandas = "^1.4.3"
+# TODO: add mock
[tool.poetry.dev-dependencies]
bandit = "^1.7.4"
black = "^22.3.0"
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index db7b503..de35099 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -69,13 +69,14 @@ def clean_cache(file: Optional[str] = None) -> None:
config = load_config()
# check for cache_dir in DATA section of the config.ini
- # TODO: What happens if this key is not defined? is that error understandable?
- cache_dir = Path(config["DATA"]["cache_dir"])
-
- if not cache_dir.is_dir() or not cache_dir.exists():
+ try:
+ cache_dir = Path(config["DATA"]["cache_dir"])
+ except KeyError as e:
logger.critical(
- "Cache dir does not exist! Please make sure init_config() was run properly. Path: %s",
+ "Cache dir does not exist! Please make sure init_config() was run properly. \
+ Path: %s, Error: %s",
cache_dir,
+ e,
)
# remove specified file (directory) from the data cache
diff --git a/src/pygenesis/destatis.py b/src/pygenesis/destatis.py
deleted file mode 100644
index f61d608..0000000
--- a/src/pygenesis/destatis.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""Module provides functions to work with the GENESIS REST-API."""
-from typing import Any
-
-from pygenesis.config import load_config
-from pygenesis.http_helper import get_response_from_endpoint
-
-config = load_config()
-
-
-def get_metadata(endpoint: str, name: str) -> Any:
- """Method for downloading metadata from www-genesis.destatis.de.
-
- Method supports the following endpoints:
- - cube
- - statistic
- - table
- - timeseries
- - value
- - variable
-
- Args:
- endpoint (str): One of the supported endpoints, e.g. statistic.
- name (str): Unique name of the object.
-
- Returns:
- Any: JSON formatted content of "Object" response.
- """
- params = {
- "name": name,
- }
-
- return get_response_from_endpoint("metadata", endpoint, params).json()
-
-
-def get_catalogue(endpoint: str, params: dict) -> Any:
- """Method for downloading catalogue data from www-genesis.destatis.de.
-
- Args:
- endpoint (str): One of the supported endpoints, e.g. cubes.
- params (dict): The query parameter as defined by the API.
-
- Returns:
- Any: JSON formated response for the given query parameters.
- """
-
- return get_response_from_endpoint("catalogue", endpoint, params).json()
-
-
-def get_cubefile(params: dict) -> Any:
- """Method for downloading cube files from www-genesis.destatis.de.
-
- Args:
- params (dict): The query parameter as defined by the API.
-
- Returns:
- Any: The content of the cubefile.
- """
-
- return get_response_from_endpoint("data", "cubefile", params).text
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index cbd7dbf..661c5ae 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -88,6 +88,7 @@ def _check_destatis_status(destatis_status: dict) -> None:
If the status message is erroneous an error will be raised.
Possible Codes (2.1.2 Grundstruktur der Responses):
+ # TODO: Ask Destatis for full list of error codes
- 0: "erfolgreich" (Type: "Information")
- 22: "erfolgreich mit Parameteranpassung" (Type: "Warnung")
- 104: "Kein passendes Objekt zu Suche" (Type: "Information")
@@ -110,8 +111,7 @@ def _check_destatis_status(destatis_status: dict) -> None:
# check for generic/ system error
if destatis_status_code == -1:
raise DestatisStatusError(
- "Error: There is a system error.\
- Please check your query parameters."
+ "Error: There is a system error. Please check your query parameters."
)
# check for destatis/ query errors
@@ -125,7 +125,6 @@ def _check_destatis_status(destatis_status: dict) -> None:
logger.warning(destatis_status_content)
# output information to user
- # TODO: Would logger.info (with forced visibility) be the better option?
elif destatis_status_type.lower() == "information":
logger.info(
"Code %d : %s", destatis_status_code, destatis_status_content
diff --git a/src/pygenesis/statistic.py b/src/pygenesis/statistic.py
deleted file mode 100644
index c6683c3..0000000
--- a/src/pygenesis/statistic.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""Module provides a demo class for encapsulating the statistic object from GENESIS."""
-from pygenesis.destatis import get_catalogue, get_metadata
-
-
-class Statistic:
- """A class representing the statistic object from the GENESIS database.
-
- Attributes:
- name (str): The unique EVAS ID for this statistic.
- metadata (dict): The metadata for this statistic.
- cubes (list): All cubes that are associated with this statistic.
- variables (list): All variables that are associated with this statistic.
- tables (list): All tables that are associated with that statistic.
- """
-
- def __init__(self, name: str) -> None:
- self.name = name
- self.metadata: dict = get_metadata("statistic", name).get("Object", {})
- self.cubes: list = get_catalogue(
- "cubes2statistic", {"name": name, "selection": ""}
- ).get("List", [])
- self.variables: list = get_catalogue(
- "variables2statistic", {"name": name, "selection": ""}
- ).get("List", [])
- self.tables: list = get_catalogue(
- "tables2statistic", {"name": name, "selection": ""}
- ).get("List", [])
-
-
-if __name__ == "__main__":
- stat = Statistic("23211")
- print("metadata:", stat.metadata)
- print("cubes:", stat.cubes)
- print("variables:", stat.variables)
- print("tables:", stat.tables)
diff --git a/tests/test_cache.py b/tests/test_cache.py
index 69b0d97..5da2932 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -1,3 +1,4 @@
+import re
import time
from datetime import date
from pathlib import Path
@@ -20,7 +21,11 @@
@pytest.fixture()
def cache_dir(tmp_path_factory):
- return tmp_path_factory.mktemp(".pygenesis")
+ # remove white-space and non-latin characters (issue fo some user names)
+ temp_dir = str(tmp_path_factory.mktemp(".pygenesis"))
+ temp_dir = re.sub(r"[^\x00-\x7f]", r"", temp_dir.replace(" ", ""))
+
+ return Path(temp_dir)
@pytest.fixture(autouse=True)
@@ -77,6 +82,15 @@ def test_cache_data_wrapper(cache_dir):
pd.testing.assert_frame_equal(data, restored_data, check_index_type=False)
+def test_cache_data_wrapper_without_name(cache_dir):
+ init_config(cache_dir)
+
+ data = decorated_data(name=None)
+
+ assert isinstance(data, pd.DataFrame)
+ assert not data.empty
+
+
def test_cache_data_twice(cache_dir):
init_config(cache_dir)
@@ -93,7 +107,6 @@ def test_cache_data_twice(cache_dir):
assert load_time < SLEEP_TIME
-# TODO: double-check functionality of this test
def clean_cache_setup(cache_dir, file: Optional[str] = None):
"""
Convenience function to cache a file and remove it with different options.
@@ -102,7 +115,7 @@ def clean_cache_setup(cache_dir, file: Optional[str] = None):
assert len(list((cache_dir / "data").glob("*"))) == 0
- name = "test_clean_cache_decorator" if file is None else file
+ name = "test_clean_cache_cache_file" if file is None else file
data = decorated_data(name=name)
assert isinstance(data, pd.DataFrame)
@@ -124,5 +137,12 @@ def clean_cache_setup(cache_dir, file: Optional[str] = None):
def test_clean_cache(cache_dir):
+ # clean complete cache
clean_cache_setup(cache_dir)
- clean_cache_setup(cache_dir, file="test_clean_cache_decorator_file")
+ # TODO: So far not working as expected: is_file returns false & treated like directory
+ # clean only one file
+ name = "test_clean_cache_cache_file"
+ file_path = (
+ Path("data") / name / str(date.today()).replace("-", "") / f"{name}.xz"
+ )
+ clean_cache_setup(cache_dir, file=str(file_path))
diff --git a/tests/test_config.py b/tests/test_config.py
index 32c0762..873f16e 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,12 +1,15 @@
import logging
+import re
from configparser import ConfigParser
from pathlib import Path
import pytest
+from mock import patch
from pygenesis.config import (
DEFAULT_SETTINGS_FILE,
_write_config,
+ create_settings,
get_config_path_from_settings,
init_config,
load_config,
@@ -16,7 +19,11 @@
@pytest.fixture()
def config_dir(tmp_path_factory):
- return tmp_path_factory.mktemp(".pygenesis")
+ # remove white-space and non-latin characters (issue fo some user names)
+ temp_dir = str(tmp_path_factory.mktemp(".pygenesis"))
+ temp_dir = re.sub(r"[^\x00-\x7f]", r"", temp_dir.replace(" ", ""))
+
+ return Path(temp_dir)
@pytest.fixture(autouse=True)
@@ -30,6 +37,14 @@ def test_settings():
assert DEFAULT_SETTINGS_FILE.exists() and DEFAULT_SETTINGS_FILE.is_file()
+@patch("pygenesis.config.DEFAULT_CONFIG_DIR")
+@patch("pygenesis.config.DEFAULT_SETTINGS_FILE")
+def test_create_settings(mock_config, mock_settings, config_dir):
+ mock_config.return_value = config_dir
+ mock_settings.return_value = config_dir / "settings.ini"
+ create_settings()
+
+
def test_load_settings():
settings = load_settings()
diff --git a/tests/unit_tests/test_http_helper.py b/tests/test_http_helper.py
similarity index 84%
rename from tests/unit_tests/test_http_helper.py
rename to tests/test_http_helper.py
index 600c849..48b1b2b 100644
--- a/tests/unit_tests/test_http_helper.py
+++ b/tests/test_http_helper.py
@@ -3,42 +3,15 @@
import pytest
import requests
+from mock import patch
from pygenesis.custom_exceptions import DestatisStatusError
from pygenesis.http_helper import (
_check_invalid_destatis_status_code,
_check_invalid_status_code,
+ get_response_from_endpoint,
)
-# TODO: Add generic dummy request to the server, which is not getting us timed out,
-# to test get_response_from_endpoint completely?
-
-
-def test__check_invalid_status_code_with_error():
- """
- Basic tests to check an error status code (4xx, 5xx)
- for _handle_status_code method.
- """
- for status_code in [400, 500]:
- with pytest.raises(requests.exceptions.HTTPError) as e:
- _check_invalid_status_code(status_code)
- assert (
- str(e.value)
- == f"Error {status_code}: The server returned a {status_code} status code"
- )
-
-
-def test__check_invalid_status_code_without_error():
- """
- Basic test to check a valid status code (2xx)
- for the _handle_status_code method.
- """
- status_code = 200
- try:
- _check_invalid_status_code(status_code)
- except Exception:
- assert False
-
def _generic_request_status(
status_response: bool = True,
@@ -84,10 +57,47 @@ def _generic_request_status(
return request_status
+@patch("requests.get")
+def test_get_response_from_endpoint(mocker):
+ """
+ Test once with generic API response, more detailed tests
+ of subfunctions and specific cases below.
+ """
+ mocker.return_value = _generic_request_status()
+
+ get_response_from_endpoint("endpoint", "method", {})
+
+
+def test__check_invalid_status_code_with_error():
+ """
+ Basic tests to check an error status code (4xx, 5xx)
+ for _handle_status_code method.
+ """
+ for status_code in [400, 500]:
+ with pytest.raises(requests.exceptions.HTTPError) as e:
+ _check_invalid_status_code(status_code)
+ assert (
+ str(e.value)
+ == f"Error {status_code}: The server returned a {status_code} status code"
+ )
+
+
+def test__check_invalid_status_code_without_error():
+ """
+ Basic test to check a valid status code (2xx)
+ for the _handle_status_code method.
+ """
+ status_code = 200
+ try:
+ _check_invalid_status_code(status_code)
+ except Exception:
+ assert False
+
+
def test__check_invalid_destatis_status_code_with_error():
"""
Basic tests to check an error status code as defined in the
- documentation via code (e.g. 104) or type ('Error', 'Fehler').
+ documentation via code (e.g. -1, 104) or type ('Error', 'Fehler').
"""
for status in [
_generic_request_status(code=104),
@@ -101,6 +111,16 @@ def test__check_invalid_destatis_status_code_with_error():
_check_invalid_destatis_status_code(status)
assert str(e.value) == status_content
+ # also test generic -1 error code
+ generic_error_status = _generic_request_status(code=-1)
+
+ with pytest.raises(DestatisStatusError) as e:
+ _check_invalid_destatis_status_code(generic_error_status)
+ assert (
+ str(e.value)
+ == "Error: There is a system error. Please check your query parameters."
+ )
+
def test__check_invalid_destatis_status_code_with_warning(caplog):
"""
diff --git a/tests/unit_tests/__init__.py b/tests/unit_tests/__init__.py
deleted file mode 100644
index e69de29..0000000
From 27da9a3e0dd41447f0fe7793005d3dacfb23b3c0 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 16:41:03 +0200
Subject: [PATCH 22/27] Added mock as dependency (as otherwise CI/CD
understandably fails), #45
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 8de1656..703281e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -9,12 +9,12 @@ python = "^3.8"
requests = "^2.27.1"
pandas = "^1.4.3"
-# TODO: add mock
[tool.poetry.dev-dependencies]
bandit = "^1.7.4"
black = "^22.3.0"
flake8-docstrings = "^1.6.0"
isort = "^5.10.1"
+mock = "^4.0.3"
mypy = "^0.942"
pdoc3 = "^0.10.0"
pre-commit = "^2.18.1"
From 40097f49c86f49207be1050370a6621090d2c51b Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 16:46:41 +0200
Subject: [PATCH 23/27] Also update project.lock, forgot it before, #45
---
poetry.lock | 147 ++++++++++++++++++++++++++++++++++------------------
1 file changed, 98 insertions(+), 49 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 84524d2..021bbb2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -122,7 +122,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
-version = "6.4.2"
+version = "6.4.4"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
@@ -305,9 +305,9 @@ python-versions = ">=3.7"
MarkupSafe = ">=0.9.2"
[package.extras]
-babel = ["babel"]
-lingua = ["lingua"]
testing = ["pytest"]
+lingua = ["lingua"]
+babel = ["babel"]
[[package]]
name = "markdown"
@@ -339,6 +339,19 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "mock"
+version = "4.0.3"
+description = "Rolling backport of unittest.mock for all Pythons"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+build = ["twine", "wheel", "blurb"]
+docs = ["sphinx"]
+test = ["pytest (<5.4)", "pytest-cov"]
+
[[package]]
name = "mypy"
version = "0.942"
@@ -462,8 +475,8 @@ optional = false
python-versions = ">=3.6"
[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
+testing = ["pytest-benchmark", "pytest"]
+dev = ["tox", "pre-commit"]
[[package]]
name = "pre-commit"
@@ -585,7 +598,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
-testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
+testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"]
[[package]]
name = "python-dateutil"
@@ -759,7 +772,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.8"
-content-hash = "a92b306c4bc5d63e146cf2e071b8fba7d537867c3e835da9f1450b02617219cc"
+content-hash = "a137081485b16f94b4c751463c1158b0f74cff50f7cd0055c19f18d02cfd8c8a"
[metadata.files]
astroid = [
@@ -824,47 +837,56 @@ colorama = [
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
coverage = [
- {file = "coverage-6.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a9032f9b7d38bdf882ac9f66ebde3afb8145f0d4c24b2e600bc4c6304aafb87e"},
- {file = "coverage-6.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e0524adb49c716ca763dbc1d27bedce36b14f33e6b8af6dba56886476b42957c"},
- {file = "coverage-6.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4548be38a1c810d79e097a38107b6bf2ff42151900e47d49635be69943763d8"},
- {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23876b018dfa5d3e98e96f5644b109090f16a4acb22064e0f06933663005d39"},
- {file = "coverage-6.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fe75dcfcb889b6800f072f2af5a331342d63d0c1b3d2bf0f7b4f6c353e8c9c0"},
- {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2f8553878a24b00d5ab04b7a92a2af50409247ca5c4b7a2bf4eabe94ed20d3ee"},
- {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d774d9e97007b018a651eadc1b3970ed20237395527e22cbeb743d8e73e0563d"},
- {file = "coverage-6.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d56f105592188ce7a797b2bd94b4a8cb2e36d5d9b0d8a1d2060ff2a71e6b9bbc"},
- {file = "coverage-6.4.2-cp310-cp310-win32.whl", hash = "sha256:d230d333b0be8042ac34808ad722eabba30036232e7a6fb3e317c49f61c93386"},
- {file = "coverage-6.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:5ef42e1db047ca42827a85e34abe973971c635f83aed49611b7f3ab49d0130f0"},
- {file = "coverage-6.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:25b7ec944f114f70803d6529394b64f8749e93cbfac0fe6c5ea1b7e6c14e8a46"},
- {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb00521ab4f99fdce2d5c05a91bddc0280f0afaee0e0a00425e28e209d4af07"},
- {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dff52b3e7f76ada36f82124703f4953186d9029d00d6287f17c68a75e2e6039"},
- {file = "coverage-6.4.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:147605e1702d996279bb3cc3b164f408698850011210d133a2cb96a73a2f7996"},
- {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:422fa44070b42fef9fb8dabd5af03861708cdd6deb69463adc2130b7bf81332f"},
- {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8af6c26ba8df6338e57bedbf916d76bdae6308e57fc8f14397f03b5da8622b4e"},
- {file = "coverage-6.4.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5336e0352c0b12c7e72727d50ff02557005f79a0b8dcad9219c7c4940a930083"},
- {file = "coverage-6.4.2-cp37-cp37m-win32.whl", hash = "sha256:0f211df2cba951ffcae210ee00e54921ab42e2b64e0bf2c0befc977377fb09b7"},
- {file = "coverage-6.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a13772c19619118903d65a91f1d5fea84be494d12fd406d06c849b00d31bf120"},
- {file = "coverage-6.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7bd0ffbcd03dc39490a1f40b2669cc414fae0c4e16b77bb26806a4d0b7d1452"},
- {file = "coverage-6.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0895ea6e6f7f9939166cc835df8fa4599e2d9b759b02d1521b574e13b859ac32"},
- {file = "coverage-6.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e7ced84a11c10160c0697a6cc0b214a5d7ab21dfec1cd46e89fbf77cc66fae"},
- {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80db4a47a199c4563d4a25919ff29c97c87569130375beca3483b41ad5f698e8"},
- {file = "coverage-6.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3def6791adf580d66f025223078dc84c64696a26f174131059ce8e91452584e1"},
- {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f89d8e03c8a3757aae65570d14033e8edf192ee9298303db15955cadcff0c63"},
- {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6d0b48aff8e9720bdec315d67723f0babd936a7211dc5df453ddf76f89c59933"},
- {file = "coverage-6.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b20286c2b726f94e766e86a3fddb7b7e37af5d0c635bdfa7e4399bc523563de"},
- {file = "coverage-6.4.2-cp38-cp38-win32.whl", hash = "sha256:d714af0bdba67739598849c9f18efdcc5a0412f4993914a0ec5ce0f1e864d783"},
- {file = "coverage-6.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:5f65e5d3ff2d895dab76b1faca4586b970a99b5d4b24e9aafffc0ce94a6022d6"},
- {file = "coverage-6.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a697977157adc052284a7160569b36a8bbec09db3c3220642e6323b47cec090f"},
- {file = "coverage-6.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c77943ef768276b61c96a3eb854eba55633c7a3fddf0a79f82805f232326d33f"},
- {file = "coverage-6.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54d8d0e073a7f238f0666d3c7c0d37469b2aa43311e4024c925ee14f5d5a1cbe"},
- {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f22325010d8824594820d6ce84fa830838f581a7fd86a9235f0d2ed6deb61e29"},
- {file = "coverage-6.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b04d305ea172ccb21bee5bacd559383cba2c6fcdef85b7701cf2de4188aa55"},
- {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:866ebf42b4c5dbafd64455b0a1cd5aa7b4837a894809413b930026c91e18090b"},
- {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e36750fbbc422c1c46c9d13b937ab437138b998fe74a635ec88989afb57a3978"},
- {file = "coverage-6.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:79419370d6a637cb18553ecb25228893966bd7935a9120fa454e7076f13b627c"},
- {file = "coverage-6.4.2-cp39-cp39-win32.whl", hash = "sha256:b5e28db9199dd3833cc8a07fa6cf429a01227b5d429facb56eccd765050c26cd"},
- {file = "coverage-6.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:edfdabe7aa4f97ed2b9dd5dde52d2bb29cb466993bb9d612ddd10d0085a683cf"},
- {file = "coverage-6.4.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:e2618cb2cf5a7cc8d698306e42ebcacd02fb7ef8cfc18485c59394152c70be97"},
- {file = "coverage-6.4.2.tar.gz", hash = "sha256:6c3ccfe89c36f3e5b9837b9ee507472310164f352c9fe332120b764c9d60adbe"},
+ {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"},
+ {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"},
+ {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"},
+ {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"},
+ {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"},
+ {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"},
+ {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"},
+ {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"},
+ {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"},
+ {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"},
+ {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"},
+ {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"},
+ {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"},
+ {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"},
+ {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"},
+ {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"},
+ {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"},
+ {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"},
+ {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"},
+ {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"},
+ {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"},
+ {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"},
+ {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"},
+ {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"},
+ {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"},
+ {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"},
+ {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"},
+ {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"},
+ {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"},
+ {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"},
]
dill = [
{file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"},
@@ -1011,6 +1033,10 @@ mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
+mock = [
+ {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"},
+ {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"},
+]
mypy = [
{file = "mypy-0.942-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5bf44840fb43ac4074636fd47ee476d73f0039f4f54e86d7265077dc199be24d"},
{file = "mypy-0.942-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dcd955f36e0180258a96f880348fbca54ce092b40fbb4b37372ae3b25a0b0a46"},
@@ -1044,7 +1070,30 @@ nodeenv = [
{file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
{file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
]
-numpy = []
+numpy = [
+ {file = "numpy-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58bfd40eb478f54ff7a5710dd61c8097e169bc36cc68333d00a9bcd8def53b38"},
+ {file = "numpy-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:196cd074c3f97c4121601790955f915187736f9cf458d3ee1f1b46aff2b1ade0"},
+ {file = "numpy-1.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1d88ef79e0a7fa631bb2c3dda1ea46b32b1fe614e10fedd611d3d5398447f2f"},
+ {file = "numpy-1.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d54b3b828d618a19779a84c3ad952e96e2c2311b16384e973e671aa5be1f6187"},
+ {file = "numpy-1.23.0-cp310-cp310-win32.whl", hash = "sha256:2b2da66582f3a69c8ce25ed7921dcd8010d05e59ac8d89d126a299be60421171"},
+ {file = "numpy-1.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:97a76604d9b0e79f59baeca16593c711fddb44936e40310f78bfef79ee9a835f"},
+ {file = "numpy-1.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d8cc87bed09de55477dba9da370c1679bd534df9baa171dd01accbb09687dac3"},
+ {file = "numpy-1.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0f18804df7370571fb65db9b98bf1378172bd4e962482b857e612d1fec0f53e"},
+ {file = "numpy-1.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac86f407873b952679f5f9e6c0612687e51547af0e14ddea1eedfcb22466babd"},
+ {file = "numpy-1.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae8adff4172692ce56233db04b7ce5792186f179c415c37d539c25de7298d25d"},
+ {file = "numpy-1.23.0-cp38-cp38-win32.whl", hash = "sha256:fe8b9683eb26d2c4d5db32cd29b38fdcf8381324ab48313b5b69088e0e355379"},
+ {file = "numpy-1.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:5043bcd71fcc458dfb8a0fc5509bbc979da0131b9d08e3d5f50fb0bbb36f169a"},
+ {file = "numpy-1.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c29b44905af288b3919803aceb6ec7fec77406d8b08aaa2e8b9e63d0fe2f160"},
+ {file = "numpy-1.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98e8e0d8d69ff4d3fa63e6c61e8cfe2d03c29b16b58dbef1f9baa175bbed7860"},
+ {file = "numpy-1.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a506cacf2be3a74ead5467aee97b81fca00c9c4c8b3ba16dbab488cd99ba10"},
+ {file = "numpy-1.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:092f5e6025813e64ad6d1b52b519165d08c730d099c114a9247c9bb635a2a450"},
+ {file = "numpy-1.23.0-cp39-cp39-win32.whl", hash = "sha256:d6ca8dabe696c2785d0c8c9b0d8a9b6e5fdbe4f922bde70d57fa1a2848134f95"},
+ {file = "numpy-1.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc431493df245f3c627c0c05c2bd134535e7929dbe2e602b80e42bf52ff760bc"},
+ {file = "numpy-1.23.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f9c3fc2adf67762c9fe1849c859942d23f8d3e0bee7b5ed3d4a9c3eeb50a2f07"},
+ {file = "numpy-1.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d2094e8f4d760500394d77b383a1b06d3663e8892cdf5df3c592f55f3bff66"},
+ {file = "numpy-1.23.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:94b170b4fa0168cd6be4becf37cb5b127bd12a795123984385b8cd4aca9857e5"},
+ {file = "numpy-1.23.0.tar.gz", hash = "sha256:bd3fa4fe2e38533d5336e1272fc4e765cabbbde144309ccee8675509d5cd7b05"},
+]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
From 2f7cdef8d8013dce5d51ee20e452deefac0636ff Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 17:13:08 +0200
Subject: [PATCH 24/27] Fixed missing config dict error (in remote CI/CD) by
mocking it, moved load_config into function, #45
---
src/pygenesis/http_helper.py | 2 +-
tests/test_http_helper.py | 12 ++++++++++--
2 files changed, 11 insertions(+), 3 deletions(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 661c5ae..4bcd9de 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -7,7 +7,6 @@
from pygenesis.config import load_config
from pygenesis.custom_exceptions import DestatisStatusError
-config = load_config()
logger = logging.getLogger(__name__)
@@ -26,6 +25,7 @@ def get_response_from_endpoint(
Returns:
requests.Response: the response from Destatis
"""
+ config = load_config()
url = f"{config['GENESIS API']['base_url']}{endpoint}/{method}"
params |= {
diff --git a/tests/test_http_helper.py b/tests/test_http_helper.py
index 48b1b2b..9e8530e 100644
--- a/tests/test_http_helper.py
+++ b/tests/test_http_helper.py
@@ -58,12 +58,20 @@ def _generic_request_status(
@patch("requests.get")
-def test_get_response_from_endpoint(mocker):
+@patch("pygenesis.http_helper.load_config")
+def test_get_response_from_endpoint(mock_config, mock_requests):
"""
Test once with generic API response, more detailed tests
of subfunctions and specific cases below.
"""
- mocker.return_value = _generic_request_status()
+ mock_config.return_value = {
+ "GENESIS API": {
+ "base_url": "mocked_url",
+ "username": "JaneDoe",
+ "password": "password",
+ }
+ }
+ mock_requests.return_value = _generic_request_status()
get_response_from_endpoint("endpoint", "method", {})
From 1fa5e0f9a33ef5e438124d9a0b1715935f67ba61 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 17:24:47 +0200
Subject: [PATCH 25/27] Fixed missing config dict error (in remote CI/CD) by
mocking it, moved load_config into function, #45
---
src/pygenesis/cache.py | 4 +---
src/pygenesis/data.py | 2 +-
src/pygenesis/http_helper.py | 10 ++++++----
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/src/pygenesis/cache.py b/src/pygenesis/cache.py
index de35099..6aaa2ed 100644
--- a/src/pygenesis/cache.py
+++ b/src/pygenesis/cache.py
@@ -73,9 +73,7 @@ def clean_cache(file: Optional[str] = None) -> None:
cache_dir = Path(config["DATA"]["cache_dir"])
except KeyError as e:
logger.critical(
- "Cache dir does not exist! Please make sure init_config() was run properly. \
- Path: %s, Error: %s",
- cache_dir,
+ "Cache dir does not exist! Please make sure init_config() was run properly. Error: %s",
e,
)
diff --git a/src/pygenesis/data.py b/src/pygenesis/data.py
index a5211df..cf99938 100644
--- a/src/pygenesis/data.py
+++ b/src/pygenesis/data.py
@@ -38,7 +38,7 @@ def get_data(
if method == "tablefile":
params["format"] = "ffcsv"
- params |= kwargs
+ params.update(kwargs)
response = get_response_from_endpoint("data", method, params)
data = response.text
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 4bcd9de..91c76df 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -28,10 +28,12 @@ def get_response_from_endpoint(
config = load_config()
url = f"{config['GENESIS API']['base_url']}{endpoint}/{method}"
- params |= {
- "username": config["GENESIS API"]["username"],
- "password": config["GENESIS API"]["password"],
- }
+ params.update(
+ {
+ "username": config["GENESIS API"]["username"],
+ "password": config["GENESIS API"]["password"],
+ }
+ )
response = requests.get(url, params=params)
From 7e93ecfb7a9b3eaf5a05b63362f2ae78b6b75dad Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 17:40:46 +0200
Subject: [PATCH 26/27] Fixed safety issues (mako and dparse) by updating
poetry.lock, #45
---
poetry.lock | 571 ++++++++++++++++++++++++++--------------------------
1 file changed, 290 insertions(+), 281 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 021bbb2..6640541 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,37 +1,32 @@
[[package]]
name = "astroid"
-version = "2.11.2"
+version = "2.12.5"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
-python-versions = ">=3.6.2"
+python-versions = ">=3.7.2"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
-wrapt = ">=1.11,<2"
-
-[[package]]
-name = "atomicwrites"
-version = "1.4.0"
-description = "Atomic file writes."
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+wrapt = [
+ {version = ">=1.11,<2", markers = "python_version < \"3.11\""},
+ {version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
+]
[[package]]
name = "attrs"
-version = "21.4.0"
+version = "22.1.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.5"
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "bandit"
@@ -54,7 +49,7 @@ yaml = ["pyyaml"]
[[package]]
name = "black"
-version = "22.3.0"
+version = "22.8.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
@@ -65,7 +60,7 @@ click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0"
platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras]
@@ -76,11 +71,11 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "certifi"
-version = "2021.10.8"
+version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
[[package]]
name = "cfgv"
@@ -92,18 +87,18 @@ python-versions = ">=3.6.1"
[[package]]
name = "charset-normalizer"
-version = "2.0.12"
+version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.5.0"
+python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "click"
-version = "8.1.2"
+version = "8.1.3"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
@@ -114,7 +109,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
-version = "0.4.4"
+version = "0.4.5"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
@@ -136,18 +131,18 @@ toml = ["tomli"]
[[package]]
name = "dill"
-version = "0.3.4"
+version = "0.3.5.1"
description = "serialize all of python"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "distlib"
-version = "0.3.4"
+version = "0.3.6"
description = "Distribution utilities"
category = "dev"
optional = false
@@ -155,7 +150,7 @@ python-versions = "*"
[[package]]
name = "dparse"
-version = "0.5.1"
+version = "0.5.2"
description = "A parser for Python dependency files"
category = "dev"
optional = false
@@ -163,23 +158,23 @@ python-versions = ">=3.5"
[package.dependencies]
packaging = "*"
-pyyaml = "*"
toml = "*"
[package.extras]
pipenv = ["pipenv"]
+conda = ["pyyaml"]
[[package]]
name = "filelock"
-version = "3.6.0"
+version = "3.8.0"
description = "A platform independent file lock."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
-testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"]
+testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "flake8"
@@ -230,7 +225,7 @@ gitdb = ">=4.0.1,<5"
[[package]]
name = "identify"
-version = "2.4.12"
+version = "2.5.3"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -249,7 +244,7 @@ python-versions = ">=3.5"
[[package]]
name = "importlib-metadata"
-version = "4.11.3"
+version = "4.12.0"
description = "Read metadata from Python packages"
category = "dev"
optional = false
@@ -261,7 +256,7 @@ zipp = ">=0.5"
[package.extras]
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
perf = ["ipython"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
[[package]]
name = "iniconfig"
@@ -295,7 +290,7 @@ python-versions = ">=3.6"
[[package]]
name = "mako"
-version = "1.2.0"
+version = "1.2.2"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
category = "dev"
optional = false
@@ -305,17 +300,17 @@ python-versions = ">=3.7"
MarkupSafe = ">=0.9.2"
[package.extras]
-testing = ["pytest"]
-lingua = ["lingua"]
babel = ["babel"]
+lingua = ["lingua"]
+testing = ["pytest"]
[[package]]
name = "markdown"
-version = "3.3.6"
+version = "3.4.1"
description = "Python implementation of Markdown."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[package.dependencies]
importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
@@ -380,15 +375,15 @@ python-versions = "*"
[[package]]
name = "nodeenv"
-version = "1.6.0"
+version = "1.7.0"
description = "Node.js virtual environment builder"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
[[package]]
name = "numpy"
-version = "1.23.0"
+version = "1.23.2"
description = "NumPy is the fundamental package for array computing with Python."
category = "main"
optional = false
@@ -407,7 +402,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pandas"
-version = "1.4.3"
+version = "1.4.4"
description = "Powerful data structures for data analysis, time series, and statistics"
category = "main"
optional = false
@@ -428,15 +423,15 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
[[package]]
name = "pathspec"
-version = "0.9.0"
+version = "0.10.1"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.7"
[[package]]
name = "pbr"
-version = "5.8.1"
+version = "5.10.0"
description = "Python Build Reasonableness"
category = "dev"
optional = false
@@ -456,15 +451,15 @@ markdown = ">=3.0"
[[package]]
name = "platformdirs"
-version = "2.5.1"
+version = "2.5.2"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
-test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
+test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
[[package]]
name = "pluggy"
@@ -480,7 +475,7 @@ dev = ["tox", "pre-commit"]
[[package]]
name = "pre-commit"
-version = "2.18.1"
+version = "2.20.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
@@ -534,46 +529,47 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pylint"
-version = "2.13.4"
+version = "2.15.0"
description = "python code static checker"
category = "dev"
optional = false
-python-versions = ">=3.6.2"
+python-versions = ">=3.7.2"
[package.dependencies]
-astroid = ">=2.11.2,<=2.12.0-dev0"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
+astroid = ">=2.12.4,<=2.14.0-dev0"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = ">=0.2"
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+tomlkit = ">=0.10.1"
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras]
-testutil = ["gitpython (>3)"]
+spelling = ["pyenchant (>=3.2,<4.0)"]
+testutils = ["gitpython (>3)"]
[[package]]
name = "pyparsing"
-version = "3.0.7"
-description = "Python parsing module"
+version = "3.0.9"
+description = "pyparsing module - Classes and methods to define and execute parsing grammars"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.6.8"
[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
+diagrams = ["railroad-diagrams", "jinja2"]
[[package]]
name = "pytest"
-version = "7.1.1"
+version = "7.1.3"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
-atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
@@ -613,7 +609,7 @@ six = ">=1.5"
[[package]]
name = "pytz"
-version = "2022.1"
+version = "2022.2.1"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
@@ -629,21 +625,21 @@ python-versions = ">=3.6"
[[package]]
name = "requests"
-version = "2.27.1"
+version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
-idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+charset-normalizer = ">=2,<3"
+idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "safety"
@@ -685,11 +681,11 @@ python-versions = "*"
[[package]]
name = "stevedore"
-version = "3.5.0"
+version = "4.0.0"
description = "Manage dynamic plugins for Python applications"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
[package.dependencies]
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
@@ -710,48 +706,55 @@ category = "dev"
optional = false
python-versions = ">=3.7"
+[[package]]
+name = "tomlkit"
+version = "0.11.4"
+description = "Style preserving TOML library"
+category = "dev"
+optional = false
+python-versions = ">=3.6,<4.0"
+
[[package]]
name = "typing-extensions"
-version = "4.1.1"
-description = "Backported and Experimental Type Hints for Python 3.6+"
+version = "4.3.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
[[package]]
name = "urllib3"
-version = "1.26.9"
+version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.14.0"
+version = "20.16.4"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+python-versions = ">=3.6"
[package.dependencies]
-distlib = ">=0.3.1,<1"
-filelock = ">=3.2,<4"
-platformdirs = ">=2,<3"
-six = ">=1.9.0,<2"
+distlib = ">=0.3.5,<1"
+filelock = ">=3.4.1,<4"
+platformdirs = ">=2.4,<3"
[package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"]
+testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
[[package]]
name = "wrapt"
-version = "1.14.0"
+version = "1.14.1"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
@@ -759,15 +762,15 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "zipp"
-version = "3.8.0"
+version = "3.8.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
[metadata]
lock-version = "1.1"
@@ -776,65 +779,61 @@ content-hash = "a137081485b16f94b4c751463c1158b0f74cff50f7cd0055c19f18d02cfd8c8a
[metadata.files]
astroid = [
- {file = "astroid-2.11.2-py3-none-any.whl", hash = "sha256:cc8cc0d2d916c42d0a7c476c57550a4557a083081976bf42a73414322a6411d9"},
- {file = "astroid-2.11.2.tar.gz", hash = "sha256:8d0a30fe6481ce919f56690076eafbb2fb649142a89dc874f1ec0e7a011492d0"},
-]
-atomicwrites = [
- {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
- {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
+ {file = "astroid-2.12.5-py3-none-any.whl", hash = "sha256:d612609242996c4365aeb0345e61edba34363eaaba55f1c0addf6a98f073bef6"},
+ {file = "astroid-2.12.5.tar.gz", hash = "sha256:396c88d0a58d7f8daadf730b2ce90838bf338c6752558db719ec6f99c18ec20e"},
]
attrs = [
- {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
- {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
+ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
]
bandit = [
{file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
{file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
]
black = [
- {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
- {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"},
- {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"},
- {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"},
- {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"},
- {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"},
- {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"},
- {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"},
- {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"},
- {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"},
- {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"},
- {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"},
- {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"},
- {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"},
- {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"},
- {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"},
- {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"},
- {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"},
- {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"},
- {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"},
- {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"},
- {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"},
- {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
+ {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"},
+ {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"},
+ {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"},
+ {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"},
+ {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"},
+ {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"},
+ {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"},
+ {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"},
+ {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"},
+ {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"},
+ {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"},
+ {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"},
+ {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"},
+ {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"},
+ {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"},
+ {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"},
+ {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"},
+ {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"},
+ {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"},
+ {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"},
+ {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"},
+ {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"},
+ {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"},
]
certifi = [
- {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
- {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
+ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
+ {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
cfgv = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
charset-normalizer = [
- {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
- {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
+ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+ {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
]
click = [
- {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"},
- {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"},
+ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
]
colorama = [
- {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
- {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
+ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
+ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
]
coverage = [
{file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"},
@@ -889,20 +888,20 @@ coverage = [
{file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"},
]
dill = [
- {file = "dill-0.3.4-py2.py3-none-any.whl", hash = "sha256:7e40e4a70304fd9ceab3535d36e58791d9c4a776b38ec7f7ec9afc8d3dca4d4f"},
- {file = "dill-0.3.4.zip", hash = "sha256:9f9734205146b2b353ab3fec9af0070237b6ddae78452af83d2fca84d739e675"},
+ {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"},
+ {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"},
]
distlib = [
- {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"},
- {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
+ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
+ {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
]
dparse = [
- {file = "dparse-0.5.1-py3-none-any.whl", hash = "sha256:e953a25e44ebb60a5c6efc2add4420c177f1d8404509da88da9729202f306994"},
- {file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"},
+ {file = "dparse-0.5.2-py3-none-any.whl", hash = "sha256:b1514fb08895d85b18d4eba3b1b7025ff9e6ea07286282021e19def872129975"},
+ {file = "dparse-0.5.2.tar.gz", hash = "sha256:c348994a1f41c85f664d8f5a47442647bc4e22c5af5b1b26ef29aff0fa5dddcd"},
]
filelock = [
- {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"},
- {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"},
+ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"},
+ {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
]
flake8 = [
{file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
@@ -921,16 +920,16 @@ gitpython = [
{file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
]
identify = [
- {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"},
- {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"},
+ {file = "identify-2.5.3-py2.py3-none-any.whl", hash = "sha256:25851c8c1370effb22aaa3c987b30449e9ff0cece408f810ae6ce408fdd20893"},
+ {file = "identify-2.5.3.tar.gz", hash = "sha256:887e7b91a1be152b0d46bbf072130235a8117392b9f1828446079a816a05ef44"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
importlib-metadata = [
- {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"},
- {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"},
+ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
+ {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
@@ -980,12 +979,12 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
]
mako = [
- {file = "Mako-1.2.0-py3-none-any.whl", hash = "sha256:23aab11fdbbb0f1051b93793a58323ff937e98e34aece1c4219675122e57e4ba"},
- {file = "Mako-1.2.0.tar.gz", hash = "sha256:9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39"},
+ {file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"},
+ {file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"},
]
markdown = [
- {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"},
- {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"},
+ {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"},
+ {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"},
]
markupsafe = [
{file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
@@ -1067,83 +1066,89 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
nodeenv = [
- {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
- {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
+ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
+ {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
]
numpy = [
- {file = "numpy-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58bfd40eb478f54ff7a5710dd61c8097e169bc36cc68333d00a9bcd8def53b38"},
- {file = "numpy-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:196cd074c3f97c4121601790955f915187736f9cf458d3ee1f1b46aff2b1ade0"},
- {file = "numpy-1.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1d88ef79e0a7fa631bb2c3dda1ea46b32b1fe614e10fedd611d3d5398447f2f"},
- {file = "numpy-1.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d54b3b828d618a19779a84c3ad952e96e2c2311b16384e973e671aa5be1f6187"},
- {file = "numpy-1.23.0-cp310-cp310-win32.whl", hash = "sha256:2b2da66582f3a69c8ce25ed7921dcd8010d05e59ac8d89d126a299be60421171"},
- {file = "numpy-1.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:97a76604d9b0e79f59baeca16593c711fddb44936e40310f78bfef79ee9a835f"},
- {file = "numpy-1.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d8cc87bed09de55477dba9da370c1679bd534df9baa171dd01accbb09687dac3"},
- {file = "numpy-1.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0f18804df7370571fb65db9b98bf1378172bd4e962482b857e612d1fec0f53e"},
- {file = "numpy-1.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac86f407873b952679f5f9e6c0612687e51547af0e14ddea1eedfcb22466babd"},
- {file = "numpy-1.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae8adff4172692ce56233db04b7ce5792186f179c415c37d539c25de7298d25d"},
- {file = "numpy-1.23.0-cp38-cp38-win32.whl", hash = "sha256:fe8b9683eb26d2c4d5db32cd29b38fdcf8381324ab48313b5b69088e0e355379"},
- {file = "numpy-1.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:5043bcd71fcc458dfb8a0fc5509bbc979da0131b9d08e3d5f50fb0bbb36f169a"},
- {file = "numpy-1.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c29b44905af288b3919803aceb6ec7fec77406d8b08aaa2e8b9e63d0fe2f160"},
- {file = "numpy-1.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98e8e0d8d69ff4d3fa63e6c61e8cfe2d03c29b16b58dbef1f9baa175bbed7860"},
- {file = "numpy-1.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a506cacf2be3a74ead5467aee97b81fca00c9c4c8b3ba16dbab488cd99ba10"},
- {file = "numpy-1.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:092f5e6025813e64ad6d1b52b519165d08c730d099c114a9247c9bb635a2a450"},
- {file = "numpy-1.23.0-cp39-cp39-win32.whl", hash = "sha256:d6ca8dabe696c2785d0c8c9b0d8a9b6e5fdbe4f922bde70d57fa1a2848134f95"},
- {file = "numpy-1.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc431493df245f3c627c0c05c2bd134535e7929dbe2e602b80e42bf52ff760bc"},
- {file = "numpy-1.23.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f9c3fc2adf67762c9fe1849c859942d23f8d3e0bee7b5ed3d4a9c3eeb50a2f07"},
- {file = "numpy-1.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d2094e8f4d760500394d77b383a1b06d3663e8892cdf5df3c592f55f3bff66"},
- {file = "numpy-1.23.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:94b170b4fa0168cd6be4becf37cb5b127bd12a795123984385b8cd4aca9857e5"},
- {file = "numpy-1.23.0.tar.gz", hash = "sha256:bd3fa4fe2e38533d5336e1272fc4e765cabbbde144309ccee8675509d5cd7b05"},
+ {file = "numpy-1.23.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e603ca1fb47b913942f3e660a15e55a9ebca906857edfea476ae5f0fe9b457d5"},
+ {file = "numpy-1.23.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:633679a472934b1c20a12ed0c9a6c9eb167fbb4cb89031939bfd03dd9dbc62b8"},
+ {file = "numpy-1.23.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17e5226674f6ea79e14e3b91bfbc153fdf3ac13f5cc54ee7bc8fdbe820a32da0"},
+ {file = "numpy-1.23.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdc02c0235b261925102b1bd586579b7158e9d0d07ecb61148a1799214a4afd5"},
+ {file = "numpy-1.23.2-cp310-cp310-win32.whl", hash = "sha256:df28dda02c9328e122661f399f7655cdcbcf22ea42daa3650a26bce08a187450"},
+ {file = "numpy-1.23.2-cp310-cp310-win_amd64.whl", hash = "sha256:8ebf7e194b89bc66b78475bd3624d92980fca4e5bb86dda08d677d786fefc414"},
+ {file = "numpy-1.23.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dc76bca1ca98f4b122114435f83f1fcf3c0fe48e4e6f660e07996abf2f53903c"},
+ {file = "numpy-1.23.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ecfdd68d334a6b97472ed032b5b37a30d8217c097acfff15e8452c710e775524"},
+ {file = "numpy-1.23.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5593f67e66dea4e237f5af998d31a43e447786b2154ba1ad833676c788f37cde"},
+ {file = "numpy-1.23.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac987b35df8c2a2eab495ee206658117e9ce867acf3ccb376a19e83070e69418"},
+ {file = "numpy-1.23.2-cp311-cp311-win32.whl", hash = "sha256:d98addfd3c8728ee8b2c49126f3c44c703e2b005d4a95998e2167af176a9e722"},
+ {file = "numpy-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ecb818231afe5f0f568c81f12ce50f2b828ff2b27487520d85eb44c71313b9e"},
+ {file = "numpy-1.23.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:909c56c4d4341ec8315291a105169d8aae732cfb4c250fbc375a1efb7a844f8f"},
+ {file = "numpy-1.23.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8247f01c4721479e482cc2f9f7d973f3f47810cbc8c65e38fd1bbd3141cc9842"},
+ {file = "numpy-1.23.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8b97a8a87cadcd3f94659b4ef6ec056261fa1e1c3317f4193ac231d4df70215"},
+ {file = "numpy-1.23.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5b7ccae24e3d8501ee5563e82febc1771e73bd268eef82a1e8d2b4d556ae66"},
+ {file = "numpy-1.23.2-cp38-cp38-win32.whl", hash = "sha256:9b83d48e464f393d46e8dd8171687394d39bc5abfe2978896b77dc2604e8635d"},
+ {file = "numpy-1.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:dec198619b7dbd6db58603cd256e092bcadef22a796f778bf87f8592b468441d"},
+ {file = "numpy-1.23.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4f41f5bf20d9a521f8cab3a34557cd77b6f205ab2116651f12959714494268b0"},
+ {file = "numpy-1.23.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:806cc25d5c43e240db709875e947076b2826f47c2c340a5a2f36da5bb10c58d6"},
+ {file = "numpy-1.23.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f9d84a24889ebb4c641a9b99e54adb8cab50972f0166a3abc14c3b93163f074"},
+ {file = "numpy-1.23.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c403c81bb8ffb1c993d0165a11493fd4bf1353d258f6997b3ee288b0a48fce77"},
+ {file = "numpy-1.23.2-cp39-cp39-win32.whl", hash = "sha256:cf8c6aed12a935abf2e290860af8e77b26a042eb7f2582ff83dc7ed5f963340c"},
+ {file = "numpy-1.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:5e28cd64624dc2354a349152599e55308eb6ca95a13ce6a7d5679ebff2962913"},
+ {file = "numpy-1.23.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:806970e69106556d1dd200e26647e9bee5e2b3f1814f9da104a943e8d548ca38"},
+ {file = "numpy-1.23.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd879d3ca4b6f39b7770829f73278b7c5e248c91d538aab1e506c628353e47f"},
+ {file = "numpy-1.23.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:be6b350dfbc7f708d9d853663772a9310783ea58f6035eec649fb9c4371b5389"},
+ {file = "numpy-1.23.2.tar.gz", hash = "sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01"},
]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pandas = [
- {file = "pandas-1.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d51674ed8e2551ef7773820ef5dab9322be0828629f2cbf8d1fc31a0c4fed640"},
- {file = "pandas-1.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:16ad23db55efcc93fa878f7837267973b61ea85d244fc5ff0ccbcfa5638706c5"},
- {file = "pandas-1.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:958a0588149190c22cdebbc0797e01972950c927a11a900fe6c2296f207b1d6f"},
- {file = "pandas-1.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e48fbb64165cda451c06a0f9e4c7a16b534fcabd32546d531b3c240ce2844112"},
- {file = "pandas-1.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f803320c9da732cc79210d7e8cc5c8019aad512589c910c66529eb1b1818230"},
- {file = "pandas-1.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:2893e923472a5e090c2d5e8db83e8f907364ec048572084c7d10ef93546be6d1"},
- {file = "pandas-1.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:24ea75f47bbd5574675dae21d51779a4948715416413b30614c1e8b480909f81"},
- {file = "pandas-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ebc990bd34f4ac3c73a2724c2dcc9ee7bf1ce6cf08e87bb25c6ad33507e318"},
- {file = "pandas-1.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d6c0106415ff1a10c326c49bc5dd9ea8b9897a6ca0c8688eb9c30ddec49535ef"},
- {file = "pandas-1.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78b00429161ccb0da252229bcda8010b445c4bf924e721265bec5a6e96a92e92"},
- {file = "pandas-1.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dfbf16b1ea4f4d0ee11084d9c026340514d1d30270eaa82a9f1297b6c8ecbf0"},
- {file = "pandas-1.4.3-cp38-cp38-win32.whl", hash = "sha256:48350592665ea3cbcd07efc8c12ff12d89be09cd47231c7925e3b8afada9d50d"},
- {file = "pandas-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:605d572126eb4ab2eadf5c59d5d69f0608df2bf7bcad5c5880a47a20a0699e3e"},
- {file = "pandas-1.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a3924692160e3d847e18702bb048dc38e0e13411d2b503fecb1adf0fcf950ba4"},
- {file = "pandas-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07238a58d7cbc8a004855ade7b75bbd22c0db4b0ffccc721556bab8a095515f6"},
- {file = "pandas-1.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:755679c49460bd0d2f837ab99f0a26948e68fa0718b7e42afbabd074d945bf84"},
- {file = "pandas-1.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41fc406e374590a3d492325b889a2686b31e7a7780bec83db2512988550dadbf"},
- {file = "pandas-1.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d9382f72a4f0e93909feece6fef5500e838ce1c355a581b3d8f259839f2ea76"},
- {file = "pandas-1.4.3-cp39-cp39-win32.whl", hash = "sha256:0daf876dba6c622154b2e6741f29e87161f844e64f84801554f879d27ba63c0d"},
- {file = "pandas-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:721a3dd2f06ef942f83a819c0f3f6a648b2830b191a72bbe9451bcd49c3bd42e"},
- {file = "pandas-1.4.3.tar.gz", hash = "sha256:2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c"},
+ {file = "pandas-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:799e6a25932df7e6b1f8dabf63de064e2205dc309abb75956126a0453fd88e97"},
+ {file = "pandas-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7cd1d69a387f7d5e1a5a06a87574d9ef2433847c0e78113ab51c84d3a8bcaeaa"},
+ {file = "pandas-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:94f2ed1fd51e545ebf71da1e942fe1822ee01e10d3dd2a7276d01351333b7c6b"},
+ {file = "pandas-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4591cadd06fbbbd16fafc2de6e840c1aaefeae3d5864b688004777ef1bbdede3"},
+ {file = "pandas-1.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0022fe6a313df1c4869b5edc012d734c6519a6fffa3cf70930f32e6a1078e49"},
+ {file = "pandas-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:785e878a6e6d8ddcdb8c181e600855402750052497d7fc6d6b508894f6b8830b"},
+ {file = "pandas-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c4bb8b0ab9f94207d07e401d24baebfc63057246b1a5e0cd9ee50df85a656871"},
+ {file = "pandas-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:51c424ca134fdaeac9a4acd719d1ab48046afc60943a489028f0413fdbe9ef1c"},
+ {file = "pandas-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ce35f947202b0b99c660221d82beb91d2e6d553d55a40b30128204e3e2c63848"},
+ {file = "pandas-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6f1848148ed3204235967613b0a32be2d77f214e9623f554511047705c1e04"},
+ {file = "pandas-1.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cc960959be28d064faefc0cb2aef854d46b827c004ebea7e79b5497ed83e7d"},
+ {file = "pandas-1.4.4-cp38-cp38-win32.whl", hash = "sha256:9d805bce209714b1c1fa29bfb1e42ad87e4c0a825e4b390c56a3e71593b7e8d8"},
+ {file = "pandas-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:afbddad78a98ec4d2ce08b384b81730de1ccc975b99eb663e6dac43703f36d98"},
+ {file = "pandas-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a08ceb59db499864c58a9bf85ab6219d527d91f14c0240cc25fa2c261032b2a7"},
+ {file = "pandas-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0959c41004e3d2d16f39c828d6da66ebee329836a7ecee49fb777ac9ad8a7501"},
+ {file = "pandas-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87b4194f344dcd14c0f885cecb22005329b38bda10f1aaf7b9596a00ec8a4768"},
+ {file = "pandas-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d2a7a3c1fea668d56bd91edbd5f2732e0af8feb9d2bf8d9bfacb2dea5fa9536"},
+ {file = "pandas-1.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a981cfabf51c318a562deb4ae7deec594c07aee7cf18b4594a92c23718ec8275"},
+ {file = "pandas-1.4.4-cp39-cp39-win32.whl", hash = "sha256:050aada67a5ec6699a7879e769825b510018a95fb9ac462bb1867483d0974a97"},
+ {file = "pandas-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:8d4d2fe2863ecddb0ba1979bdda26c8bc2ea138f5a979abe3ba80c0fa4015c91"},
+ {file = "pandas-1.4.4.tar.gz", hash = "sha256:ab6c0d738617b675183e5f28db32b5148b694ad9bba0a40c3ea26d96b431db67"},
]
pathspec = [
- {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
- {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
+ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"},
+ {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"},
]
pbr = [
- {file = "pbr-5.8.1-py2.py3-none-any.whl", hash = "sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec"},
- {file = "pbr-5.8.1.tar.gz", hash = "sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"},
+ {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"},
+ {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"},
]
pdoc3 = [
{file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"},
{file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"},
]
platformdirs = [
- {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"},
- {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"},
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
pre-commit = [
- {file = "pre_commit-2.18.1-py2.py3-none-any.whl", hash = "sha256:02226e69564ebca1a070bd1f046af866aa1c318dbc430027c50ab832ed2b73f2"},
- {file = "pre_commit-2.18.1.tar.gz", hash = "sha256:5d445ee1fa8738d506881c5d84f83c62bb5be6b2838e32207433647e8e5ebe10"},
+ {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
+ {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
]
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
@@ -1162,16 +1167,16 @@ pyflakes = [
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
]
pylint = [
- {file = "pylint-2.13.4-py3-none-any.whl", hash = "sha256:8672cf7441b81410f5de7defdf56e2d559c956fd0579652f2e0a0a35bea2d546"},
- {file = "pylint-2.13.4.tar.gz", hash = "sha256:7cc6d0c4f61dff440f9ed8b657f4ecd615dcfe35345953eb7b1dc74afe901d7a"},
+ {file = "pylint-2.15.0-py3-none-any.whl", hash = "sha256:4b124affc198b7f7c9b5f9ab690d85db48282a025ef9333f51d2d7281b92a6c3"},
+ {file = "pylint-2.15.0.tar.gz", hash = "sha256:4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0"},
]
pyparsing = [
- {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
- {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
+ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
+ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
]
pytest = [
- {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"},
- {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"},
+ {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"},
+ {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"},
]
pytest-cov = [
{file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
@@ -1182,8 +1187,8 @@ python-dateutil = [
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
pytz = [
- {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
- {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
+ {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"},
+ {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"},
]
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
@@ -1221,8 +1226,8 @@ pyyaml = [
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
requests = [
- {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
- {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
+ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
+ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
safety = [
{file = "safety-1.10.3-py2.py3-none-any.whl", hash = "sha256:5f802ad5df5614f9622d8d71fedec2757099705c2356f862847c58c6dfe13e84"},
@@ -1241,8 +1246,8 @@ snowballstemmer = [
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
stevedore = [
- {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"},
- {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"},
+ {file = "stevedore-4.0.0-py3-none-any.whl", hash = "sha256:87e4d27fe96d0d7e4fc24f0cbe3463baae4ec51e81d95fbe60d2474636e0c7d8"},
+ {file = "stevedore-4.0.0.tar.gz", hash = "sha256:f82cc99a1ff552310d19c379827c2c64dd9f85a38bcd5559db2470161867b786"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
@@ -1252,85 +1257,89 @@ tomli = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+tomlkit = [
+ {file = "tomlkit-0.11.4-py3-none-any.whl", hash = "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c"},
+ {file = "tomlkit-0.11.4.tar.gz", hash = "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83"},
+]
typing-extensions = [
- {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
- {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
+ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
+ {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
]
urllib3 = [
- {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
- {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
+ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
+ {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]
virtualenv = [
- {file = "virtualenv-20.14.0-py2.py3-none-any.whl", hash = "sha256:1e8588f35e8b42c6ec6841a13c5e88239de1e6e4e4cedfd3916b306dc826ec66"},
- {file = "virtualenv-20.14.0.tar.gz", hash = "sha256:8e5b402037287126e81ccde9432b95a8be5b19d36584f64957060a3488c11ca8"},
+ {file = "virtualenv-20.16.4-py3-none-any.whl", hash = "sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22"},
+ {file = "virtualenv-20.16.4.tar.gz", hash = "sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782"},
]
wrapt = [
- {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"},
- {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"},
- {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"},
- {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"},
- {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"},
- {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"},
- {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"},
- {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"},
- {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"},
- {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"},
- {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"},
- {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"},
- {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"},
- {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"},
- {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"},
- {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"},
- {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"},
- {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"},
- {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"},
- {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"},
- {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"},
- {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"},
+ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
+ {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
+ {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
+ {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
+ {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
+ {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
+ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
+ {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
+ {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
+ {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
+ {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
+ {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
+ {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
+ {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
+ {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
+ {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
+ {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
+ {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
+ {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
+ {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
+ {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
+ {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
+ {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
+ {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
+ {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
+ {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
+ {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
+ {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
+ {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
+ {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
+ {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
+ {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
+ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
+ {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
zipp = [
- {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
- {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
+ {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"},
+ {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"},
]
From 47af7b86b4d4ee72c8560928926b82130a0d01d1 Mon Sep 17 00:00:00 2001
From: MarcoHuebner <57489799+MarcoHuebner@users.noreply.github.com>
Date: Sun, 4 Sep 2022 18:13:18 +0200
Subject: [PATCH 27/27] Fixed linting issue: timeout for requests.get added,
#45
---
src/pygenesis/http_helper.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/pygenesis/http_helper.py b/src/pygenesis/http_helper.py
index 91c76df..32c63e1 100644
--- a/src/pygenesis/http_helper.py
+++ b/src/pygenesis/http_helper.py
@@ -35,7 +35,7 @@ def get_response_from_endpoint(
}
)
- response = requests.get(url, params=params)
+ response = requests.get(url, params=params, timeout=30)
response.encoding = "UTF-8"