From afa4f37bad1f4568e4ea90e1c3b9337deb968a47 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 20 Oct 2023 15:03:31 -0700 Subject: [PATCH 001/199] WIP: add functionality to use display name as label, and tests --- schematic/help.py | 3 ++ schematic/schemas/commands.py | 8 ++++- schematic/schemas/data_model_nodes.py | 7 ++-- schematic/utils/schema_utils.py | 31 +++++++++++++---- tests/test_utils.py | 48 ++++++++++++++++++++++++++- 5 files changed, 86 insertions(+), 11 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index c243a10ab..ae2befcbf 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -169,6 +169,9 @@ "output_jsonld": ( "Path to where the generated JSON-LD file needs to be outputted." ), + "display_name_as_label": ( + "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + ), } } } diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 80700e2bf..65d5b382b 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -41,13 +41,19 @@ def schema(): # use as `schematic model ...` @click.argument( "schema", type=click.Path(exists=True), metavar="", nargs=1 ) +@click.option( + "--display_name_as_label", + "-dnl", + is_flag=True, + help=query_dict(schema_commands, ("schema", "convert", "display_name_as_label")), +) @click.option( "--output_jsonld", "-o", metavar="", help=query_dict(schema_commands, ("schema", "convert", "output_jsonld")), ) -def convert(schema, output_jsonld): +def convert(schema, display_name_as_label, output_jsonld): """ Running CLI to convert data model specification in CSV format to data model in JSON-LD format. diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index a1681c469..fb5025688 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -129,6 +129,7 @@ def run_rel_functions( attr_relationships={}, csv_header="", entry_type="", + display_name_as_label=False, ): """This function exists to centralzie handling of functions for filling out node information, makes sure all the proper parameters are passed to each function. Args: @@ -156,7 +157,7 @@ def run_rel_functions( elif rel_func == get_label_from_display_name: return get_label_from_display_name( - display_name=node_display_name, entry_type=entry_type + display_name=node_display_name, entry_type=entry_type, use_display_name_as_label=display_name_as_label, ) elif rel_func == convert_bool_to_str: @@ -175,7 +176,7 @@ def run_rel_functions( f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update." ) - def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dict: + def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict, display_name_as_label:bool) -> dict: """Gather information to be attached to each node. Args: node_display_name, str: display name for current node @@ -183,6 +184,7 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic {Attribute Display Name: { Relationships: { CSV Header: Value}}} + display_name_as_label, bool: if true, use the display name provided as the label Returns: node_dict, dict: dictionary of relationship information about the current node @@ -227,6 +229,7 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict) -> dic attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type, + display_name_as_label=display_name_as_label, ) } ) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index b8cab8e66..9d890bff8 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -61,7 +61,13 @@ def get_attribute_display_name_from_label(node_name: str, attr_relationships: di display_name = node_name return display_name -def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_case:bool = False) -> str: +def check_if_display_name_is_valid_label(display_name:str, blacklisted_chars:list[str])-> bool: + valid_label=True + if any(map(display_name.__contains__, blacklisted_chars)): + valid_label=False + return valid_label + +def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_case:bool = False, use_display_name_as_label:bool = False) -> str: """Get node label from provided display name, based on whether the node is a class or property Args: display_name, str: node display name @@ -73,15 +79,26 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c ValueError if entry_type.lower(), is not either 'class' or 'property' """ - if entry_type.lower()=='class': - label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) - - elif entry_type.lower()=='property': - label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + if use_display_name_as_label: + blacklisted_chars = ["(", ")", ".", "-", " "] + # Check that display name can be used as a label. + valid_display_name = check_if_display_name_is_valid_label(display_name=display_name, blacklisted_chars=blacklisted_chars) + if valid_display_name: + label=display_name + else: + raise ValueError(f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}") else: - raise ValueError(f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'") + if entry_type.lower()=='class': + label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + + elif entry_type.lower()=='property': + label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + else: + raise ValueError(f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'") return label + + def convert_bool_to_str(provided_bool: bool) -> str: """Convert bool to string. Args: diff --git a/tests/test_utils.py b/tests/test_utils.py index cd5c27508..7c6dd5e71 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -40,7 +40,8 @@ from schematic.utils.schema_utils import (export_schema, get_property_label_from_display_name, get_class_label_from_display_name, - strip_context) + strip_context, + get_label_from_display_name) logging.basicConfig(level=logging.DEBUG) @@ -411,6 +412,51 @@ def test_strip_context(self, helpers, context_value): elif 'sms:required' == context_value: assert stripped_contex == ('sms', 'required') + TEST_DN_DICT = { + "Bio Things": {"class": "BioThings", "property": "bioThings"}, + "bio things": {"class": "Biothings", "property": "biothings"}, + "BioThings": {"class": "BioThings", "property": "bioThings"}, + "Bio-things": {"class": "Biothings", "property": "biothings"}, + "bio_things": {"class": "Biothings", "property": "biothings"}, + } + @pytest.mark.parametrize( + ("test_dn", "entry_types"), + (list(TEST_DN_DICT.keys()), list(TEST_DN_DICT.values())), + ids=(list(TEST_DN_DICT.keys()), list(TEST_DN_DICT.keys())) + ) + + @pytest.mark.parametrize( + "use_label", [True, False], ids=["True", "False"] + ) + def test_get_label_from_display_name(self, test_dn, use_label): + display_name = test_dn.keys()[0] + for entry_type, expected_result in test_dn.values(): + label = "" + + try: + label = get_label_from_display_name(entry_type=entry_type, display_name=test_dn, use_display_name_as_label=use_label) + except: + # Under these conditions should only fail if the display name cannot be used as a label. + try: + assert test_dn in ["Bio Things", "bio things", "Bio-things", "bio_things"] + continue + except: + breakpoint() + if label: + if use_label: + try: + assert label == test_dn + except: + breakpoint() + else: + try: + assert label == expected_result + except: + breakpoint() + else: + return + return + class TestValidateUtils: def test_validate_schema(self, helpers): ''' From b699ed50c642fb3e8dd3a52580b950abcd3981a8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 23 Oct 2023 13:09:51 -0700 Subject: [PATCH 002/199] WIP: changes to enable new display_name_as_label parameters --- schematic/help.py | 3 +++ schematic/manifest/commands.py | 9 ++++++++- schematic/schemas/commands.py | 2 +- schematic/schemas/data_model_graph.py | 5 +++-- schematic/schemas/data_model_nodes.py | 1 + schematic/schemas/data_model_validator.py | 19 +++++++++++-------- schematic/utils/schema_utils.py | 11 ++++++++++- 7 files changed, 37 insertions(+), 13 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index ae2befcbf..6f9a53996 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -54,6 +54,9 @@ "Specify to alphabetize valid attribute values either ascending (a) or descending (d)." "Optional" ), + "display_name_as_label": ( + "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + ), }, "migrate": { "short_help": ( diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index a1382ec88..705fa4b78 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -102,6 +102,12 @@ def manifest(ctx, config): # use as `schematic manifest ...` default = 'ascending', help=query_dict(manifest_commands, ("manifest", "get", "alphabetize_valid_values")), ) +@click.option( + "--display_name_as_label", + "-dnl", + is_flag=True, + help=query_dict(manifest_commands, ("manifest", "get", "display_name_as_label")), +) @click.pass_obj def get_manifest( ctx, @@ -115,6 +121,7 @@ def get_manifest( json_schema, output_xlsx, alphabetize_valid_values, + display_name_as_label, ): """ Running CLI with manifest generation options. @@ -138,7 +145,7 @@ def get_manifest( parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph logger.info("Generating data model graph.") diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 65d5b382b..3a2b40119 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -73,7 +73,7 @@ def convert(schema, display_name_as_label, output_jsonld): # Convert parsed model to graph # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph logger.info("Generating data model graph.") diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index e63cd4137..e9500bade 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -42,7 +42,7 @@ class DataModelGraph: __metaclass__ = DataModelGraphMeta - def __init__(self, attribute_relationships_dict: dict) -> None: + def __init__(self, attribute_relationships_dict: dict, display_name_as_label:bool) -> None: """Load parsed data model. Args: attributes_relationship_dict, dict: generated in data_model_parser @@ -56,6 +56,7 @@ def __init__(self, attribute_relationships_dict: dict) -> None: self.dmn = DataModelNodes(self.attribute_relationships_dict) self.dme = DataModelEdges() self.dmr = DataModelRelationships() + self.display_name_as_label=display_name_as_label if not self.attribute_relationships_dict: raise ValueError( @@ -85,7 +86,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: for node in all_nodes: # Gather information for each node node_dict = self.dmn.generate_node_dict( - node, self.attribute_relationships_dict + node_display_name=node, attr_rel_dict=self.attribute_relationships_dict, display_name_as_label=self.display_name_as_label, ) # Add each node to the all_node_dict to be used for generating edges diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index fb5025688..e1ef8de7a 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -251,6 +251,7 @@ def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict, displa attr_relationships=attr_relationships, csv_header=csv_header, entry_type=entry_type, + display_name_as_label=display_name_as_label, ) } ) diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index 40911e6a9..9eaa067bb 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -77,14 +77,17 @@ def check_is_dag(self) -> List[str]: error = [] if not nx.is_directed_acyclic_graph(self.graph): # Attempt to find any cycles: - cycles = nx.simple_cycles(self.graph) - if cycles: - for cycle in cycles: - error.append( - f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again." - ) - else: - error.append( + #cycles = nx.simple_cycles(self.graph) + #if cycles: + # for cycle in cycles: + # error.append( + # f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we found a loop between: {cycle[0]} and {cycle[1]}, please remove this loop from your model and submit again." + # ) + #else: + # error.append( + # f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model." + # ) + error.append( f"Schematic requires models be a directed acyclic graph (DAG). Your graph is not a DAG, we could not locate the sorce of the error, please inspect your model." ) return error diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 9d890bff8..0ed709625 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -1,9 +1,13 @@ import inflection import json +import logging import networkx as nx import string from typing import List, Dict +logger = logging.getLogger(__name__) + + def attr_dict_template(key_name:str)->Dict[str,dict[str,dict]]: return {key_name: {'Relationships': {}}} @@ -86,7 +90,12 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c if valid_display_name: label=display_name else: - raise ValueError(f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}") + if entry_type.lower()=='class': + label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + + elif entry_type.lower()=='property': + label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + logger.warning(f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}") else: if entry_type.lower()=='class': label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) From f0bc6eb65e02677be3483d22f0db0964bc76922a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 26 Oct 2023 15:01:41 -0700 Subject: [PATCH 003/199] WIP: updating tests --- tests/test_schemas.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 7436712d1..1763a4452 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -61,7 +61,7 @@ def test_fake_func(): "Bio Things": {"class": "BioThings", "property": "bioThings"}, "bio things": {"class": "Biothings", "property": "biothings"}, } -NODE_DISPLAY_NAME_DICT = {"Patient": False, "Sex": True} +NODE_DISPLAY_NAME_DICT = {"Patient": False, "Sex": True, "SourceManifest": False} def get_data_model_parser(helpers, data_model_name: str = None): @@ -73,7 +73,7 @@ def get_data_model_parser(helpers, data_model_name: str = None): return data_model_parser -def generate_graph_data_model(helpers, data_model_name: str) -> nx.MultiDiGraph: +def generate_graph_data_model(helpers, data_model_name: str, display_name_as_label:bool=False) -> nx.MultiDiGraph: """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ @@ -88,7 +88,7 @@ def generate_graph_data_model(helpers, data_model_name: str) -> nx.MultiDiGraph: # Convert parsed model to graph # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -800,9 +800,11 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo @pytest.mark.parametrize( "node_display_name", list(NODE_DISPLAY_NAME_DICT.keys()), - ids=[str(v) for v in NODE_DISPLAY_NAME_DICT.values()], + ids=["Node_required-" + str(v) for v in NODE_DISPLAY_NAME_DICT.values()], ) - def test_generate_node_dict(self, helpers, data_model, node_display_name): + @pytest.mark.parametrize( + "display_name_as_label", [True, False], ids=["Display_name_as_label-True", "Display_name_as_label-False"]) + def test_generate_node_dict(self, helpers, data_model, node_display_name, display_name_as_label): # Instantiate Parser data_model_parser = get_data_model_parser( helpers=helpers, data_model_name=data_model @@ -811,6 +813,12 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name): # Parse Model attr_rel_dictionary = data_model_parser.parse_model() + for attr, rels in attr_rel_dictionary.items() + if attr=='SourceManifest': + attr_rel_dictionary[attr] + + # Change SourceManifest to sockComponent so we can check the display_name_as_label is working as expected + # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes( helpers, data_model_name=data_model @@ -819,6 +827,7 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name): node_dict = data_model_nodes.generate_node_dict( node_display_name=node_display_name, attr_rel_dict=attr_rel_dictionary, + display_name_as_label=display_name_as_label, ) # Check that the output is as expected for the required key. @@ -829,6 +838,14 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name): if not node_dict["required"] == False: assert DATA_MODEL_DICT[data_model] == "JSONLD" + # Check that the display name matches the label + if display_name_as_label: + breakpoint() + assert node_display_name == node_dict['label'] + else: + breakpoint() + assert node_display_name == node_dict['displayName'] + def test_generate_node(self, helpers, data_model): # Test adding a dummy node node_dict = {"label": "test_label"} From 0577838250dc009cc3e896a6bcf6bf2c0f00d70e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 10:45:14 -0800 Subject: [PATCH 004/199] add display_name_as_label to help.py --- schematic/help.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/schematic/help.py b/schematic/help.py index 6f9a53996..51580378a 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -132,6 +132,9 @@ "Upsert functionality requires primary keys to be specified in the data model and manfiest as _id." "Currently it is required to use -dl/--use_display_label with table upserts." ), + "display_name_as_label": ( + "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + ), }, "validate": { "short_help": ("Validation of manifest files."), @@ -157,6 +160,9 @@ "project_scope": ( "Specify a comma-separated list of projects to search through for cross manifest validation." ), + "display_name_as_label": ( + "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + ), }, } } From da05a9506b76314c75eb899b66347e6a6ccbc58f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 10:47:37 -0800 Subject: [PATCH 005/199] update model commands to take in display_name_as_label --- schematic/models/commands.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index eeeb7c809..f0ef73016 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -103,9 +103,15 @@ def model(ctx, config): # use as `schematic model ...` default='replace', type=click.Choice(['replace', 'upsert'], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "table_manipulation"))) +@click.option( + "--display_name_as_label", + "-dnl", + is_flag=True, + help=query_dict(model_commands, ("model", "submit", "display_name_as_label")), +) @click.pass_obj def submit_manifest( - ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, + ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, display_name_as_label ): """ Running CLI with manifest validation (optional) and submission options. @@ -115,7 +121,7 @@ def submit_manifest( log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType="local" + inputMModelLocation=jsonld, inputMModelLocationType="local", display_name_as_label=display_name_as_label ) @@ -176,8 +182,14 @@ def submit_manifest( callback=parse_synIDs, help=query_dict(model_commands, ("model", "validate", "project_scope")), ) +@click.option( + "--display_name_as_label", + "-dnl", + is_flag=True, + help=query_dict(model_commands, ("model", "validate", "display_name_as_label")), +) @click.pass_obj -def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules,project_scope): +def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules,project_scope, display_name_as_label): """ Running CLI for manifest validation. """ @@ -200,7 +212,7 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType="local" + inputMModelLocation=jsonld, inputMModelLocationType="local", display_name_as_label=display_name_as_label, ) errors, warnings = metadata_model.validateModelManifest( From 27c21c9c28ee2bf057d260d9103c6be4fa44422e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 10:48:17 -0800 Subject: [PATCH 006/199] add update schema/commands to take display_name_as_label in parser --- schematic/schemas/commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 3a2b40119..1ed0a96de 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -65,7 +65,7 @@ def convert(schema, display_name_as_label, output_jsonld): st = time.time() # Instantiate Parser - data_model_parser = DataModelParser(schema) + data_model_parser = DataModelParser(schema, display_name_as_label) # Parse Model logger.info("Parsing data model.") From f7317aea43ed33378b9074d0ebc136947720819e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 10:54:09 -0800 Subject: [PATCH 007/199] update metadata.py to take in display_name_as_label parameter --- schematic/models/metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 04a690bf6..fcf493385 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -34,7 +34,7 @@ class MetadataModel(object): - generate validation schema view of the metadata model """ - def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> None: + def __init__(self, inputMModelLocation: str, inputMModelLocationType: str, display_name_as_label: bool) -> None: """Instantiates a MetadataModel object. @@ -56,7 +56,7 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph self.graph_data_model = data_model_grapher.generate_data_model_graph() From e5bf9650f56a08a0100d80bccce081d08ecc72a1 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 10:54:37 -0800 Subject: [PATCH 008/199] remove unused import --- schematic/models/validate_manifest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index c54d7945d..4def20c5a 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -19,7 +19,7 @@ from urllib import error from schematic.models.validate_attribute import ValidateAttribute, GenerateError -#from schematic.schemas.generator import SchemaGenerator + from schematic.schemas.data_model_graph import DataModelGraphExplorer from schematic.store.synapse import SynapseStorage from schematic.models.GE_Helpers import GreatExpectationsHelpers From b4cb92326b38b7e0f31d5c492cec6ab71cde3ad5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:01:11 -0800 Subject: [PATCH 009/199] add docstrings and default value for display_name_as_label --- schematic/schemas/data_model_graph.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 2dce332d5..3b676d24c 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -43,13 +43,14 @@ class DataModelGraph: __metaclass__ = DataModelGraphMeta - def __init__(self, attribute_relationships_dict: dict, display_name_as_label:bool) -> None: + def __init__(self, attribute_relationships_dict: dict, display_name_as_label:bool=False) -> None: """Load parsed data model. Args: attributes_relationship_dict, dict: generated in data_model_parser {Attribute Display Name: { Relationships: { CSV Header: Value}}} + display_name_as_label, bool: Default, false. If true, set the display name as the label. If display name is not formatted properly, standard schema label will be used instead. Raises: ValueError, attribute_relationship_dict not loaded. """ @@ -78,6 +79,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: attr_rel_dict=self.attribute_relationships_dict ) + # Instantiate NetworkX MultiDigraph G = nx.MultiDiGraph() From 86adb29daab5524a1e6b5f094d665f35047cee33 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:02:12 -0800 Subject: [PATCH 010/199] add space for aesthetics --- schematic/schemas/data_model_jsonld.py | 1 + 1 file changed, 1 insertion(+) diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index fa9af86ef..d3d57e0b2 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -303,6 +303,7 @@ def fill_entry_template(self, template: dict, node: str) -> dict: template = self.reorder_template_entries( template=template, ) + # Add contexts to certain values template = self.add_contexts_to_entries( template=template, From 9768a758dc3ddfe8ddfa18782e758d8119464004 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:02:50 -0800 Subject: [PATCH 011/199] add default value for display_name_as_label --- schematic/schemas/data_model_nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index e1ef8de7a..7a672857a 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -176,7 +176,7 @@ def run_rel_functions( f"The function provided ({rel_func}) to define the relationship {key} is not captured in the function run_rel_functions, please update." ) - def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict, display_name_as_label:bool) -> dict: + def generate_node_dict(self, node_display_name: str, attr_rel_dict: dict, display_name_as_label:bool=False) -> dict: """Gather information to be attached to each node. Args: node_display_name, str: display name for current node From 9bdac3b6c15b6c0f531df2687a4fe54947003844 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:03:31 -0800 Subject: [PATCH 012/199] add display_name_as_label option to api.yml --- schematic_api/api/openapi/api.yaml | 98 +++++++++++++++++++++++++++++- 1 file changed, 97 insertions(+), 1 deletion(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 0aca6a05d..2b153daf4 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -98,6 +98,14 @@ paths: default: True description: If using Google Sheets, can set the strictness of Google Sheets regex match validation. True (default) will block users from entering incorrect values, False will throw a warning to users. required: false + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false operationId: schematic_api.api.routes.get_manifest_route responses: "200": @@ -235,6 +243,14 @@ paths: description: Data Model Component example: Patient required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false - in: query name: restrict_rules schema: @@ -303,6 +319,14 @@ paths: example: >- https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false - in: query name: data_type schema: @@ -415,6 +439,14 @@ paths: default: false description: if False return component requirements as a list; if True return component requirements as a dependency graph (i.e. a DAG) required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false operationId: schematic_api.api.routes.get_component_requirements responses: "200": @@ -735,6 +767,14 @@ paths: description: ID of file or folder. For example, for Synapse this would be the Synapse ID. example: syn30988314 required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: return "true" or "false" @@ -790,6 +830,14 @@ paths: description: schema class example: MolecularEntity required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: A list of properties of a given class. @@ -818,7 +866,15 @@ paths: nullable: false description: Relationship (i.e. parentOf, requiresDependency, rangeValue, domainValue) example: requiresDependency - required: true + required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: A list of tuples. @@ -859,6 +915,14 @@ paths: description: Display label of a node example: FamilyHistory required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: return a boolean @@ -891,6 +955,14 @@ paths: description: List of node labels. example: ['FamilyHistory', 'Biospecimen'] required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: return List[str] @@ -922,6 +994,14 @@ paths: description: Display label of node example: CheckRegexList required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false responses: "200": description: return a list @@ -952,6 +1032,14 @@ paths: description: The node whose dependencies are needed example: Patient required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false - in: query name: return_display_names schema: @@ -1027,6 +1115,14 @@ paths: description: Node / term for which you need to retrieve the range. example: FamilyHistory required: true + - in: query + name: display_name_as_label + schema: + type: string + nullable: true + default: false + description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + required: false - in: query name: return_display_names schema: From 487aed6e07f8f170be6d3dfd0740ed040abcf6b7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:04:08 -0800 Subject: [PATCH 013/199] update api routes to take display_name_as_label --- schematic_api/api/routes.py | 52 +++++++++++++++++++------------------ 1 file changed, 27 insertions(+), 25 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 30b1984e9..128abffea 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -197,13 +197,13 @@ def save_file(file_key="csv_file"): return temp_path -def initalize_metadata_model(schema_url): +def initalize_metadata_model(schema_url, display_name_as_label): # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) metadata_model = MetadataModel( - inputMModelLocation=data_model, inputMModelLocationType="local" + inputMModelLocation=data_model, inputMModelLocationType="local", display_name_as_label=display_name_as_label, ) return metadata_model @@ -375,7 +375,7 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None #####profile validate manifest route function #@profile(sort_by='cumulative', strip_dirs=True) -def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str=None): +def validate_manifest_route(schema_url, data_type, display_name_as_label, restrict_rules=None, json_str=None): # if restrict rules is set to None, default it to False if not restrict_rules: restrict_rules=False @@ -399,7 +399,7 @@ def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str data_model = get_temp_model_path(schema_url) metadata_model = MetadataModel( - inputMModelLocation=data_model, inputMModelLocationType="local" + inputMModelLocation=data_model, inputMModelLocationType="local", display_name_as_label=display_name_as_label ) errors, warnings = metadata_model.validateModelManifest( @@ -412,7 +412,7 @@ def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str #####profile validate manifest route function #@profile(sort_by='cumulative', strip_dirs=True) -def submit_manifest_route(schema_url, asset_view=None, manifest_record_type=None, json_str=None, table_manipulation=None, data_type=None, hide_blanks=False): +def submit_manifest_route(schema_url, display_name_as_label:bool, asset_view=None, manifest_record_type=None, json_str=None, table_manipulation=None, data_type=None, hide_blanks=False): # call config_handler() config_handler(asset_view = asset_view) @@ -427,7 +427,7 @@ def submit_manifest_route(schema_url, asset_view=None, manifest_record_type=None restrict_rules = parse_bool(connexion.request.args["restrict_rules"]) - metadata_model = initalize_metadata_model(schema_url) + metadata_model = initalize_metadata_model(schema_url, display_name_as_label) # Access token now stored in request header access_token = get_access_token() @@ -467,7 +467,7 @@ def submit_manifest_route(schema_url, asset_view=None, manifest_record_type=None return manifest_id -def populate_manifest_route(schema_url, title=None, data_type=None, return_excel=None): +def populate_manifest_route(schema_url, title=None, data_type=None, return_excel=None, display_name_as_label=display_name_as_label): # call config_handler() config_handler() @@ -478,7 +478,7 @@ def populate_manifest_route(schema_url, title=None, data_type=None, return_excel data_model = get_temp_model_path(schema_url) #Initalize MetadataModel - metadata_model = MetadataModel(inputMModelLocation=data_model, inputMModelLocationType='local') + metadata_model = MetadataModel(inputMModelLocation=data_model, inputMModelLocationType='local', display_name_as_label=display_name_as_label) #Call populateModelManifest class populated_manifest_link = metadata_model.populateModelManifest(title=title, manifestPath=temp_path, rootNode=data_type, return_excel=return_excel) @@ -560,8 +560,8 @@ def check_entity_type(entity_id): return entity_type -def get_component_requirements(schema_url, source_component, as_graph): - metadata_model = initalize_metadata_model(schema_url) +def get_component_requirements(schema_url, source_component, as_graph, display_name_as_label): + metadata_model = initalize_metadata_model(schema_url, display_name_as_label) req_components = metadata_model.get_component_requirements(source_component=source_component, as_graph = as_graph) @@ -735,13 +735,13 @@ def get_manifest_datatype(manifest_id, asset_view): return manifest_dtypes_dict -def get_schema_pickle(schema_url): +def get_schema_pickle(schema_url, display_name_as_label): data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -755,14 +755,14 @@ def get_schema_pickle(schema_url): return export_path -def get_subgraph_by_edge_type(schema_url, relationship): +def get_subgraph_by_edge_type(schema_url, relationship, display_name_as_label): data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -780,13 +780,13 @@ def get_subgraph_by_edge_type(schema_url, relationship): return Arr -def find_class_specific_properties(schema_url, schema_class): +def find_class_specific_properties(schema_url, schema_class, display_name_as_label): data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -802,8 +802,9 @@ def find_class_specific_properties(schema_url, schema_class): def get_node_dependencies( schema_url: str, source_node: str, + display_name_as_label: bool, return_display_names: bool = True, - return_schema_ordered: bool = True + return_schema_ordered: bool = True, ) -> list[str]: """Get the immediate dependencies that are related to a given source node. @@ -827,7 +828,7 @@ def get_node_dependencies( parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -862,6 +863,7 @@ def get_property_label_from_display_name_route( def get_node_range( schema_url: str, node_label: str, + display_name_as_label:bool, return_display_names: bool = True ) -> list[str]: """Get the range, i.e., all the valid values that are associated with a node label. @@ -880,7 +882,7 @@ def get_node_range( parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -890,7 +892,7 @@ def get_node_range( node_range = dmge.get_node_range(node_label, return_display_names) return node_range -def get_if_node_required(schema_url: str, node_display_name: str) -> bool: +def get_if_node_required(schema_url: str, node_display_name: str, display_name_as_label:bool) -> bool: """Check if the node is required Args: @@ -906,7 +908,7 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -917,7 +919,7 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: return is_required -def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: +def get_node_validation_rules(schema_url: str, node_display_name: str, display_name_as_label:bool) -> list: """ Args: schema_url (str): Data Model URL @@ -932,7 +934,7 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -944,7 +946,7 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: return node_validation_rules -def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: +def get_nodes_display_names(schema_url: str, node_list: list[str], display_name_as_label:bool) -> list: """From a list of node labels retrieve their display names, return as list. Args: @@ -962,7 +964,7 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() From 6b573106f0838726e3dcca6f8229b677c2be236c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:04:52 -0800 Subject: [PATCH 014/199] add display_name_as_label to conftest --- tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e965bdc5c..eb55ee5d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -55,7 +55,7 @@ def get_data_frame(path, *paths, **kwargs): return load_df(fullpath, **kwargs) @staticmethod - def get_data_model_graph_explorer(path=None, *paths): + def get_data_model_graph_explorer(path=None, display_name_as_label:bool=False, *paths): #commenting this now bc we dont want to have multiple instances if path is None: return @@ -63,13 +63,13 @@ def get_data_model_graph_explorer(path=None, *paths): fullpath = Helpers.get_data_path(path, *paths) # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = fullpath) + data_model_parser = DataModelParser(path_to_data_model = fullpath, display_name_as_label=display_name_as_label) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label=display_name_as_label) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() From 662d18acf8b8d966f7f1b073d60b58406a2565f1 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:06:44 -0800 Subject: [PATCH 015/199] update test_metadata to take in display_name as label, and use as a paramter to some tests --- tests/test_metadata.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 2a88e6688..58854c9e5 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -8,24 +8,26 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -@pytest.fixture -def metadata_model(helpers): +def metadata_model(helpers, display_name_as_label): metadata_model = MetadataModel( - inputMModelLocation=helpers.get_data_path("example.model.jsonld"), + inputMModelLocation=helpers.get_data_path("example.model.jsonld"), display_name_as_label=display_name_as_label, inputMModelLocationType="local", ) - yield metadata_model + return metadata_model class TestMetadataModel: @pytest.mark.parametrize("as_graph", [True, False], ids=["as_graph", "as_list"]) - def test_get_component_requirements(self, metadata_model, as_graph): + @pytest.mark.parametrize("display_name_as_label", [True, False], ids=["display_name_as_label-True", "display_name_as_label-False"]) + def test_get_component_requirements(self, helpers, as_graph, display_name_as_label): + # Instantiate MetadataModel + meta_data_model = metadata_model(helpers, display_name_as_label) source_component = "BulkRNA-seqAssay" - output = metadata_model.get_component_requirements( + output = meta_data_model.get_component_requirements( source_component, as_graph=as_graph ) @@ -40,13 +42,18 @@ def test_get_component_requirements(self, metadata_model, as_graph): assert "BulkRNA-seqAssay" in output @pytest.mark.parametrize("return_excel", [None, True, False]) + @pytest.mark.parametrize("display_name_as_label", [True, False], ids=["display_name_as_label-True", "display_name_as_label-False"]) @pytest.mark.google_credentials_needed - def test_populate_manifest(self, metadata_model, helpers, return_excel): + def test_populate_manifest(self, helpers, return_excel, display_name_as_label): + + # Instantiate MetadataModel + meta_data_model = metadata_model(helpers, display_name_as_label) + #Get path of manifest manifestPath = helpers.get_data_path("mock_manifests/Valid_Test_Manifest.csv") #Call populateModelManifest class - populated_manifest_route= metadata_model.populateModelManifest(title="mock_title", manifestPath=manifestPath, rootNode="MockComponent", return_excel=return_excel) + populated_manifest_route= meta_data_model.populateModelManifest(title="mock_title", manifestPath=manifestPath, rootNode="MockComponent", return_excel=return_excel) if not return_excel: # return a url From 062734827e895cd9babc1f278ef8dfaf2fe86022 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:08:37 -0800 Subject: [PATCH 016/199] add display_name_as_label to tests, and as a parameter to some tests --- tests/test_schemas.py | 62 ++++++++++++++++++++++++++----------------- 1 file changed, 37 insertions(+), 25 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 1763a4452..dadf354c4 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -61,15 +61,15 @@ def test_fake_func(): "Bio Things": {"class": "BioThings", "property": "bioThings"}, "bio things": {"class": "Biothings", "property": "biothings"}, } -NODE_DISPLAY_NAME_DICT = {"Patient": False, "Sex": True, "SourceManifest": False} +NODE_DISPLAY_NAME_DICT = {"Patient": False, "Sex": True, "MockRDB_id": True} -def get_data_model_parser(helpers, data_model_name: str = None): +def get_data_model_parser(helpers, data_model_name: str = None, display_name_as_label: bool=False): # Get path to data model fullpath = helpers.get_data_path(path=data_model_name) # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model=fullpath) + data_model_parser = DataModelParser(path_to_data_model=fullpath, display_name_as_label=display_name_as_label) return data_model_parser @@ -77,10 +77,9 @@ def generate_graph_data_model(helpers, data_model_name: str, display_name_as_lab """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ - # Instantiate Parser data_model_parser = get_data_model_parser( - helpers=helpers, data_model_name=data_model_name + helpers=helpers, data_model_name=data_model_name, display_name_as_label=display_name_as_label, ) # Parse Model @@ -270,7 +269,7 @@ def test_parse_csv_model( @pytest.mark.parametrize("data_model", ["example.model.jsonld"], ids=["jsonld"]) class TestDataModelJsonLdParser: def test_gather_jsonld_attributes_relationships( - self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser + self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser, ): """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) @@ -278,7 +277,8 @@ def test_gather_jsonld_attributes_relationships( # Get output of the function: attr_rel_dict = jsonld_parser.gather_jsonld_attributes_relationships( - model_jsonld=model_jsonld["@graph"] + model_jsonld=model_jsonld["@graph"], + display_name_as_label=False, ) # Test the attr_rel_dict is formatted as expected: @@ -291,8 +291,10 @@ def test_gather_jsonld_attributes_relationships( assert "Relationships" in attr_rel_dict[attribute_key] assert "Attribute" in attr_rel_dict[attribute_key]["Relationships"] + + def test_parse_jsonld_model( - self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser + self, helpers, data_model: str, jsonld_parser: DataModelJSONLDParser, ): """The output of the function is a attributes relationship dictionary, check that it is formatted properly.""" path_to_data_model = helpers.get_data_path(path=data_model) @@ -300,7 +302,8 @@ def test_parse_jsonld_model( # Get output of the function: attr_rel_dictionary = jsonld_parser.parse_jsonld_model( - path_to_data_model=path_to_data_model + path_to_data_model=path_to_data_model, + display_name_as_label=False, ) # Test the attr_rel_dictionary is formatted as expected: @@ -385,25 +388,37 @@ class TestDataModelGraph: ["example.model.csv", "example.model.jsonld"], ids=["csv", "jsonld"], ) - def test_generate_data_model_graph(self, helpers, data_model): + @pytest.mark.parametrize("display_name_as_label", [True, False], ids=["display_name_as_label-True", "display_name_as_label-False"]) + def test_generate_data_model_graph(self, helpers, data_model, display_name_as_label): """Check that data model graph is constructed properly, requires calling various classes. TODO: In another test, check conditional dependencies. """ - graph = generate_graph_data_model(helpers=helpers, data_model_name=data_model) + graph = generate_graph_data_model(helpers=helpers, data_model_name=data_model, display_name_as_label=display_name_as_label) # Check that some edges are present as expected: assert ("FamilyHistory", "Breast") in graph.edges("FamilyHistory") assert ("BulkRNA-seqAssay", "Biospecimen") in graph.edges("BulkRNA-seqAssay") - assert ["Ab", "Cd", "Ef", "Gh"] == [ + + if display_name_as_label: + expected_valid_values = ['ab', 'cd', 'ef', 'gh'] + mock_id_label = 'MockRDB_id' + + else: + expected_valid_values = ["Ab", "Cd", "Ef", "Gh"] + mock_id_label = 'MockRDBId' + + assert expected_valid_values == [ k for k, v in graph["CheckList"].items() for vk, vv in v.items() if vk == "rangeValue" ] + assert mock_id_label in graph.nodes + # Check that all relationships recorded between 'CheckList' and 'Ab' are present - assert "rangeValue" and "parentOf" in graph["CheckList"]["Ab"] - assert "requiresDependency" not in graph["CheckList"]["Ab"] + assert "rangeValue" and "parentOf" in graph["CheckList"][expected_valid_values[0]] + assert "requiresDependency" not in graph["CheckList"][expected_valid_values[0]] # Check nodes: assert "Patient" in graph.nodes @@ -813,10 +828,6 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name, displa # Parse Model attr_rel_dictionary = data_model_parser.parse_model() - for attr, rels in attr_rel_dictionary.items() - if attr=='SourceManifest': - attr_rel_dictionary[attr] - # Change SourceManifest to sockComponent so we can check the display_name_as_label is working as expected # Instantiate DataModelNodes @@ -840,11 +851,7 @@ def test_generate_node_dict(self, helpers, data_model, node_display_name, displa # Check that the display name matches the label if display_name_as_label: - breakpoint() assert node_display_name == node_dict['label'] - else: - breakpoint() - assert node_display_name == node_dict['displayName'] def test_generate_node(self, helpers, data_model): # Test adding a dummy node @@ -1200,6 +1207,7 @@ class TestDataModelJsonLd: @pytest.mark.parametrize( "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) ) + def test_init(self, helpers, data_model): # Test that __init__ is being set up properly # Get Graph @@ -1274,10 +1282,11 @@ def test_class_template(self, helpers): "template_type", ["property", "class"], ids=["property", "class"] ) @pytest.mark.parametrize("node", ["", "Patient"], ids=["no node", "Patient"]) - def test_fill_entry_template(self, helpers, data_model, template_type, node): + @pytest.mark.parametrize("display_name_as_label", [True, False], ids=["display_name_as_label-True", "display_name_as_label-False"]) + def test_fill_entry_template(self, helpers, data_model, template_type, node, display_name_as_label): # Get Graph graph_data_model = generate_graph_data_model( - helpers, data_model_name=data_model + helpers, data_model_name=data_model, display_name_as_label=display_name_as_label, ) # Instantiate DataModelJsonLD @@ -1331,9 +1340,12 @@ def test_fill_entry_template(self, helpers, data_model, template_type, node): "sms:requiresDependency", "sms:validationRules", ] + assert (set(actual_keys) - set(expected_keys)) == ( set(expected_keys) - set(actual_keys) ) + if display_name_as_label: + assert object_template["rdfs:label"] == object_template["sms:displayName"] @pytest.mark.parametrize( "data_model", list(DATA_MODEL_DICT.keys()), ids=list(DATA_MODEL_DICT.values()) @@ -1465,7 +1477,7 @@ def test_generate_jsonld_object(self, helpers, data_model): # Get Graph graph_data_model = generate_graph_data_model( - helpers, data_model_name=data_model + helpers, data_model_name=data_model, ) # Instantiate DataModelJsonLD From b637476c7f19251e48883873916b144148afca38 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:10:15 -0800 Subject: [PATCH 017/199] update synapse.py so it can take in id, Id or ID column names without error --- schematic/store/synapse.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index b24663128..00be1ba05 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1358,11 +1358,16 @@ def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, dmge: DataModelGra else: manifest["Id"] = '' + # Retrieve the ID column name (id, Id and ID) are treated the same. + id_col_name = [col for col in manifest.columns if col.lower() == 'id'][0] + + # Check if values have been added to the Id coulumn, if not add a UUID so value in the row is not blank. for idx,row in manifest.iterrows(): - if not row["Id"]: + if not row[id_col_name]: gen_uuid = str(uuid.uuid4()) - row["Id"] = gen_uuid - manifest.loc[idx, 'Id'] = gen_uuid + row[id_col_name] = gen_uuid + manifest.loc[idx, id_col_name] = gen_uuid + # add entityId as a column if not already there or # fill any blanks with an empty string. From ea7bf7fb35b7db0841123872d232ed9b0a1f909c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 11:18:51 -0800 Subject: [PATCH 018/199] WIP: add display_name_as_label to parser, needs to be cleaned and documented --- schematic/schemas/data_model_parser.py | 126 +++++++++++++++++++++---- 1 file changed, 106 insertions(+), 20 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index f1684da1c..16574f98c 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -1,7 +1,7 @@ import logging import pandas as pd import pathlib -from typing import Any, Dict, Optional, Text, List +from typing import Any, Dict, Optional, Text, Union from schematic.utils.df_utils import load_df from schematic.utils.io_utils import load_json @@ -24,6 +24,7 @@ class DataModelParser: def __init__( self, path_to_data_model: str, + display_name_as_label:bool=False, ) -> None: """ Args: @@ -33,6 +34,7 @@ def __init__( self.path_to_data_model = path_to_data_model self.model_type = self.get_model_type() self.base_schema_path = None + self.display_name_as_label = display_name_as_label def _get_base_schema_path(self, base_schema: str = None) -> str: """Evaluate path to base schema. @@ -97,7 +99,7 @@ def parse_model(self) -> Dict[str, dict[str, Any]]: model_dict = csv_parser.parse_csv_model(self.path_to_data_model) elif self.model_type == "JSONLD": jsonld_parser = DataModelJSONLDParser() - model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) + model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model, self.display_name_as_label) else: raise ValueError( f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format." @@ -243,7 +245,37 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: + def parse_list_of_dict_entry(self, rel_entry, id_jsonld_key, display_name_as_label, model_jsonld): + parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] + #Convert labels to display names if specified + if display_name_as_label: + parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) + return parsed_rel_entry + + def parse_string_entry(self, rel_entry, display_name_as_label, model_jsonld): + # Remove contexts and treat strings as appropriate. + if ":" in rel_entry and "http:" not in rel_entry: + parsed_rel_entry = rel_entry.split(":")[1] + # Convert true/false strings to boolean + if parsed_rel_entry.lower() == "true": + parsed_rel_entry = True + elif parsed_rel_entry.lower == "false": + parsed_rel_entry = False + else: + parsed_rel_entry = rel_entry + #Convert labels to display names if specified + if display_name_as_label: + parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) + return parsed_rel_entry + + def parse_basic_entry(self, rel_entry, display_name_as_label, model_jsonld): + parsed_rel_entry = rel_entry + #Convert labels to display names if specified + if display_name_as_label: + parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) + return parsed_rel_entry + + def parse_entry(self, rel_entry: any, id_jsonld_key: str, display_name_as_label:bool, model_jsonld:list[dict]) -> Any: """Parse an input entry based on certain attributes Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value @@ -256,22 +288,35 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str) -> Any: parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] + #parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] + #Convert labels to display names if specified + #if display_name_as_label: + #parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) + parsed_rel_entry = self.parse_list_of_dict_entry(rel_entry, id_jsonld_key, display_name_as_label, model_jsonld) # Strip context from string and convert true/false to bool elif type(rel_entry) == str: + parsed_rel_entry = self.parse_string_entry(rel_entry, display_name_as_label, model_jsonld) + # Remove contexts and treat strings as appropriate. - if ":" in rel_entry and "http:" not in rel_entry: - parsed_rel_entry = rel_entry.split(":")[1] - # Convert true/false strings to boolean - if parsed_rel_entry.lower() == "true": - parsed_rel_entry = True - elif parsed_rel_entry.lower == "false": - parsed_rel_entry = False - else: - parsed_rel_entry = rel_entry + #if ":" in rel_entry and "http:" not in rel_entry: + # parsed_rel_entry = rel_entry.split(":")[1] + # # Convert true/false strings to boolean + # if parsed_rel_entry.lower() == "true": + # parsed_rel_entry = True + # elif parsed_rel_entry.lower == "false": + # parsed_rel_entry = False + #else: + # parsed_rel_entry = rel_entry + # #Convert labels to display names if specified + # if display_name_as_label: + # parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) # For anything else get that else: - parsed_rel_entry = rel_entry + parsed_rel_entry = self.parse_basic_entry(rel_entry, display_name_as_label, model_jsonld) + #parsed_rel_entry = rel_entry + #Convert labels to display names if specified + #if display_name_as_label: + # parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) return parsed_rel_entry def get_display_name_from_label(self, label, model_jsonld): @@ -289,7 +334,30 @@ def get_display_name_from_label(self, label, model_jsonld): attr_key = entry[label_jsonld_key] return attr_key - def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Dict: + def display_name_to_label_dict(self, model_jsonld): + jsonld_keys_to_extract = ["label", "displayName"] + label_jsonld_key, dn_jsonld_key = [ + self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract + ] + dn_label_dict = {} + for entry in model_jsonld: + dn_label_dict[entry[label_jsonld_key]]=entry[dn_jsonld_key] + return dn_label_dict + + def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_jsonld:list[dict]): + # Get a dictionary of display_names mapped to labels + dn_label_dict = self.display_name_to_label_dict(model_jsonld=model_jsonld) + + # Handle if using the display name as the label + if type(parsed_rel_entry) == list: + parsed_rel_entry = [dn_label_dict.get(entry) if dn_label_dict.get(entry) else entry for entry in parsed_rel_entry ] + elif type(parsed_rel_entry) == str: + converted_label = dn_label_dict.get(parsed_rel_entry) + if converted_label: + parsed_rel_entry = dn_label_dict.get(parsed_rel_entry) + return parsed_rel_entry + + def gather_jsonld_attributes_relationships(self, model_jsonld: list[dict], display_name_as_label:bool) -> Dict: """ Args: model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model @@ -298,6 +366,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di {Node Display Name: {Relationships: { CSV Header: Value}}} + display_name_as_label, bool: Default, false. If true, set the display name as the label. If display name is not formatted properly, standard schema label will be used instead. Notes: - Unlike a CSV the JSONLD might already have a base schema attached to it. So the attributes:relationship dictionary for importing a CSV vs JSONLD may not match. @@ -315,6 +384,8 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di label_jsonld_key, subclassof_jsonld_key, id_jsonld_key, dn_jsonld_key = [ self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract ] + + # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model @@ -342,13 +413,16 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di ): # Retrieve entry value associated with the given relationship rel_entry = entry[rel_vals["jsonld_key"]] + # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, display_name_as_label=display_name_as_label, model_jsonld=model_jsonld, ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] + if rel_key == 'domainIncludes' or rel_key == 'parentOf': + # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. @@ -374,20 +448,31 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_rel_dictionary.update(attr_dict_template(p_attr_key)) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) + #if using display name as label, make sure all values that are recorded as value are using the display_label instead of the label + #elif rel_key == "rangeIncludes" and display_name_as_label and type(parsed_rel_entry) == list: + #parsed_rel_entry = [dn_label_dict[entry] for entry in parsed_rel_entry] else: attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) + # Add values to the dictionary that do not directly have a corillary to the CSV elif ( rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] ): # Retrieve entry value associated with the given relationship - rel_entry = entry[rel_vals["jsonld_key"]] - # If there is an entry parset it by type and add to the attr:relationships dictionary. + #rel_entry = entry[rel_vals["jsonld_key"]] + ## START + # If using the display name as the label, ensure that the display name is set for the label + #if display_name_as_label and rel_vals["jsonld_key"] == label_jsonld_key: + # rel_entry = entry[dn_jsonld_key] + #else: + # rel_entry = entry[rel_vals["jsonld_key"]] + ## END + # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key + rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, display_name_as_label=display_name_as_label, model_jsonld=model_jsonld, ) # Add relationships for each attribute and relationship to the dictionary attr_rel_dictionary[attr_key]["Relationships"].update( @@ -398,6 +483,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di def parse_jsonld_model( self, path_to_data_model: str, + display_name_as_label:bool, ): """Convert raw JSONLD data model to attributes relationship dictionary. Args: @@ -415,5 +501,5 @@ def parse_jsonld_model( # Load the json_ld model to df json_load = load_json(path_to_data_model) # Convert dataframe to attributes relationship dictionary. - model_dict = self.gather_jsonld_attributes_relationships(json_load["@graph"]) + model_dict = self.gather_jsonld_attributes_relationships(json_load["@graph"], display_name_as_label) return model_dict From 13b7e2154e75b3519a0f0d6f1df3f38bde588f01 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 13:45:55 -0800 Subject: [PATCH 019/199] clean up and document parser --- schematic/schemas/data_model_parser.py | 122 +++++++++++++------------ 1 file changed, 63 insertions(+), 59 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 16574f98c..b2ffa92e9 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -245,22 +245,45 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_list_of_dict_entry(self, rel_entry, id_jsonld_key, display_name_as_label, model_jsonld): + def parse_list_of_dict_entry(self, rel_entry: list, id_jsonld_key: str, display_name_as_label:bool, model_jsonld: list[dict])-> list[str]: + """Parse a list of dictionaries entry, so it can be added to the attr_rel_dictionary + Args: + rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value + id_jsonld_key, str: the jsonld key for id + display_name_as_label: bool, flag indicating to use the display name as the label + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. + """ parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] #Convert labels to display names if specified if display_name_as_label: parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) return parsed_rel_entry - def parse_string_entry(self, rel_entry, display_name_as_label, model_jsonld): + def parse_string_entry(self, rel_entry:str, display_name_as_label:bool, model_jsonld: list[dict]) -> Union[bool,str]: + """ + Parse a string entry, so it can be added to the attr_rel_dictionary + Args: + rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value + display_name_as_label: bool, flag indicating to use the display name as the label + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. + """ # Remove contexts and treat strings as appropriate. if ":" in rel_entry and "http:" not in rel_entry: parsed_rel_entry = rel_entry.split(":")[1] + # Convert true/false strings to boolean if parsed_rel_entry.lower() == "true": parsed_rel_entry = True elif parsed_rel_entry.lower == "false": parsed_rel_entry = False + else: + #Convert labels to display names if specified + if display_name_as_label: + parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) else: parsed_rel_entry = rel_entry #Convert labels to display names if specified @@ -268,7 +291,15 @@ def parse_string_entry(self, rel_entry, display_name_as_label, model_jsonld): parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry, model_jsonld) return parsed_rel_entry - def parse_basic_entry(self, rel_entry, display_name_as_label, model_jsonld): + def parse_basic_entry(self, rel_entry:str, display_name_as_label:bool, model_jsonld: list[dict]) -> str: + """For basic entry, just return or convert to display name if indicated. + Args: + rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value + display_name_as_label: bool, flag indicating to use the display name as the label + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. + """ parsed_rel_entry = rel_entry #Convert labels to display names if specified if display_name_as_label: @@ -276,65 +307,36 @@ def parse_basic_entry(self, rel_entry, display_name_as_label, model_jsonld): return parsed_rel_entry def parse_entry(self, rel_entry: any, id_jsonld_key: str, display_name_as_label:bool, model_jsonld:list[dict]) -> Any: - """Parse an input entry based on certain attributes + """Parse an input entry based on certain attributes so it can be added used in further downstream processing Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value id_jsonld_key, str: the jsonld key for id + display_name_as_label: bool, flag indicating to use the display name as the label + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model Returns: - parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. """ # Retrieve ID from single value dictionary if type(rel_entry) == dict and len(rel_entry.keys()) == 1: parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - #parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] - #Convert labels to display names if specified - #if display_name_as_label: - #parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) parsed_rel_entry = self.parse_list_of_dict_entry(rel_entry, id_jsonld_key, display_name_as_label, model_jsonld) # Strip context from string and convert true/false to bool elif type(rel_entry) == str: parsed_rel_entry = self.parse_string_entry(rel_entry, display_name_as_label, model_jsonld) - - # Remove contexts and treat strings as appropriate. - #if ":" in rel_entry and "http:" not in rel_entry: - # parsed_rel_entry = rel_entry.split(":")[1] - # # Convert true/false strings to boolean - # if parsed_rel_entry.lower() == "true": - # parsed_rel_entry = True - # elif parsed_rel_entry.lower == "false": - # parsed_rel_entry = False - #else: - # parsed_rel_entry = rel_entry - # #Convert labels to display names if specified - # if display_name_as_label: - # parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) # For anything else get that else: parsed_rel_entry = self.parse_basic_entry(rel_entry, display_name_as_label, model_jsonld) - #parsed_rel_entry = rel_entry - #Convert labels to display names if specified - #if display_name_as_label: - # parsed_rel_entry=self.convert_entry_to_dn_label(parsed_rel_entry) return parsed_rel_entry - def get_display_name_from_label(self, label, model_jsonld): - jsonld_keys_to_extract = ["label", "displayName"] - label_jsonld_key, dn_jsonld_key = [ - self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract - ] - for entry in model_jsonld: - # Get the attr key for the dictionary - if dn_jsonld_key in entry: - # The attr_key is the entry display name if one was recorded - attr_key = entry[dn_jsonld_key] - else: - # If not we wil use the get the label. - attr_key = entry[label_jsonld_key] - return attr_key - - def display_name_to_label_dict(self, model_jsonld): + def label_to_dn_dict(self, model_jsonld: list[dict]): + """ Generate a dictionary of labels to display name, so can easily look up display names using the label. + Args: + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + dn_label_dict: dict of model labels to display names + """ jsonld_keys_to_extract = ["label", "displayName"] label_jsonld_key, dn_jsonld_key = [ self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract @@ -344,9 +346,16 @@ def display_name_to_label_dict(self, model_jsonld): dn_label_dict[entry[label_jsonld_key]]=entry[dn_jsonld_key] return dn_label_dict - def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_jsonld:list[dict]): + def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_jsonld:list[dict]) -> Union[str,list]: + """Convert a parsed entry to display name, taking into account the entry type + Args: + parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. + model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + Returns: + parsed_rel_entry: an entry that has been parsed based on its input type and characteristics, and converted to display names. + """ # Get a dictionary of display_names mapped to labels - dn_label_dict = self.display_name_to_label_dict(model_jsonld=model_jsonld) + dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) # Handle if using the display name as the label if type(parsed_rel_entry) == list: @@ -361,6 +370,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: list[dict], displ """ Args: model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model + display_name_as_label: bool, flag indicating to use the display name as the label Returns: attr_rel_dictionary: dict, {Node Display Name: @@ -385,7 +395,6 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: list[dict], displ self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract ] - # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model @@ -444,13 +453,10 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: list[dict], displ # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value - p_attr_key = self.get_display_name_from_label(parsed_val, model_jsonld) - + p_attr_key = self.convert_entry_to_dn_label(parsed_val, model_jsonld) + attr_rel_dictionary.update(attr_dict_template(p_attr_key)) attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) - #if using display name as label, make sure all values that are recorded as value are using the display_label instead of the label - #elif rel_key == "rangeIncludes" and display_name_as_label and type(parsed_rel_entry) == list: - #parsed_rel_entry = [dn_label_dict[entry] for entry in parsed_rel_entry] else: attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} @@ -460,15 +466,12 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: list[dict], displ rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] ): - # Retrieve entry value associated with the given relationship - #rel_entry = entry[rel_vals["jsonld_key"]] - ## START # If using the display name as the label, ensure that the display name is set for the label - #if display_name_as_label and rel_vals["jsonld_key"] == label_jsonld_key: - # rel_entry = entry[dn_jsonld_key] - #else: - # rel_entry = entry[rel_vals["jsonld_key"]] - ## END + if display_name_as_label and rel_vals["jsonld_key"] == label_jsonld_key: + rel_entry = entry[dn_jsonld_key] + else: + rel_entry = entry[rel_vals["jsonld_key"]] + # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( @@ -488,6 +491,7 @@ def parse_jsonld_model( """Convert raw JSONLD data model to attributes relationship dictionary. Args: path_to_data_model: str, path to JSONLD data model + display_name_as_label: bool, flag indicating to use the display name as the label Returns: model_dict: dict, {Node Display Name: From dcd5670981525565eb44b98ae035526ecc7eb57d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 14:22:32 -0800 Subject: [PATCH 020/199] change display_name_as_label API entry to bool --- schematic_api/api/openapi/api.yaml | 58 +++++++++++++++++------------- schematic_api/api/routes.py | 2 +- 2 files changed, 34 insertions(+), 26 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 2b153daf4..04308bbbd 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -101,10 +101,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false operationId: schematic_api.api.routes.get_manifest_route responses: @@ -246,7 +246,7 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. @@ -322,10 +322,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false - in: query name: data_type @@ -377,7 +377,7 @@ paths: type: string nullable: false description: A JSON object - example: '[{ + example: [{ "Patient ID": 123, "Sex": "Female", "Year of Birth": "", @@ -385,7 +385,7 @@ paths: "Component": "Patient", "Cancer Type": "Breast", "Family History": "Breast, Lung", - }]' + }] - in: query name: table_manipulation description: Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'.replace' will remove the rows and columns from the existing table and store the new rows and columns, preserving the name and synID.'upsert' will add the new rows to the table and preserve the exisitng rows and columns in the existing table. If nothing is selected, the default is "replace" @@ -442,10 +442,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false operationId: schematic_api.api.routes.get_component_requirements responses: @@ -480,6 +480,14 @@ paths: example: >- https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true + - in: query + name: display_name_as_label + schema: + type: boolean + nullable: true + default: false + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + required: false - in: query name: data_type schema: @@ -770,10 +778,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -833,10 +841,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -870,10 +878,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -918,10 +926,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -958,10 +966,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -997,10 +1005,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false responses: "200": @@ -1035,10 +1043,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false - in: query name: return_display_names @@ -1118,10 +1126,10 @@ paths: - in: query name: display_name_as_label schema: - type: string + type: boolean nullable: true default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. required: false - in: query name: return_display_names diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 128abffea..50e2e2935 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -467,7 +467,7 @@ def submit_manifest_route(schema_url, display_name_as_label:bool, asset_view=Non return manifest_id -def populate_manifest_route(schema_url, title=None, data_type=None, return_excel=None, display_name_as_label=display_name_as_label): +def populate_manifest_route(schema_url, display_name_as_label:bool, title=None, data_type=None, return_excel=None): # call config_handler() config_handler() From d82400756732a363a998adc9e477a8c8e58dbe03 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 14:37:39 -0800 Subject: [PATCH 021/199] add display_name_as_label flag to visualization cli and api endpoints --- .../visualization/attributes_explorer.py | 5 +-- schematic/visualization/tangled_tree.py | 7 ++-- schematic_api/api/openapi/api.yaml | 32 +++++++++++++++++++ schematic_api/api/routes.py | 16 +++++----- 4 files changed, 47 insertions(+), 13 deletions(-) diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 0917172dd..70c3393a8 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -17,6 +17,7 @@ class AttributesExplorer(): def __init__(self, path_to_jsonld: str, + display_name_as_label:bool, )-> None: self.path_to_jsonld = path_to_jsonld @@ -24,13 +25,13 @@ def __init__(self, self.jsonld = load_json(self.path_to_jsonld) # Instantiate Data Model Parser - data_model_parser = DataModelParser(path_to_data_model = self.path_to_jsonld) + data_model_parser = DataModelParser(path_to_data_model = self.path_to_jsonld, display_name_as_label=display_name_as_label) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph self.graph_data_model = data_model_grapher.generate_data_model_graph() diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 03bffd825..1e5831292 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -35,6 +35,7 @@ class TangledTree(object): def __init__(self, path_to_json_ld: str, figure_type: str, + display_name_as_label: bool, ) -> None: # Load jsonld self.path_to_json_ld = path_to_json_ld @@ -44,13 +45,13 @@ def __init__(self, self.schema_name = path.basename(self.path_to_json_ld).split(".model.jsonld")[0] # Instantiate Data Model Parser - data_model_parser = DataModelParser(path_to_data_model = self.path_to_json_ld) + data_model_parser = DataModelParser(path_to_data_model = self.path_to_json_ld, display_name_as_label=display_name_as_label) #Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) # Generate graph self.graph_data_model = data_model_grapher.generate_data_model_graph() @@ -67,7 +68,7 @@ def __init__(self, self.schema_abbr = self.schema_name.split('_')[0] # Initialize AttributesExplorer - self.ae = AttributesExplorer(self.path_to_json_ld) + self.ae = AttributesExplorer(self.path_to_json_ld, display_name_as_label) # Create output paths. self.text_csv_output_path = self.ae.create_output_path('text_csv') diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 04308bbbd..ada111735 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -1169,6 +1169,14 @@ paths: description: Figure type to generate. example: 'component' required: true + - in: query + name: display_name_as_label + schema: + type: boolean + nullable: true + default: false + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + required: false responses: "200": description: Returns a dataframe as a JSON String. @@ -1209,6 +1217,14 @@ paths: description: Text formatting type. example: 'plain' required: true + - in: query + name: display_name_as_label + schema: + type: boolean + nullable: true + default: false + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + required: false responses: "200": description: Returns a dataframe as a JSON String. @@ -1234,6 +1250,14 @@ paths: example: >- https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true + - in: query + name: display_name_as_label + schema: + type: boolean + nullable: true + default: false + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + required: false responses: "200": description: Returns a CSV as a JSON String. @@ -1274,6 +1298,14 @@ paths: default: false description: Whether to include the indexes of the datafram in the returned JSON string (true) or not (false). required: true + - in: query + name: display_name_as_label + schema: + type: boolean + nullable: true + default: false + description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + required: false responses: "200": description: Returns a CSV as a JSON String. diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 50e2e2935..63652833a 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -568,36 +568,36 @@ def get_component_requirements(schema_url, source_component, as_graph, display_n return req_components @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) -def get_viz_attributes_explorer(schema_url): +def get_viz_attributes_explorer(schema_url, display_name_as_label): # call config_handler() config_handler() # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(data_model).parse_attributes(save_file=False) + attributes_csv = AttributesExplorer(data_model, display_name_as_label).parse_attributes(save_file=False) return attributes_csv -def get_viz_component_attributes_explorer(schema_url, component, include_index): +def get_viz_component_attributes_explorer(schema_url, component, include_index, display_name_as_label): # call config_handler() config_handler() # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(data_model).parse_component_attributes(component, save_file=False, include_index=include_index) + attributes_csv = AttributesExplorer(data_model, display_name_as_label).parse_component_attributes(component, save_file=False, include_index=include_index) return attributes_csv @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) -def get_viz_tangled_tree_text(schema_url, figure_type, text_format): +def get_viz_tangled_tree_text(schema_url, figure_type, text_format, display_name_as_label): # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) # Initialize TangledTree - tangled_tree = TangledTree(data_model, figure_type) + tangled_tree = TangledTree(data_model, figure_type, display_name_as_label) # Get text for tangled tree. text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=False) @@ -605,7 +605,7 @@ def get_viz_tangled_tree_text(schema_url, figure_type, text_format): return text_df @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) -def get_viz_tangled_tree_layers(schema_url, figure_type): +def get_viz_tangled_tree_layers(schema_url, figure_type, display_name_as_label): # call config_handler() config_handler() @@ -614,7 +614,7 @@ def get_viz_tangled_tree_layers(schema_url, figure_type): data_model = get_temp_model_path(schema_url) # Initialize Tangled Tree - tangled_tree = TangledTree(data_model, figure_type) + tangled_tree = TangledTree(data_model, figure_type, display_name_as_label) # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=False) From 51a7300f9fe86ee766605bde934e22cc94e7d4b4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 15:03:35 -0800 Subject: [PATCH 022/199] add updated example.single_rule.model.jsonld so can use for testing --- tests/data/example.single_rule.model.jsonld | 3210 ++++--------------- 1 file changed, 674 insertions(+), 2536 deletions(-) diff --git a/tests/data/example.single_rule.model.jsonld b/tests/data/example.single_rule.model.jsonld index 738bba05e..24da12094 100644 --- a/tests/data/example.single_rule.model.jsonld +++ b/tests/data/example.single_rule.model.jsonld @@ -7,1971 +7,6 @@ "xsd": "http://www.w3.org/2001/XMLSchema#" }, "@graph": [ - { - "@id": "schema:Text", - "@type": [ - "schema:DataType", - "rdfs:Class" - ], - "rdfs:comment": "Data type: Text.", - "rdfs:label": "Text" - }, - { - "@id": "schema:Number", - "@type": [ - "schema:DataType", - "rdfs:Class" - ], - "rdfs:comment": "Data type: Number.", - "rdfs:label": "Number" - }, - { - "@id": "schema:Integer", - "@type": "rdfs:Class", - "rdfs:comment": "Data type: Integer.", - "rdfs:label": "Integer", - "rdfs:subClassOf": { - "@id": "schema:Number" - } - }, - { - "@id": "schema:Thing", - "@type": "rdfs:Class", - "rdfs:comment": "Thing", - "rdfs:label": "Thing", - "schema:isPartOf": { - "@id": "http://schema.org" - } - }, - { - "@id": "bts:BiologicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "BiologicalEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OntologyClass", - "@type": "rdfs:Class", - "rdfs:comment": "a concept or class in an ontology, vocabulary or thesaurus", - "rdfs:label": "OntologyClass", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RelationshipType", - "@type": "rdfs:Class", - "rdfs:comment": "An OWL property used as an edge label", - "rdfs:label": "RelationshipType", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneOntologyClass", - "@type": "rdfs:Class", - "rdfs:comment": "an ontology class that describes a functional aspect of a gene, gene prodoct or complex", - "rdfs:label": "GeneOntologyClass", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OrganismTaxon", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "OrganismTaxon", - "rdfs:subClassOf": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:OrganismalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A named entity that is either a part of an organism, a whole organism, population or clade of organisms, excluding molecular entities", - "rdfs:label": "OrganismalEntity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:IndividualOrganism", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "IndividualOrganism", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Case", - "@type": "rdfs:Class", - "rdfs:comment": "An individual organism that has a patient role in some clinical context.", - "rdfs:label": "Case", - "rdfs:subClassOf": { - "@id": "bts:IndividualOrganism" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PopulationOfIndividualOrganisms", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PopulationOfIndividualOrganisms", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Biosample", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Biosample", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:DiseaseOrPhenotypicFeature", - "@type": "rdfs:Class", - "rdfs:comment": "Either one of a disease or an individual phenotypic feature. Some knowledge resources such as Monarch treat these as distinct, others such as MESH conflate.", - "rdfs:label": "DiseaseOrPhenotypicFeature", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Disease", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Disease", - "rdfs:subClassOf": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PhenotypicFeature", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PhenotypicFeature", - "rdfs:subClassOf": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Environment", - "@type": "rdfs:Class", - "rdfs:comment": "A feature of the environment of an organism that influences one or more phenotypic features of that organism, potentially mediated by genes", - "rdfs:label": "Environment", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:InformationContentEntity", - "@type": "rdfs:Class", - "rdfs:comment": "a piece of information that typically describes some piece of biology or is used as support.", - "rdfs:label": "InformationContentEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ConfidenceLevel", - "@type": "rdfs:Class", - "rdfs:comment": "Level of confidence in a statement", - "rdfs:label": "ConfidenceLevel", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EvidenceType", - "@type": "rdfs:Class", - "rdfs:comment": "Class of evidence that supports an association", - "rdfs:label": "EvidenceType", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Publication", - "@type": "rdfs:Class", - "rdfs:comment": "Any published piece of information. Can refer to a whole publication, or to a part of it (e.g. a figure, figure legend, or section highlighted by NLP). The scope is intended to be general and include information published on the web as well as journals.", - "rdfs:label": "Publication", - "rdfs:subClassOf": { - "@id": "bts:InformationContentEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MolecularEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A gene, gene product, small molecule or macromolecule (including protein complex)", - "rdfs:label": "MolecularEntity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ChemicalSubstance", - "@type": "rdfs:Class", - "rdfs:comment": "May be a chemical entity or a formulation with a chemical entity as active ingredient, or a complex material with multiple chemical entities as part", - "rdfs:label": "ChemicalSubstance", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Drug", - "@type": "rdfs:Class", - "rdfs:comment": "A substance intended for use in the diagnosis, cure, mitigation, treatment, or prevention of disease", - "rdfs:label": "Drug", - "rdfs:subClassOf": { - "@id": "bts:ChemicalSubstance" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Metabolite", - "@type": "rdfs:Class", - "rdfs:comment": "Any intermediate or product resulting from metabolism. Includes primary and secondary metabolites.", - "rdfs:label": "Metabolite", - "rdfs:subClassOf": { - "@id": "bts:ChemicalSubstance" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:AnatomicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "A subcellular location, cell type or gross anatomical part", - "rdfs:label": "AnatomicalEntity", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:LifeStage", - "@type": "rdfs:Class", - "rdfs:comment": "A stage of development or growth of an organism, including post-natal adult stages", - "rdfs:label": "LifeStage", - "rdfs:subClassOf": { - "@id": "bts:OrganismalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PlanetaryEntity", - "@type": "rdfs:Class", - "rdfs:comment": "Any entity or process that exists at the level of the whole planet", - "rdfs:label": "PlanetaryEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EnvironmentalProcess", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "EnvironmentalProcess", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:EnvironmentalFeature", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "EnvironmentalFeature", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalEntity", - "@type": "rdfs:Class", - "rdfs:comment": "Any entity or process that exists in the clinical domain and outside the biological realm. Diseases are placed under biological entities", - "rdfs:label": "ClinicalEntity", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalTrial", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "ClinicalTrial", - "rdfs:subClassOf": { - "@id": "bts:ClinicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ClinicalIntervention", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "ClinicalIntervention", - "rdfs:subClassOf": { - "@id": "bts:ClinicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Device", - "@type": "rdfs:Class", - "rdfs:comment": "A thing made or adapted for a particular purpose, especially a piece of mechanical or electronic equipment", - "rdfs:label": "Device", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GenomicEntity", - "@type": "rdfs:Class", - "rdfs:comment": "an entity that can either be directly located on a genome (gene, transcript, exon, regulatory region) or is encoded in a genome (protein)", - "rdfs:label": "GenomicEntity", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Genome", - "@type": "rdfs:Class", - "rdfs:comment": "A genome is the sum of genetic material within a cell or virion.", - "rdfs:label": "Genome", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Transcript", - "@type": "rdfs:Class", - "rdfs:comment": "An RNA synthesized on a DNA or RNA template by an RNA polymerase", - "rdfs:label": "Transcript", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Exon", - "@type": "rdfs:Class", - "rdfs:comment": "A region of the transcript sequence within a gene which is not removed from the primary RNA transcript by RNA splicing", - "rdfs:label": "Exon", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CodingSequence", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "CodingSequence", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MacromolecularMachine", - "@type": "rdfs:Class", - "rdfs:comment": "A union of gene, gene product, and macromolecular complex. These are the basic units of function in a cell. They either carry out individual biological activities, or they encode molecules which do this.", - "rdfs:label": "MacromolecularMachine", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneOrGeneProduct", - "@type": "rdfs:Class", - "rdfs:comment": "a union of genes or gene products. Frequently an identifier for one will be used as proxy for another", - "rdfs:label": "GeneOrGeneProduct", - "rdfs:subClassOf": { - "@id": "bts:MacromolecularMachine" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Gene", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Gene", - "rdfs:subClassOf": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneProduct", - "@type": "rdfs:Class", - "rdfs:comment": "The functional molecular product of a single gene. Gene products are either proteins or functional RNA molecules", - "rdfs:label": "GeneProduct", - "rdfs:subClassOf": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Protein", - "@type": "rdfs:Class", - "rdfs:comment": "A gene product that is composed of a chain of amino acid sequences and is produced by ribosome-mediated translation of mRNA", - "rdfs:label": "Protein", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneProductIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "This is an abstract class that can be mixed in with different kinds of gene products to indicate that the gene product is intended to represent a specific isoform rather than a canonical or reference or generic product. The designation of canonical or reference may be arbitrary, or it may represent the superclass of all isoforms.", - "rdfs:label": "GeneProductIsoform", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ProteinIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "Represents a protein that is a specific isoform of the canonical or reference protein. See https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4114032/", - "rdfs:label": "ProteinIsoform", - "rdfs:subClassOf": { - "@id": "bts:Protein" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RnaProduct", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "RnaProduct", - "rdfs:subClassOf": { - "@id": "bts:GeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:RnaProductIsoform", - "@type": "rdfs:Class", - "rdfs:comment": "Represents a protein that is a specific isoform of the canonical or reference RNA", - "rdfs:label": "RnaProductIsoform", - "rdfs:subClassOf": { - "@id": "bts:RnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:NoncodingRnaProduct", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "NoncodingRnaProduct", - "rdfs:subClassOf": { - "@id": "bts:RnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Microrna", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Microrna", - "rdfs:subClassOf": { - "@id": "bts:NoncodingRnaProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MacromolecularComplex", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "MacromolecularComplex", - "rdfs:subClassOf": { - "@id": "bts:MacromolecularMachine" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeneFamily", - "@type": "rdfs:Class", - "rdfs:comment": "any grouping of multiple genes or gene products related by common descent", - "rdfs:label": "GeneFamily", - "rdfs:subClassOf": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Genotype", - "@type": "rdfs:Class", - "rdfs:comment": "An information content entity that describes a genome by specifying the total variation in genomic sequence and/or gene expression, relative to some extablished background", - "rdfs:label": "Genotype", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Haplotype", - "@type": "rdfs:Class", - "rdfs:comment": "A set of zero or more Alleles on a single instance of a Sequence[VMC]", - "rdfs:label": "Haplotype", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:SequenceVariant", - "@type": "rdfs:Class", - "rdfs:comment": "An allele that varies in its sequence from what is considered the reference allele at that locus.", - "rdfs:label": "SequenceVariant", - "rdfs:subClassOf": { - "@id": "bts:GenomicEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:DrugExposure", - "@type": "rdfs:Class", - "rdfs:comment": "A drug exposure is an intake of a particular chemical substance", - "rdfs:label": "DrugExposure", - "rdfs:subClassOf": { - "@id": "bts:Environment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Treatment", - "@type": "rdfs:Class", - "rdfs:comment": "A treatment is targeted at a disease or phenotype and may involve multiple drug 'exposures'", - "rdfs:label": "Treatment", - "rdfs:subClassOf": { - "@id": "bts:Environment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeographicLocation", - "@type": "rdfs:Class", - "rdfs:comment": "a location that can be described in lat/long coordinates", - "rdfs:label": "GeographicLocation", - "rdfs:subClassOf": { - "@id": "bts:PlanetaryEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GeographicLocationAtTime", - "@type": "rdfs:Class", - "rdfs:comment": "a location that can be described in lat/long coordinates, for a particular time", - "rdfs:label": "GeographicLocationAtTime", - "rdfs:subClassOf": { - "@id": "bts:GeographicLocation" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Occurrent", - "@type": "rdfs:Class", - "rdfs:comment": "A processual entity", - "rdfs:label": "Occurrent", - "rdfs:subClassOf": { - "@id": "schema:Thing" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:BiologicalProcessOrActivity", - "@type": "rdfs:Class", - "rdfs:comment": "Either an individual molecular activity, or a collection of causally connected molecular activities", - "rdfs:label": "BiologicalProcessOrActivity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:MolecularActivity", - "@type": "rdfs:Class", - "rdfs:comment": "An execution of a molecular function carried out by a gene product or macromolecular complex.", - "rdfs:label": "MolecularActivity", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcessOrActivity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ActivityAndBehavior", - "@type": "rdfs:Class", - "rdfs:comment": "Activity or behavior of any independent integral living, organization or mechanical actor in the world", - "rdfs:label": "ActivityAndBehavior", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Procedure", - "@type": "rdfs:Class", - "rdfs:comment": "A series of actions conducted in a certain order or manner", - "rdfs:label": "Procedure", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Phenomenon", - "@type": "rdfs:Class", - "rdfs:comment": "a fact or situation that is observed to exist or happen, especially one whose cause or explanation is in question", - "rdfs:label": "Phenomenon", - "rdfs:subClassOf": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:BiologicalProcess", - "@type": "rdfs:Class", - "rdfs:comment": "One or more causally connected executions of molecular functions", - "rdfs:label": "BiologicalProcess", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcessOrActivity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Pathway", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Pathway", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcess" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:PhysiologicalProcess", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "PhysiologicalProcess", - "rdfs:subClassOf": { - "@id": "bts:BiologicalProcess" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CellularComponent", - "@type": "rdfs:Class", - "rdfs:comment": "A location in or around a cell", - "rdfs:label": "CellularComponent", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:Cell", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "Cell", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:CellLine", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "CellLine", - "rdfs:subClassOf": { - "@id": "bts:Biosample" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:GrossAnatomicalStructure", - "@type": "rdfs:Class", - "rdfs:comment": null, - "rdfs:label": "GrossAnatomicalStructure", - "rdfs:subClassOf": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - } - }, - { - "@id": "bts:ensembl", - "@type": "rdf:Property", - "rdfs:comment": "Ensembl ID for gene, protein or transcript", - "rdfs:label": "ensembl", - "schema:domainIncludes": [ - { - "@id": "bts:Transcript" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:hgnc", - "@type": "rdf:Property", - "rdfs:comment": "HGNC ID for gene", - "rdfs:label": "hgnc", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:entrez", - "@type": "rdf:Property", - "rdfs:comment": "Entrez ID for gene", - "rdfs:label": "entrez", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:refseq", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "refseq", - "schema:domainIncludes": [ - { - "@id": "bts:Transcript" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:omim", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "omim", - "schema:domainIncludes": [ - { - "@id": "bts:Disease" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Integer" - } - }, - { - "@id": "bts:umls", - "@type": "rdf:Property", - "rdfs:comment": "Refseq ID for gene, protein or transcript", - "rdfs:label": "umls", - "schema:domainIncludes": { - "@id": "bts:Disease" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "schema:Text" - } - }, - { - "@id": "bts:homologousTo", - "@type": "rdf:Property", - "rdfs:comment": "Shared ancestry between protein or gene", - "rdfs:label": "homologousTo", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:molecularlyInteractsWith", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "molecularlyInteractsWith", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:geneticallyInteractsWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes whose phenotypic effects are dependent on each other in some way - such that their combined phenotypic effects are the result of some interaction between the activity of their gene products. Examples include epistasis and synthetic lethality.", - "rdfs:label": "geneticallyInteractsWith", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Gene" - } - }, - { - "@id": "bts:affectsAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the amount of the other within a system of interest", - "rdfs:label": "affectsAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the amount of the other within a system of interest", - "rdfs:label": "increasesAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesAbundanceOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the amount of the other within a system of interest", - "rdfs:label": "decreasesAbundanceOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the activity of the other within a system of interest", - "rdfs:label": "affectsActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the activity of the other within a system of interest", - "rdfs:label": "increasesActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesActivityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the activity of the other within a system of interest", - "rdfs:label": "decreasesActivityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the level of expression of the other within a system of interest", - "rdfs:label": "affectsExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:increasesExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the level of expression of the other within a system of interest", - "rdfs:label": "increasesExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:decreasesExpressionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the level of expression of the other within a system of interest", - "rdfs:label": "decreasesExpressionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:affectsFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the rate or quality of folding of the other ", - "rdfs:label": "affectsFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate or quality of folding of the other ", - "rdfs:label": "increasesFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesFoldingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate or quality of folding of the other ", - "rdfs:label": "decreasesFoldingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one changes the localization of the other within a system of interest", - "rdfs:label": "affectsLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the proper localization of the other within a system of interest", - "rdfs:label": "increasesLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesLocalizationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the proper localization of the other within a system of interest", - "rdfs:label": "decreasesLocalizationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the metabolic processing of the other within a system of interest", - "rdfs:label": "affectsMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of metabolic processing of the other within a system of interest", - "rdfs:label": "increasesMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesMetabolicProcessingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of metabolic processing of the other within a system of interest", - "rdfs:label": "decreasesMetabolicProcessingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads changes in the molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "affectsMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads to increased molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "increasesMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesMolecularModificationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one leads to decreased molecular modification(s) of the other (e.g. via post-translational modifications of proteins such as the addition of phosphoryl group, or via redox reaction that adds or subtracts electrons)", - "rdfs:label": "decreasesMolecularModificationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of chemical synthesis of the other", - "rdfs:label": "affectsSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of chemical synthesis of the other", - "rdfs:label": "increasesSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesSynthesisOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of chemical synthesis of the other", - "rdfs:label": "decreasesSynthesisOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of degradation of the other within a system of interest", - "rdfs:label": "affectsDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of degradation of the other within a system of interest", - "rdfs:label": "increasesDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesDegradationOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of degradation of the other within a system of interest", - "rdfs:label": "decreasesDegradationOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity impacts the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "affectsMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:increasesMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity increases the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "increasesMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:decreasesMutationRateOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and a genomic entity where the action or effect of the molecular entity decreases the rate of mutation of the genomic entity within a system of interest", - "rdfs:label": "decreasesMutationRateOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GenomicEntity" - } - }, - { - "@id": "bts:affectsResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "affectsResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "increasesResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesResponseTo", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the susceptibility of a biological entity or system (e.g. an organism, cell, cellular component, macromolecular machine, biological or pathological process) to the other", - "rdfs:label": "decreasesResponseTo", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity impacts the splicing of the mRNA", - "rdfs:label": "affectsSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:increasesSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity increases the proper splicing of the mRNA", - "rdfs:label": "increasesSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:decreasesSplicingOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between a molecular entity and an mRNA where the action or effect of the molecular entity decreases the proper splicing of the mRNA", - "rdfs:label": "decreasesSplicingOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Transcript" - } - }, - { - "@id": "bts:affectsStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the stability of the other within a system of interest", - "rdfs:label": "affectsStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the stability of the other within a system of interest", - "rdfs:label": "increasesStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesStabilityOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the stability of the other within a system of interest", - "rdfs:label": "decreasesStabilityOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "affectsTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "increasesTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesTransportOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of transport of the other across some boundary in a system of interest", - "rdfs:label": "decreasesTransportOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "affectsSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "increasesSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesSecretionOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of secretion of the other out of a cell, gland, or organ", - "rdfs:label": "decreasesSecretionOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:affectsUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one impacts the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "affectsUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:increasesUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one increases the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "increasesUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:decreasesUptakeOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two molecular entities where the action or effect of one decreases the rate of uptake of the other into of a cell, gland, or organ", - "rdfs:label": "decreasesUptakeOf", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:regulates,ProcessToProcess", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "regulates,ProcessToProcess", - "schema:domainIncludes": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Occurrent" - } - }, - { - "@id": "bts:regulates,EntityToEntity", - "@type": "rdf:Property", - "rdfs:comment": null, - "rdfs:label": "regulates,EntityToEntity", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:hasGeneProduct", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene and a transcribed and/or translated product generated from it", - "rdfs:label": "hasGeneProduct", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneProduct" - } - }, - { - "@id": "bts:inPathwayWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are part of in the same biological pathway", - "rdfs:label": "inPathwayWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:inComplexWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are part of (or code for products that are part of) in the same macromolecular complex", - "rdfs:label": "inComplexWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:inCellPopulationWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between two genes or gene products that are expressed in the same cell type or population ", - "rdfs:label": "inCellPopulationWith", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:geneAssociatedWithCondition", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene and a disease or phenotypic feature that the gene or its alleles/products may influence, contribute to, or correlate with", - "rdfs:label": "geneAssociatedWithCondition", - "schema:domainIncludes": { - "@id": "bts:Gene" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:treats", - "@type": "rdf:Property", - "rdfs:comment": "holds between a therapeutic procedure or chemical substance and a disease or phenotypic feature that it is used to treat", - "rdfs:label": "treats", - "schema:domainIncludes": { - "@id": "bts:Treatment" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:correlatedWith", - "@type": "rdf:Property", - "rdfs:comment": "holds between a disease or phenotypic feature and a measurable molecular entity that is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "correlatedWith", - "schema:domainIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:hasBiomarker", - "@type": "rdf:Property", - "rdfs:comment": "holds between a disease or phenotypic feature and a measurable molecular entity that is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "hasBiomarker", - "schema:domainIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:MolecularEntity" - } - }, - { - "@id": "bts:biomarkerFor", - "@type": "rdf:Property", - "rdfs:comment": "holds between a measurable molecular entity and a disease or phenotypic feature, where the entity is used as an indicator of the presence or state of the disease or feature.", - "rdfs:label": "biomarkerFor", - "schema:domainIncludes": { - "@id": "bts:MolecularEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:expressedIn", - "@type": "rdf:Property", - "rdfs:comment": "holds between a gene or gene product and an anatomical entity in which it is expressed", - "rdfs:label": "expressedIn", - "schema:domainIncludes": { - "@id": "bts:GeneOrGeneProduct" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:AnatomicalEntity" - } - }, - { - "@id": "bts:expresses", - "@type": "rdf:Property", - "rdfs:comment": "holds between an anatomical entity and gene or gene product that is expressed there", - "rdfs:label": "expresses", - "schema:domainIncludes": { - "@id": "bts:AnatomicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:GeneOrGeneProduct" - } - }, - { - "@id": "bts:hasPhenotype", - "@type": "rdf:Property", - "rdfs:comment": "holds between a biological entity and a phenotype, where a phenotype is construed broadly as any kind of quality of an organism part, a collection of these qualities, or a change in quality or qualities (e.g. abnormally increased temperature). ", - "rdfs:label": "hasPhenotype", - "schema:domainIncludes": { - "@id": "bts:BiologicalEntity" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:DiseaseOrPhenotypicFeature" - } - }, - { - "@id": "bts:precedes", - "@type": "rdf:Property", - "rdfs:comment": "holds between two processes, where one completes before the other begins", - "rdfs:label": "precedes", - "schema:domainIncludes": { - "@id": "bts:Occurrent" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:Occurrent" - } - }, - { - "@id": "bts:subclassOf", - "@type": "rdf:Property", - "rdfs:comment": "holds between two classes where the domain class is a specialization of the range class", - "rdfs:label": "subclassOf", - "schema:domainIncludes": { - "@id": "bts:OntologyClass" - }, - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": { - "@id": "bts:OntologyClass" - } - }, { "@id": "bts:Patient", "@type": "rdfs:Class", @@ -2016,219 +51,46 @@ "@id": "bts:DataProperty" } ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Patient ID", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:Sex", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Sex", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": [ - { - "@id": "bts:Female" - }, - { - "@id": "bts:Male" - }, - { - "@id": "bts:Other" - } - ], - "sms:displayName": "Sex", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:YearofBirth", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "YearofBirth", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Year of Birth", - "sms:required": "sms:false", - "sms:validationRules": [] - }, - { - "@id": "bts:Diagnosis", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Diagnosis", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": [ - { - "@id": "bts:Healthy" - }, - { - "@id": "bts:Cancer" - } - ], - "sms:displayName": "Diagnosis", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:Cancer", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Cancer", - "rdfs:subClassOf": [ - { - "@id": "bts:ValidValue" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Cancer", - "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:CancerType" - }, - { - "@id": "bts:FamilyHistory" - } - ], - "sms:validationRules": [] - }, - { - "@id": "bts:CancerType", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "CancerType", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": [ - { - "@id": "bts:Breast" - }, - { - "@id": "bts:Colorectal" - }, - { - "@id": "bts:Lung" - }, - { - "@id": "bts:Prostate" - }, - { - "@id": "bts:Skin" - } - ], - "sms:displayName": "Cancer Type", - "sms:required": "sms:true", - "sms:validationRules": [] - }, - { - "@id": "bts:FamilyHistory", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "FamilyHistory", - "rdfs:subClassOf": [ - { - "@id": "bts:DataProperty" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "schema:rangeIncludes": [ - { - "@id": "bts:Breast" - }, - { - "@id": "bts:Colorectal" - }, - { - "@id": "bts:Lung" - }, - { - "@id": "bts:Prostate" - }, - { - "@id": "bts:Skin" - } - ], - "sms:displayName": "Family History", + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Patient ID", "sms:required": "sms:true", - "sms:validationRules": [ - "list strict" - ] + "sms:validationRules": [] }, { - "@id": "bts:Biospecimen", + "@id": "bts:Sex", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Biospecimen", + "rdfs:label": "Sex", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Biospecimen", - "sms:required": "sms:false", - "sms:requiresComponent": [ - { - "@id": "bts:Patient" - } - ], - "sms:requiresDependency": [ - { - "@id": "bts:SampleID" - }, + "schema:rangeIncludes": [ { - "@id": "bts:PatientID" + "@id": "bts:Female" }, { - "@id": "bts:TissueStatus" + "@id": "bts:Male" }, { - "@id": "bts:Component" + "@id": "bts:Other" } ], + "sms:displayName": "Sex", + "sms:required": "sms:true", "sms:validationRules": [] }, { - "@id": "bts:SampleID", + "@id": "bts:YearofBirth", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "SampleID", + "rdfs:label": "YearofBirth", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2237,15 +99,15 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Sample ID", - "sms:required": "sms:true", + "sms:displayName": "Year of Birth", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:TissueStatus", + "@id": "bts:Diagnosis", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "TissueStatus", + "rdfs:label": "Diagnosis", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2259,171 +121,202 @@ "@id": "bts:Healthy" }, { - "@id": "bts:Malignant" + "@id": "bts:Cancer" } ], - "sms:displayName": "Tissue Status", + "sms:displayName": "Diagnosis", "sms:required": "sms:true", "sms:validationRules": [] }, { - "@id": "bts:BulkRNA-seqAssay", + "@id": "bts:Component", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "BulkRNA-seqAssay", + "rdfs:label": "Component", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:Thing" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Bulk RNA-seq Assay", + "sms:displayName": "Component", "sms:required": "sms:false", - "sms:requiresComponent": [ - { - "@id": "bts:Biospecimen" - } - ], - "sms:requiresDependency": [ - { - "@id": "bts:Filename" - }, - { - "@id": "bts:SampleID" - }, - { - "@id": "bts:FileFormat" - }, + "sms:validationRules": [] + }, + { + "@id": "bts:DataType", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "DataType", + "rdfs:subClassOf": [ { - "@id": "bts:Component" + "@id": "bts:Thing" } ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "DataType", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:Filename", + "@id": "bts:DataProperty", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Filename", + "rdfs:label": "DataProperty", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:Thing" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Filename", - "sms:required": "sms:true", + "sms:displayName": "DataProperty", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:FileFormat", + "@id": "bts:Female", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "FileFormat", + "rdfs:label": "Female", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:Sex" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "schema:rangeIncludes": [ - { - "@id": "bts:FASTQ" - }, - { - "@id": "bts:BAM" - }, - { - "@id": "bts:CRAM" - }, + "sms:displayName": "Female", + "sms:required": "sms:false", + "sms:validationRules": [] + }, + { + "@id": "bts:Male", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Male", + "rdfs:subClassOf": [ { - "@id": "bts:CSV/TSV" + "@id": "bts:Sex" } ], - "sms:displayName": "File Format", - "sms:required": "sms:true", + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Male", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:BAM", + "@id": "bts:Other", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "BAM", + "rdfs:label": "Other", "rdfs:subClassOf": [ { - "@id": "bts:ValidValue" + "@id": "bts:Sex" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "BAM", + "sms:displayName": "Other", "sms:required": "sms:false", - "sms:requiresDependency": [ + "sms:validationRules": [] + }, + { + "@id": "bts:Healthy", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Healthy", + "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:Diagnosis" + }, + { + "@id": "bts:TissueStatus" } ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Healthy", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:CRAM", + "@id": "bts:Cancer", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CRAM", + "rdfs:label": "Cancer", "rdfs:subClassOf": [ { "@id": "bts:ValidValue" + }, + { + "@id": "bts:Diagnosis" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "CRAM", + "sms:displayName": "Cancer", "sms:required": "sms:false", "sms:requiresDependency": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:CancerType" }, { - "@id": "bts:GenomeFASTA" + "@id": "bts:FamilyHistory" } ], "sms:validationRules": [] }, { - "@id": "bts:CSV/TSV", + "@id": "bts:CancerType", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CSV/TSV", + "rdfs:label": "CancerType", "rdfs:subClassOf": [ { - "@id": "bts:ValidValue" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "CSV/TSV", - "sms:required": "sms:false", - "sms:requiresDependency": [ + "schema:rangeIncludes": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:Breast" + }, + { + "@id": "bts:Colorectal" + }, + { + "@id": "bts:Lung" + }, + { + "@id": "bts:Prostate" + }, + { + "@id": "bts:Skin" } ], + "sms:displayName": "Cancer Type", + "sms:required": "sms:true", "sms:validationRules": [] }, { - "@id": "bts:GenomeBuild", + "@id": "bts:FamilyHistory", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GenomeBuild", + "rdfs:label": "FamilyHistory", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2434,156 +327,185 @@ }, "schema:rangeIncludes": [ { - "@id": "bts:GRCh37" + "@id": "bts:Breast" }, { - "@id": "bts:GRCh38" + "@id": "bts:Colorectal" }, { - "@id": "bts:GRCm38" + "@id": "bts:Lung" }, { - "@id": "bts:GRCm39" + "@id": "bts:Prostate" + }, + { + "@id": "bts:Skin" } ], - "sms:displayName": "Genome Build", + "sms:displayName": "Family History", "sms:required": "sms:true", + "sms:validationRules": [ + "list strict" + ] + }, + { + "@id": "bts:ValidValue", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "ValidValue", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "ValidValue", + "sms:required": "sms:false", + "sms:validationRules": [] + }, + { + "@id": "bts:Breast", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Breast", + "rdfs:subClassOf": [ + { + "@id": "bts:CancerType" + }, + { + "@id": "bts:FamilyHistory" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Breast", + "sms:required": "sms:false", + "sms:validationRules": [] + }, + { + "@id": "bts:Colorectal", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Colorectal", + "rdfs:subClassOf": [ + { + "@id": "bts:CancerType" + }, + { + "@id": "bts:FamilyHistory" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Colorectal", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:GenomeFASTA", + "@id": "bts:Lung", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GenomeFASTA", + "rdfs:label": "Lung", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:CancerType" + }, + { + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Genome FASTA", - "sms:required": "sms:true", + "sms:displayName": "Lung", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:MockComponent", + "@id": "bts:Prostate", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockComponent", + "rdfs:label": "Prostate", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:CancerType" + }, + { + "@id": "bts:FamilyHistory" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockComponent", + "sms:displayName": "Prostate", "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:Component" - }, - { - "@id": "bts:CheckList" - }, - { - "@id": "bts:CheckRegexList" - }, - { - "@id": "bts:CheckRegexSingle" - }, - { - "@id": "bts:CheckRegexFormat" - }, - { - "@id": "bts:CheckNum" - }, - { - "@id": "bts:CheckFloat" - }, - { - "@id": "bts:CheckInt" - }, - { - "@id": "bts:CheckString" - }, - { - "@id": "bts:CheckURL" - }, - { - "@id": "bts:CheckMatchatLeast" - }, - { - "@id": "bts:CheckMatchatLeastvalues" - }, - { - "@id": "bts:CheckMatchExactly" - }, - { - "@id": "bts:CheckMatchExactlyvalues" - }, - { - "@id": "bts:CheckRecommended" - }, - { - "@id": "bts:CheckAges" - }, - { - "@id": "bts:CheckUnique" - }, - { - "@id": "bts:CheckRange" - }, + "sms:validationRules": [] + }, + { + "@id": "bts:Skin", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Skin", + "rdfs:subClassOf": [ { - "@id": "bts:CheckDate" + "@id": "bts:CancerType" }, { - "@id": "bts:CheckNA" + "@id": "bts:FamilyHistory" } ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Skin", + "sms:required": "sms:false", "sms:validationRules": [] }, { - "@id": "bts:CheckList", + "@id": "bts:Biospecimen", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckList", + "rdfs:label": "Biospecimen", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "schema:rangeIncludes": [ + "sms:displayName": "Biospecimen", + "sms:required": "sms:false", + "sms:requiresComponent": [ { - "@id": "bts:Ab" + "@id": "bts:Patient" + } + ], + "sms:requiresDependency": [ + { + "@id": "bts:SampleID" }, { - "@id": "bts:Cd" + "@id": "bts:PatientID" }, { - "@id": "bts:Ef" + "@id": "bts:TissueStatus" }, { - "@id": "bts:Gh" + "@id": "bts:Component" } ], - "sms:displayName": "Check List", - "sms:required": "sms:false", - "sms:validationRules": [ - "list strict error" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexList", + "@id": "bts:SampleID", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexList", + "rdfs:label": "SampleID", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2592,18 +514,15 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex List", - "sms:required": "sms:false", - "sms:validationRules": [ - "list strict error", - "regex match [a-f] error" - ] + "sms:displayName": "Sample ID", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexSingle", + "@id": "bts:TissueStatus", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexSingle", + "rdfs:label": "TissueStatus", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2612,55 +531,76 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex Single", - "sms:required": "sms:false", - "sms:validationRules": [ - "regex search [a-f] error" - ] + "schema:rangeIncludes": [ + { + "@id": "bts:Healthy" + }, + { + "@id": "bts:Malignant" + } + ], + "sms:displayName": "Tissue Status", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckRegexFormat", + "@id": "bts:Malignant", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRegexFormat", + "rdfs:label": "Malignant", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:TissueStatus" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Regex Format", + "sms:displayName": "Malignant", "sms:required": "sms:false", - "sms:validationRules": [ - "regex match [a-f] error" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckNum", + "@id": "bts:BulkRNA-seqAssay", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckNum", + "rdfs:label": "BulkRNA-seqAssay", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Num", + "sms:displayName": "Bulk RNA-seq Assay", "sms:required": "sms:false", - "sms:validationRules": [ - "num error" - ] + "sms:requiresComponent": [ + { + "@id": "bts:Biospecimen" + } + ], + "sms:requiresDependency": [ + { + "@id": "bts:Filename" + }, + { + "@id": "bts:SampleID" + }, + { + "@id": "bts:FileFormat" + }, + { + "@id": "bts:Component" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckFloat", + "@id": "bts:Filename", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckFloat", + "rdfs:label": "Filename", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2669,17 +609,15 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Float", - "sms:required": "sms:false", - "sms:validationRules": [ - "float error" - ] + "sms:displayName": "Filename", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckInt", + "@id": "bts:FileFormat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckInt", + "rdfs:label": "FileFormat", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2688,93 +626,124 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Int", - "sms:required": "sms:false", - "sms:validationRules": [ - "int error" - ] + "schema:rangeIncludes": [ + { + "@id": "bts:FASTQ" + }, + { + "@id": "bts:BAM" + }, + { + "@id": "bts:CRAM" + }, + { + "@id": "bts:CSV/TSV" + } + ], + "sms:displayName": "File Format", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckString", + "@id": "bts:FASTQ", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckString", + "rdfs:label": "FASTQ", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check String", + "sms:displayName": "FASTQ", "sms:required": "sms:false", - "sms:validationRules": [ - "str error" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckURL", + "@id": "bts:BAM", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckURL", + "rdfs:label": "BAM", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check URL", + "sms:displayName": "BAM", "sms:required": "sms:false", - "sms:validationRules": [ - "url error" - ] + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchatLeast", + "@id": "bts:CRAM", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchatLeast", + "rdfs:label": "CRAM", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match at Least", + "sms:displayName": "CRAM", "sms:required": "sms:false", - "sms:validationRules": [ - "matchAtLeastOne Patient.PatientID set warning" - ] + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + }, + { + "@id": "bts:GenomeFASTA" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchExactly", + "@id": "bts:CSV/TSV", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchExactly", + "rdfs:label": "CSV/TSV", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:ValidValue" + }, + { + "@id": "bts:FileFormat" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match Exactly", + "sms:displayName": "CSV/TSV", "sms:required": "sms:false", - "sms:validationRules": [ - "matchExactlyOne MockComponent.checkMatchExactly set warning" - ] + "sms:requiresDependency": [ + { + "@id": "bts:GenomeBuild" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchatLeastvalues", + "@id": "bts:GenomeBuild", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchatLeastvalues", + "rdfs:label": "GenomeBuild", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2783,17 +752,29 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match at Least values", - "sms:required": "sms:false", - "sms:validationRules": [ - "matchAtLeastOne MockComponent.checkMatchatLeastvalues value warning" - ] + "schema:rangeIncludes": [ + { + "@id": "bts:GRCh37" + }, + { + "@id": "bts:GRCh38" + }, + { + "@id": "bts:GRCm38" + }, + { + "@id": "bts:GRCm39" + } + ], + "sms:displayName": "Genome Build", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckMatchExactlyvalues", + "@id": "bts:GenomeFASTA", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckMatchExactlyvalues", + "rdfs:label": "GenomeFASTA", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2802,112 +783,165 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Match Exactly values", - "sms:required": "sms:false", - "sms:validationRules": [ - "matchExactlyOne MockComponent.checkMatchExactlyvalues value warning" - ] + "sms:displayName": "Genome FASTA", + "sms:required": "sms:true", + "sms:validationRules": [] }, { - "@id": "bts:CheckRecommended", + "@id": "bts:GRCh37", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRecommended", + "rdfs:label": "GRCh37", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Recommended", + "sms:displayName": "GRCh37", "sms:required": "sms:false", - "sms:validationRules": [ - "recommended warning" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckAges", + "@id": "bts:GRCh38", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckAges", + "rdfs:label": "GRCh38", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Ages", + "sms:displayName": "GRCh38", "sms:required": "sms:false", - "sms:validationRules": [ - "protectAges warning" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckUnique", + "@id": "bts:GRCm38", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckUnique", + "rdfs:label": "GRCm38", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Unique", + "sms:displayName": "GRCm38", "sms:required": "sms:false", - "sms:validationRules": [ - "unique error" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckRange", + "@id": "bts:GRCm39", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckRange", + "rdfs:label": "GRCm39", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:GenomeBuild" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Range", + "sms:displayName": "GRCm39", "sms:required": "sms:false", - "sms:validationRules": [ - "inRange 50 100 error" - ] + "sms:validationRules": [] }, { - "@id": "bts:CheckDate", + "@id": "bts:MockComponent", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckDate", + "rdfs:label": "MockComponent", "rdfs:subClassOf": [ { - "@id": "bts:DataProperty" + "@id": "bts:DataType" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check Date", + "sms:displayName": "MockComponent", "sms:required": "sms:false", - "sms:validationRules": [ - "date error" - ] + "sms:requiresDependency": [ + { + "@id": "bts:Component" + }, + { + "@id": "bts:CheckList" + }, + { + "@id": "bts:CheckRegexList" + }, + { + "@id": "bts:CheckRegexSingle" + }, + { + "@id": "bts:CheckRegexFormat" + }, + { + "@id": "bts:CheckRegexInteger" + }, + { + "@id": "bts:CheckNum" + }, + { + "@id": "bts:CheckFloat" + }, + { + "@id": "bts:CheckInt" + }, + { + "@id": "bts:CheckString" + }, + { + "@id": "bts:CheckURL" + }, + { + "@id": "bts:CheckMatchatLeast" + }, + { + "@id": "bts:CheckMatchatLeastvalues" + }, + { + "@id": "bts:CheckMatchExactly" + }, + { + "@id": "bts:CheckMatchExactlyvalues" + }, + { + "@id": "bts:CheckRecommended" + }, + { + "@id": "bts:CheckAges" + }, + { + "@id": "bts:CheckUnique" + }, + { + "@id": "bts:CheckRange" + }, + { + "@id": "bts:CheckDate" + }, + { + "@id": "bts:CheckNA" + } + ], + "sms:validationRules": [] }, { - "@id": "bts:CheckNA", + "@id": "bts:CheckList", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "CheckNA", + "rdfs:label": "CheckList", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2916,46 +950,51 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Check NA", + "schema:rangeIncludes": [ + { + "@id": "bts:Ab" + }, + { + "@id": "bts:Cd" + }, + { + "@id": "bts:Ef" + }, + { + "@id": "bts:Gh" + } + ], + "sms:displayName": "Check List", "sms:required": "sms:false", "sms:validationRules": [ - "int error", - "IsNA warning" + "list strict" ] }, { - "@id": "bts:MockRDB", + "@id": "bts:CheckRegexList", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockRDB", + "rdfs:label": "CheckRegexList", "rdfs:subClassOf": [ { - "@id": "bts:DataType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockRDB", + "sms:displayName": "Check Regex List", "sms:required": "sms:false", - "sms:requiresDependency": [ - { - "@id": "bts:Component" - }, - { - "@id": "bts:MockRDBId" - }, - { - "@id": "bts:SourceManifest" - } - ], - "sms:validationRules": [] + "sms:validationRules": [ + "list strict", + "regex match [a-f]" + ] }, { - "@id": "bts:MockRDBId", + "@id": "bts:CheckRegexSingle", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "MockRDBId", + "rdfs:label": "CheckRegexSingle", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2964,17 +1003,17 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "MockRDB_id", - "sms:required": "sms:true", + "sms:displayName": "Check Regex Single", + "sms:required": "sms:false", "sms:validationRules": [ - "int" + "regex search [a-f]" ] }, { - "@id": "bts:SourceManifest", + "@id": "bts:CheckRegexFormat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "SourceManifest", + "rdfs:label": "CheckRegexFormat", "rdfs:subClassOf": [ { "@id": "bts:DataProperty" @@ -2983,281 +1022,316 @@ "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "SourceManifest", - "sms:required": "sms:true", - "sms:validationRules": [] + "sms:displayName": "Check Regex Format", + "sms:required": "sms:false", + "sms:validationRules": [ + "regex match [a-f]" + ] }, { - "@id": "bts:Component", + "@id": "bts:CheckRegexInteger", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Component", + "rdfs:label": "CheckRegexInteger", "rdfs:subClassOf": [ { - "@id": "bts:Patient" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Component", + "sms:displayName": "Check Regex Integer", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "regex search ^\\d+$" + ] }, { - "@id": "bts:Female", + "@id": "bts:CheckNum", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Female", + "rdfs:label": "CheckNum", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Female", + "sms:displayName": "Check Num", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "num" + ] }, { - "@id": "bts:Male", + "@id": "bts:CheckFloat", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Male", + "rdfs:label": "CheckFloat", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Male", + "sms:displayName": "Check Float", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "float" + ] }, { - "@id": "bts:Other", + "@id": "bts:CheckInt", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Other", + "rdfs:label": "CheckInt", "rdfs:subClassOf": [ { - "@id": "bts:Sex" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Other", + "sms:displayName": "Check Int", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "int" + ] }, { - "@id": "bts:Healthy", + "@id": "bts:CheckString", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Healthy", + "rdfs:label": "CheckString", "rdfs:subClassOf": [ { - "@id": "bts:Diagnosis" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Healthy", + "sms:displayName": "Check String", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "str" + ] }, { - "@id": "bts:Breast", + "@id": "bts:CheckURL", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Breast", + "rdfs:label": "CheckURL", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Breast", + "sms:displayName": "Check URL", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "url" + ] }, { - "@id": "bts:Colorectal", + "@id": "bts:CheckMatchatLeast", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Colorectal", + "rdfs:label": "CheckMatchatLeast", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Colorectal", + "sms:displayName": "Check Match at Least", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "matchAtLeastOne Patient.PatientID set" + ] }, { - "@id": "bts:Lung", + "@id": "bts:CheckMatchatLeastvalues", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Lung", + "rdfs:label": "CheckMatchatLeastvalues", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Lung", + "sms:displayName": "Check Match at Least values", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "matchAtLeastOne MockComponent.checkMatchatLeastvalues value" + ] }, { - "@id": "bts:Prostate", + "@id": "bts:CheckMatchExactly", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Prostate", + "rdfs:label": "CheckMatchExactly", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Prostate", + "sms:displayName": "Check Match Exactly", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "matchExactlyOne MockComponent.checkMatchExactly set" + ] }, { - "@id": "bts:Skin", + "@id": "bts:CheckMatchExactlyvalues", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Skin", + "rdfs:label": "CheckMatchExactlyvalues", "rdfs:subClassOf": [ { - "@id": "bts:CancerType" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Skin", + "sms:displayName": "Check Match Exactly values", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "matchExactlyOne MockComponent.checkMatchExactlyvalues value" + ] }, { - "@id": "bts:Malignant", + "@id": "bts:CheckRecommended", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "Malignant", + "rdfs:label": "CheckRecommended", "rdfs:subClassOf": [ { - "@id": "bts:TissueStatus" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "Malignant", + "sms:displayName": "Check Recommended", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "recommended" + ] }, { - "@id": "bts:FASTQ", + "@id": "bts:CheckAges", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "FASTQ", + "rdfs:label": "CheckAges", "rdfs:subClassOf": [ { - "@id": "bts:FileFormat" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "FASTQ", + "sms:displayName": "Check Ages", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "protectAges" + ] }, { - "@id": "bts:GRCh37", + "@id": "bts:CheckUnique", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCh37", + "rdfs:label": "CheckUnique", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCh37", + "sms:displayName": "Check Unique", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "unique error" + ] }, { - "@id": "bts:GRCh38", + "@id": "bts:CheckRange", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCh38", + "rdfs:label": "CheckRange", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCh38", + "sms:displayName": "Check Range", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "inRange 50 100 error" + ] }, { - "@id": "bts:GRCm38", + "@id": "bts:CheckDate", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCm38", + "rdfs:label": "CheckDate", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCm38", + "sms:displayName": "Check Date", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "date" + ] }, { - "@id": "bts:GRCm39", + "@id": "bts:CheckNA", "@type": "rdfs:Class", "rdfs:comment": "TBD", - "rdfs:label": "GRCm39", + "rdfs:label": "CheckNA", "rdfs:subClassOf": [ { - "@id": "bts:GenomeBuild" + "@id": "bts:DataProperty" } ], "schema:isPartOf": { "@id": "http://schema.biothings.io" }, - "sms:displayName": "GRCm39", + "sms:displayName": "Check NA", "sms:required": "sms:false", - "sms:validationRules": [] + "sms:validationRules": [ + "int", + "IsNA" + ] }, { "@id": "bts:Ab", @@ -3326,6 +1400,70 @@ "sms:displayName": "gh", "sms:required": "sms:false", "sms:validationRules": [] + }, + { + "@id": "bts:MockRDB", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "MockRDB", + "rdfs:subClassOf": [ + { + "@id": "bts:DataType" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "MockRDB", + "sms:required": "sms:false", + "sms:requiresDependency": [ + { + "@id": "bts:Component" + }, + { + "@id": "bts:MockRDBId" + }, + { + "@id": "bts:SourceManifest" + } + ], + "sms:validationRules": [] + }, + { + "@id": "bts:MockRDBId", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "MockRDBId", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "MockRDB_id", + "sms:required": "sms:true", + "sms:validationRules": [ + "int" + ] + }, + { + "@id": "bts:SourceManifest", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "SourceManifest", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "SourceManifest", + "sms:required": "sms:true", + "sms:validationRules": [] } ], "@id": "http://schema.biothings.io/#0.1" From 057a583537316504b8478496521cf65112bb47cd Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 15:05:16 -0800 Subject: [PATCH 023/199] point api to temp locations for testing assets, till the refactor branch is pushed to develop --- tests/test_api.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 05c88aaec..ae5d0dd57 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -63,12 +63,15 @@ def test_manifest_json(helpers): @pytest.fixture(scope="class") def data_model_jsonld(): - data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" + #data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" + data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.model.jsonld" yield data_model_jsonld @pytest.fixture(scope="class") def benchmark_data_model_jsonld(): - benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + #benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + # Placeholder till the model is updated on develop + benchmark_data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" yield benchmark_data_model_jsonld def get_MockComponent_attribute(): @@ -76,7 +79,8 @@ def get_MockComponent_attribute(): Yield all of the mock conponent attributes one at a time TODO: pull in jsonld from fixture """ - schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + #schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + schema_url = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" data_model_parser = DataModelParser(path_to_data_model = schema_url) #Parse Model parsed_data_model = data_model_parser.parse_model() From 2a3837e01a7a8b3e964040c4bce6e9a5a3bab293 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 16:09:03 -0800 Subject: [PATCH 024/199] fix issues with schema tests --- schematic/utils/schema_utils.py | 2 +- tests/test_utils.py | 50 ++++++++++++++++----------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 0ed709625..a12965f6b 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -84,7 +84,7 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c """ if use_display_name_as_label: - blacklisted_chars = ["(", ")", ".", "-", " "] + blacklisted_chars = ["(", ")", ".", " "] # Check that display name can be used as a label. valid_display_name = check_if_display_name_is_valid_label(display_name=display_name, blacklisted_chars=blacklisted_chars) if valid_display_name: diff --git a/tests/test_utils.py b/tests/test_utils.py index 7c6dd5e71..a53a95067 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -49,6 +49,14 @@ IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") +TEST_DN_DICT = { + "Bio Things": {"class": "BioThings", "property": "bioThings"}, + "bio things": {"class": "Biothings", "property": "biothings"}, + "BioThings": {"class": "BioThings", "property": "bioThings"}, + "Bio-things": {"class": "Bio-things", "property": "bio-things"}, + "bio_things": {"class": "BioThings", "property": "bioThings"}, + } + @pytest.fixture def synapse_store(): access_token = os.getenv("SYNAPSE_ACCESS_TOKEN") @@ -412,47 +420,39 @@ def test_strip_context(self, helpers, context_value): elif 'sms:required' == context_value: assert stripped_contex == ('sms', 'required') - TEST_DN_DICT = { - "Bio Things": {"class": "BioThings", "property": "bioThings"}, - "bio things": {"class": "Biothings", "property": "biothings"}, - "BioThings": {"class": "BioThings", "property": "bioThings"}, - "Bio-things": {"class": "Biothings", "property": "biothings"}, - "bio_things": {"class": "Biothings", "property": "biothings"}, - } + @pytest.mark.parametrize( - ("test_dn", "entry_types"), - (list(TEST_DN_DICT.keys()), list(TEST_DN_DICT.values())), - ids=(list(TEST_DN_DICT.keys()), list(TEST_DN_DICT.keys())) + "test_dn", + list(TEST_DN_DICT.keys()), + ids=list(TEST_DN_DICT.keys()), ) + #@pytest.mark.parametrize( + # "expected_result", + # list(TEST_DN_DICT.values()), + # ids=list(TEST_DN_DICT.keys()), + #) @pytest.mark.parametrize( "use_label", [True, False], ids=["True", "False"] ) def test_get_label_from_display_name(self, test_dn, use_label): - display_name = test_dn.keys()[0] - for entry_type, expected_result in test_dn.values(): + display_name = test_dn + for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): label = "" try: - label = get_label_from_display_name(entry_type=entry_type, display_name=test_dn, use_display_name_as_label=use_label) + label = get_label_from_display_name(entry_type=entry_type, display_name=display_name, use_display_name_as_label=use_label) except: # Under these conditions should only fail if the display name cannot be used as a label. - try: - assert test_dn in ["Bio Things", "bio things", "Bio-things", "bio_things"] - continue - except: - breakpoint() + assert test_dn in ["Bio Things", "bio things", "Bio-things", "bio_things"] if label: if use_label: - try: + if test_dn in ["Bio Things", "bio things"]: + assert label == expected_result + else: assert label == test_dn - except: - breakpoint() else: - try: - assert label == expected_result - except: - breakpoint() + assert label == expected_result else: return return From f71a3e619af336be2bc37ebdb19aa66774b8ea76 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 16:12:35 -0800 Subject: [PATCH 025/199] update handling of labels to match synapse.py --- schematic/utils/schema_utils.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index a12965f6b..ca43eddfd 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -84,17 +84,28 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c """ if use_display_name_as_label: - blacklisted_chars = ["(", ")", ".", " "] + blacklisted_chars = ["(", ")", ".", " ", "-"] # Check that display name can be used as a label. valid_display_name = check_if_display_name_is_valid_label(display_name=display_name, blacklisted_chars=blacklisted_chars) if valid_display_name: label=display_name else: if entry_type.lower()=='class': - label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) - + label = [ + get_class_label_from_display_name( + str(col) + ).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ][0] + elif entry_type.lower()=='property': - label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + label=[ + get_property_label_from_display_name( + str(col) + ).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ][0] + logger.warning(f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}") else: if entry_type.lower()=='class': From 1ddc7fc943754ecc04a53f4cf2dc81fca1dec154 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 16:32:24 -0800 Subject: [PATCH 026/199] add display_name_as_label param to fixtures --- tests/test_viz.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_viz.py b/tests/test_viz.py index df344ad6e..5a50d3dc9 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -19,7 +19,7 @@ def attributes_explorer(helpers): path_to_jsonld = helpers.get_data_path("example.model.jsonld") # Initialize TangledTree - attributes_explorer = AttributesExplorer(path_to_jsonld) + attributes_explorer = AttributesExplorer(path_to_jsonld, display_name_as_label=False) yield attributes_explorer @pytest.fixture @@ -30,7 +30,7 @@ def tangled_tree(helpers): path_to_jsonld = helpers.get_data_path("example.model.jsonld") # Initialize TangledTree - tangled_tree = TangledTree(path_to_jsonld, figure_type) + tangled_tree = TangledTree(path_to_jsonld, figure_type, display_name_as_label=False) yield tangled_tree class TestVisualization: From d1db61f2425df8f6efcada18f849cfc3058a4c56 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 18:16:46 -0800 Subject: [PATCH 027/199] fix get_label_from_display_name --- schematic/utils/schema_utils.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index ca43eddfd..b09eea66c 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -92,19 +92,17 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c else: if entry_type.lower()=='class': label = [ - get_class_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) - for col in manifest_columns - ][0] + get_class_label_from_display_name( + str(display_name) + ).translate({ord(x): '' for x in blacklisted_chars}) + ][0] elif entry_type.lower()=='property': label=[ - get_property_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) - for col in manifest_columns - ][0] + get_property_label_from_display_name( + str(display_name) + ).translate({ord(x): '' for x in blacklisted_chars}) + ][0] logger.warning(f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}") else: From 923706e72f0604125280939297889b29838e7566 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 18:19:29 -0800 Subject: [PATCH 028/199] update additional tests to work properly with display_name_as_label --- tests/test_metadata.py | 15 ++++++++++++--- tests/test_schemas.py | 4 ++-- tests/test_utils.py | 7 +------ tests/test_validation.py | 3 ++- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 58854c9e5..cd86664b5 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -25,7 +25,10 @@ def test_get_component_requirements(self, helpers, as_graph, display_name_as_lab # Instantiate MetadataModel meta_data_model = metadata_model(helpers, display_name_as_label) - source_component = "BulkRNA-seqAssay" + if display_name_as_label: + source_component="BulkRNAseqAssay" + else: + source_component = "BulkRNA-seqAssay" output = meta_data_model.get_component_requirements( source_component, as_graph=as_graph @@ -35,11 +38,17 @@ def test_get_component_requirements(self, helpers, as_graph, display_name_as_lab if as_graph: assert ("Biospecimen", "Patient") in output - assert ("BulkRNA-seqAssay", "Biospecimen") in output + if display_name_as_label: + assert ("BulkRNAseqAssay", "Biospecimen") in output + else: + assert ("BulkRNA-seqAssay", "Biospecimen") in output else: assert "Biospecimen" in output assert "Patient" in output - assert "BulkRNA-seqAssay" in output + if display_name_as_label: + assert "BulkRNAseqAssay" in output + else: + assert "BulkRNA-seqAssay" in output @pytest.mark.parametrize("return_excel", [None, True, False]) @pytest.mark.parametrize("display_name_as_label", [True, False], ids=["display_name_as_label-True", "display_name_as_label-False"]) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index dadf354c4..03613377a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -397,15 +397,15 @@ def test_generate_data_model_graph(self, helpers, data_model, display_name_as_la # Check that some edges are present as expected: assert ("FamilyHistory", "Breast") in graph.edges("FamilyHistory") - assert ("BulkRNA-seqAssay", "Biospecimen") in graph.edges("BulkRNA-seqAssay") if display_name_as_label: expected_valid_values = ['ab', 'cd', 'ef', 'gh'] mock_id_label = 'MockRDB_id' - + assert ("BulkRNAseqAssay", "Biospecimen") in graph.edges("BulkRNAseqAssay") else: expected_valid_values = ["Ab", "Cd", "Ef", "Gh"] mock_id_label = 'MockRDBId' + assert ("BulkRNA-seqAssay", "Biospecimen") in graph.edges("BulkRNA-seqAssay") assert expected_valid_values == [ k diff --git a/tests/test_utils.py b/tests/test_utils.py index a53a95067..900bdcf43 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -427,11 +427,6 @@ def test_strip_context(self, helpers, context_value): ids=list(TEST_DN_DICT.keys()), ) - #@pytest.mark.parametrize( - # "expected_result", - # list(TEST_DN_DICT.values()), - # ids=list(TEST_DN_DICT.keys()), - #) @pytest.mark.parametrize( "use_label", [True, False], ids=["True", "False"] ) @@ -447,7 +442,7 @@ def test_get_label_from_display_name(self, test_dn, use_label): assert test_dn in ["Bio Things", "bio things", "Bio-things", "bio_things"] if label: if use_label: - if test_dn in ["Bio Things", "bio things"]: + if test_dn in ["Bio Things", "bio things", "Bio-things"]: assert label == expected_result else: assert label == test_dn diff --git a/tests/test_validation.py b/tests/test_validation.py index 1b447190d..ec5b0d4ad 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -29,7 +29,8 @@ def DMGE(helpers): def metadataModel(helpers): metadataModel = MetadataModel( inputMModelLocation = helpers.get_data_path("example.model.jsonld"), - inputMModelLocationType = "local" + inputMModelLocationType = "local", + display_name_as_label=False, ) yield metadataModel From c82736350e959a4ccc60c0d9db614295c1136e64 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 6 Nov 2023 20:33:02 -0800 Subject: [PATCH 029/199] handle unique - situation with tests --- tests/test_utils.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 900bdcf43..d6c51aa6b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -53,7 +53,7 @@ "Bio Things": {"class": "BioThings", "property": "bioThings"}, "bio things": {"class": "Biothings", "property": "biothings"}, "BioThings": {"class": "BioThings", "property": "bioThings"}, - "Bio-things": {"class": "Bio-things", "property": "bio-things"}, + "Bio-things": {"class": "Biothings", "property": "biothings"}, "bio_things": {"class": "BioThings", "property": "bioThings"}, } @@ -444,10 +444,19 @@ def test_get_label_from_display_name(self, test_dn, use_label): if use_label: if test_dn in ["Bio Things", "bio things", "Bio-things"]: assert label == expected_result + else: assert label == test_dn else: - assert label == expected_result + # The dash has an odd handling + if display_name == 'Bio-things': + if entry_type == 'property': + assert label == 'bio-things' + else: + assert label == 'Bio-things' + else: + assert label == expected_result + else: return return From 4da7549679eef62e9f4aee3ca9cd8c7b502c5dee Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 20 Nov 2023 15:27:17 -0800 Subject: [PATCH 030/199] add function to retain formatting of Display name while removing blacklisted characters --- schematic/help.py | 4 ++ schematic/models/commands.py | 9 ++++- schematic/models/metadata.py | 7 +++- schematic/store/synapse.py | 76 ++++++++++++++++++++++++------------ 4 files changed, 68 insertions(+), 28 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index c738df1bc..6f84daa1b 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -129,6 +129,10 @@ "Upsert functionality requires primary keys to be specified in the data model and manfiest as _id." "Currently it is required to use -dl/--use_display_label with table upserts." ), + "retain_dl_formatting":( + "Boolean flag, default, false. When true annotations and table columns will be uploaded with the display name formatting with blacklisted characters removed. " + "To use for tables, use in conjunction with the use_schema_label flag." + ), }, "validate": { "short_help": ("Validation of manifest files."), diff --git a/schematic/models/commands.py b/schematic/models/commands.py index eeeb7c809..0924fc7c7 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -103,9 +103,15 @@ def model(ctx, config): # use as `schematic model ...` default='replace', type=click.Choice(['replace', 'upsert'], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "table_manipulation"))) +@click.option( + "--retain_dl_formatting", + "-rdlf", + is_flag=True, + help=query_dict(model_commands, ("model", "submit", "retain_dl_formatting")), +) @click.pass_obj def submit_manifest( - ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, + ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, retain_dl_formatting ): """ Running CLI with manifest validation (optional) and submission options. @@ -130,6 +136,7 @@ def submit_manifest( hide_blanks=hide_blanks, project_scope=project_scope, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting ) if manifest_id: diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 278f4c396..67868d154 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -297,7 +297,8 @@ def submit_metadata_manifest( hide_blanks: bool = False, access_token: str = None, project_scope: List = None, - table_manipulation: str = 'replace' + table_manipulation: str = 'replace', + retain_dl_formatting: bool = False, ) -> string: """Wrap methods that are responsible for validation of manifests for a given component, and association of the same manifest file with a specified dataset. @@ -352,6 +353,7 @@ def submit_metadata_manifest( useSchemaLabel = use_schema_label, hideBlanks = hide_blanks, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) restrict_maniest = True @@ -364,6 +366,7 @@ def submit_metadata_manifest( hideBlanks = hide_blanks, restrict_manifest=restrict_maniest, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) logger.info(f"No validation errors occured during validation.") @@ -385,6 +388,7 @@ def submit_metadata_manifest( useSchemaLabel=use_schema_label, hideBlanks=hide_blanks, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) restrict_maniest = True @@ -397,6 +401,7 @@ def submit_metadata_manifest( hideBlanks=hide_blanks, restrict_manifest=restrict_maniest, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) logger.debug( diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index f6e7e7096..e91e2be7c 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -411,7 +411,6 @@ def getFilesInStorageDataset( Raises: ValueError: Dataset ID not found. """ - # select all files within a given storage dataset folder (top level folder in a Synapse storage project or folder marked with contentType = 'dataset') walked_path = synapseutils.walk(self.syn, datasetId, includeTypes=["folder", "file"]) @@ -959,6 +958,7 @@ def uploadDB(self, restrict: bool = False, useSchemaLabel: bool = True, table_manipulation: str = 'replace', + retain_dl_formatting:bool=False, ): """ Method to upload a database to an asset store. In synapse, this will upload a metadata table @@ -972,7 +972,7 @@ def uploadDB(self, useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) existingTableId: str of the synId of the existing table, if one already exists table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) - + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Returns: manifest_table_id: synID of the uploaded table manifest: the original manifset @@ -981,13 +981,13 @@ def uploadDB(self, """ - col_schema, table_manifest = self.formatDB(sg=sg, manifest=manifest, useSchemaLabel=useSchemaLabel) + col_schema, table_manifest = self.formatDB(sg=sg, manifest=manifest, useSchemaLabel=useSchemaLabel, retain_dl_formatting=retain_dl_formatting) manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, sg, restrict,) return manifest_table_id, manifest, table_manifest - def formatDB(self, sg, manifest, useSchemaLabel): + def formatDB(self, sg, manifest, useSchemaLabel, retain_dl_formatting): """ Method to format a manifest appropriatly for upload as table @@ -995,7 +995,7 @@ def formatDB(self, sg, manifest, useSchemaLabel): sg: schemaGenerator object manifest: pd.Df manifest to upload useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) - + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Returns: col_schema: schema for table columns: type, size, etc table_manifest: formatted manifest @@ -1009,12 +1009,21 @@ def formatDB(self, sg, manifest, useSchemaLabel): table_manifest=deepcopy(manifest) if useSchemaLabel: - cols = [ - sg.se.get_class_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) - for col in manifest_columns - ] + if retain_dl_formatting: + + cols = [ + str(col).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ] + + else: + + cols = [ + sg.se.get_class_label_from_display_name( + str(col) + ).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ] cols = list(map(lambda x: x.replace('EntityId', 'entityId'), cols)) @@ -1132,7 +1141,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, hideBlanks): + def format_row_annotations(self, se, sg, row, entityId, hideBlanks, retain_dl_formatting): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1141,8 +1150,10 @@ def format_row_annotations(self, se, sg, row, entityId, hideBlanks): blacklist_chars = ['(', ')', '.', ' ', '-'] for k, v in row.to_dict().items(): - - keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) + if retain_dl_formatting: + keySyn = str(k).translate({ord(x): '' for x in blacklist_chars}) + else: + keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1374,7 +1385,7 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): + def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting): """Helper function to format and add annotations to entities in Synapse. Args: se: schemaExplorer object, @@ -1382,11 +1393,12 @@ def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Returns: Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks) + annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting) if annos: # Store annotations for an entity folder @@ -1420,7 +1432,8 @@ def add_annotations_to_entities_files( manifest_record_type, datasetId, hideBlanks, - manifest_synapse_table_id='' + manifest_synapse_table_id='', + retain_dl_formatting=False, ): '''Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: @@ -1431,6 +1444,7 @@ def add_annotations_to_entities_files( datasetId (str): synapse ID of folder containing the dataset hideBlanks (bool): Default is false -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. manifest_synapse_table_id (str): Default is an empty string ''. + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Returns: manifest (pd.DataFrame): modified to add entitiyId as appropriate. @@ -1462,7 +1476,7 @@ def add_annotations_to_entities_files( # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks) + self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting) logger.info(f"Added annotations to entity: {entityId}") return manifest @@ -1480,6 +1494,7 @@ def upload_manifest_as_table( useSchemaLabel, hideBlanks, table_manipulation, + retain_dl_formatting, ): """Upload manifest to Synapse as a table and csv. Args: @@ -1494,6 +1509,7 @@ def upload_manifest_as_table( manifest_record_type (str): valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Return: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. """ @@ -1505,9 +1521,10 @@ def upload_manifest_as_table( table_name=table_name, restrict=restrict, useSchemaLabel=useSchemaLabel, - table_manipulation=table_manipulation) + table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting) - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, retain_dl_formatting) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1524,7 +1541,8 @@ def upload_manifest_as_table( table_name=table_name, restrict=restrict, useSchemaLabel=useSchemaLabel, - table_manipulation='update') + table_manipulation='update', + retain_dl_formatting=retain_dl_formatting) # Set annotations for the table manifest manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) @@ -1558,7 +1576,7 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ # remove with_entities parameter and rename add_annotations, as add_annototaions_to_files_entities. - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, retain_dl_formatting=retain_dl_formatting) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1586,6 +1604,7 @@ def upload_manifest_combo( useSchemaLabel, hideBlanks, table_manipulation, + retain_dl_formatting, ): """Upload manifest to Synapse as a table and CSV with entities. Args: @@ -1601,6 +1620,7 @@ def upload_manifest_combo( useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. + retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. Return: manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ @@ -1611,9 +1631,10 @@ def upload_manifest_combo( table_name=table_name, restrict=restrict, useSchemaLabel=useSchemaLabel, - table_manipulation=table_manipulation) + table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting) - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, retain_dl_formatting=retain_dl_formatting) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1631,7 +1652,8 @@ def upload_manifest_combo( table_name=table_name, restrict=restrict, useSchemaLabel=useSchemaLabel, - table_manipulation='update') + table_manipulation='update', + retain_dl_formatting=retain_dl_formatting) # Set annotations for the table manifest manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) @@ -1640,7 +1662,7 @@ def upload_manifest_combo( def associateMetadataWithFiles( self, schemaGenerator: SchemaGenerator, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', - useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', + useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', retain_dl_formatting: bool = False, ) -> str: """Associate metadata with files in a storage dataset already on Synapse. Upload metadataManifest in the storage dataset folder on Synapse as well. Return synapseId of the uploaded manifest file. @@ -1705,6 +1727,7 @@ def associateMetadataWithFiles( hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) elif manifest_record_type == "file_and_entities": manifest_synapse_file_id = self.upload_manifest_as_csv( @@ -1732,6 +1755,7 @@ def associateMetadataWithFiles( hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, table_manipulation=table_manipulation, + retain_dl_formatting=retain_dl_formatting, ) else: raise ValueError("Please enter a valid manifest_record_type.") From 26c44e5e27f8545bf5247ed621b3017d09e7af1d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 22 Nov 2023 09:08:11 -0800 Subject: [PATCH 031/199] add retain_dl_formatting as a parameterer in the API --- schematic_api/api/openapi/api.yaml | 7 +++++++ schematic_api/api/routes.py | 12 ++++++++++++ 2 files changed, 19 insertions(+) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index ba04b659d..b5432c6ad 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -397,6 +397,13 @@ paths: type: boolean default: true required: false + - in: query + name: retain_dl_formatting + description: "Retain display label formatting (rather than Class Label camelcase formatting) when generating schema labels, for uploading annotations and making table column names, while stripping any blacklisted characters. Used in conjunction with use_schema_label for table column formatting." + schema: + type: boolean + default: false + required: false - in: query name: project_scope schema: diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 1e3c6dc94..ecec80981 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -387,6 +387,7 @@ def submit_manifest_route(schema_url, data_type=None, hide_blanks=False, project_scope=None, + retain_dl_formatting:bool=False ): # call config_handler() config_handler(asset_view = asset_view) @@ -398,6 +399,8 @@ def submit_manifest_route(schema_url, else: temp_path = jsc.convert_json_file_to_csv("file_name") + # Get/parse parameters from the API + dataset_id = connexion.request.args["dataset_id"] restrict_rules = parse_bool(connexion.request.args["restrict_rules"]) @@ -414,6 +417,14 @@ def submit_manifest_route(schema_url, else: use_schema_label = parse_bool(use_schema_label) + + retain_dl_formatting = connexion.request.args["retain_dl_formatting"] + if retain_dl_formatting == 'None': + retain_dl_formatting = False + else: + retain_dl_formatting = parse_bool(retain_dl_formatting) + + if not table_manipulation: table_manipulation = "replace" @@ -437,6 +448,7 @@ def submit_manifest_route(schema_url, table_manipulation = table_manipulation, use_schema_label=use_schema_label, project_scope=project_scope, + retain_dl_formatting=retain_dl_formatting ) return manifest_id From a2016cfa8d018e6748a60e596c827d91d78a5d2b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 29 Nov 2023 09:26:41 -0800 Subject: [PATCH 032/199] WIP: adding new diplayname/label handling --- schematic/help.py | 16 ++++++++-------- schematic/models/commands.py | 29 ++++++++++++++++------------- 2 files changed, 24 insertions(+), 21 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index 6f84daa1b..d39214bb0 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -101,11 +101,6 @@ "The component or data type from the data model which you can use to validate the " "data filled in your manifest template." ), - "use_schema_label": ( - "Store attributes using the schema label (--use_schema_label, default) or store attributes using the display label " - "(--use_display_label). Attribute display names in the schema must not only include characters that are " - "not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'" - ), "hide_blanks": ( "This is a boolean flag. If flag is provided when command line utility is executed, annotations with blank values will be hidden from a dataset's annotation list in Synaspe." "If not, annotations with blank values will be displayed." @@ -127,10 +122,15 @@ "'upsert' should be used for initial table uploads if users intend to upsert into them at a later time." "Using 'upsert' at creation will generate the metadata necessary for upsert functionality." "Upsert functionality requires primary keys to be specified in the data model and manfiest as _id." - "Currently it is required to use -dl/--use_display_label with table upserts." + "Currently it is required to use --table_column_names = display_name with table upserts." + ), + "annotation_keys": ( + "Store attributes using the class label (default) or store attributes using the display label. " + "Attribute display names in the schema must not only include characters that are " + "not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'" ), - "retain_dl_formatting":( - "Boolean flag, default, false. When true annotations and table columns will be uploaded with the display name formatting with blacklisted characters removed. " + "table_column_names":( + "class_label, display_label, display_name, default, class_label. When true annotations and table columns will be uploaded with the display name formatting with blacklisted characters removed. " "To use for tables, use in conjunction with the use_schema_label flag." ), }, diff --git a/schematic/models/commands.py b/schematic/models/commands.py index 0924fc7c7..ef6fd1458 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -66,12 +66,7 @@ def model(ctx, config): # use as `schematic model ...` "--validate_component", help=query_dict(model_commands, ("model", "submit", "validate_component")), ) -@click.option( - "--use_schema_label/--use_display_label", - "-sl/-dl", - default=True, - help=query_dict(model_commands, ("model", "submit", "use_schema_label")), -) + @click.option( "--hide_blanks", "-hb", @@ -104,14 +99,22 @@ def model(ctx, config): # use as `schematic model ...` type=click.Choice(['replace', 'upsert'], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "table_manipulation"))) @click.option( - "--retain_dl_formatting", - "-rdlf", - is_flag=True, - help=query_dict(model_commands, ("model", "submit", "retain_dl_formatting")), + "--table_column_names", + "-tcn", + default='class_label', + type=click.Choice(['class_label', 'display_label', 'display_name'], case_sensitive=True), + help=query_dict(model_commands, ("model", "submit", "table_column_names")), +) +@click.option( + "--annotation_keys", + "-ak", + default='class_label', + type=click.Choice(['class_label', 'display_label'], case_sensitive=True), + help=query_dict(model_commands, ("model", "submit", "annotation_keys")), ) @click.pass_obj def submit_manifest( - ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, retain_dl_formatting + ctx, manifest_path, dataset_id, validate_component, manifest_record_type, hide_blanks, restrict_rules, project_scope, table_manipulation, table_column_names, annotation_keys, ): """ Running CLI with manifest validation (optional) and submission options. @@ -132,11 +135,11 @@ def submit_manifest( validate_component=validate_component, manifest_record_type=manifest_record_type, restrict_rules=restrict_rules, - use_schema_label=use_schema_label, + table_column_names=table_column_names, hide_blanks=hide_blanks, project_scope=project_scope, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting + annotation_keys=annotation_keys, ) if manifest_id: From 1ce8ae0882b0dab75cfe21ab142267d387afd5a8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 30 Nov 2023 14:48:32 -0800 Subject: [PATCH 033/199] WIP add ref to annotationkeys and table_columnnames --- schematic/store/synapse.py | 143 +++++++++++++++++++++---------------- 1 file changed, 82 insertions(+), 61 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e91e2be7c..ef713e916 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -956,9 +956,8 @@ def uploadDB(self, datasetId: str, table_name: str, restrict: bool = False, - useSchemaLabel: bool = True, table_manipulation: str = 'replace', - retain_dl_formatting:bool=False, + table_column_names:str='class_label', ): """ Method to upload a database to an asset store. In synapse, this will upload a metadata table @@ -969,10 +968,11 @@ def uploadDB(self, datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) existingTableId: str of the synId of the existing table, if one already exists table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. Returns: manifest_table_id: synID of the uploaded table manifest: the original manifset @@ -981,21 +981,22 @@ def uploadDB(self, """ - col_schema, table_manifest = self.formatDB(sg=sg, manifest=manifest, useSchemaLabel=useSchemaLabel, retain_dl_formatting=retain_dl_formatting) + col_schema, table_manifest = self.formatDB(sg=sg, manifest=manifest, table_column_names=table_column_names) manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, sg, restrict,) return manifest_table_id, manifest, table_manifest - def formatDB(self, sg, manifest, useSchemaLabel, retain_dl_formatting): + def formatDB(self, sg, manifest, table_column_names): """ Method to format a manifest appropriatly for upload as table Args: sg: schemaGenerator object manifest: pd.Df manifest to upload - useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. Returns: col_schema: schema for table columns: type, size, etc table_manifest: formatted manifest @@ -1008,28 +1009,31 @@ def formatDB(self, sg, manifest, useSchemaLabel, retain_dl_formatting): table_manifest=deepcopy(manifest) - if useSchemaLabel: - if retain_dl_formatting: + if table_column_names=='display_name': - cols = [ - str(col).translate({ord(x): '' for x in blacklist_chars}) - for col in manifest_columns - ] + cols = table_manifest.columns - else: + elif table_column_names=='display_label': + + cols = [ + str(col).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ] + + elif table_column_names=='class_label': - cols = [ - sg.se.get_class_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) - for col in manifest_columns - ] + cols = [ + sg.se.get_class_label_from_display_name( + str(col) + ).translate({ord(x): '' for x in blacklist_chars}) + for col in manifest_columns + ] - cols = list(map(lambda x: x.replace('EntityId', 'entityId'), cols)) + cols = list(map(lambda x: x.replace('EntityId', 'entityId'), cols)) - # Reset column names in table manifest - table_manifest.columns = cols + # Reset column names in table manifest + table_manifest.columns = cols #move entity id to end of df entity_col = table_manifest.pop('entityId') @@ -1141,7 +1145,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, hideBlanks, retain_dl_formatting): + def format_row_annotations(self, se, sg, row, entityId, hideBlanks, annotation_keys): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1150,9 +1154,9 @@ def format_row_annotations(self, se, sg, row, entityId, hideBlanks, retain_dl_fo blacklist_chars = ['(', ')', '.', ' ', '-'] for k, v in row.to_dict().items(): - if retain_dl_formatting: + if annotation_keys=='display_label': keySyn = str(k).translate({ord(x): '' for x in blacklist_chars}) - else: + elif annotation_keys=='class_label': keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) # Skip `Filename` and `ETag` columns when setting annotations @@ -1385,7 +1389,7 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting): + def _add_annotations(self, se, schemaGenerator, row, entityId:str, hideBlanks:bool, annotation_keys:str): """Helper function to format and add annotations to entities in Synapse. Args: se: schemaExplorer object, @@ -1393,12 +1397,14 @@ def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks, retai row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting) + annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks, annotation_keys) if annos: # Store annotations for an entity folder @@ -1433,7 +1439,7 @@ def add_annotations_to_entities_files( datasetId, hideBlanks, manifest_synapse_table_id='', - retain_dl_formatting=False, + annotation_keys:str='class_label', ): '''Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: @@ -1444,7 +1450,9 @@ def add_annotations_to_entities_files( datasetId (str): synapse ID of folder containing the dataset hideBlanks (bool): Default is false -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. manifest_synapse_table_id (str): Default is an empty string ''. - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: manifest (pd.DataFrame): modified to add entitiyId as appropriate. @@ -1476,7 +1484,7 @@ def add_annotations_to_entities_files( # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks, retain_dl_formatting) + self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks, annotation_keys) logger.info(f"Added annotations to entity: {entityId}") return manifest @@ -1491,10 +1499,10 @@ def upload_manifest_as_table( component_name, restrict, manifest_record_type, - useSchemaLabel, hideBlanks, table_manipulation, - retain_dl_formatting, + table_column_names:str, + annotation_keys:str, ): """Upload manifest to Synapse as a table and csv. Args: @@ -1509,7 +1517,12 @@ def upload_manifest_as_table( manifest_record_type (str): valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. """ @@ -1520,11 +1533,10 @@ def upload_manifest_as_table( datasetId=datasetId, table_name=table_name, restrict=restrict, - useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting) + table_column_names=table_column_names) - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, retain_dl_formatting) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, annotation_keys) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1540,9 +1552,8 @@ def upload_manifest_as_table( datasetId=datasetId, table_name=table_name, restrict=restrict, - useSchemaLabel=useSchemaLabel, table_manipulation='update', - retain_dl_formatting=retain_dl_formatting) + table_column_names=table_column_names) # Set annotations for the table manifest manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) @@ -1559,7 +1570,8 @@ def upload_manifest_as_csv( restrict, manifest_record_type, hideBlanks, - component_name): + component_name, + annotation_keys:str): """Upload manifest to Synapse as a csv only. Args: se: SchemaExplorer object @@ -1570,13 +1582,13 @@ def upload_manifest_as_csv( restrict (bool): Flag for censored data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. - table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - with_entities (bool): Default is False - Flag to indicate whether to create entityIds and add annotations. + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ - # remove with_entities parameter and rename add_annotations, as add_annototaions_to_files_entities. - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, retain_dl_formatting=retain_dl_formatting) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, annotation_keys=annotation_keys) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1601,10 +1613,10 @@ def upload_manifest_combo( component_name, restrict, manifest_record_type, - useSchemaLabel, hideBlanks, table_manipulation, - retain_dl_formatting, + table_column_names:str, + annotation_keys:str, ): """Upload manifest to Synapse as a table and CSV with entities. Args: @@ -1617,10 +1629,14 @@ def upload_manifest_combo( component_name (str): Name of the component manifest that is currently being uploaded. restrict (bool): Flag for censored data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. - useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - retain_dl_formatting: bool, used in conjunction with useSchemaLabel. Ensures column name is formatted properly to be used as a synapse annotation without converting to class_label. Uses displayLabel formatting while removing blacklisted characters. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ @@ -1630,11 +1646,10 @@ def upload_manifest_combo( datasetId=datasetId, table_name=table_name, restrict=restrict, - useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting) + table_column_names=table_column_names) - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, retain_dl_formatting=retain_dl_formatting) + manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, annotation_keys=annotation_keys) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1651,9 +1666,8 @@ def upload_manifest_combo( datasetId=datasetId, table_name=table_name, restrict=restrict, - useSchemaLabel=useSchemaLabel, table_manipulation='update', - retain_dl_formatting=retain_dl_formatting) + table_column_names=table_column_names) # Set annotations for the table manifest manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) @@ -1662,7 +1676,8 @@ def upload_manifest_combo( def associateMetadataWithFiles( self, schemaGenerator: SchemaGenerator, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', - useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', retain_dl_formatting: bool = False, + hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', table_column_names: str = 'class_label', + annotation_keys:str = 'class_label', ) -> str: """Associate metadata with files in a storage dataset already on Synapse. Upload metadataManifest in the storage dataset folder on Synapse as well. Return synapseId of the uploaded manifest file. @@ -1683,10 +1698,15 @@ def associateMetadataWithFiles( In this case, the system creates a file on Synapse for each row in the table (e.g. patient, biospecimen) and associates the columnset data as metadata/annotations to his file. datasetId: synapse ID of folder containing the dataset manifest_record_type: Default value is 'table_file_and_entities'. valid values are 'file_only', 'file_and_entities', 'table_and_file' or 'table_file_and_entities'. 'file_and_entities' will store the manifest as a csv and create Synapse files for each row in the manifest.'table_and_file' will store the manifest as a table and a csv on Synapse. 'file_only' will store the manifest as a csv only on Synapse. 'table_file_and_entities' will perform the options file_with_entites and table in combination. - useSchemaLabel: Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks: Default is false. Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. restrict_manifest (bool): Default is false. Flag for censored data. table_malnipulation (str): Default is 'replace'. Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. """ @@ -1712,6 +1732,7 @@ def associateMetadataWithFiles( hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, + annotation_keys=annotation_keys, ) elif manifest_record_type == "table_and_file": manifest_synapse_file_id = self.upload_manifest_as_table( @@ -1723,11 +1744,10 @@ def associateMetadataWithFiles( table_name=table_name, component_name=component_name, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, ) elif manifest_record_type == "file_and_entities": manifest_synapse_file_id = self.upload_manifest_as_csv( @@ -1740,6 +1760,7 @@ def associateMetadataWithFiles( hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, + annotation_keys=annotation_keys, ) elif manifest_record_type == "table_file_and_entities": manifest_synapse_file_id = self.upload_manifest_combo( @@ -1751,11 +1772,11 @@ def associateMetadataWithFiles( table_name=table_name, component_name=component_name, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) else: raise ValueError("Please enter a valid manifest_record_type.") From 298b0f5017a718cc47b227f1a3c55f06bcfc8201 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 4 Dec 2023 16:20:20 -0800 Subject: [PATCH 034/199] add new options to api and route, run black --- schematic_api/api/openapi/api.yaml | 18 +- schematic_api/api/routes.py | 551 +++++++++++++++++------------ 2 files changed, 340 insertions(+), 229 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index b5432c6ad..4151b31ed 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -391,18 +391,20 @@ paths: enum: ["replace", "upsert"] required: false - in: query - name: use_schema_label - description: "Store attributes using the schema label (true, default) or store attributes using the display label (false). Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'" + name: table_column_names + description: Specify how table column names should be formatted. display_name would use the raw attribute display name as the column name. display_label, would strip all blacklisted characters (including spaces) from the display name, while retaining the rest of the display name formatting (best used when display name is already in camelcase), class_label (default) converts the display name to upper camelcase and strips blacklisted characters. schema: - type: boolean - default: true + type: string + enum: ["display_name", "display_label", "class_label"] + default: "class_label" required: false - in: query - name: retain_dl_formatting - description: "Retain display label formatting (rather than Class Label camelcase formatting) when generating schema labels, for uploading annotations and making table column names, while stripping any blacklisted characters. Used in conjunction with use_schema_label for table column formatting." + name: annotation_keys + description: Specify how table column names should be formatted. display_label, would strip all blacklisted characters (including spaces) from the display name, while retaining the rest of the display name formatting (best used when display name is already in camelcase), class_label (default) converts the display name to upper camelcase and strips blacklisted characters. schema: - type: boolean - default: false + type: string + enum: ["display_label", "class_label"] + default: "class_label" required: false - in: query name: project_scope diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index ecec80981..856073c7f 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -27,13 +27,20 @@ from schematic.schemas.generator import SchemaGenerator from schematic.schemas.explorer import SchemaExplorer from schematic.store.synapse import SynapseStorage, ManifestDownload -from synapseclient.core.exceptions import SynapseHTTPError, SynapseAuthenticationError, SynapseUnmetAccessRestrictions, SynapseNoCredentialsError, SynapseTimeoutError +from synapseclient.core.exceptions import ( + SynapseHTTPError, + SynapseAuthenticationError, + SynapseUnmetAccessRestrictions, + SynapseNoCredentialsError, + SynapseTimeoutError, +) from schematic.utils.general import entity_type_mapping logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) -def config_handler(asset_view: str=None): + +def config_handler(asset_view: str = None): # check if path to config is provided path_to_config = app.config["SCHEMATIC_CONFIG"] if path_to_config is not None and os.path.isfile(path_to_config): @@ -41,81 +48,83 @@ def config_handler(asset_view: str=None): if asset_view is not None: CONFIG.synapse_master_fileview_id = asset_view + class JsonConverter: - ''' + """ Mainly handle converting json str or json file to csv - ''' + """ + def readJson(self, json_str=None, manifest_file=None): - ''' + """ The purpose of this function is to read either json str or json file - input: + input: json_str: json object - manifest_file: manifest file object - output: + manifest_file: manifest file object + output: return a dataframe - ''' + """ if json_str: df = pd.read_json(json_str) - elif manifest_file: + elif manifest_file: df = pd.read_json(manifest_file.read()) return df - + def get_file(self, file_key): - ''' + """ The purpose of this function is to get the file uploaded by user - input: - file_key: Defined in api.yaml. This key refers to the files uploaded. - manifest_file: manifest file object - output: + input: + file_key: Defined in api.yaml. This key refers to the files uploaded. + manifest_file: manifest file object + output: return file object - ''' + """ manifest_file = connexion.request.files[file_key] return manifest_file def IsJsonFile(self, manifest_file): - ''' + """ The purpose of this function is check if the manifest file that gets uploaded is a json or not - input: - manifest_file: manifest file object - output: + input: + manifest_file: manifest file object + output: return True if it is json - ''' + """ file_type = manifest_file.content_type - if file_type == 'application/json': + if file_type == "application/json": return True - else: + else: return False def convert_df_to_csv(self, df, file_name): - ''' + """ The purpose of this function is to convert dataframe to a temporary CSV file - input: + input: df: dataframe file_name: file name of the output csv - output: + output: return temporary file path of the output csv - ''' + """ # convert dataframe to a temporary csv file temp_dir = tempfile.gettempdir() temp_path = os.path.join(temp_dir, file_name) - df.to_csv(temp_path, encoding = 'utf-8', index=False) + df.to_csv(temp_path, encoding="utf-8", index=False) return temp_path def convert_json_str_to_csv(self, json_str, file_name): - ''' + """ The purpose of this function is to convert json str to a temporary csv file - input: + input: json_str: json object file_name: file name of the output csv - output: + output: return temporary file path of the output csv - ''' + """ # convert json to df - df = self.readJson(json_str = json_str) + df = self.readJson(json_str=json_str) # convert dataframe to a temporary csv file temp_path = self.convert_df_to_csv(df, file_name) @@ -123,31 +132,32 @@ def convert_json_str_to_csv(self, json_str, file_name): return temp_path def convert_json_file_to_csv(self, file_key): - ''' + """ The purpose of this function is to convert json str to a temporary csv file - input: - file_key: Defined in api.yaml. This key refers to the files uploaded. - output: + input: + file_key: Defined in api.yaml. This key refers to the files uploaded. + output: return temporary file path of the output csv - ''' + """ # get manifest file manifest_file = self.get_file(file_key) if self.IsJsonFile(manifest_file): # read json as dataframe - df = self.readJson(manifest_file = manifest_file) + df = self.readJson(manifest_file=manifest_file) # get base file name base = os.path.splitext(manifest_file.filename)[0] - # name the new csv file - new_file_name = base + '.csv' + # name the new csv file + new_file_name = base + ".csv" # convert to csv temp_path = self.convert_df_to_csv(df, new_file_name) return temp_path - else: - temp_path = save_file(file_key='file_name') + else: + temp_path = save_file(file_key="file_name") return temp_path + def get_access_token() -> str: """Get access token from header""" bearer_token = None @@ -159,28 +169,31 @@ def get_access_token() -> str: if auth_header.startswith("Bearer "): bearer_token = auth_header.split(" ")[1] return bearer_token - + + def parse_bool(str_bool): - if str_bool.lower().startswith('t'): + if str_bool.lower().startswith("t"): return True - elif str_bool.lower().startswith('f'): + elif str_bool.lower().startswith("f"): return False else: raise ValueError( "String boolean does not appear to be true or false. Please verify input." ) + def return_as_json(manifest_local_file_path): manifest_csv = pd.read_csv(manifest_local_file_path) manifest_json = manifest_csv.to_dict(orient="records") return manifest_json + def save_file(file_key="csv_file"): - ''' - input: + """ + input: file_key: Defined in api.yaml. This key refers to the files uploaded. By default, set to "csv_file" Return a temporary file path for the uploaded a given file - ''' + """ manifest_file = connexion.request.files[file_key] # save contents of incoming manifest CSV file to temp file @@ -192,6 +205,7 @@ def save_file(file_key="csv_file"): return temp_path + def initalize_metadata_model(schema_url): jsonld = get_temp_jsonld(schema_url) metadata_model = MetadataModel( @@ -199,35 +213,47 @@ def initalize_metadata_model(schema_url): ) return metadata_model + def get_temp_jsonld(schema_url): # retrieve a JSON-LD via URL and store it in a temporary location with urllib.request.urlopen(schema_url) as response: - with tempfile.NamedTemporaryFile(delete=False, suffix=".model.jsonld") as tmp_file: + with tempfile.NamedTemporaryFile( + delete=False, suffix=".model.jsonld" + ) as tmp_file: shutil.copyfileobj(response, tmp_file) # get path to temporary JSON-LD file return tmp_file.name + # @before_request -def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, strict_validation:bool=True): +def get_manifest_route( + schema_url: str, + use_annotations: bool, + dataset_ids=None, + asset_view=None, + output_format=None, + title=None, + strict_validation: bool = True, +): """Get the immediate dependencies that are related to a given source node. - Args: - schema_url: link to data model in json ld format - title: title of a given manifest. - dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). - output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet - use_annotations: Whether to use existing annotations during manifest generation - asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. - strict: bool, strictness with which to apply validation rules to google sheets. - Returns: - Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). + Args: + schema_url: link to data model in json ld format + title: title of a given manifest. + dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). + output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet + use_annotations: Whether to use existing annotations during manifest generation + asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. + strict: bool, strictness with which to apply validation rules to google sheets. + Returns: + Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). """ # Get access token from request header access_token = get_access_token() # call config_handler() - config_handler(asset_view = asset_view) + config_handler(asset_view=asset_view) # get path to temporary JSON-LD file jsonld = get_temp_jsonld(schema_url) @@ -235,47 +261,54 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, # Gather all data_types to make manifests for. all_args = connexion.request.args args_dict = dict(all_args.lists()) - data_type = args_dict['data_type'] + data_type = args_dict["data_type"] # Gather all dataset_ids try: - dataset_ids = args_dict['dataset_id'] + dataset_ids = args_dict["dataset_id"] except: pass - + if dataset_ids: # Check that the number of submitted data_types matches # the number of dataset_ids (if applicable) len_data_types = len(data_type) len_dataset_ids = len(dataset_ids) - + try: len_data_types == len_dataset_ids except: raise ValueError( - f"There is a mismatch in the number of data_types and dataset_id's that " - f"submitted. Please check your submission and try again." - ) - + f"There is a mismatch in the number of data_types and dataset_id's that " + f"submitted. Please check your submission and try again." + ) + # Raise an error if used in conjunction with datatype = 'all_manifests' try: - data_type[0] != 'all manifests' + data_type[0] != "all manifests" except: raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." - ) + f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + f"Please check your submission and try again." + ) - # Since this function is called in `get_manifest_route`, + # Since this function is called in `get_manifest_route`, # it can use the access_token passed in from there and retain `access_token` as a parameter - def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None, strict=strict_validation): + def create_single_manifest( + data_type, + title, + dataset_id=None, + output_format=None, + access_token=None, + strict=strict_validation, + ): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_json_ld=jsonld, title=title, root=data_type, use_annotations=use_annotations, - alphabetize_valid_values = 'ascending', + alphabetize_valid_values="ascending", ) # if returning a dataframe @@ -284,82 +317,124 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None output_format = "dataframe" result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token, strict=strict, + dataset_id=dataset_id, + sheet_url=True, + output_format=output_format, + access_token=access_token, + strict=strict, ) # return an excel file if output_format is set to "excel" if output_format == "excel": dir_name = os.path.dirname(result) file_name = os.path.basename(result) - mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' - return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) - + mimetype = ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + ) + return send_from_directory( + directory=dir_name, + path=file_name, + as_attachment=True, + mimetype=mimetype, + max_age=0, + ) + return result # Gather all returned result urls all_results = [] - if data_type[0] == 'all manifests': + if data_type[0] == "all manifests": sg = SchemaGenerator(path_to_json_ld=jsonld) - component_digraph = sg.se.get_digraph_by_edge_type('requiresComponent') + component_digraph = sg.se.get_digraph_by_edge_type("requiresComponent") components = component_digraph.nodes() for component in components: if title: - t = f'{title}.{component}.manifest' - else: - t = f'Example.{component}.manifest' + t = f"{title}.{component}.manifest" + else: + t = f"Example.{component}.manifest" if output_format != "excel": - result = create_single_manifest(data_type=component, output_format=output_format, title=t, access_token=access_token) + result = create_single_manifest( + data_type=component, + output_format=output_format, + title=t, + access_token=access_token, + ) all_results.append(result) - else: - app.logger.error('Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. ') + else: + app.logger.error( + "Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. " + ) else: for i, dt in enumerate(data_type): - if not title: - t = f'Example.{dt}.manifest' - else: + if not title: + t = f"Example.{dt}.manifest" + else: if len(data_type) > 1: - t = f'{title}.{dt}.manifest' - else: + t = f"{title}.{dt}.manifest" + else: t = title if dataset_ids: # if a dataset_id is provided add this to the function call. - result = create_single_manifest(data_type=dt, dataset_id=dataset_ids[i], output_format=output_format, title=t, access_token=access_token) + result = create_single_manifest( + data_type=dt, + dataset_id=dataset_ids[i], + output_format=output_format, + title=t, + access_token=access_token, + ) else: - result = create_single_manifest(data_type=dt, output_format=output_format, title=t, access_token=access_token) + result = create_single_manifest( + data_type=dt, + output_format=output_format, + title=t, + access_token=access_token, + ) # if output is pandas dataframe or google sheet url if isinstance(result, str) or isinstance(result, pd.DataFrame): all_results.append(result) - else: + else: if len(data_type) > 1: - app.logger.warning(f'Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. ') + app.logger.warning( + f"Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. " + ) return result return all_results -#####profile validate manifest route function -#@profile(sort_by='cumulative', strip_dirs=True) -def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str=None, asset_view=None, project_scope = None): + +#####profile validate manifest route function +# @profile(sort_by='cumulative', strip_dirs=True) +def validate_manifest_route( + schema_url, + data_type, + restrict_rules=None, + json_str=None, + asset_view=None, + project_scope=None, +): # Access token now stored in request header access_token = get_access_token() - + # if restrict rules is set to None, default it to False if not restrict_rules: - restrict_rules=False - + restrict_rules = False + # call config_handler() - config_handler(asset_view = asset_view) + config_handler(asset_view=asset_view) - #If restrict_rules parameter is set to None, then default it to False + # If restrict_rules parameter is set to None, then default it to False if not restrict_rules: restrict_rules = False - #Get path to temp file where manifest file contents will be saved + # Get path to temp file where manifest file contents will be saved jsc = JsonConverter() if json_str: - temp_path = jsc.convert_json_str_to_csv(json_str = json_str, file_name = "example_json") - else: + temp_path = jsc.convert_json_str_to_csv( + json_str=json_str, file_name="example_json" + ) + else: temp_path = jsc.convert_json_file_to_csv("file_name") # get path to temporary JSON-LD file @@ -370,33 +445,42 @@ def validate_manifest_route(schema_url, data_type, restrict_rules=None, json_str ) errors, warnings = metadata_model.validateModelManifest( - manifestPath=temp_path, rootNode=data_type, restrict_rules=restrict_rules, project_scope=project_scope, access_token=access_token + manifestPath=temp_path, + rootNode=data_type, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, ) - + res_dict = {"errors": errors, "warnings": warnings} return res_dict -#####profile validate manifest route function -#@profile(sort_by='cumulative', strip_dirs=True) -def submit_manifest_route(schema_url, - asset_view=None, - manifest_record_type=None, - json_str=None, - table_manipulation=None, - data_type=None, - hide_blanks=False, - project_scope=None, - retain_dl_formatting:bool=False - ): + +#####profile validate manifest route function +# @profile(sort_by='cumulative', strip_dirs=True) +def submit_manifest_route( + schema_url, + asset_view=None, + manifest_record_type=None, + json_str=None, + table_manipulation=None, + data_type=None, + hide_blanks=False, + project_scope=None, + table_column_names=None, + annotation_keys=None, +): # call config_handler() - config_handler(asset_view = asset_view) + config_handler(asset_view=asset_view) # convert Json file to CSV if applicable jsc = JsonConverter() if json_str: - temp_path = jsc.convert_json_str_to_csv(json_str = json_str, file_name = "example_json.csv") - else: + temp_path = jsc.convert_json_str_to_csv( + json_str=json_str, file_name="example_json.csv" + ) + else: temp_path = jsc.convert_json_file_to_csv("file_name") # Get/parse parameters from the API @@ -405,54 +489,48 @@ def submit_manifest_route(schema_url, restrict_rules = parse_bool(connexion.request.args["restrict_rules"]) - metadata_model = initalize_metadata_model(schema_url) - - # Access token now stored in request header - access_token = get_access_token() - - - use_schema_label = connexion.request.args["use_schema_label"] - if use_schema_label == 'None': - use_schema_label = True - else: - use_schema_label = parse_bool(use_schema_label) - - - retain_dl_formatting = connexion.request.args["retain_dl_formatting"] - if retain_dl_formatting == 'None': - retain_dl_formatting = False - else: - retain_dl_formatting = parse_bool(retain_dl_formatting) - - - if not table_manipulation: + if not table_manipulation: table_manipulation = "replace" if not manifest_record_type: manifest_record_type = "table_file_and_entities" - if data_type == 'None': + if data_type == "None": validate_component = None else: validate_component = data_type + # table_column_names = connexion.request.args["table_column_names"] + if not table_column_names: + table_column_names = "class_label" + + # annotation_keys = connexion.request.args["retain_dl_formatting"] + if not annotation_keys: + annotation_keys = "class_label" + + metadata_model = initalize_metadata_model(schema_url) + + # Access token now stored in request header + access_token = get_access_token() + manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = schema_url, - manifest_path=temp_path, - dataset_id=dataset_id, - validate_component=validate_component, - access_token=access_token, - manifest_record_type = manifest_record_type, - restrict_rules = restrict_rules, + path_to_json_ld=schema_url, + manifest_path=temp_path, + dataset_id=dataset_id, + validate_component=validate_component, + access_token=access_token, + manifest_record_type=manifest_record_type, + restrict_rules=restrict_rules, hide_blanks=hide_blanks, - table_manipulation = table_manipulation, - use_schema_label=use_schema_label, + table_manipulation=table_manipulation, project_scope=project_scope, - retain_dl_formatting=retain_dl_formatting - ) + table_column_names=table_column_names, + annotation_keys=annotation_keys, + ) return manifest_id + def populate_manifest_route(schema_url, title=None, data_type=None, return_excel=None): # call config_handler() config_handler() @@ -462,30 +540,39 @@ def populate_manifest_route(schema_url, title=None, data_type=None, return_excel # Get path to temp file where manifest file contents will be saved temp_path = save_file() - - #Initalize MetadataModel - metadata_model = MetadataModel(inputMModelLocation=jsonld, inputMModelLocationType='local') - #Call populateModelManifest class - populated_manifest_link = metadata_model.populateModelManifest(title=title, manifestPath=temp_path, rootNode=data_type, return_excel=return_excel) + # Initalize MetadataModel + metadata_model = MetadataModel( + inputMModelLocation=jsonld, inputMModelLocationType="local" + ) + + # Call populateModelManifest class + populated_manifest_link = metadata_model.populateModelManifest( + title=title, + manifestPath=temp_path, + rootNode=data_type, + return_excel=return_excel, + ) return populated_manifest_link + def get_storage_projects(asset_view): # Access token now stored in request header access_token = get_access_token() - # call config handler + # call config handler config_handler(asset_view=asset_view) - # use Synapse storage + # use Synapse storage store = SynapseStorage(access_token=access_token) # call getStorageProjects function lst_storage_projects = store.getStorageProjects() - + return lst_storage_projects + def get_storage_projects_datasets(asset_view, project_id): # Access token now stored in request header access_token = get_access_token() @@ -497,10 +584,11 @@ def get_storage_projects_datasets(asset_view, project_id): store = SynapseStorage(access_token=access_token) # call getStorageDatasetsInProject function - sorted_dataset_lst = store.getStorageDatasetsInProject(projectId = project_id) - + sorted_dataset_lst = store.getStorageDatasetsInProject(projectId=project_id) + return sorted_dataset_lst + def get_files_storage_dataset(asset_view, dataset_id, full_path, file_names=None): # Access token now stored in request header access_token = get_access_token() @@ -512,18 +600,21 @@ def get_files_storage_dataset(asset_view, dataset_id, full_path, file_names=None store = SynapseStorage(access_token=access_token) # no file names were specified (file_names = ['']) - if file_names and not all(file_names): - file_names=None - + if file_names and not all(file_names): + file_names = None + # call getFilesInStorageDataset function - file_lst = store.getFilesInStorageDataset(datasetId=dataset_id, fileNames=file_names, fullpath=full_path) + file_lst = store.getFilesInStorageDataset( + datasetId=dataset_id, fileNames=file_names, fullpath=full_path + ) return file_lst + def check_if_files_in_assetview(asset_view, entity_id): # Access token now stored in request header access_token = get_access_token() - - # call config handler + + # call config handler config_handler(asset_view=asset_view) # use Synapse Storage @@ -534,25 +625,30 @@ def check_if_files_in_assetview(asset_view, entity_id): return if_exists + def check_entity_type(entity_id): # Access token now stored in request header access_token = get_access_token() - - # call config handler + + # call config handler config_handler() - syn = SynapseStorage.login(access_token = access_token) + syn = SynapseStorage.login(access_token=access_token) entity_type = entity_type_mapping(syn, entity_id) - return entity_type + return entity_type + def get_component_requirements(schema_url, source_component, as_graph): metadata_model = initalize_metadata_model(schema_url) - req_components = metadata_model.get_component_requirements(source_component=source_component, as_graph = as_graph) + req_components = metadata_model.get_component_requirements( + source_component=source_component, as_graph=as_graph + ) return req_components + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_attributes_explorer(schema_url): # call config_handler() @@ -560,23 +656,28 @@ def get_viz_attributes_explorer(schema_url): temp_path_to_jsonld = get_temp_jsonld(schema_url) - attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_attributes(save_file=False) + attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_attributes( + save_file=False + ) return attributes_csv + def get_viz_component_attributes_explorer(schema_url, component, include_index): # call config_handler() config_handler() temp_path_to_jsonld = get_temp_jsonld(schema_url) - attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_component_attributes(component, save_file=False, include_index=include_index) + attributes_csv = AttributesExplorer(temp_path_to_jsonld).parse_component_attributes( + component, save_file=False, include_index=include_index + ) return attributes_csv + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_text(schema_url, figure_type, text_format): - temp_path_to_jsonld = get_temp_jsonld(schema_url) # Initialize TangledTree @@ -584,12 +685,12 @@ def get_viz_tangled_tree_text(schema_url, figure_type, text_format): # Get text for tangled tree. text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=False) - + return text_df + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_layers(schema_url, figure_type): - # call config_handler() config_handler() @@ -597,20 +698,21 @@ def get_viz_tangled_tree_layers(schema_url, figure_type): # Initialize Tangled Tree tangled_tree = TangledTree(temp_path_to_jsonld, figure_type) - + # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=False) return layers[0] -def download_manifest(manifest_id, new_manifest_name='', as_json=True): + +def download_manifest(manifest_id, new_manifest_name="", as_json=True): """ - Download a manifest based on a given manifest id. + Download a manifest based on a given manifest id. Args: manifest_syn_id: syn id of a manifest newManifestName: new name of a manifest that gets downloaded. as_json: boolean; If true, return a manifest as a json. Default to True - Return: + Return: file path of the downloaded manifest """ # Access token now stored in request header @@ -621,24 +723,25 @@ def download_manifest(manifest_id, new_manifest_name='', as_json=True): # use login method in synapse storage syn = SynapseStorage.login(access_token=access_token) - try: + try: md = ManifestDownload(syn, manifest_id) manifest_data = ManifestDownload.download_manifest(md, new_manifest_name) - #return local file path - manifest_local_file_path = manifest_data['path'] + # return local file path + manifest_local_file_path = manifest_data["path"] except TypeError as e: - raise TypeError(f'Failed to download manifest {manifest_id}.') + raise TypeError(f"Failed to download manifest {manifest_id}.") if as_json: manifest_json = return_as_json(manifest_local_file_path) return manifest_json else: return manifest_local_file_path -#@profile(sort_by='cumulative', strip_dirs=True) -def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name=''): + +# @profile(sort_by='cumulative', strip_dirs=True) +def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name=""): # Access token now stored in request header access_token = get_access_token() - + # call config handler config_handler(asset_view=asset_view) @@ -646,22 +749,25 @@ def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name store = SynapseStorage(access_token=access_token) # download existing file - manifest_data = store.getDatasetManifest(datasetId=dataset_id, downloadFile=True, newManifestName=new_manifest_name) + manifest_data = store.getDatasetManifest( + datasetId=dataset_id, downloadFile=True, newManifestName=new_manifest_name + ) - #return local file path + # return local file path try: - manifest_local_file_path = manifest_data['path'] + manifest_local_file_path = manifest_data["path"] except KeyError as e: - raise KeyError(f'Failed to download manifest from dataset: {dataset_id}') from e + raise KeyError(f"Failed to download manifest from dataset: {dataset_id}") from e - #return a json (if as_json = True) + # return a json (if as_json = True) if as_json: manifest_json = return_as_json(manifest_local_file_path) return manifest_json return manifest_local_file_path + def get_asset_view_table(asset_view, return_type): # Access token now stored in request header access_token = get_access_token() @@ -681,7 +787,7 @@ def get_asset_view_table(asset_view, return_type): return json_res else: path = os.getcwd() - export_path = os.path.join(path, 'tests/data/file_view_table.csv') + export_path = os.path.join(path, "tests/data/file_view_table.csv") file_view_table_df.to_csv(export_path, index=False) return export_path @@ -689,7 +795,7 @@ def get_asset_view_table(asset_view, return_type): def get_project_manifests(project_id, asset_view): # Access token now stored in request header access_token = get_access_token() - + # use the default asset view from config config_handler(asset_view=asset_view) @@ -701,10 +807,11 @@ def get_project_manifests(project_id, asset_view): return lst_manifest + def get_manifest_datatype(manifest_id, asset_view): # Access token now stored in request header access_token = get_access_token() - + # use the default asset view from config config_handler(asset_view=asset_view) @@ -712,11 +819,11 @@ def get_manifest_datatype(manifest_id, asset_view): store = SynapseStorage(access_token=access_token) # get data types of an existing manifest - manifest_dtypes_dict= store.getDataTypeFromManifest(manifest_id) - + manifest_dtypes_dict = store.getDataTypeFromManifest(manifest_id) return manifest_dtypes_dict + def get_schema_pickle(schema_url): # load schema se = SchemaExplorer() @@ -728,9 +835,9 @@ def get_schema_pickle(schema_url): # write to local pickle file path = os.getcwd() - export_path = os.path.join(path, 'tests/data/schema.gpickle') + export_path = os.path.join(path, "tests/data/schema.gpickle") - with open(export_path, 'wb') as file: + with open(export_path, "wb") as file: pickle.dump(schema_graph, file) return export_path @@ -741,13 +848,13 @@ def get_subgraph_by_edge_type(schema_url, relationship): se = SchemaExplorer() se.load_schema(schema_url) - # get the schema graph + # get the schema graph schema_graph = se.get_nx_schema() # relationship subgraph relationship_subgraph = sg.get_subgraph_by_edge_type(schema_graph, relationship) - # return relationship + # return relationship Arr = [] for t in relationship_subgraph.edges: lst = list(t) @@ -773,7 +880,7 @@ def get_node_dependencies( schema_url: str, source_node: str, return_display_names: bool = True, - return_schema_ordered: bool = True + return_schema_ordered: bool = True, ) -> list[str]: """Get the immediate dependencies that are related to a given source node. @@ -800,9 +907,7 @@ def get_node_dependencies( def get_property_label_from_display_name( - schema_url: str, - display_name: str, - strict_camel_case: bool = False + schema_url: str, display_name: str, strict_camel_case: bool = False ) -> str: """Converts a given display name string into a proper property label string @@ -817,14 +922,14 @@ def get_property_label_from_display_name( """ explorer = SchemaExplorer() explorer.load_schema(schema_url) - label = explorer.get_property_label_from_display_name(display_name, strict_camel_case) + label = explorer.get_property_label_from_display_name( + display_name, strict_camel_case + ) return label def get_node_range( - schema_url: str, - node_label: str, - return_display_names: bool = True + schema_url: str, node_label: str, return_display_names: bool = True ) -> list[str]: """Get the range, i.e., all the valid values that are associated with a node label. @@ -841,6 +946,7 @@ def get_node_range( node_range = gen.get_node_range(node_label, return_display_names) return node_range + def get_if_node_required(schema_url: str, node_display_name: str) -> bool: """Check if the node is required @@ -857,6 +963,7 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: return is_required + def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: """ Args: @@ -870,13 +977,14 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: return node_validation_rules + def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: """From a list of node labels retrieve their display names, return as list. - + Args: schema_url (str): Data Model URL node_list (List[str]): List of node labels. - + Returns: node_display_names (List[str]): List of node display names. @@ -886,6 +994,7 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: node_display_names = gen.get_nodes_display_names(node_list, mm_graph) return node_display_names + def get_schematic_version() -> str: """ Return the current version of schematic From 100ccddda3459561bf0fa461da52d67485667722 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 4 Dec 2023 16:28:32 -0800 Subject: [PATCH 035/199] run black formatter --- schematic/models/commands.py | 89 +- schematic/models/metadata.py | 141 ++-- schematic/store/synapse.py | 1495 ++++++++++++++++++++-------------- 3 files changed, 1043 insertions(+), 682 deletions(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index ef6fd1458..0c1e6e8a3 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -11,16 +11,22 @@ from jsonschema import ValidationError from schematic.models.metadata import MetadataModel -from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs, parse_comma_str_to_list +from schematic.utils.cli_utils import ( + log_value_from_config, + query_dict, + parse_synIDs, + parse_comma_str_to_list, +) from schematic.help import model_commands from schematic.exceptions import MissingConfigValueError from schematic.configuration.configuration import CONFIG -logger = logging.getLogger('schematic') +logger = logging.getLogger("schematic") click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -39,7 +45,7 @@ def model(ctx, config): # use as `schematic model ...` try: logger.debug(f"Loading config file contents in '{config}'") CONFIG.load_config(config) - ctx.obj = CONFIG + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -66,24 +72,27 @@ def model(ctx, config): # use as `schematic model ...` "--validate_component", help=query_dict(model_commands, ("model", "submit", "validate_component")), ) - @click.option( "--hide_blanks", "-hb", is_flag=True, - help=query_dict(model_commands,("model","submit","hide_blanks")), + help=query_dict(model_commands, ("model", "submit", "hide_blanks")), ) @click.option( "--manifest_record_type", "-mrt", - default='table_file_and_entities', - type=click.Choice(['table_and_file', 'file_only', 'file_and_entities', 'table_file_and_entities'], case_sensitive=True), - help=query_dict(model_commands, ("model", "submit", "manifest_record_type"))) + default="table_file_and_entities", + type=click.Choice( + ["table_and_file", "file_only", "file_and_entities", "table_file_and_entities"], + case_sensitive=True, + ), + help=query_dict(model_commands, ("model", "submit", "manifest_record_type")), +) @click.option( "-rr", "--restrict_rules", is_flag=True, - help=query_dict(model_commands,("model","validate","restrict_rules")), + help=query_dict(model_commands, ("model", "validate", "restrict_rules")), ) @click.option( "-ps", @@ -95,53 +104,65 @@ def model(ctx, config): # use as `schematic model ...` @click.option( "--table_manipulation", "-tm", - default='replace', - type=click.Choice(['replace', 'upsert'], case_sensitive=True), - help=query_dict(model_commands, ("model", "submit", "table_manipulation"))) + default="replace", + type=click.Choice(["replace", "upsert"], case_sensitive=True), + help=query_dict(model_commands, ("model", "submit", "table_manipulation")), +) @click.option( "--table_column_names", "-tcn", - default='class_label', - type=click.Choice(['class_label', 'display_label', 'display_name'], case_sensitive=True), + default="class_label", + type=click.Choice( + ["class_label", "display_label", "display_name"], case_sensitive=True + ), help=query_dict(model_commands, ("model", "submit", "table_column_names")), ) @click.option( "--annotation_keys", "-ak", - default='class_label', - type=click.Choice(['class_label', 'display_label'], case_sensitive=True), + default="class_label", + type=click.Choice(["class_label", "display_label"], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "annotation_keys")), ) @click.pass_obj def submit_manifest( - ctx, manifest_path, dataset_id, validate_component, manifest_record_type, hide_blanks, restrict_rules, project_scope, table_manipulation, table_column_names, annotation_keys, + ctx, + manifest_path, + dataset_id, + validate_component, + manifest_record_type, + hide_blanks, + restrict_rules, + project_scope, + table_manipulation, + table_column_names, + annotation_keys, ): """ Running CLI with manifest validation (optional) and submission options. """ - - jsonld = CONFIG.model_location + + jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( inputMModelLocation=jsonld, inputMModelLocationType="local" ) - manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = jsonld, + path_to_json_ld=jsonld, manifest_path=manifest_path, dataset_id=dataset_id, validate_component=validate_component, manifest_record_type=manifest_record_type, restrict_rules=restrict_rules, - table_column_names=table_column_names, hide_blanks=hide_blanks, project_scope=project_scope, table_manipulation=table_manipulation, + table_column_names=table_column_names, annotation_keys=annotation_keys, ) - + if manifest_id: logger.info( f"File at '{manifest_path}' was successfully associated " @@ -177,7 +198,7 @@ def submit_manifest( "-rr", "--restrict_rules", is_flag=True, - help=query_dict(model_commands,("model","validate","restrict_rules")), + help=query_dict(model_commands, ("model", "validate", "restrict_rules")), ) @click.option( "-ps", @@ -187,14 +208,16 @@ def submit_manifest( help=query_dict(model_commands, ("model", "validate", "project_scope")), ) @click.pass_obj -def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules,project_scope): +def validate_manifest( + ctx, manifest_path, data_type, json_schema, restrict_rules, project_scope +): """ Running CLI for manifest validation. """ if data_type is None: - data_type = CONFIG.manifest_data_type + data_type = CONFIG.manifest_data_type log_value_from_config("data_type", data_type) - + try: len(data_type) == 1 except: @@ -206,7 +229,7 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules t_validate = perf_counter() - jsonld = CONFIG.model_location + jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( @@ -214,7 +237,11 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules ) errors, warnings = metadata_model.validateModelManifest( - manifestPath=manifest_path, rootNode=data_type, jsonSchema=json_schema, restrict_rules=restrict_rules, project_scope=project_scope, + manifestPath=manifest_path, + rootNode=data_type, + jsonSchema=json_schema, + restrict_rules=restrict_rules, + project_scope=project_scope, ) if not errors: @@ -226,6 +253,4 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules else: click.echo(errors) - logger.debug( - f"Total elapsed time {perf_counter()-t_validate} seconds" - ) + logger.debug(f"Total elapsed time {perf_counter()-t_validate} seconds") diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 67868d154..37b05de3a 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -6,7 +6,13 @@ import pandas as pd import re import networkx as nx -from jsonschema import Draft7Validator, exceptions, validate, ValidationError, FormatError +from jsonschema import ( + Draft7Validator, + exceptions, + validate, + ValidationError, + FormatError, +) from os.path import exists # allows specifying explicit variable types @@ -19,7 +25,7 @@ from schematic.manifest.generator import ManifestGenerator from schematic.schemas.generator import SchemaGenerator -#TODO: This module should only be aware of the store interface +# TODO: This module should only be aware of the store interface # we shouldn't need to expose Synapse functionality explicitly from schematic.store.synapse import SynapseStorage @@ -43,8 +49,11 @@ class MetadataModel(object): - generate validation schema view of the metadata model """ - def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> None: - + def __init__( + self, + inputMModelLocation: str, + inputMModelLocationType: str, + ) -> None: """Instantiates a MetadataModel object. Args: @@ -186,7 +195,13 @@ def get_component_requirements( # TODO: abstract validation in its own module def validateModelManifest( - self, manifestPath: str, rootNode: str, restrict_rules: bool = False, jsonSchema: str = None, project_scope: List = None, access_token: str = None, + self, + manifestPath: str, + rootNode: str, + restrict_rules: bool = False, + jsonSchema: str = None, + project_scope: List = None, + access_token: str = None, ) -> List[str]: """Check if provided annotations manifest dataframe satisfies all model requirements. @@ -203,7 +218,7 @@ def validateModelManifest( ValueError: rootNode not found in metadata model. """ # get validation schema for a given node in the data model, if the user has not provided input validation schema - + if not jsonSchema: jsonSchema = self.sg.get_json_schema_requirements( rootNode, rootNode + "_validation" @@ -212,12 +227,14 @@ def validateModelManifest( errors = [] warnings = [] - load_args={ - "dtype":"string", - } + load_args = { + "dtype": "string", + } # get annotations from manifest (array of json annotations corresponding to manifest rows) manifest = load_df( - manifestPath, preserve_raw_input=False, **load_args, + manifestPath, + preserve_raw_input=False, + **load_args, ) # read manifest csv file as is from manifest path # handler for mismatched components/data types @@ -251,19 +268,23 @@ def validateModelManifest( return errors, warnings - errors, warnings, manifest = validate_all(self, - errors=errors, - warnings=warnings, - manifest=manifest, - manifestPath=manifestPath, - sg=self.sg, - jsonSchema=jsonSchema, - restrict_rules=restrict_rules, - project_scope=project_scope, - access_token=access_token) + errors, warnings, manifest = validate_all( + self, + errors=errors, + warnings=warnings, + manifest=manifest, + manifestPath=manifestPath, + sg=self.sg, + jsonSchema=jsonSchema, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, + ) return errors, warnings - def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_excel = False) -> str: + def populateModelManifest( + self, title, manifestPath: str, rootNode: str, return_excel=False + ) -> str: """Populate an existing annotations manifest based on a dataframe. TODO: Remove this method; always use getModelManifest instead @@ -283,7 +304,9 @@ def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_ emptyManifestURL = mg.get_manifest() - return mg.populate_manifest_spreadsheet(manifestPath, emptyManifestURL, return_excel = return_excel, title=title) + return mg.populate_manifest_spreadsheet( + manifestPath, emptyManifestURL, return_excel=return_excel, title=title + ) def submit_metadata_manifest( self, @@ -297,8 +320,9 @@ def submit_metadata_manifest( hide_blanks: bool = False, access_token: str = None, project_scope: List = None, - table_manipulation: str = 'replace', - retain_dl_formatting: bool = False, + table_manipulation: str = "replace", + table_column_names: str = "class_label", + annotation_keys: str = "class_label", ) -> string: """Wrap methods that are responsible for validation of manifests for a given component, and association of the same manifest file with a specified dataset. @@ -313,17 +337,18 @@ def submit_metadata_manifest( ValidationError: If validation against data model was not successful. """ - #TODO: avoid explicitly exposing Synapse store functionality + # TODO: avoid explicitly exposing Synapse store functionality # just instantiate a Store class and let it decide at runtime/config # the store type - syn_store = SynapseStorage(access_token = access_token, project_scope = project_scope) - manifest_id=None - censored_manifest_id=None - restrict_maniest=False - censored_manifest_path=manifest_path.replace('.csv','_censored.csv') + syn_store = SynapseStorage( + access_token=access_token, project_scope=project_scope + ) + manifest_id = None + censored_manifest_id = None + restrict_maniest = False + censored_manifest_path = manifest_path.replace(".csv", "_censored.csv") # check if user wants to perform validation or not if validate_component is not None: - try: # check if the component ("class" in schema) passed as argument is valid (present in schema) or not self.sg.se.is_class_in_schema(validate_component) @@ -338,40 +363,44 @@ def submit_metadata_manifest( # automatic JSON schema generation and validation with that JSON schema val_errors, val_warnings = self.validateModelManifest( - manifestPath=manifest_path, rootNode=validate_component, restrict_rules=restrict_rules, project_scope=project_scope, access_token=access_token + manifestPath=manifest_path, + rootNode=validate_component, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, ) # if there are no errors in validation process - if val_errors == []: + if val_errors == []: # upload manifest file from `manifest_path` path to entity with Syn ID `dataset_id` if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, - metadataManifestPath = censored_manifest_path, - datasetId = dataset_id, - manifest_record_type = manifest_record_type, - useSchemaLabel = use_schema_label, - hideBlanks = hide_blanks, + schemaGenerator=self.sg, + metadataManifestPath=censored_manifest_path, + datasetId=dataset_id, + manifest_record_type=manifest_record_type, + hideBlanks=hide_blanks, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) restrict_maniest = True - + manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, - metadataManifestPath = manifest_path, - datasetId = dataset_id, - manifest_record_type = manifest_record_type, - useSchemaLabel = use_schema_label, - hideBlanks = hide_blanks, + schemaGenerator=self.sg, + metadataManifestPath=manifest_path, + datasetId=dataset_id, + manifest_record_type=manifest_record_type, + hideBlanks=hide_blanks, restrict_manifest=restrict_maniest, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) logger.info(f"No validation errors occured during validation.") return manifest_id - + else: raise ValidationError( "Manifest could not be validated under provided data model. " @@ -381,27 +410,27 @@ def submit_metadata_manifest( # no need to perform validation, just submit/associate the metadata manifest file if exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + schemaGenerator=self.sg, metadataManifestPath=censored_manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, - useSchemaLabel=use_schema_label, hideBlanks=hide_blanks, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) restrict_maniest = True - + manifest_id = syn_store.associateMetadataWithFiles( - schemaGenerator = self.sg, + schemaGenerator=self.sg, metadataManifestPath=manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, - useSchemaLabel=use_schema_label, hideBlanks=hide_blanks, restrict_manifest=restrict_maniest, table_manipulation=table_manipulation, - retain_dl_formatting=retain_dl_formatting, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) logger.debug( diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index ef713e916..bf985e67a 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -12,7 +12,13 @@ # allows specifying explicit variable types from typing import Dict, List, Tuple, Sequence, Union, Optional from collections import OrderedDict -from tenacity import retry, stop_after_attempt, wait_chain, wait_fixed, retry_if_exception_type +from tenacity import ( + retry, + stop_after_attempt, + wait_chain, + wait_fixed, + retry_if_exception_type, +) import numpy as np import pandas as pd @@ -34,7 +40,11 @@ from synapseclient.entity import File from synapseclient.table import CsvFileTable, build_table, Schema from synapseclient.annotations import from_synapse_annotations -from synapseclient.core.exceptions import SynapseHTTPError, SynapseAuthenticationError, SynapseUnmetAccessRestrictions +from synapseclient.core.exceptions import ( + SynapseHTTPError, + SynapseAuthenticationError, + SynapseUnmetAccessRestrictions, +) import synapseutils from synapseutils.copy_functions import changeFileMetaData @@ -45,7 +55,14 @@ from schematic.utils.df_utils import update_df, load_df, col_in_dataframe from schematic.utils.validate_utils import comma_separated_list_regex, rule_in_rule_list -from schematic.utils.general import entity_type_mapping, get_dir_size, convert_gb_to_bytes, create_temp_folder, check_synapse_cache_size, clear_synapse_cache +from schematic.utils.general import ( + entity_type_mapping, + get_dir_size, + convert_gb_to_bytes, + create_temp_folder, + check_synapse_cache_size, + clear_synapse_cache, +) from schematic.schemas.explorer import SchemaExplorer from schematic.schemas.generator import SchemaGenerator from schematic.store.base import BaseStorage @@ -57,12 +74,14 @@ logger = logging.getLogger("Synapse storage") + @dataclass class ManifestDownload(object): """ syn: an object of type synapseclient. - manifest_id: id of a manifest + manifest_id: id of a manifest """ + syn: synapseclient.Synapse manifest_id: str @@ -84,33 +103,37 @@ def _download_manifest_to_folder(self) -> File: # create temporary folders for storing manifests download_location = create_temp_folder(temporary_manifest_storage) else: - download_location=CONFIG.manifest_folder + download_location = CONFIG.manifest_folder manifest_data = self.syn.get( - self.manifest_id, - downloadLocation=download_location, - ifcollision="overwrite.local", - ) + self.manifest_id, + downloadLocation=download_location, + ifcollision="overwrite.local", + ) return manifest_data def _entity_type_checking(self) -> str: """ check the entity type of the id that needs to be downloaded - Return: + Return: if the entity type is wrong, raise an error """ # check the type of entity entity_type = entity_type_mapping(self.syn, self.manifest_id) - if entity_type != "file": - logger.error(f'You are using entity type: {entity_type}. Please provide a file ID') + if entity_type != "file": + logger.error( + f"You are using entity type: {entity_type}. Please provide a file ID" + ) @staticmethod - def download_manifest(self, newManifestName: str="", manifest_df: pd.DataFrame=pd.DataFrame()) -> Union[str,File]: + def download_manifest( + self, newManifestName: str = "", manifest_df: pd.DataFrame = pd.DataFrame() + ) -> Union[str, File]: """ - Download a manifest based on a given manifest id. + Download a manifest based on a given manifest id. Args: newManifestName(optional): new name of a manifest that gets downloaded. manifest_df(optional): a dataframe containing name and id of manifests in a given asset view - Return: + Return: manifest_data: synapse entity file object """ @@ -124,36 +147,44 @@ def download_manifest(self, newManifestName: str="", manifest_df: pd.DataFrame=p # download a manifest try: manifest_data = self._download_manifest_to_folder() - except(SynapseUnmetAccessRestrictions, SynapseAuthenticationError): + except (SynapseUnmetAccessRestrictions, SynapseAuthenticationError): # if there's an error getting an uncensored manifest, try getting the censored manifest if not manifest_df.empty: - censored_regex=re.compile('.*censored.*') - censored = manifest_df['name'].str.contains(censored_regex) - new_manifest_id=manifest_df[censored]["id"][0] + censored_regex = re.compile(".*censored.*") + censored = manifest_df["name"].str.contains(censored_regex) + new_manifest_id = manifest_df[censored]["id"][0] self.manifest_id = new_manifest_id - try: + try: manifest_data = self._download_manifest_to_folder() - except (SynapseUnmetAccessRestrictions, SynapseAuthenticationError) as e: - raise PermissionError("You don't have access to censored and uncensored manifests in this dataset.") from e + except ( + SynapseUnmetAccessRestrictions, + SynapseAuthenticationError, + ) as e: + raise PermissionError( + "You don't have access to censored and uncensored manifests in this dataset." + ) from e else: - logger.error(f"You don't have access to the requested resource: {self.manifest_id}") + logger.error( + f"You don't have access to the requested resource: {self.manifest_id}" + ) - if newManifestName and os.path.exists(manifest_data.get('path')): + if newManifestName and os.path.exists(manifest_data.get("path")): # Rename the file we just made to the new name - new_manifest_filename = newManifestName + '.csv' + new_manifest_filename = newManifestName + ".csv" # get location of existing manifest. The manifest that will be renamed should live in the same folder as existing manifest. - parent_folder = os.path.dirname(manifest_data.get('path')) + parent_folder = os.path.dirname(manifest_data.get("path")) new_manifest_path_name = os.path.join(parent_folder, new_manifest_filename) - os.rename(manifest_data['path'], new_manifest_path_name) + os.rename(manifest_data["path"], new_manifest_path_name) # Update file names/paths in manifest_data - manifest_data['name'] = new_manifest_filename - manifest_data['filename'] = new_manifest_filename - manifest_data['path'] = new_manifest_path_name + manifest_data["name"] = new_manifest_filename + manifest_data["filename"] = new_manifest_filename + manifest_data["path"] = new_manifest_path_name return manifest_data + class SynapseStorage(BaseStorage): """Implementation of Storage interface for datasets/files stored on Synapse. Provides utilities to list files in a specific project; update files annotations, create fileviews, etc. @@ -191,24 +222,28 @@ def __init__( def _purge_synapse_cache(self, maximum_storage_allowed_cache_gb=1): """ - Purge synapse cache if it exceeds a certain size. Default to 1GB. + Purge synapse cache if it exceeds a certain size. Default to 1GB. Args: - maximum_storage_allowed_cache_gb: the maximum storage allowed before purging cache. Default is 1 GB. + maximum_storage_allowed_cache_gb: the maximum storage allowed before purging cache. Default is 1 GB. """ # try clearing the cache # scan a directory and check size of files if os.path.exists(self.root_synapse_cache): - maximum_storage_allowed_cache_bytes = convert_gb_to_bytes(maximum_storage_allowed_cache_gb) + maximum_storage_allowed_cache_bytes = convert_gb_to_bytes( + maximum_storage_allowed_cache_gb + ) nbytes = get_dir_size(self.root_synapse_cache) dir_size_bytes = check_synapse_cache_size(directory=self.root_synapse_cache) # if 1 GB has already been taken, purge cache before 15 min if dir_size_bytes >= maximum_storage_allowed_cache_bytes: num_of_deleted_files = clear_synapse_cache(self.syn.cache, minutes=15) - logger.info(f'{num_of_deleted_files} files have been deleted from {self.root_synapse_cache}') + logger.info( + f"{num_of_deleted_files} files have been deleted from {self.root_synapse_cache}" + ) else: # on AWS, OS takes around 14-17% of our ephemeral storage (20GiB) # instead of guessing how much space that we left, print out .synapseCache here - logger.info(f'the total size of .synapseCache is: {nbytes} bytes') + logger.info(f"the total size of .synapseCache is: {nbytes} bytes") def _query_fileview(self): self._purge_synapse_cache() @@ -218,14 +253,14 @@ def _query_fileview(self): if self.project_scope: self.storageFileviewTable = self.syn.tableQuery( f"SELECT * FROM {self.storageFileview} WHERE projectId IN {tuple(self.project_scope + [''])}" - ).asDataFrame() + ).asDataFrame() else: # get data in administrative fileview for this pipeline self.storageFileviewTable = self.syn.tableQuery( "SELECT * FROM " + self.storageFileview ).asDataFrame() except SynapseHTTPError: - raise AccessCredentialsError(self.storageFileview) + raise AccessCredentialsError(self.storageFileview) @staticmethod def login(token=None, access_token=None): @@ -246,7 +281,9 @@ def login(token=None, access_token=None): syn = synapseclient.Synapse() syn.default_headers["Authorization"] = f"Bearer {access_token}" except synapseclient.core.exceptions.SynapseHTTPError: - raise ValueError("No access to resources. Please make sure that your token is correct") + raise ValueError( + "No access to resources. Please make sure that your token is correct" + ) else: # login using synapse credentials provided by user in .synapseConfig (default) file syn = synapseclient.Synapse(configPath=CONFIG.synapse_configuration_path) @@ -257,18 +294,18 @@ def missing_entity_handler(method): def wrapper(*args, **kwargs): try: return method(*args, **kwargs) - except(SynapseHTTPError) as ex: - str_message = str(ex).replace("\n","") - if 'trash' in str_message or 'does not exist' in str_message: + except SynapseHTTPError as ex: + str_message = str(ex).replace("\n", "") + if "trash" in str_message or "does not exist" in str_message: logging.warning(str_message) return None else: raise ex + return wrapper def getStorageFileviewTable(self): - """ Returns the storageFileviewTable obtained during initialization. - """ + """Returns the storageFileviewTable obtained during initialization.""" return self.storageFileviewTable def getPaginatedRestResults(self, currentUserId: str) -> Dict[str, str]: @@ -330,7 +367,7 @@ def getStorageProjects(self, project_scope: List = None) -> List[str]: # find set of user projects that are also in this pipeline's storage projects set storageProjects = list(set(storageProjects) & set(currentUserProjects)) - + # Limit projects to scope if specified if project_scope: storageProjects = list(set(storageProjects) & set(project_scope)) @@ -339,7 +376,7 @@ def getStorageProjects(self, project_scope: List = None) -> List[str]: raise Warning( f"There are no projects that the user has access to that match the criteria of the specified project scope: {project_scope}" ) - + # prepare a return list of project IDs and names projects = [] for projectId in storageProjects: @@ -412,20 +449,19 @@ def getFilesInStorageDataset( ValueError: Dataset ID not found. """ # select all files within a given storage dataset folder (top level folder in a Synapse storage project or folder marked with contentType = 'dataset') - walked_path = synapseutils.walk(self.syn, datasetId, includeTypes=["folder", "file"]) + walked_path = synapseutils.walk( + self.syn, datasetId, includeTypes=["folder", "file"] + ) file_list = [] # iterate over all results for dirpath, dirname, filenames in walked_path: - # iterate over all files in a folder for filename in filenames: - if (not "manifest" in filename[0] and not fileNames) or ( fileNames and filename[0] in fileNames ): - # don't add manifest to list of files unless it is specified in the list of specified fileNames; return all found files # except the manifest if no fileNames have been specified # TODO: refactor for clarity/maintainability @@ -440,39 +476,42 @@ def getFilesInStorageDataset( return file_list def _get_manifest_id(self, manifest: pd.DataFrame) -> str: - """If both censored and uncensored manifests are present, return uncensored manifest; if only one manifest is present, return manifest id of that manifest; if more than two manifests are present, return the manifest id of the first one. + """If both censored and uncensored manifests are present, return uncensored manifest; if only one manifest is present, return manifest id of that manifest; if more than two manifests are present, return the manifest id of the first one. Args: manifest: a dataframe contains name and id of manifests in a given asset view - Return: + Return: manifest_syn_id: id of a given censored or uncensored manifest - """ - censored_regex=re.compile('.*censored.*') - censored = manifest['name'].str.contains(censored_regex) + """ + censored_regex = re.compile(".*censored.*") + censored = manifest["name"].str.contains(censored_regex) if any(censored): # Try to use uncensored manifest first - not_censored=~censored + not_censored = ~censored if any(not_censored): - manifest_syn_id=manifest[not_censored]["id"][0] + manifest_syn_id = manifest[not_censored]["id"][0] # if only censored manifests are available, just use the first censored manifest - else: + else: manifest_syn_id = manifest["id"][0] - #otherwise, use the first (implied only) version that exists + # otherwise, use the first (implied only) version that exists else: manifest_syn_id = manifest["id"][0] - + return manifest_syn_id def getDatasetManifest( - self, datasetId: str, downloadFile: bool = False, newManifestName: str='', + self, + datasetId: str, + downloadFile: bool = False, + newManifestName: str = "", ) -> Union[str, File]: """Gets the manifest associated with a given dataset. Args: datasetId: synapse ID of a storage dataset. downloadFile: boolean argument indicating if manifest file in dataset should be downloaded or not. - newManifestName: new name of a manifest that gets downloaded + newManifestName: new name of a manifest that gets downloaded Returns: manifest_syn_id (String): Synapse ID of exisiting manifest file. @@ -484,104 +523,121 @@ def getDatasetManifest( # get a list of files containing the manifest for this dataset (if any) all_files = self.storageFileviewTable - # construct regex based on manifest basename in the config - manifest_re=re.compile(os.path.basename(self.manifest)+".*.[tc]sv") + # construct regex based on manifest basename in the config + manifest_re = re.compile(os.path.basename(self.manifest) + ".*.[tc]sv") # search manifest based on given manifest basename regex above # and return a dataframe containing name and id of manifests in a given asset view manifest = all_files[ - (all_files['name'].str.contains(manifest_re,regex=True)) + (all_files["name"].str.contains(manifest_re, regex=True)) & (all_files["parentId"] == datasetId) ] manifest = manifest[["id", "name"]] - + # if there is no pre-exisiting manifest in the specified dataset if manifest.empty: - logger.warning(f"Could not find a manifest that fits basename {self.manifest} in asset view and dataset {datasetId}") + logger.warning( + f"Could not find a manifest that fits basename {self.manifest} in asset view and dataset {datasetId}" + ) return "" # if there is an exisiting manifest else: manifest_syn_id = self._get_manifest_id(manifest) - if downloadFile: + if downloadFile: md = ManifestDownload(self.syn, manifest_id=manifest_syn_id) - manifest_data = ManifestDownload.download_manifest(md, newManifestName=newManifestName, manifest_df=manifest) - ## TO DO: revisit how downstream code handle manifest_data. If the downstream code would break when manifest_data is an empty string, - ## then we should catch the error here without returning an empty string. + manifest_data = ManifestDownload.download_manifest( + md, newManifestName=newManifestName, manifest_df=manifest + ) + ## TO DO: revisit how downstream code handle manifest_data. If the downstream code would break when manifest_data is an empty string, + ## then we should catch the error here without returning an empty string. if not manifest_data: - logger.debug(f"No manifest data returned. Please check if you have successfully downloaded manifest: {manifest_syn_id}") + logger.debug( + f"No manifest data returned. Please check if you have successfully downloaded manifest: {manifest_syn_id}" + ) return manifest_data return manifest_syn_id - def getDataTypeFromManifest(self, manifestId:str): + def getDataTypeFromManifest(self, manifestId: str): """Fetch a manifest and return data types of all columns - Args: + Args: manifestId: synapse ID of a manifest """ - # get manifest file path + # get manifest file path manifest_filepath = self.syn.get(manifestId).path - # load manifest dataframe - manifest = load_df(manifest_filepath, preserve_raw_input=False, data_model=False) + # load manifest dataframe + manifest = load_df( + manifest_filepath, preserve_raw_input=False, data_model=False + ) # convert the dataFrame to use best possible dtypes. manifest_new = manifest.convert_dtypes() # get data types of columns - result = manifest_new.dtypes.to_frame('dtypes').reset_index() - - # return the result as a dictionary - result_dict = result.set_index('index')['dtypes'].astype(str).to_dict() + result = manifest_new.dtypes.to_frame("dtypes").reset_index() + # return the result as a dictionary + result_dict = result.set_index("index")["dtypes"].astype(str).to_dict() return result_dict - def _get_files_metadata_from_dataset(self, datasetId: str, only_new_files: bool, manifest:pd.DataFrame=None) -> Optional[dict]: + def _get_files_metadata_from_dataset( + self, datasetId: str, only_new_files: bool, manifest: pd.DataFrame = None + ) -> Optional[dict]: """retrieve file ids under a particular datasetId Args: - datasetId (str): a dataset id - only_new_files (bool): if only adding new files that are not already exist - manifest (pd.DataFrame): metadata manifest dataframe. Default to None. + datasetId (str): a dataset id + only_new_files (bool): if only adding new files that are not already exist + manifest (pd.DataFrame): metadata manifest dataframe. Default to None. Returns: a dictionary that contains filename and entityid under a given datasetId or None if there is nothing under a given dataset id are not available """ dataset_files = self.getFilesInStorageDataset(datasetId) if dataset_files: - dataset_file_names_id_dict = self._get_file_entityIds(dataset_files, only_new_files=only_new_files, manifest=manifest) + dataset_file_names_id_dict = self._get_file_entityIds( + dataset_files, only_new_files=only_new_files, manifest=manifest + ) return dataset_file_names_id_dict else: return None - def add_entity_id_and_filename(self, datasetId: str, manifest: pd.DataFrame) -> pd.DataFrame: + def add_entity_id_and_filename( + self, datasetId: str, manifest: pd.DataFrame + ) -> pd.DataFrame: """add entityid and filename column to an existing manifest assuming entityId column is not already present Args: datasetId (str): dataset syn id - manifest (pd.DataFrame): existing manifest dataframe, assuming this dataframe does not have an entityId column and Filename column is present but completely empty + manifest (pd.DataFrame): existing manifest dataframe, assuming this dataframe does not have an entityId column and Filename column is present but completely empty Returns: - pd.DataFrame: returns a pandas dataframe + pd.DataFrame: returns a pandas dataframe """ - # get file names and entity ids of a given dataset - dataset_files_dict = self._get_files_metadata_from_dataset(datasetId, only_new_files=False) + # get file names and entity ids of a given dataset + dataset_files_dict = self._get_files_metadata_from_dataset( + datasetId, only_new_files=False + ) - if dataset_files_dict: - # turn manifest dataframe back to a dictionary for operation - manifest_dict = manifest.to_dict('list') + if dataset_files_dict: + # turn manifest dataframe back to a dictionary for operation + manifest_dict = manifest.to_dict("list") # update Filename column # add entityId column to the end manifest_dict.update(dataset_files_dict) - - # if the component column exists in existing manifest, fill up that column + + # if the component column exists in existing manifest, fill up that column if "Component" in manifest_dict.keys(): - manifest_dict["Component"] = manifest_dict["Component"] * max(1, len(manifest_dict["Filename"])) - + manifest_dict["Component"] = manifest_dict["Component"] * max( + 1, len(manifest_dict["Filename"]) + ) + # turn dictionary back to a dataframe - manifest_df_index = pd.DataFrame.from_dict(manifest_dict, orient='index') + manifest_df_index = pd.DataFrame.from_dict(manifest_dict, orient="index") manifest_df_updated = manifest_df_index.transpose() # fill na with empty string @@ -594,8 +650,10 @@ def add_entity_id_and_filename(self, datasetId: str, manifest: pd.DataFrame) -> else: return manifest - def fill_in_entity_id_filename(self, datasetId: str, manifest: pd.DataFrame) -> Tuple[List, pd.DataFrame]: - """fill in Filename column and EntityId column. EntityId column and Filename column will be created if not already present. + def fill_in_entity_id_filename( + self, datasetId: str, manifest: pd.DataFrame + ) -> Tuple[List, pd.DataFrame]: + """fill in Filename column and EntityId column. EntityId column and Filename column will be created if not already present. Args: datasetId (str): dataset syn id @@ -612,20 +670,24 @@ def fill_in_entity_id_filename(self, datasetId: str, manifest: pd.DataFrame) -> # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=True, manifest=manifest) + new_files = self._get_file_entityIds( + dataset_files=dataset_files, only_new_files=True, manifest=manifest + ) # update manifest so that it contains new dataset files new_files = pd.DataFrame(new_files) manifest = ( - pd.concat([manifest, new_files], sort=False) - .reset_index() - .drop("index", axis=1) + pd.concat([manifest, new_files], sort=False) + .reset_index() + .drop("index", axis=1) ) - manifest = manifest.fillna("") + manifest = manifest.fillna("") return dataset_files, manifest - - def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: + + def updateDatasetManifestFiles( + self, sg: SchemaGenerator, datasetId: str, store: bool = True + ) -> Union[Tuple[str, pd.DataFrame], None]: """Fetch the names and entity IDs of all current files in dataset in store, if any; update dataset's manifest with new files, if any. Args: @@ -635,7 +697,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: Returns: - Synapse ID of updated manifest and Pandas dataframe containing the updated manifest. + Synapse ID of updated manifest and Pandas dataframe containing the updated manifest. If there is no existing manifest return None """ @@ -661,15 +723,21 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: manifest.to_csv(manifest_filepath, index=False) # store manifest and update associated metadata with manifest on Synapse - manifest_id = self.associateMetadataWithFiles(sg, manifest_filepath, datasetId) + manifest_id = self.associateMetadataWithFiles( + sg, manifest_filepath, datasetId + ) - return manifest_id, manifest - - def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): + + def _get_file_entityIds( + self, + dataset_files: List, + only_new_files: bool = False, + manifest: pd.DataFrame = None, + ): """ Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files - + Args: manifest: metadata manifest dataset_file: List of all files in a dataset @@ -684,7 +752,7 @@ def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False raise UnboundLocalError( "No manifest was passed in, a manifest is required when `only_new_files` is True." ) - + # find new files (that are not in the current manifest) if any for file_id, file_name in dataset_files: if not file_id in manifest["entityId"].values: @@ -714,16 +782,16 @@ def getProjectManifests(self, projectId: str) -> List[str]: TODO: Return manifest URI instead of Synapse ID for interoperability with other implementations of a store interface """ - component=None - entity=None + component = None + entity = None manifests = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently - + # Get synID of manifest for a dataset manifestId = self.getDatasetManifest(datasetId) @@ -732,48 +800,53 @@ def getProjectManifests(self, projectId: str) -> List[str]: annotations = self.getFileAnnotations(manifestId) # If manifest has annotations specifying component, use that - if annotations and 'Component' in annotations: - component = annotations['Component'] + if annotations and "Component" in annotations: + component = annotations["Component"] entity = self.syn.get(manifestId, downloadFile=False) manifest_name = entity["properties"]["name"] # otherwise download the manifest and parse for information - elif not annotations or 'Component' not in annotations: + elif not annotations or "Component" not in annotations: logging.debug( f"No component annotations have been found for manifest {manifestId}. " "The manifest will be downloaded and parsed instead. " "For increased speed, add component annotations to manifest." - ) + ) - manifest_info = self.getDatasetManifest(datasetId,downloadFile=True) + manifest_info = self.getDatasetManifest( + datasetId, downloadFile=True + ) manifest_name = manifest_info["properties"].get("name", "") if not manifest_name: - logger.error(f'Failed to download manifests from {datasetId}') + logger.error(f"Failed to download manifests from {datasetId}") manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) # Get component from component column if it exists - if "Component" in manifest_df and not manifest_df["Component"].empty: - list(set(manifest_df['Component'])) + if ( + "Component" in manifest_df + and not manifest_df["Component"].empty + ): + list(set(manifest_df["Component"])) component = list(set(manifest_df["Component"])) - #Added to address issues raised during DCA testing - if '' in component: - component.remove('') + # Added to address issues raised during DCA testing + if "" in component: + component.remove("") if len(component) == 1: component = component[0] elif len(component) > 1: logging.warning( - f"Manifest {manifestId} is composed of multiple components. Schematic does not support mulit-component manifests at this time." - "Behavior of manifests with multiple components is undefined" + f"Manifest {manifestId} is composed of multiple components. Schematic does not support mulit-component manifests at this time." + "Behavior of manifests with multiple components is undefined" ) else: manifest_name = "" - component = None + component = None if component: manifest = ( (datasetId, datasetName), @@ -781,7 +854,9 @@ def getProjectManifests(self, projectId: str) -> List[str]: (component, component), ) elif manifestId: - logging.debug(f"Manifest {manifestId} does not have an associated Component") + logging.debug( + f"Manifest {manifestId} does not have an associated Component" + ) manifest = ( (datasetId, datasetName), (manifestId, manifest_name), @@ -796,10 +871,12 @@ def getProjectManifests(self, projectId: str) -> List[str]: if manifest: manifests.append(manifest) - + return manifests - def upload_project_manifests_to_synapse(self, sg: SchemaGenerator, projectId: str) -> List[str]: + def upload_project_manifests_to_synapse( + self, sg: SchemaGenerator, projectId: str + ) -> List[str]: """Upload all metadata manifest files across all datasets in a specified project as tables in Synapse. Returns: String of all the manifest_table_ids of all the manifests that have been loaded. @@ -809,7 +886,7 @@ def upload_project_manifests_to_synapse(self, sg: SchemaGenerator, projectId: st manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -821,23 +898,30 @@ def upload_project_manifests_to_synapse(self, sg: SchemaGenerator, projectId: st manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest_table_id = uploadDB(sg=sg, manifest=manifest, datasetId=datasetId, table_name=datasetName) + manifest_table_id = uploadDB( + sg=sg, + manifest=manifest, + datasetId=datasetId, + table_name=datasetName, + ) manifest_loaded.append(datasetName) return manifest_loaded - def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_json_ld: str, dry_run: bool = False) -> List[str]: - ''' + def upload_annotated_project_manifests_to_synapse( + self, projectId: str, path_to_json_ld: str, dry_run: bool = False + ) -> List[str]: + """ Purpose: For all manifests in a project, upload them as a table and add annotations manifest csv. Assumes the manifest is already present as a CSV in a dataset in the project. - ''' + """ sg = SchemaGenerator(path_to_json_ld) manifests = [] manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -850,15 +934,26 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j manifest_id = manifest_info["properties"]["id"] manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] - manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) + manifest = ( + (datasetId, datasetName), + (manifest_id, manifest_name), + ("", ""), + ) if not dry_run: - manifest_syn_id = self.associateMetadataWithFiles(sg, manifest_path, datasetId, manifest_record_type='table') + manifest_syn_id = self.associateMetadataWithFiles( + sg, manifest_path, datasetId, manifest_record_type="table" + ) manifest_loaded.append(manifest) - - return manifests, manifest_loaded + return manifests, manifest_loaded - def move_entities_to_new_project(self, projectId: str, newProjectId: str, returnEntities: bool = False, dry_run: bool = False): + def move_entities_to_new_project( + self, + projectId: str, + newProjectId: str, + returnEntities: bool = False, + dry_run: bool = False, + ): """ For each manifest csv in a project, look for all the entitiy ids that are associated. Look up the entitiy in the files, move the entity to new project. @@ -868,7 +963,7 @@ def move_entities_to_new_project(self, projectId: str, newProjectId: str, return manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) if datasets: - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -882,35 +977,48 @@ def move_entities_to_new_project(self, projectId: str, newProjectId: str, return manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) + manifest = ( + (datasetId, datasetName), + (manifest_id, manifest_name), + ("", ""), + ) manifest_loaded.append(manifest) annotation_entities = self.storageFileviewTable[ - (self.storageFileviewTable['id'].isin(manifest_df['entityId'])) - & (self.storageFileviewTable['type'] == 'folder') - ]['id'] + (self.storageFileviewTable["id"].isin(manifest_df["entityId"])) + & (self.storageFileviewTable["type"] == "folder") + ]["id"] if returnEntities: - for entityId in annotation_entities: + for entityId in annotation_entities: if not dry_run: self.syn.move(entityId, datasetId) else: - logging.info(f"{entityId} will be moved to folder {datasetId}.") - else: + logging.info( + f"{entityId} will be moved to folder {datasetId}." + ) + else: # generate project folder - archive_project_folder = Folder(projectId+'_archive', parent = newProjectId) + archive_project_folder = Folder( + projectId + "_archive", parent=newProjectId + ) archive_project_folder = self.syn.store(archive_project_folder) - + # generate dataset folder - dataset_archive_folder = Folder("_".join([datasetId,datasetName,'archive']), parent = archive_project_folder.id) - dataset_archive_folder = self.syn.store(dataset_archive_folder) + dataset_archive_folder = Folder( + "_".join([datasetId, datasetName, "archive"]), + parent=archive_project_folder.id, + ) + dataset_archive_folder = self.syn.store(dataset_archive_folder) for entityId in annotation_entities: # move entities to folder if not dry_run: self.syn.move(entityId, dataset_archive_folder.id) else: - logging.info(f"{entityId} will be moved to folder {dataset_archive_folder.id}.") + logging.info( + f"{entityId} will be moved to folder {dataset_archive_folder.id}." + ) else: raise LookupError( f"No datasets were found in the specified project: {projectId}. Re-check specified master_fileview in CONFIG and retry." @@ -934,7 +1042,7 @@ def _get_tables(self, datasetId: str = None, projectId: str = None) -> List[Tabl project = projectId elif datasetId: project = self.syn.get(self.getDatasetProject(datasetId)) - + return list(self.syn.getChildren(project, includeTypes=["table"])) def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[str]: @@ -943,221 +1051,262 @@ def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[s Returns: list[str]: A list of table names """ - tables = self._get_tables(datasetId = datasetId, projectId = projectId) + tables = self._get_tables(datasetId=datasetId, projectId=projectId) if tables: return {table["name"]: table["id"] for table in tables} - else: - return {None:None} + else: + return {None: None} @missing_entity_handler - def uploadDB(self, + def uploadDB( + self, sg: SchemaGenerator, - manifest: pd.DataFrame, - datasetId: str, - table_name: str, - restrict: bool = False, - table_manipulation: str = 'replace', - table_column_names:str='class_label', - ): + manifest: pd.DataFrame, + datasetId: str, + table_name: str, + restrict: bool = False, + table_manipulation: str = "replace", + table_column_names: str = "class_label", + ): """ Method to upload a database to an asset store. In synapse, this will upload a metadata table - + Args: sg: schemaGenerator object manifest: pd.Df manifest to upload datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions existingTableId: str of the synId of the existing table, if one already exists table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) - table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting. Returns: manifest_table_id: synID of the uploaded table manifest: the original manifset table_manifest: manifest formatted appropriately for the table - + """ - - col_schema, table_manifest = self.formatDB(sg=sg, manifest=manifest, table_column_names=table_column_names) + col_schema, table_manifest = self.formatDB( + sg=sg, manifest=manifest, table_column_names=table_column_names + ) - manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, sg, restrict,) + manifest_table_id = self.buildDB( + datasetId, + table_name, + col_schema, + table_manifest, + table_manipulation, + sg, + restrict, + ) return manifest_table_id, manifest, table_manifest def formatDB(self, sg, manifest, table_column_names): """ Method to format a manifest appropriatly for upload as table - + Args: sg: schemaGenerator object manifest: pd.Df manifest to upload - table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain - display label formatting. + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + display label formatting. Returns: col_schema: schema for table columns: type, size, etc table_manifest: formatted manifest - + """ # Rename the manifest columns to display names to match fileview - blacklist_chars = ['(', ')', '.', ' ', '-'] + blacklist_chars = ["(", ")", ".", " ", "-"] manifest_columns = manifest.columns.tolist() - table_manifest=deepcopy(manifest) - - if table_column_names=='display_name': + table_manifest = deepcopy(manifest) + if table_column_names == "display_name": cols = table_manifest.columns - elif table_column_names=='display_label': - + elif table_column_names == "display_label": cols = [ - str(col).translate({ord(x): '' for x in blacklist_chars}) + str(col).translate({ord(x): "" for x in blacklist_chars}) for col in manifest_columns ] - elif table_column_names=='class_label': - + elif table_column_names == "class_label": cols = [ - sg.se.get_class_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) + sg.se.get_class_label_from_display_name(str(col)).translate( + {ord(x): "" for x in blacklist_chars} + ) for col in manifest_columns ] - cols = list(map(lambda x: x.replace('EntityId', 'entityId'), cols)) - + cols = list(map(lambda x: x.replace("EntityId", "entityId"), cols)) # Reset column names in table manifest table_manifest.columns = cols - #move entity id to end of df - entity_col = table_manifest.pop('entityId') - table_manifest.insert(len(table_manifest.columns), 'entityId', entity_col) + # move entity id to end of df + entity_col = table_manifest.pop("entityId") + table_manifest.insert(len(table_manifest.columns), "entityId", entity_col) # Get the column schema col_schema = as_table_columns(table_manifest) # Set Id column length to 64 (for some reason not being auto set.) for i, col in enumerate(col_schema): - if col['name'].lower() == 'id': - col_schema[i]['maximumSize'] = 64 + if col["name"].lower() == "id": + col_schema[i]["maximumSize"] = 64 return col_schema, table_manifest - def buildDB(self, - datasetId: str, - table_name: str, + def buildDB( + self, + datasetId: str, + table_name: str, col_schema: List, table_manifest: pd.DataFrame, table_manipulation: str, - sg: SchemaGenerator, + sg: SchemaGenerator, restrict: bool = False, - - ): + ): """ Method to construct the table appropriately: create new table, replace existing, or upsert new into existing - Calls TableOperations class to execute - + Calls TableOperations class to execute + Args: datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded col_schema: schema for table columns: type, size, etc from `formatDB` table_manifest: formatted manifest that can be uploaded as a table table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions Returns: manifest_table_id: synID of the uploaded table - + """ - table_info = self.get_table_info(datasetId = datasetId) + table_info = self.get_table_info(datasetId=datasetId) # Put table manifest onto synapse - schema = Schema(name=table_name, columns=col_schema, parent=self.getDatasetProject(datasetId)) + schema = Schema( + name=table_name, + columns=col_schema, + parent=self.getDatasetProject(datasetId), + ) if table_name in table_info: existingTableId = table_info[table_name] else: existingTableId = None - tableOps = TableOperations( - synStore = self, - tableToLoad = table_manifest, - tableName = table_name, - datasetId = datasetId, - existingTableId = existingTableId, - restrict = restrict, - ) + synStore=self, + tableToLoad=table_manifest, + tableName=table_name, + datasetId=datasetId, + existingTableId=existingTableId, + restrict=restrict, + ) if not table_manipulation or table_name not in table_info.keys(): - manifest_table_id = tableOps.createTable(columnTypeDict=col_schema, specifySchema=True,) + manifest_table_id = tableOps.createTable( + columnTypeDict=col_schema, + specifySchema=True, + ) elif table_name in table_info.keys() and table_info[table_name]: - - if table_manipulation.lower() == 'replace': - manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) - elif table_manipulation.lower() == 'upsert': - manifest_table_id = tableOps.upsertTable(sg=sg,) - elif table_manipulation.lower() == 'update': + if table_manipulation.lower() == "replace": + manifest_table_id = tableOps.replaceTable( + specifySchema=True, + columnTypeDict=col_schema, + ) + elif table_manipulation.lower() == "upsert": + manifest_table_id = tableOps.upsertTable( + sg=sg, + ) + elif table_manipulation.lower() == "update": manifest_table_id = tableOps.updateTable() - - - if table_manipulation and table_manipulation.lower() == 'upsert': - existing_tables=self.get_table_info(datasetId=datasetId) - tableId=existing_tables[table_name] + if table_manipulation and table_manipulation.lower() == "upsert": + existing_tables = self.get_table_info(datasetId=datasetId) + tableId = existing_tables[table_name] annos = self.syn.get_annotations(tableId) - annos['primary_key'] = table_manifest['Component'][0] + "_id" + annos["primary_key"] = table_manifest["Component"][0] + "_id" annos = self.syn.set_annotations(annos) return manifest_table_id - - def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restrict_manifest, component_name = ''): + def upload_manifest_file( + self, + manifest, + metadataManifestPath, + datasetId, + restrict_manifest, + component_name="", + ): # Update manifest to have the new entityId column manifest.to_csv(metadataManifestPath, index=False) # store manifest to Synapse as a CSV # update file name - file_name_full = metadataManifestPath.split('/')[-1] - file_extension = file_name_full.split('.')[-1] + file_name_full = metadataManifestPath.split("/")[-1] + file_extension = file_name_full.split(".")[-1] # Differentiate "censored" and "uncensored" manifest - if "censored" in file_name_full: - file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + "_censored" + '.' + file_extension - else: - file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + '.' + file_extension + if "censored" in file_name_full: + file_name_new = ( + os.path.basename(CONFIG.synapse_manifest_basename) + + "_" + + component_name + + "_censored" + + "." + + file_extension + ) + else: + file_name_new = ( + os.path.basename(CONFIG.synapse_manifest_basename) + + "_" + + component_name + + "." + + file_extension + ) manifestSynapseFile = File( metadataManifestPath, description="Manifest for dataset " + datasetId, parent=datasetId, - name=file_name_new + name=file_name_new, + ) + + manifest_synapse_file_id = self.syn.store( + manifestSynapseFile, isRestricted=restrict_manifest + ).id + changeFileMetaData( + syn=self.syn, entity=manifest_synapse_file_id, downloadAs=file_name_new ) - manifest_synapse_file_id = self.syn.store(manifestSynapseFile, isRestricted = restrict_manifest).id - changeFileMetaData(syn = self.syn, entity = manifest_synapse_file_id, downloadAs = file_name_new) - return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, hideBlanks, annotation_keys): + def format_row_annotations( + self, se, sg, row, entityId, hideBlanks, annotation_keys + ): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. # columns with special characters are outside of the schema metadataSyn = {} - blacklist_chars = ['(', ')', '.', ' ', '-'] - + blacklist_chars = ["(", ")", ".", " ", "-"] + for k, v in row.to_dict().items(): - if annotation_keys=='display_label': - keySyn = str(k).translate({ord(x): '' for x in blacklist_chars}) - elif annotation_keys=='class_label': - keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) + if annotation_keys == "display_label": + keySyn = str(k).translate({ord(x): "" for x in blacklist_chars}) + elif annotation_keys == "class_label": + keySyn = se.get_class_label_from_display_name(str(k)).translate( + {ord(x): "" for x in blacklist_chars} + ) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1174,45 +1323,49 @@ def format_row_annotations(self, se, sg, row, entityId, hideBlanks, annotation_k metadataSyn[keySyn] = v # set annotation(s) for the various objects/items in a dataset on Synapse annos = self.syn.get_annotations(entityId) - csv_list_regex=comma_separated_list_regex() + csv_list_regex = comma_separated_list_regex() for anno_k, anno_v in metadataSyn.items(): - # Remove keys with nan or empty string values from dict of annotations to be uploaded # if present on current data annotation - if hideBlanks and (anno_v == '' or (isinstance(anno_v,float) and np.isnan(anno_v))): + if hideBlanks and ( + anno_v == "" or (isinstance(anno_v, float) and np.isnan(anno_v)) + ): annos.pop(anno_k) if anno_k in annos.keys() else annos # Otherwise save annotation as approrpriate else: - if isinstance(anno_v,float) and np.isnan(anno_v): - annos[anno_k] = "" - elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', sg.get_node_validation_rules(anno_k)): + if isinstance(anno_v, float) and np.isnan(anno_v): + annos[anno_k] = "" + elif ( + isinstance(anno_v, str) + and re.fullmatch(csv_list_regex, anno_v) + and rule_in_rule_list("list", sg.get_node_validation_rules(anno_k)) + ): annos[anno_k] = anno_v.split(",") else: annos[anno_k] = anno_v - + return annos @missing_entity_handler def format_manifest_annotations(self, manifest, manifest_synapse_id): - ''' + """ Set annotations for the manifest (as a whole) so they can be applied to the manifest table or csv. For now just getting the Component. - ''' - + """ + entity = self.syn.get(manifest_synapse_id, downloadFile=False) is_file = entity.concreteType.endswith(".FileEntity") is_table = entity.concreteType.endswith(".TableEntity") if is_file: - # Get file metadata metadata = self.getFileAnnotations(manifest_synapse_id) # If there is a defined component add it to the metadata. - if 'Component' in manifest.columns: + if "Component" in manifest.columns: # Gather component information - component = manifest['Component'].unique() - + component = manifest["Component"].unique() + # Double check that only a single component is listed, else raise an error. try: len(component) == 1 @@ -1222,12 +1375,12 @@ def format_manifest_annotations(self, manifest, manifest_synapse_id): ) from err # Add component to metadata - metadata['Component'] = component[0] - + metadata["Component"] = component[0] + elif is_table: # Get table metadata metadata = self.getTableAnnotations(manifest_synapse_id) - + # Get annotations annos = self.syn.get_annotations(manifest_synapse_id) @@ -1236,6 +1389,7 @@ def format_manifest_annotations(self, manifest, manifest_synapse_id): annos[annos_k] = annos_v return annos + ''' def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPath, useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False): @@ -1313,7 +1467,7 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa return manifest_synapse_table_id ''' - def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: + def _read_manifest(self, metadataManifestPath: str) -> pd.DataFrame: """Helper function to read in provided manifest as a pandas DataFrame for subsequent downstream processing. Args: metadataManifestPath (str): path where manifest is stored @@ -1324,10 +1478,12 @@ def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: """ # read new manifest csv try: - load_args={ - "dtype":"string", + load_args = { + "dtype": "string", } - manifest = load_df(metadataManifestPath, preserve_raw_input = False, **load_args) + manifest = load_df( + metadataManifestPath, preserve_raw_input=False, **load_args + ) except FileNotFoundError as err: raise FileNotFoundError( f"No manifest file was found at this path: {metadataManifestPath}" @@ -1346,22 +1502,24 @@ def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, sg: SchemaGenerato if not col_in_dataframe("Id", manifest): # See if schema has `Uuid` column specified try: - uuid_col_in_schema = sg.se.is_class_in_schema('Uuid') or sg.se.is_class_in_schema('uuid') - except (KeyError): + uuid_col_in_schema = sg.se.is_class_in_schema( + "Uuid" + ) or sg.se.is_class_in_schema("uuid") + except KeyError: uuid_col_in_schema = False # Rename `Uuid` column if it wasn't specified in the schema if col_in_dataframe("Uuid", manifest) and not uuid_col_in_schema: - manifest.rename(columns={'Uuid': 'Id'}, inplace=True) + manifest.rename(columns={"Uuid": "Id"}, inplace=True) # If no `Uuid` column exists or it is specified in the schema, create a new `Id` column else: - manifest["Id"] = '' + manifest["Id"] = "" - for idx,row in manifest.iterrows(): + for idx, row in manifest.iterrows(): if not row["Id"]: gen_uuid = str(uuid.uuid4()) row["Id"] = gen_uuid - manifest.loc[idx, 'Id'] = gen_uuid + manifest.loc[idx, "Id"] = gen_uuid # add entityId as a column if not already there or # fill any blanks with an empty string. @@ -1381,15 +1539,23 @@ def _generate_table_name(self, manifest): component_name (str): Name of the manifest component (if applicable) """ # Create table name here. - if 'Component' in manifest.columns: - component_name = manifest['Component'][0].lower() - table_name = component_name + '_synapse_storage_manifest_table' + if "Component" in manifest.columns: + component_name = manifest["Component"][0].lower() + table_name = component_name + "_synapse_storage_manifest_table" else: - component_name = '' - table_name = 'synapse_storage_manifest_table' + component_name = "" + table_name = "synapse_storage_manifest_table" return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId:str, hideBlanks:bool, annotation_keys:str): + def _add_annotations( + self, + se, + schemaGenerator, + row, + entityId: str, + hideBlanks: bool, + annotation_keys: str, + ): """Helper function to format and add annotations to entities in Synapse. Args: se: schemaExplorer object, @@ -1397,17 +1563,19 @@ def _add_annotations(self, se, schemaGenerator, row, entityId:str, hideBlanks:bo row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. - annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks, annotation_keys) + annos = self.format_row_annotations( + se, schemaGenerator, row, entityId, hideBlanks, annotation_keys + ) if annos: - # Store annotations for an entity folder + # Store annotations for an entity folder self.syn.set_annotations(annos) return @@ -1421,7 +1589,7 @@ def _create_entity_id(self, idx, row, manifest, datasetId): Returns: manifest (pd.DataFrame): manifest with entityId added to the appropriate row entityId (str): Generated Entity Id. - + """ rowEntity = Folder(str(uuid.uuid4()), parent=datasetId) rowEntity = self.syn.store(rowEntity) @@ -1431,17 +1599,17 @@ def _create_entity_id(self, idx, row, manifest, datasetId): return manifest, entityId def add_annotations_to_entities_files( - self, - se, - schemaGenerator, - manifest, - manifest_record_type, - datasetId, - hideBlanks, - manifest_synapse_table_id='', - annotation_keys:str='class_label', - ): - '''Depending on upload type add Ids to entityId row. Add anotations to connected files. + self, + se, + schemaGenerator, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id="", + annotation_keys: str = "class_label", + ): + """Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: se: Schema Explorer Object schemaGenerator: SchemaGenerator object @@ -1450,60 +1618,70 @@ def add_annotations_to_entities_files( datasetId (str): synapse ID of folder containing the dataset hideBlanks (bool): Default is false -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. manifest_synapse_table_id (str): Default is an empty string ''. - annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label(default), Determines labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: manifest (pd.DataFrame): modified to add entitiyId as appropriate. - ''' + """ # Expected behavior is to annotate files if `Filename` is present regardless of `-mrt` setting - if 'filename' in [col.lower() for col in manifest.columns]: + if "filename" in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files_and_entityIds = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=False) + files_and_entityIds = self._get_file_entityIds( + dataset_files=dataset_files, only_new_files=False + ) file_df = pd.DataFrame(files_and_entityIds) - + # Merge dataframes to add entityIds - manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) + manifest = manifest.merge( + file_df, how="left", on="Filename", suffixes=["_x", None] + ).drop("entityId_x", axis=1) # Fill `entityId` for each row if missing and annotate entity as appropriate for idx, row in manifest.iterrows(): - if not row["entityId"] and (manifest_record_type == 'file_and_entities' or - manifest_record_type == 'table_file_and_entities'): - manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) - elif not row["entityId"] and manifest_record_type == 'table_and_file': - # If not using entityIds, fill with manifest_table_id so + if not row["entityId"] and ( + manifest_record_type == "file_and_entities" + or manifest_record_type == "table_file_and_entities" + ): + manifest, entityId = self._create_entity_id( + idx, row, manifest, datasetId + ) + elif not row["entityId"] and manifest_record_type == "table_and_file": + # If not using entityIds, fill with manifest_table_id so row["entityId"] = manifest_synapse_table_id manifest.loc[idx, "entityId"] = manifest_synapse_table_id - entityId = '' + entityId = "" else: # get the file id of the file to annotate, collected in above step. entityId = row["entityId"] # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks, annotation_keys) + self._add_annotations( + se, schemaGenerator, row, entityId, hideBlanks, annotation_keys + ) logger.info(f"Added annotations to entity: {entityId}") return manifest def upload_manifest_as_table( - self, - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId, - table_name, - component_name, - restrict, - manifest_record_type, - hideBlanks, - table_manipulation, - table_column_names:str, - annotation_keys:str, - ): + self, + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId, + table_name, + component_name, + restrict, + manifest_record_type, + hideBlanks, + table_manipulation, + table_column_names: str, + annotation_keys: str, + ): """Upload manifest to Synapse as a table and csv. Args: se: SchemaExplorer object @@ -1517,61 +1695,83 @@ def upload_manifest_as_table( manifest_record_type (str): valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting. - annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. - """ + """ # Upload manifest as a table, get the ID and updated manifest. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - sg=schemaGenerator, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - table_manipulation=table_manipulation, - table_column_names=table_column_names) - - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, annotation_keys) + sg=schemaGenerator, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + table_manipulation=table_manipulation, + table_column_names=table_column_names, + ) + + manifest = self.add_annotations_to_entities_files( + se, + schemaGenerator, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id, + annotation_keys, + ) # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, + metadataManifestPath, + datasetId, + restrict, + component_name=component_name, + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - sg=schemaGenerator, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - table_manipulation='update', - table_column_names=table_column_names) + sg=schemaGenerator, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + table_manipulation="update", + table_column_names=table_column_names, + ) # Set annotations for the table manifest - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_table_id + ) self.syn.set_annotations(manifest_annotations) return manifest_synapse_file_id def upload_manifest_as_csv( - self, - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId, - restrict, - manifest_record_type, - hideBlanks, - component_name, - annotation_keys:str): + self, + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId, + restrict, + manifest_record_type, + hideBlanks, + component_name, + annotation_keys: str, + ): """Upload manifest to Synapse as a csv only. Args: se: SchemaExplorer object @@ -1582,42 +1782,57 @@ def upload_manifest_as_csv( restrict (bool): Flag for censored data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. - annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, annotation_keys=annotation_keys) + manifest = self.add_annotations_to_entities_files( + se, + schemaGenerator, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + annotation_keys=annotation_keys, + ) # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, - metadataManifestPath, datasetId, restrict, component_name = component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, + metadataManifestPath, + datasetId, + restrict, + component_name=component_name, + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + return manifest_synapse_file_id def upload_manifest_combo( - self, - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId, - table_name, - component_name, - restrict, - manifest_record_type, - hideBlanks, - table_manipulation, - table_column_names:str, - annotation_keys:str, - ): + self, + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId, + table_name, + component_name, + restrict, + manifest_record_type, + hideBlanks, + table_manipulation, + table_column_names: str, + annotation_keys: str, + ): """Upload manifest to Synapse as a table and CSV with entities. Args: se: SchemaExplorer object @@ -1631,57 +1846,81 @@ def upload_manifest_combo( manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting. - annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Return: manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - sg=schemaGenerator, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - table_manipulation=table_manipulation, - table_column_names=table_column_names) - - manifest = self.add_annotations_to_entities_files(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id, annotation_keys=annotation_keys) - + sg=schemaGenerator, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + table_manipulation=table_manipulation, + table_column_names=table_column_names, + ) + + manifest = self.add_annotations_to_entities_files( + se, + schemaGenerator, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id, + annotation_keys=annotation_keys, + ) + # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, metadataManifestPath, datasetId, restrict, component_name + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - sg=schemaGenerator, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - table_manipulation='update', - table_column_names=table_column_names) + sg=schemaGenerator, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + table_manipulation="update", + table_column_names=table_column_names, + ) # Set annotations for the table manifest - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_table_id + ) self.syn.set_annotations(manifest_annotations) return manifest_synapse_file_id def associateMetadataWithFiles( - self, schemaGenerator: SchemaGenerator, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', - hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', table_column_names: str = 'class_label', - annotation_keys:str = 'class_label', + self, + schemaGenerator: SchemaGenerator, + metadataManifestPath: str, + datasetId: str, + manifest_record_type: str = "table_file_and_entities", + hideBlanks: bool = False, + restrict_manifest=False, + table_manipulation: str = "replace", + table_column_names: str = "class_label", + annotation_keys: str = "class_label", ) -> str: """Associate metadata with files in a storage dataset already on Synapse. Upload metadataManifest in the storage dataset folder on Synapse as well. Return synapseId of the uploaded manifest file. - + If this is a new manifest there could be no Synapse entities associated with the rows of this manifest this may be due to data type (e.g. clinical data) being tabular and not requiring files; to utilize uniform interfaces downstream @@ -1701,11 +1940,11 @@ def associateMetadataWithFiles( hideBlanks: Default is false. Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. restrict_manifest (bool): Default is false. Flag for censored data. table_malnipulation (str): Default is 'replace'. Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. - table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + table_column_names: (str): display_name/display_label/class_label (default). Sets labeling style for table column names. display_name will use the raw display name as the column name. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting. - annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display - name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain + annotation_keys: (str) display_label/class_label (default), Sets labeling syle for annotation keys. class_label will format the display + name as upper camelcase, and strip blacklisted characters, display_label will strip blacklisted characters including spaces, to retain display label formatting while ensuring the label is formatted properly for Synapse annotations. Returns: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. @@ -1720,69 +1959,70 @@ def associateMetadataWithFiles( table_name, component_name = self._generate_table_name(manifest) # Upload manifest to synapse based on user input (manifest_record_type) - + if manifest_record_type == "file_only": manifest_synapse_file_id = self.upload_manifest_as_csv( - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId=datasetId, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - component_name = component_name, - annotation_keys=annotation_keys, - ) + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId=datasetId, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + component_name=component_name, + annotation_keys=annotation_keys, + ) elif manifest_record_type == "table_and_file": manifest_synapse_file_id = self.upload_manifest_as_table( - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId=datasetId, - table_name=table_name, - component_name=component_name, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - table_manipulation=table_manipulation, - table_column_names=table_column_names, - ) + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId=datasetId, + table_name=table_name, + component_name=component_name, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + table_manipulation=table_manipulation, + table_column_names=table_column_names, + annotation_keys=annotation_keys, + ) elif manifest_record_type == "file_and_entities": - manifest_synapse_file_id = self.upload_manifest_as_csv( - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId=datasetId, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - component_name = component_name, - annotation_keys=annotation_keys, - ) + manifest_synapse_file_id = self.upload_manifest_as_csv( + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId=datasetId, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + component_name=component_name, + annotation_keys=annotation_keys, + ) elif manifest_record_type == "table_file_and_entities": manifest_synapse_file_id = self.upload_manifest_combo( - se, - schemaGenerator, - manifest, - metadataManifestPath, - datasetId=datasetId, - table_name=table_name, - component_name=component_name, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - table_manipulation=table_manipulation, - table_column_names=table_column_names, - annotation_keys=annotation_keys, - ) + se, + schemaGenerator, + manifest, + metadataManifestPath, + datasetId=datasetId, + table_name=table_name, + component_name=component_name, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + table_manipulation=table_manipulation, + table_column_names=table_column_names, + annotation_keys=annotation_keys, + ) else: raise ValueError("Please enter a valid manifest_record_type.") return manifest_synapse_file_id - def getTableAnnotations(self, table_id:str): + def getTableAnnotations(self, table_id: str): """Generate dictionary of annotations for the given Synapse file. Synapse returns all custom annotations as lists since they can contain multiple values. In all cases, the values will @@ -1920,7 +2160,7 @@ def getDatasetAnnotations( # Add filenames for the files that "survived" annotation retrieval filenames = [dataset_files_map[i] for i in table["entityId"]] - if 'Filename' not in table.columns: + if "Filename" not in table.columns: table.insert(0, "Filename", filenames) # Ensure that entityId and eTag are at the end @@ -1939,20 +2179,23 @@ def getDatasetAnnotations( def raise_final_error(retry_state): return retry_state.outcome.result() - @retry(stop = stop_after_attempt(5), - wait = wait_chain(*[wait_fixed(10) for i in range (2)] + - [wait_fixed(15) for i in range(2)] + - [wait_fixed(20)]), - retry=retry_if_exception_type(LookupError), - retry_error_callback = raise_final_error) - + @retry( + stop=stop_after_attempt(5), + wait=wait_chain( + *[wait_fixed(10) for i in range(2)] + + [wait_fixed(15) for i in range(2)] + + [wait_fixed(20)] + ), + retry=retry_if_exception_type(LookupError), + retry_error_callback=raise_final_error, + ) def checkIfinAssetView(self, syn_id) -> str: # get data in administrative fileview for this pipeline assetViewTable = self.getStorageFileviewTable() all_files = list(assetViewTable["id"]) - if syn_id in all_files: + if syn_id in all_files: return True - else: + else: return False def getDatasetProject(self, datasetId: str) -> str: @@ -1981,7 +2224,6 @@ def getDatasetProject(self, datasetId: str) -> str: dataset_index = self.storageFileviewTable["id"] == datasetId dataset_row = self.storageFileviewTable[dataset_index] - # Return `projectId` for given row if only one found if len(dataset_row) == 1: dataset_project = dataset_row["projectId"].values[0] @@ -1999,11 +2241,11 @@ def getDatasetProject(self, datasetId: str) -> str: ) # If not, then assume dataset not in file view - raise LookupError ( + raise LookupError( f"The given dataset ({datasetId}) doesn't appear in the " f"configured file view ({self.storageFileview}). This might " "mean that the file view's scope needs to be updated." - ) + ) def getDatasetAnnotationsBatch( self, datasetId: str, dataset_file_ids: Sequence[str] = None @@ -2034,21 +2276,20 @@ def getDatasetAnnotationsBatch( return table def _get_table_schema_by_cname(self, table_schema): - # assume no duplicate column names in the table table_schema_by_cname = {} for col_record in table_schema: - - #TODO clean up dictionary for compactness (e.g. remove redundant 'name' key) + # TODO clean up dictionary for compactness (e.g. remove redundant 'name' key) table_schema_by_cname[col_record["name"]] = col_record return table_schema_by_cname + class TableOperations: """ Object to hold functions for various table operations specific to the Synapse Asset Store. - + Currently implement operations are: createTable: upload a manifest as a new table when none exist replaceTable: replace a metadata in a table from one manifest with metadata from another manifest @@ -2057,15 +2298,16 @@ class TableOperations: Operations currently in development are: upsertTable: add metadata from a manifest to an existing table that contains metadata from another manifest """ - def __init__(self, - synStore: SynapseStorage, - tableToLoad: pd.DataFrame = None, - tableName: str = None, - datasetId: str = None, - existingTableId: str = None, - restrict: bool = False - ): - + + def __init__( + self, + synStore: SynapseStorage, + tableToLoad: pd.DataFrame = None, + tableName: str = None, + datasetId: str = None, + existingTableId: str = None, + restrict: bool = False, + ): """ Class governing table operations (creation, replacement, upserts, updates) in schematic @@ -2073,7 +2315,7 @@ def __init__(self, tableName: name of the table to be uploaded datasetId: synID of the dataset for the manifest existingTableId: synId of the table currently exising on synapse (if there is one) - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions """ self.synStore = synStore @@ -2083,78 +2325,100 @@ def __init__(self, self.existingTableId = existingTableId self.restrict = restrict - - def createTable(self, columnTypeDict: dict = None, specifySchema: bool = True,): + def createTable( + self, + columnTypeDict: dict = None, + specifySchema: bool = True, + ): """ Method to create a table from a metadata manifest and upload it to synapse - + Args: columnTypeDict: dictionary schema for table columns: type, size, etc - specifySchema: to specify a specific schema for the table format + specifySchema: to specify a specific schema for the table format Returns: table.schema.id: synID of the newly created table """ - datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile=False) datasetName = datasetEntity.name - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) if not self.tableName: - self.tableName = datasetName + 'table' + self.tableName = datasetName + "table" datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") - #create list of columns: + # create list of columns: cols = [] for col in self.tableToLoad.columns: if col in table_schema_by_cname: - col_type = table_schema_by_cname[col]['columnType'] - max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 + col_type = table_schema_by_cname[col]["columnType"] + max_size = ( + table_schema_by_cname[col]["maximumSize"] + if "maximumSize" in table_schema_by_cname[col].keys() + else 100 + ) max_list_len = 250 if max_size and max_list_len: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size, maximumListLength=max_list_len)) + cols.append( + Column( + name=col, + columnType=col_type, + maximumSize=max_size, + maximumListLength=max_list_len, + ) + ) elif max_size: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size)) + cols.append( + Column(name=col, columnType=col_type, maximumSize=max_size) + ) else: cols.append(Column(name=col, columnType=col_type)) else: - #TODO add warning that the given col was not found and it's max size is set to 100 - cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + # TODO add warning that the given col was not found and it's max size is set to 100 + cols.append(Column(name=col, columnType="STRING", maximumSize=100)) + schema = Schema( + name=self.tableName, columns=cols, parent=datasetParentProject + ) table = Table(schema, self.tableToLoad) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = self.synStore.syn.store(table, isRestricted=self.restrict) return table.schema.id else: # For just uploading the tables to synapse using default # column types. table = build_table(self.tableName, datasetParentProject, self.tableToLoad) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = self.synStore.syn.store(table, isRestricted=self.restrict) return table.schema.id - def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,): + def replaceTable( + self, + specifySchema: bool = True, + columnTypeDict: dict = None, + ): """ Method to replace an existing table on synapse with metadata from a new manifest - + Args: - specifySchema: to infer a schema for the table format - columnTypeDict: dictionary schema for table columns: type, size, etc + specifySchema: to infer a schema for the table format + columnTypeDict: dictionary schema for table columns: type, size, etc Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile=False) datasetName = datasetEntity.name - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) - existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + existing_table, existing_results = self.synStore.get_synapse_table( + self.existingTableId + ) # remove rows self.synStore.syn.delete(existing_results) # wait for row deletion to finish on synapse before getting empty table sleep(10) - + # removes all current columns current_table = self.synStore.syn.get(self.existingTableId) current_columns = self.synStore.syn.getTableColumns(current_table) @@ -2162,58 +2426,70 @@ def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,) current_table.removeColumn(col) if not self.tableName: - self.tableName = datasetName + 'table' - + self.tableName = datasetName + "table" + # Process columns according to manifest entries - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") - #create list of columns: + # create list of columns: cols = [] - + for col in self.tableToLoad.columns: - if col in table_schema_by_cname: - col_type = table_schema_by_cname[col]['columnType'] - max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 + col_type = table_schema_by_cname[col]["columnType"] + max_size = ( + table_schema_by_cname[col]["maximumSize"] + if "maximumSize" in table_schema_by_cname[col].keys() + else 100 + ) max_list_len = 250 if max_size and max_list_len: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size, maximumListLength=max_list_len)) + cols.append( + Column( + name=col, + columnType=col_type, + maximumSize=max_size, + maximumListLength=max_list_len, + ) + ) elif max_size: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size)) + cols.append( + Column(name=col, columnType=col_type, maximumSize=max_size) + ) else: cols.append(Column(name=col, columnType=col_type)) else: - - #TODO add warning that the given col was not found and it's max size is set to 100 - cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - + # TODO add warning that the given col was not found and it's max size is set to 100 + cols.append(Column(name=col, columnType="STRING", maximumSize=100)) + # adds new columns to schema for col in cols: current_table.addColumn(col) - self.synStore.syn.store(current_table, isRestricted = self.restrict) + self.synStore.syn.store(current_table, isRestricted=self.restrict) # wait for synapse store to finish sleep(1) # build schema and table from columns and store with necessary restrictions - schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + schema = Schema( + name=self.tableName, columns=cols, parent=datasetParentProject + ) schema.id = self.existingTableId - table = Table(schema, self.tableToLoad, etag = existing_results.etag) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = Table(schema, self.tableToLoad, etag=existing_results.etag) + table = self.synStore.syn.store(table, isRestricted=self.restrict) else: logging.error("Must specify a schema for table replacements") # remove system metadata from manifest - existing_table.drop(columns = ['ROW_ID', 'ROW_VERSION'], inplace = True) + existing_table.drop(columns=["ROW_ID", "ROW_VERSION"], inplace=True) return self.existingTableId - - def _get_auth_token(self,): + def _get_auth_token( + self, + ): authtoken = None # Get access token from environment variable if available @@ -2225,14 +2501,16 @@ def _get_auth_token(self,): # Get token from authorization header # Primarily useful for API endpoint functionality - if 'Authorization' in self.synStore.syn.default_headers: - authtoken = self.synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] + if "Authorization" in self.synStore.syn.default_headers: + authtoken = self.synStore.syn.default_headers["Authorization"].split( + "Bearer " + )[-1] return authtoken # retrive credentials from synapse object # Primarily useful for local users, could only be stored here when a .synapseConfig file is used, but including to be safe synapse_object_creds = self.synStore.syn.credentials - if hasattr(synapse_object_creds, '_token'): + if hasattr(synapse_object_creds, "_token"): authtoken = synapse_object_creds.secret # Try getting creds from .synapseConfig file if it exists @@ -2241,51 +2519,64 @@ def _get_auth_token(self,): config = self.synStore.syn.getConfigFile(CONFIG.synapse_configuration_path) # check which credentials are provided in file - if config.has_option('authentication', 'authtoken'): - authtoken = config.get('authentication', 'authtoken') - + if config.has_option("authentication", "authtoken"): + authtoken = config.get("authentication", "authtoken") + # raise error if required credentials are not found if not authtoken: raise NameError( "authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" ) - + return authtoken - def upsertTable(self, sg: SchemaGenerator,): + def upsertTable( + self, + sg: SchemaGenerator, + ): """ Method to upsert rows from a new manifest into an existing table on synapse - For upsert functionality to work, primary keys must follow the naming convention of _id + For upsert functionality to work, primary keys must follow the naming convention of _id `-tm upsert` should be used for initial table uploads if users intend to upsert into them at a later time; using 'upsert' at creation will generate the metadata necessary for upsert functionality. Currently it is required to use -dl/--use_display_label with table upserts. - + Args: sg: SchemaGenerator instance - + Returns: existingTableId: synID of the already existing table that had its metadata replaced - """ + """ authtoken = self._get_auth_token() - synapseDB = SynapseDatabase(auth_token=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) + synapseDB = SynapseDatabase( + auth_token=authtoken, + project_id=self.synStore.getDatasetProject(self.datasetId), + ) try: # Try performing upsert - synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) - except(SynapseHTTPError) as ex: + synapseDB.upsert_table_rows( + table_name=self.tableName, data=self.tableToLoad + ) + except SynapseHTTPError as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload - if 'Id is not a valid column name or id' in str(ex): + if "Id is not a valid column name or id" in str(ex): self._update_table_uuid_column(sg) - synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) + synapseDB.upsert_table_rows( + table_name=self.tableName, data=self.tableToLoad + ) # Raise if other error else: raise ex return self.existingTableId - - def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: + + def _update_table_uuid_column( + self, + sg: SchemaGenerator, + ) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention @@ -2299,61 +2590,77 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: # Get the columns of the schema schema = self.synStore.syn.get(self.existingTableId) cols = self.synStore.syn.getTableColumns(schema) - + # Iterate through columns until `Uuid` column is found for col in cols: - if col.name.lower() == 'uuid': + if col.name.lower() == "uuid": # See if schema has `Uuid` column specified try: - uuid_col_in_schema = sg.se.is_class_in_schema(col.name) - except (KeyError): + uuid_col_in_schema = sg.se.is_class_in_schema(col.name) + except KeyError: uuid_col_in_schema = False # If there is, then create a new `Id` column from scratch if uuid_col_in_schema: - new_col = Column(columnType = "STRING", maximumSize = 64, name = "Id") + new_col = Column(columnType="STRING", maximumSize=64, name="Id") schema.addColumn(new_col) schema = self.synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: - # Build ColumnModel that will be used for new column - id_column = Column(name='Id', columnType='STRING', maximumSize=64, defaultValue=None, maximumListLength=1) + id_column = Column( + name="Id", + columnType="STRING", + maximumSize=64, + defaultValue=None, + maximumListLength=1, + ) new_col_response = self.synStore.syn.store(id_column) - # Define columnChange body columnChangeDict = { "concreteType": "org.sagebionetworks.repo.model.table.TableSchemaChangeRequest", "entityId": self.existingTableId, "changes": [ - { - "oldColumnId": col['id'], - "newColumnId": new_col_response['id'], + { + "oldColumnId": col["id"], + "newColumnId": new_col_response["id"], } - ] + ], } - self.synStore.syn._async_table_update(table=self.existingTableId, changes=[columnChangeDict], wait=False) + self.synStore.syn._async_table_update( + table=self.existingTableId, + changes=[columnChangeDict], + wait=False, + ) break return - def updateTable(self, update_col: str = 'Id',): + def updateTable( + self, + update_col: str = "Id", + ): """ Method to update an existing table with a new column - + Args: - updateCol: column to index the old and new tables on + updateCol: column to index the old and new tables on Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) - + existing_table, existing_results = self.synStore.get_synapse_table( + self.existingTableId + ) + self.tableToLoad = update_df(existing_table, self.tableToLoad, update_col) # store table with existing etag data and impose restrictions as appropriate - self.synStore.syn.store(Table(self.existingTableId, self.tableToLoad, etag = existing_results.etag), isRestricted = self.restrict) + self.synStore.syn.store( + Table(self.existingTableId, self.tableToLoad, etag=existing_results.etag), + isRestricted=self.restrict, + ) return self.existingTableId From b59997cf4dfe6bafbc6d451ecab51fbe8881370c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 7 Dec 2023 13:12:08 -0800 Subject: [PATCH 036/199] add value error if invalid table_column_names value passed --- schematic/store/synapse.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index bf985e67a..6155dcbff 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1127,6 +1127,7 @@ def formatDB(self, sg, manifest, table_column_names): table_manifest = deepcopy(manifest) + if table_column_names == "display_name": cols = table_manifest.columns @@ -1143,6 +1144,8 @@ def formatDB(self, sg, manifest, table_column_names): ) for col in manifest_columns ] + else: + ValueError(f"The provided table_column_name: {table_column_names} is not valid, please resubmit with an allowed value only.") cols = list(map(lambda x: x.replace("EntityId", "entityId"), cols)) From c0b76418402945043f6ceeebb72faae557b82540 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 7 Dec 2023 13:13:05 -0800 Subject: [PATCH 037/199] update test_api.py to take table_column_names and annotation_keys parameters --- tests/test_api.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 9d69c63b2..49563c245 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -765,7 +765,7 @@ def test_submit_manifest_table_and_file_replace(self, client, request_headers, d "asset_view": "syn51514344", "dataset_id": "syn51514345", "table_manipulation": 'replace', - "use_schema_label": True + "table_column_names": 'class_label', } response_csv = client.post('http://localhost:3001/v1/model/submit', query_string=params, data={"file_name": (open(test_manifest_submit, 'rb'), "test.csv")}, headers=request_headers) @@ -783,7 +783,7 @@ def test_submit_manifest_file_only_replace(self, helpers, client, request_header "restrict_rules": False, "manifest_record_type": "file_only", "table_manipulation": 'replace', - "use_schema_label": True + "table_column_names": 'class_label', } if data_type == "Biospecimen": @@ -826,7 +826,7 @@ def test_submit_manifest_json_str_replace(self, client, request_headers, data_mo "asset_view": "syn51514344", "dataset_id": "syn51514345", "table_manipulation": 'replace', - "use_schema_label": True + "table_column_names": 'class_label', } params["json_str"] = json_str response = client.post('http://localhost:3001/v1/model/submit', query_string = params, data={"file_name":''}, headers = request_headers) @@ -843,7 +843,8 @@ def test_submit_manifest_w_file_and_entities(self, client, request_headers, data "asset_view": "syn51514501", "dataset_id": "syn51514523", "table_manipulation": 'replace', - "use_schema_label": True + "table_column_names": 'class_label', + "annotation_keys": 'class_label', } # test uploading a csv file @@ -861,7 +862,7 @@ def test_submit_manifest_table_and_file_upsert(self, client, request_headers, da "asset_view": "syn51514557", "dataset_id": "syn51514551", "table_manipulation": 'upsert', - "use_schema_label": False # have to set use_schema_label to false to ensure upsert feature works + "table_column_names": 'display_name' # have to set table_column_names to display_name to ensure upsert feature works } # test uploading a csv file From 62548e79ef76db3bd965da727ef80a9d3fca3ea3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 7 Dec 2023 13:13:47 -0800 Subject: [PATCH 038/199] add table_column_name and annotation_keys parameters to test_store.py --- tests/test_store.py | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 8828d3fb6..08234e3aa 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -165,9 +165,10 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = manifest_record_type, - useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, + table_column_names='class_label', + annotation_keys='class_label' ) except RetryError: pass @@ -378,8 +379,9 @@ def test_tidy_table(self, dataset_fileview_table_tidy): @pytest.mark.table_operations class TestTableOperations: - - def test_createTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): + @pytest.mark.parametrize("table_column_names", ['display_name', 'display_label', 'class_label'], ids=['tcn_display_name', 'tcn_display_label', 'tcn_class_label']) + @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) + def test_createTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId, table_column_names,annotation_keys): table_manipulation = None # Check if FollowUp table exists if so delete @@ -404,10 +406,11 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', - useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, table_manipulation=table_manipulation, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) existing_tables = synapse_store.get_table_info(projectId = projectId) @@ -416,7 +419,9 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec # assert table exists assert table_name in existing_tables.keys() - def test_replaceTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): + @pytest.mark.parametrize("table_column_names", ['display_label', 'class_label'], ids=['tcn_display_label', 'tcn_class_label']) + @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) + def test_replaceTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId, table_column_names, annotation_keys): table_manipulation = 'replace' table_name='followup_synapse_storage_manifest_table' @@ -443,10 +448,11 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', - useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, table_manipulation=table_manipulation, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) existing_tables = synapse_store.get_table_info(projectId = projectId) @@ -465,10 +471,10 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', - useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, - table_manipulation=table_manipulation, + table_column_names=table_column_names, + annotation_keys=annotation_keys, ) existing_tables = synapse_store.get_table_info(projectId = projectId) @@ -483,7 +489,8 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje # delete table synapse_store.syn.delete(tableId) - def test_upsertTable(self, helpers, synapse_store, config:Configuration, projectId, datasetId): + @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) + def test_upsertTable(self, helpers, synapse_store, config:Configuration, projectId, datasetId, annotation_keys): table_manipulation = "upsert" table_name="MockRDB_synapse_storage_manifest_table".lower() @@ -510,10 +517,11 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project metadataManifestPath = helpers.get_data_path(manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', - useSchemaLabel = False, hideBlanks = True, restrict_manifest = False, table_manipulation=table_manipulation, + table_column_names='display_name', + annotation_keys=annotation_keys, ) existing_tables = synapse_store.get_table_info(projectId = projectId) @@ -536,10 +544,11 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project metadataManifestPath = helpers.get_data_path(replacement_manifest_path), datasetId = datasetId, manifest_record_type = 'table_and_file', - useSchemaLabel = False, hideBlanks = True, restrict_manifest = False, table_manipulation=table_manipulation, + table_column_names='display_name', + annotation_keys=annotation_keys, ) existing_tables = synapse_store.get_table_info(projectId = projectId) From 4c1de4983d45c27bf5abf55e02dcb57d675ae146 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 7 Dec 2023 14:17:00 -0800 Subject: [PATCH 039/199] run black on help --- schematic/help.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/help.py b/schematic/help.py index d39214bb0..b1537e1b6 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -129,7 +129,7 @@ "Attribute display names in the schema must not only include characters that are " "not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'" ), - "table_column_names":( + "table_column_names": ( "class_label, display_label, display_name, default, class_label. When true annotations and table columns will be uploaded with the display name formatting with blacklisted characters removed. " "To use for tables, use in conjunction with the use_schema_label flag." ), From 4e51ca5bb8008d1da510d4647a93c60338a6e912 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 13 Dec 2023 09:48:19 -0800 Subject: [PATCH 040/199] remove use_schema_label --- schematic/models/metadata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 20a60ff23..5fd3f3977 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -317,7 +317,6 @@ def submit_metadata_manifest( restrict_rules: bool, access_token: str, validate_component: Optional[str] = None, - use_schema_label: bool = True, hide_blanks: bool = False, project_scope: List = None, table_manipulation: str = "replace", From a40240fa1b66773d9b3904ab7f8d2d8ccaeddab0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 14 Dec 2023 15:44:24 -0800 Subject: [PATCH 041/199] update example model jsonld --- tests/data/example.model.jsonld | 34 ++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index f49346f0e..a58d36323 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -7,6 +7,23 @@ "xsd": "http://www.w3.org/2001/XMLSchema#" }, "@graph": [ + { + "@id": "bts:Component", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "Component", + "rdfs:subClassOf": [ + { + "@id": "bts:Thing" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Component", + "sms:required": "sms:true", + "sms:validationRules": [] + }, { "@id": "bts:Patient", "@type": "rdfs:Class", @@ -128,23 +145,6 @@ "sms:required": "sms:true", "sms:validationRules": [] }, - { - "@id": "bts:Component", - "@type": "rdfs:Class", - "rdfs:comment": "TBD", - "rdfs:label": "Component", - "rdfs:subClassOf": [ - { - "@id": "bts:Thing" - } - ], - "schema:isPartOf": { - "@id": "http://schema.biothings.io" - }, - "sms:displayName": "Component", - "sms:required": "sms:false", - "sms:validationRules": [] - }, { "@id": "bts:DataType", "@type": "rdfs:Class", From b163e0a8df8b5f449e4795e285b6c1f36e86fead Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 14:31:44 -0700 Subject: [PATCH 042/199] update synapse client depndency --- poetry.lock | 1462 ++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 737 insertions(+), 727 deletions(-) diff --git a/poetry.lock b/poetry.lock index 438b83061..9f7f14539 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] @@ -35,19 +35,20 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt [[package]] name = "anyio" -version = "4.1.0" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, - {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -191,31 +192,32 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "babel" -version = "2.13.1" +version = "2.14.0" description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, - {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.extras] @@ -252,29 +254,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -288,7 +294,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -551,13 +557,13 @@ files = [ [[package]] name = "comm" -version = "0.2.0" +version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, - {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, ] [package.dependencies] @@ -598,63 +604,63 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -901,13 +907,13 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "fastjsonschema" -version = "2.19.0" +version = "2.19.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.0-py3-none-any.whl", hash = "sha256:b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e"}, - {file = "fastjsonschema-2.19.0.tar.gz", hash = "sha256:e25df6647e1bc4a26070b700897b07b542ec898dd4f1f6ea013e7f6a88417225"}, + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, ] [package.extras] @@ -979,13 +985,13 @@ files = [ [[package]] name = "google-api-core" -version = "2.14.0" +version = "2.15.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.14.0.tar.gz", hash = "sha256:5368a4502b793d9bbf812a5912e13e4e69f9bd87f6efb508460c43f5bbd1ce41"}, - {file = "google_api_core-2.14.0-py3-none-any.whl", hash = "sha256:de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952"}, + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, ] [package.dependencies] @@ -1001,13 +1007,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.108.0" +version = "2.113.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.108.0.tar.gz", hash = "sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99"}, - {file = "google_api_python_client-2.108.0-py2.py3-none-any.whl", hash = "sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d"}, + {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, + {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, ] [package.dependencies] @@ -1019,13 +1025,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.23.4" +version = "2.26.2" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.4.tar.gz", hash = "sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3"}, - {file = "google_auth-2.23.4-py2.py3-none-any.whl", hash = "sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2"}, + {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, + {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, ] [package.dependencies] @@ -1075,13 +1081,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.61.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.61.0.tar.gz", hash = "sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b"}, - {file = "googleapis_common_protos-1.61.0-py2.py3-none-any.whl", hash = "sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -1182,72 +1188,73 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) [[package]] name = "greenlet" -version = "3.0.1" +version = "3.0.3" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, - {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, - {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, - {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, - {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, - {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, - {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, - {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, - {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, - {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, - {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, - {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, - {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, - {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, - {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -1288,20 +1295,20 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "6.11.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -1354,13 +1361,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.27.1" +version = "6.28.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, - {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, + {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, + {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, ] [package.dependencies] @@ -1374,7 +1381,7 @@ matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=20" +pyzmq = ">=24" tornado = ">=6.1" traitlets = ">=5.4.0" @@ -1473,20 +1480,17 @@ arrow = ">=0.15.0" [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" @@ -1535,13 +1539,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1620,13 +1624,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.11.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.11.1-py3-none-any.whl", hash = "sha256:f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779"}, - {file = "jsonschema_specifications-2023.11.1.tar.gz", hash = "sha256:c9b234904ffe02f079bf91b14d79987faa685fd4b39c377a0996954c0090b9ca"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1657,13 +1661,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.5.0" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, - {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -1717,13 +1721,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.11.1" +version = "2.12.4" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.11.1-py3-none-any.whl", hash = "sha256:4b3a16e3ed16fd202588890f10b8ca589bd3e29405d128beb95935f059441373"}, - {file = "jupyter_server-2.11.1.tar.gz", hash = "sha256:fe80bab96493acf5f7d6cd9a1575af8fbd253dc2591aa4d015131a1e03b5799a"}, + {file = "jupyter_server-2.12.4-py3-none-any.whl", hash = "sha256:a125ae18a60de568f78f55c84dd58759901a18ef279abf0418ac220653ca1320"}, + {file = "jupyter_server-2.12.4.tar.gz", hash = "sha256:41f4a1e6b912cc24a7c6c694851b37d3d8412b180f43d72315fe422cb2b85cc2"}, ] [package.dependencies] @@ -1753,13 +1757,13 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.4.4" +version = "0.5.1" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, + {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, + {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, ] [package.dependencies] @@ -1767,18 +1771,18 @@ pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} terminado = ">=0.8.3" [package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] [[package]] name = "jupyterlab" -version = "4.0.9" +version = "4.0.10" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.9-py3-none-any.whl", hash = "sha256:9f6f8e36d543fdbcc3df961a1d6a3f524b4a4001be0327a398f68fa4e534107c"}, - {file = "jupyterlab-4.0.9.tar.gz", hash = "sha256:9ebada41d52651f623c0c9f069ddb8a21d6848e4c887d8e5ddc0613166ed5c0b"}, + {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, + {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, ] [package.dependencies] @@ -1797,7 +1801,7 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.4)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.6)"] docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -1891,47 +1895,48 @@ testing = ["backports.unittest-mock", "collective.checkdocs", "fs (>=0.5,<2)", " [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] @@ -1996,22 +2001,22 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.1" +version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] +dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -2052,38 +2057,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -2132,13 +2137,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.11.0" +version = "7.14.1" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.11.0-py3-none-any.whl", hash = "sha256:d1d417b7f34a4e38887f8da5bdfd12372adf3b80f995d57556cb0972c68909fe"}, - {file = "nbconvert-7.11.0.tar.gz", hash = "sha256:abedc01cf543177ffde0bfc2a69726d5a478f6af10a332fc1bf29fcb4f0cf000"}, + {file = "nbconvert-7.14.1-py3-none-any.whl", hash = "sha256:aa83e3dd27ea38d0c1d908e3ce9518d15fa908dd30521b6d5040bd23f33fffb0"}, + {file = "nbconvert-7.14.1.tar.gz", hash = "sha256:20cba10e0448dc76b3bebfe1adf923663e3b98338daf77b97b42511ef5a88618"}, ] [package.dependencies] @@ -2165,7 +2170,7 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] webpdf = ["playwright"] [[package]] @@ -2260,47 +2265,47 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, - {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, - {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -2562,13 +2567,13 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -2606,13 +2611,13 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2650,13 +2655,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.41" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, - {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -2664,47 +2669,47 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.25.1" +version = "4.25.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, - {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, - {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, - {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, - {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, - {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, - {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, - {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, - {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, ] [[package]] name = "psutil" -version = "5.9.6" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -2949,13 +2954,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -3199,104 +3204,104 @@ files = [ [[package]] name = "pyzmq" -version = "25.1.1" +version = "25.1.2" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, - {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, - {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, - {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, - {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, - {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, - {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, - {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, - {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, - {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, - {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, - {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, ] [package.dependencies] @@ -3325,13 +3330,13 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "referencing" -version = "0.31.0" +version = "0.32.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.31.0-py3-none-any.whl", hash = "sha256:381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882"}, - {file = "referencing-0.31.0.tar.gz", hash = "sha256:cc28f2c88fbe7b961a7817a0abc034c09a1e36358f82fedb4ffdf29a25398863"}, + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, ] [package.dependencies] @@ -3340,99 +3345,104 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -3501,110 +3511,110 @@ files = [ [[package]] name = "rpds-py" -version = "0.13.1" +version = "0.16.2" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.13.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:83feb0f682d75a09ddc11aa37ba5c07dd9b824b22915207f6176ea458474ff75"}, - {file = "rpds_py-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa84bbe22ffa108f91631935c28a623001e335d66e393438258501e618fb0dde"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04f8c76b8d5c70695b4e8f1d0b391d8ef91df00ef488c6c1ffb910176459bc6"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:032c242a595629aacace44128f9795110513ad27217b091e834edec2fb09e800"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91276caef95556faeb4b8f09fe4439670d3d6206fee78d47ddb6e6de837f0b4d"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d22f2cb82e0b40e427a74a93c9a4231335bbc548aed79955dde0b64ea7f88146"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9e2794329ef070844ff9bfc012004aeddc0468dc26970953709723f76c8a5"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c797ea56f36c6f248656f0223b11307fdf4a1886f3555eba371f34152b07677f"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:82dbcd6463e580bcfb7561cece35046aaabeac5a9ddb775020160b14e6c58a5d"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:736817dbbbd030a69a1faf5413a319976c9c8ba8cdcfa98c022d3b6b2e01eca6"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f36a1e80ef4ed1996445698fd91e0d3e54738bf597c9995118b92da537d7a28"}, - {file = "rpds_py-0.13.1-cp310-none-win32.whl", hash = "sha256:4f13d3f6585bd07657a603780e99beda96a36c86acaba841f131e81393958336"}, - {file = "rpds_py-0.13.1-cp310-none-win_amd64.whl", hash = "sha256:545e94c84575057d3d5c62634611858dac859702b1519b6ffc58eca7fb1adfcf"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bfe72b249264cc1ff2f3629be240d7d2fdc778d9d298087cdec8524c91cd11f"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edc91c50e17f5cd945d821f0f1af830522dba0c10267c3aab186dc3dbaab8def"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eca04a365be380ca1f8fa48b334462e19e3382c0bb7386444d8ca43aa01c481"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e3ac5b602fea378243f993d8b707189f9061e55ebb4e56cb9fdef8166060f28"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb5d2ab183c0efe5e7b8917e4eaa2e837aacafad8a69b89aa6bc81550eed857"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9793d46d3e6522ae58e9321032827c9c0df1e56cbe5d3de965facb311aed6aa"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cd935c0220d012a27c20135c140f9cdcbc6249d5954345c81bfb714071b985c"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37b08df45f02ff1866043b95096cbe91ac99de05936dd09d6611987a82a3306a"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad666a904212aa9a6c77da7dce9d5170008cda76b7776e6731928b3f8a0d40fa"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8a6ad8429340e0a4de89353447c6441329def3632e7b2293a7d6e873217d3c2b"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c40851b659d958c5245c1236e34f0d065cc53dca8d978b49a032c8e0adfda6e"}, - {file = "rpds_py-0.13.1-cp311-none-win32.whl", hash = "sha256:4145172ab59b6c27695db6d78d040795f635cba732cead19c78cede74800949a"}, - {file = "rpds_py-0.13.1-cp311-none-win_amd64.whl", hash = "sha256:46a07a258bda12270de02b34c4884f200f864bba3dcd6e3a37fef36a168b859d"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:ba4432301ad7eeb1b00848cf46fae0e5fecfd18a8cb5fdcf856c67985f79ecc7"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d22e0660de24bd8e9ac82f4230a22a5fe4e397265709289d61d5fb333839ba50"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76a8374b294e4ccb39ccaf11d39a0537ed107534139c00b4393ca3b542cc66e5"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7d152ec7bb431040af2500e01436c9aa0d993f243346f0594a15755016bf0be1"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74a2044b870df7c9360bb3ce7e12f9ddf8e72e49cd3a353a1528cbf166ad2383"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:960e7e460fda2d0af18c75585bbe0c99f90b8f09963844618a621b804f8c3abe"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37f79f4f1f06cc96151f4a187528c3fd4a7e1065538a4af9eb68c642365957f7"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd4ea56c9542ad0091dfdef3e8572ae7a746e1e91eb56c9e08b8d0808b40f1d1"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0290712eb5603a725769b5d857f7cf15cf6ca93dda3128065bbafe6fdb709beb"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0b70c1f800059c92479dc94dda41288fd6607f741f9b1b8f89a21a86428f6383"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3dd5fb7737224e1497c886fb3ca681c15d9c00c76171f53b3c3cc8d16ccfa7fb"}, - {file = "rpds_py-0.13.1-cp312-none-win32.whl", hash = "sha256:74be3b215a5695690a0f1a9f68b1d1c93f8caad52e23242fcb8ba56aaf060281"}, - {file = "rpds_py-0.13.1-cp312-none-win_amd64.whl", hash = "sha256:f47eef55297799956464efc00c74ae55c48a7b68236856d56183fe1ddf866205"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e4a45ba34f904062c63049a760790c6a2fa7a4cc4bd160d8af243b12371aaa05"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20147996376be452cd82cd6c17701daba69a849dc143270fa10fe067bb34562a"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b9535aa22ab023704cfc6533e968f7e420affe802d85e956d8a7b4c0b0b5ea"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d4fa1eeb9bea6d9b64ac91ec51ee94cc4fc744955df5be393e1c923c920db2b0"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b2415d5a7b7ee96aa3a54d4775c1fec140476a17ee12353806297e900eaeddc"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:577d40a72550eac1386b77b43836151cb61ff6700adacda2ad4d883ca5a0b6f2"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af2d1648eb625a460eee07d3e1ea3a4a6e84a1fb3a107f6a8e95ac19f7dcce67"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b769396eb358d6b55dbf78f3f7ca631ca1b2fe02136faad5af74f0111b4b6b7"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:249c8e0055ca597707d71c5ad85fd2a1c8fdb99386a8c6c257e1b47b67a9bec1"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:fe30ef31172bdcf946502a945faad110e8fff88c32c4bec9a593df0280e64d8a"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2647192facf63be9ed2d7a49ceb07efe01dc6cfb083bd2cc53c418437400cb99"}, - {file = "rpds_py-0.13.1-cp38-none-win32.whl", hash = "sha256:4011d5c854aa804c833331d38a2b6f6f2fe58a90c9f615afdb7aa7cf9d31f721"}, - {file = "rpds_py-0.13.1-cp38-none-win_amd64.whl", hash = "sha256:7cfae77da92a20f56cf89739a557b76e5c6edc094f6ad5c090b9e15fbbfcd1a4"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:e9be1f7c5f9673616f875299339984da9447a40e3aea927750c843d6e5e2e029"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:839676475ac2ccd1532d36af3d10d290a2ca149b702ed464131e450a767550df"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90031658805c63fe488f8e9e7a88b260ea121ba3ee9cdabcece9c9ddb50da39"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ba9fbc5d6e36bfeb5292530321cc56c4ef3f98048647fabd8f57543c34174ec"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08832078767545c5ee12561ce980714e1e4c6619b5b1e9a10248de60cddfa1fd"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f5aa7f5078d35ed8e344bcba40f35bc95f9176dddb33fc4f2084e04289fa63"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80080972e1d000ad0341c7cc58b6855c80bd887675f92871221451d13a975072"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ee352691c4434eb1c01802e9daa5edcc1007ff15023a320e2693fed6a661b"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d20da6b4c7aa9ee75ad0730beaba15d65157f5beeaca54a038bb968f92bf3ce3"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:faa12a9f34671a30ea6bb027f04ec4e1fb8fa3fb3ed030893e729d4d0f3a9791"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7cf241dbb50ea71c2e628ab2a32b5bfcd36e199152fc44e5c1edb0b773f1583e"}, - {file = "rpds_py-0.13.1-cp39-none-win32.whl", hash = "sha256:dab979662da1c9fbb464e310c0b06cb5f1d174d09a462553af78f0bfb3e01920"}, - {file = "rpds_py-0.13.1-cp39-none-win_amd64.whl", hash = "sha256:a2b3c79586636f1fa69a7bd59c87c15fca80c0d34b5c003d57f2f326e5276575"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5967fa631d0ed9f8511dede08bc943a9727c949d05d1efac4ac82b2938024fb7"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8308a8d49d1354278d5c068c888a58d7158a419b2e4d87c7839ed3641498790c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0580faeb9def6d0beb7aa666294d5604e569c4e24111ada423cf9936768d95c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2da81c1492291c1a90987d76a47c7b2d310661bf7c93a9de0511e27b796a8b46"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c9a1dc5e898ce30e2f9c0aa57181cddd4532b22b7780549441d6429d22d3b58"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ae6f423cb7d1c6256b7482025ace2825728f53b7ac58bcd574de6ee9d242c2"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3179e0815827cf963e634095ae5715ee73a5af61defbc8d6ca79f1bdae1d1d"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9f8930092558fd15c9e07198625efb698f7cc00b3dc311c83eeec2540226a8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d1d388d2f5f5a6065cf83c54dd12112b7389095669ff395e632003ae8999c6b8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:08b335fb0c45f0a9e2478a9ece6a1bfb00b6f4c4780f9be3cf36479c5d8dd374"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d11afdc5992bbd7af60ed5eb519873690d921425299f51d80aa3099ed49f2bcc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8c1f6c8df23be165eb0cb78f305483d00c6827a191e3a38394c658d5b9c80bbd"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:528e2afaa56d815d2601b857644aeb395afe7e59212ab0659906dc29ae68d9a6"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df2af1180b8eeececf4f819d22cc0668bfadadfd038b19a90bd2fb2ee419ec6f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88956c993a20201744282362e3fd30962a9d86dc4f1dcf2bdb31fab27821b61f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee70ee5f4144a45a9e6169000b5b525d82673d5dab9f7587eccc92794814e7ac"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5fd099acaee2325f01281a130a39da08d885e4dedf01b84bf156ec2737d78fe"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9656a09653b18b80764647d585750df2dff8928e03a706763ab40ec8c4872acc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba239bb37663b2b4cd08e703e79e13321512dccd8e5f0e9451d9e53a6b8509a"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3f55ae773abd96b1de25fc5c3fb356f491bd19116f8f854ba705beffc1ddc3c5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:f4b15a163448ec79241fb2f1bc5a8ae1a4a304f7a48d948d208a2935b26bf8a5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1a3b2583c86bbfbf417304eeb13400ce7f8725376dc7d3efbf35dc5d7052ad48"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f1059ca9a51c936c9a8d46fbc2c9a6b4c15ab3f13a97f1ad32f024b39666ba85"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f55601fb58f92e4f4f1d05d80c24cb77505dc42103ddfd63ddfdc51d3da46fa2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcfd5f91b882eedf8d9601bd21261d6ce0e61a8c66a7152d1f5df08d3f643ab1"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6574f619e8734140d96c59bfa8a6a6e7a3336820ccd1bfd95ffa610673b650a2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4b9d3f5c48bbe8d9e3758e498b3c34863f2c9b1ac57a4e6310183740e59c980"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdd6f8738e1f1d9df5b1603bb03cb30e442710e5672262b95d0f9fcb4edb0dab"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c2bf286e5d755a075e5e97ba56b3de08cccdad6b323ab0b21cc98875176b03"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d4b390ee70ca9263b331ccfaf9819ee20e90dfd0201a295e23eb64a005dbef"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:db8d0f0ad92f74feb61c4e4a71f1d573ef37c22ef4dc19cab93e501bfdad8cbd"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2abd669a39be69cdfe145927c7eb53a875b157740bf1e2d49e9619fc6f43362e"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c173f529666bab8e3f948b74c6d91afa22ea147e6ebae49a48229d9020a47c4"}, - {file = "rpds_py-0.13.1.tar.gz", hash = "sha256:264f3a5906c62b9df3a00ad35f6da1987d321a053895bd85f9d5c708de5c0fbf"}, + {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"}, + {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"}, + {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"}, + {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"}, + {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"}, + {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"}, + {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"}, + {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"}, + {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"}, + {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"}, + {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"}, + {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"}, + {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"}, ] [[package]] @@ -4018,60 +4028,60 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.24" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, + {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, + {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, ] [package.dependencies] @@ -4081,7 +4091,7 @@ typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] @@ -4091,7 +4101,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -4101,7 +4111,7 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" @@ -4306,22 +4316,22 @@ files = [ [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] @@ -4346,13 +4356,13 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.14.0" +version = "5.14.1" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, - {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [package.extras] @@ -4361,13 +4371,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.8.19.14" +version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, ] [[package]] @@ -4383,13 +4393,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] @@ -4478,13 +4488,13 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -4515,13 +4525,13 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.4" +version = "1.7.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, - {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, ] [package.extras] @@ -4651,4 +4661,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "7123714075e813a493e59782fa8922650a7b44835cf5656d029a4130d139b636" +content-hash = "46683ac38303bb6cd683114bb24c5fa8954b8dd1815836d6fdd337f18bcaea00" diff --git a/pyproject.toml b/pyproject.toml index c7c415349..9d4cbc8ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ pygsheets = "^2.0.4" PyYAML = "^6.0.0" rdflib = "^6.0.0" setuptools = "^66.0.0" -synapseclient = "^3.1.1" +synapseclient = "^3.2.0" tenacity = "^8.0.1" toml = "^0.10.2" Flask = "^2.0.0" From d6ace3f86a45998ead1b817bf94a03dd606d0d27 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 12 Jan 2024 10:26:19 -0700 Subject: [PATCH 043/199] add empty row to test manifest --- tests/data/mock_manifests/Invalid_Test_Manifest.csv | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/data/mock_manifests/Invalid_Test_Manifest.csv b/tests/data/mock_manifests/Invalid_Test_Manifest.csv index ea0e7685c..1cc8995ef 100644 --- a/tests/data/mock_manifests/Invalid_Test_Manifest.csv +++ b/tests/data/mock_manifests/Invalid_Test_Manifest.csv @@ -1,4 +1,5 @@ Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Regex Integer,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA MockComponent,"ab,cd","ab,cd,ef",a,a,5.4,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,98085,,6549,str1,70,32-984,7 MockComponent,invalid list values,ab cd ef,q,m,0,c,99,5.63,94,http://googlef.com/,7163,51100,9965,71738,,32851,str1,30,notADate,9.5 -MockComponent,"ab,cd","ab,cd,ef",b,b,683902,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable \ No newline at end of file +MockComponent,"ab,cd","ab,cd,ef",b,b,683902,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable +,,,,,,,,,,,,,,,,,,, \ No newline at end of file From c43e4fe4e0e6d268b0f17ca13bf5a7a7d3855f8d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 12 Jan 2024 10:26:43 -0700 Subject: [PATCH 044/199] add test for `load_df` --- tests/test_utils.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index d8f0aff46..f266a5600 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -238,6 +238,25 @@ def test_load_schema_org(self): class TestDfUtils: + @pytest.mark.parametrize("preserve_raw_input", [True, False], ids=["Do not infer datatypes", "Infer datatypes"]) + def test_load_df(self, helpers, preserve_raw_input): + file_path = helpers.get_data_path("mock_manifests", "Invalid_Test_Manifest.csv") + + df = df_utils.load_df(file_path, preserve_raw_input=preserve_raw_input, data_model=False) + + assert df["Component"].dtype == "object" + assert df.shape[0] == 3 + + if preserve_raw_input: + assert type(df["Check NA"].iloc[0]) == str + assert type(df["Check NA"].iloc[1]) == str + assert type(df["Check NA"].iloc[2]) == str + else: + assert type(df["Check NA"].iloc[0]) == np.int64 + assert type(df["Check NA"].iloc[1]) == float + assert type(df["Check NA"].iloc[2]) == str + + def test_update_df_col_present(self, helpers): synapse_manifest = helpers.get_data_frame( From f7416fc0f5eeffd52551bc3423f03e27e0f933eb Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 12 Jan 2024 14:15:09 -0700 Subject: [PATCH 045/199] resolve empty line bug --- schematic/utils/df_utils.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 789fb4881..021550c55 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -30,15 +30,15 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): #Read CSV to df as type specified in kwargs org_df = pd.read_csv(file_path, keep_default_na = True, encoding='utf8', **load_args) - + + #only trim if not data model csv + if not data_model: + org_df=trim_commas_df(org_df) + # If type inference not allowed: trim and return - if preserve_raw_input: - #only trim if not data model csv - if not data_model: - org_df=trim_commas_df(org_df) - - # log manifest load and processing time - logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") + if preserve_raw_input: + # log manifest load and processing time + logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df # If type inferences is allowed: infer types, trim, and return @@ -67,13 +67,10 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): for col in org_df.columns: float_df[col]=pd.to_numeric(float_df[col], errors='coerce') # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(org_df[col][float_df[col].isna()],inplace=True) - - # Trim nans and empty rows and columns - processed_df = trim_commas_df(float_df) + float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) # Store values that were converted to type int in the final dataframe - processed_df=processed_df.mask(ints_tf_df, other = ints) + processed_df=float_df.mask(ints_tf_df, other = ints) # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") From 8954155bc068ae1036611346b8fca0077a6c00ed Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 12 Jan 2024 14:18:46 -0700 Subject: [PATCH 046/199] update test assertions --- tests/test_utils.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index f266a5600..2894ea479 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -248,14 +248,13 @@ def test_load_df(self, helpers, preserve_raw_input): assert df.shape[0] == 3 if preserve_raw_input: - assert type(df["Check NA"].iloc[0]) == str - assert type(df["Check NA"].iloc[1]) == str - assert type(df["Check NA"].iloc[2]) == str + assert isinstance(df["Check NA"].iloc[0], str) + assert isinstance(df["Check NA"].iloc[1], str) + assert isinstance(df["Check NA"].iloc[2], str) else: - assert type(df["Check NA"].iloc[0]) == np.int64 - assert type(df["Check NA"].iloc[1]) == float - assert type(df["Check NA"].iloc[2]) == str - + assert isinstance(df["Check NA"].iloc[0], np.int64) + assert isinstance(df["Check NA"].iloc[1], float) + assert isinstance(df["Check NA"].iloc[2], str) def test_update_df_col_present(self, helpers): From f15115d0cdcf09aff655758f7d29cc4f2a4bb8a9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 09:38:24 -0700 Subject: [PATCH 047/199] add type annotations --- schematic/utils/df_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 021550c55..d69a298ae 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): +def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = False, **load_args): """ Universal function to load CSVs and return DataFrames Parses string entries to convert as appropriate to type int, float, and pandas timestamp From 1fabaec14eb4963d7112df04c8908c051974fa77 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 09:40:31 -0700 Subject: [PATCH 048/199] add return type hint and clean --- schematic/utils/df_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index d69a298ae..8ea061b8c 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = False, **load_args): +def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = False, **load_args) -> pd.DataFrame: """ Universal function to load CSVs and return DataFrames Parses string entries to convert as appropriate to type int, float, and pandas timestamp @@ -36,7 +36,7 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = org_df=trim_commas_df(org_df) # If type inference not allowed: trim and return - if preserve_raw_input: + if preserve_raw_input: # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df From bee19e983bee09829fac3c4fc8ef954df77563cf Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 13:54:25 -0700 Subject: [PATCH 049/199] update `pandas` and `schematic_db` deps --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9d4cbc8ac..4a6a7be21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ jsonschema = "^4.0.0" networkx = ">=2.2.8" numpy = "^1.21.1" oauth2client = "^4.1.0" # Specified because of bug in version ^4.0.0 -pandas = "^1.3.1" +pandas = "^2.0.0" pygsheets = "^2.0.4" PyYAML = "^6.0.0" rdflib = "^6.0.0" @@ -70,7 +70,7 @@ Flask-Cors = "^3.0.10" pdoc = "^12.2.0" dateparser = "^1.1.4" pandarallel = "^1.6.4" -schematic-db = {version = "0.0.dev33", extras = ["synapse"]} +schematic-db = {version = "0.0.34", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" From 8ad608552787626b341407abc7989224c527845d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 15:06:04 -0700 Subject: [PATCH 050/199] regen `.lock` file --- poetry.lock | 97 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 57 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f7f14539..cb25afc9f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2494,50 +2494,67 @@ doc = ["mkdocs-material"] [[package]] name = "pandas" -version = "1.5.3" +version = "2.1.4" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, ] [package.dependencies] -numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, -] -python-dateutil = ">=2.8.1" +numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""} +python-dateutil = ">=2.8.2" pytz = ">=2020.1" +tzdata = ">=2022.1" [package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] [[package]] name = "pandocfilters" @@ -3710,20 +3727,20 @@ files = [ [[package]] name = "schematic-db" -version = "0.0.dev33" +version = "0.0.34" description = "" optional = false python-versions = ">=3.9,<4.0" files = [ - {file = "schematic_db-0.0.dev33-py3-none-any.whl", hash = "sha256:9a274b038e5d3f382fd22300350fb4c02e0f147e5846808b324714fb30bd9e75"}, - {file = "schematic_db-0.0.dev33.tar.gz", hash = "sha256:01cadedbfa10915727c0bdf88c9184353db1294d8c941e69a824d16f12bb4701"}, + {file = "schematic_db-0.0.34-py3-none-any.whl", hash = "sha256:fb9194ee9085c12e4044b6f4bdb934807767fbf80e3d792c5e8a65c0a6e43e95"}, + {file = "schematic_db-0.0.34.tar.gz", hash = "sha256:29e1fb752e330c3319670d4c562f7f54d554c23c47d9ed19910a107438c70169"}, ] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" interrogate = ">=1.5.0,<2.0.0" networkx = ">=2.8.6,<3.0.0" -pandas = "1.5.3" +pandas = ">=2.0.0,<3.0.0" pydantic = ">=1.10.7,<2.0.0" PyYAML = ">=6.0,<7.0" requests = ">=2.28.1,<3.0.0" @@ -4661,4 +4678,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "46683ac38303bb6cd683114bb24c5fa8954b8dd1815836d6fdd337f18bcaea00" +content-hash = "65fd6ec0494aecb3e9b89b59479440ff24be22c8867df6718ddd16eac3e7bdec" From babcf71057da2f4c1fa1a37eb23defa2afd067a6 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:04:29 -0700 Subject: [PATCH 051/199] regen `.lock` file --- poetry.lock | 288 +++++++++++++++++++++++++--------------------------- 1 file changed, 139 insertions(+), 149 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb25afc9f..9eb41743d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1007,13 +1007,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.113.0" +version = "2.114.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, - {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, + {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, + {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, ] [package.dependencies] @@ -1361,13 +1361,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.28.0" +version = "6.29.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, - {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, ] [package.dependencies] @@ -1390,7 +1390,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -1595,13 +1595,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.20.0" +version = "4.21.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, - {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, + {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, + {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, ] [package.dependencies] @@ -1721,13 +1721,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.4" +version = "2.12.5" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.4-py3-none-any.whl", hash = "sha256:a125ae18a60de568f78f55c84dd58759901a18ef279abf0418ac220653ca1320"}, - {file = "jupyter_server-2.12.4.tar.gz", hash = "sha256:41f4a1e6b912cc24a7c6c694851b37d3d8412b180f43d72315fe422cb2b85cc2"}, + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, ] [package.dependencies] @@ -2137,13 +2137,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.14.1" +version = "7.14.2" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.14.1-py3-none-any.whl", hash = "sha256:aa83e3dd27ea38d0c1d908e3ce9518d15fa908dd30521b6d5040bd23f33fffb0"}, - {file = "nbconvert-7.14.1.tar.gz", hash = "sha256:20cba10e0448dc76b3bebfe1adf923663e3b98338daf77b97b42511ef5a88618"}, + {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, + {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, ] [package.dependencies] @@ -2196,13 +2196,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.8" +version = "1.5.9" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, - {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, + {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, + {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, ] [[package]] @@ -3528,110 +3528,110 @@ files = [ [[package]] name = "rpds-py" -version = "0.16.2" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"}, - {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"}, - {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"}, - {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"}, - {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"}, - {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"}, - {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"}, - {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"}, - {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"}, - {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"}, - {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"}, - {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"}, - {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"}, - {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"}, - {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"}, - {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"}, - {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"}, - {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"}, - {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"}, - {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"}, - {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, ] [[package]] @@ -3941,56 +3941,50 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.7" +version = "1.0.8" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.5" +version = "1.0.6" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.4" +version = "2.0.5" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -4009,38 +4003,34 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.6" +version = "1.0.7" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.9" +version = "1.1.10" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] From eaa2386aa0895709e7e6b5254b52854741449689 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:18:03 -0700 Subject: [PATCH 052/199] Update df_utils.py --- schematic/utils/df_utils.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 8ea061b8c..33b077f98 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -46,7 +46,7 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = # create a separate copy of the manifest # before beginning conversions to store float values float_df=deepcopy(org_df) - + # Cast the columns in the dataframe to string and # replace Null values with empty strings null_cells = org_df.isnull() @@ -54,23 +54,27 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = # Find integers stored as strings and replace with entries of type np.int64 if org_df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance - ints = org_df.applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + ints = org_df.map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) else: # parallelize iterations for large manfiests pandarallel.initialize(verbose = 1) - ints = org_df.parallel_applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + ints = org_df.parallel_map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) # Identify cells converted to intergers - ints_tf_df = ints.applymap(pd.api.types.is_integer) + ints_tf_df = ints.map(pd.api.types.is_integer) # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in org_df.columns: - float_df[col]=pd.to_numeric(float_df[col], errors='coerce') + float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + # replace values that couldn't be converted to float with the original str values float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) # Store values that were converted to type int in the final dataframe processed_df=float_df.mask(ints_tf_df, other = ints) + + # Infer dtypes for columns when possible to restore type masking + processed_df = processed_df.infer_objects() # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") From 40a0b04fc9fc694217d1e80b6a028769c04fdd5a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:28:54 -0700 Subject: [PATCH 053/199] Update GE_Helpers.py --- schematic/models/GE_Helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index e4de3310e..d9b6d07a3 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -156,7 +156,7 @@ def build_expectation_suite(self,): meta={} # remove trailing/leading whitespaces from manifest - self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) + self.manifest.map(lambda x: x.strip() if isinstance(x, str) else x) validation_rules = self.sg.get_node_validation_rules(col) #check if attribute has any rules associated with it From 8233818956e71be69dc77eac99888a1d0acc05f9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:29:07 -0700 Subject: [PATCH 054/199] Update validate_manifest.py --- schematic/models/validate_manifest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index a6e365b6e..162e30ef2 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -175,7 +175,7 @@ def validate_manifest_rules( for col in manifest.columns: # remove trailing/leading whitespaces from manifest - manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) + manifest.map(lambda x: x.strip() if isinstance(x, str) else x) validation_rules = sg.get_node_validation_rules(col) # Check that attribute rules conform to limits: @@ -235,7 +235,7 @@ def validate_manifest_values(self, manifest, jsonSchema, sg # numerical values need to be type string for the jsonValidator for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: manifest[col]=manifest[col].astype('string') - manifest = manifest.applymap(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') + manifest = manifest.map(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): From ba143e228074ea4b99511c68a5f3a55ab5e6dd5f Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:29:11 -0700 Subject: [PATCH 055/199] Update synapse.py --- schematic/store/synapse.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index fa08e09ba..19027c7f8 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -454,14 +454,14 @@ def _get_manifest_id(self, manifest: pd.DataFrame) -> str: # Try to use uncensored manifest first not_censored=~censored if any(not_censored): - manifest_syn_id=manifest[not_censored]["id"][0] + manifest_syn_id=manifest[not_censored]["id"].iloc[0] # if only censored manifests are available, just use the first censored manifest else: - manifest_syn_id = manifest["id"][0] + manifest_syn_id = manifest["id"].iloc[0] #otherwise, use the first (implied only) version that exists else: - manifest_syn_id = manifest["id"][0] + manifest_syn_id = manifest["id"].iloc[0] return manifest_syn_id From 457264d312feb623c1eb988a61b43422724767d0 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:11:17 -0700 Subject: [PATCH 056/199] fix schemas test --- tests/test_schemas.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2444d5f44..01f64ed82 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -19,7 +19,7 @@ def extended_schema_path(helpers, tmp_path): example_model_df = load_df(data_model_csv_path) # additional "Assay" attribute to be added to example schema - assay_attr_row = { + assay_attr_dict = { "Attribute": "Assay", "Description": ( "A planned process with the objective to produce information " @@ -36,7 +36,9 @@ def extended_schema_path(helpers, tmp_path): "Validation Rules": "", } - example_model_df = example_model_df.append(assay_attr_row, ignore_index=True) + assay_attr_df = pd.DataFrame.from_dict(assay_attr_dict, orient="index").T + + example_model_df = pd.concat([example_model_df, assay_attr_df], ignore_index=True) # create empty temporary file to write extended schema to schemas_folder = tmp_path / "schemas" From f09c29bef5e23940b2052f9882f52f9eebed4ef6 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:11:45 -0700 Subject: [PATCH 057/199] fix utils test --- tests/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2894ea479..ab2866acd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -287,7 +287,7 @@ def test_trim_commas_df(self, helpers): [[np.nan] * len(local_manifest.columns)], columns=local_manifest.columns ) - df_with_nans = local_manifest.append(nan_row, ignore_index=True) + df_with_nans = pd.concat([local_manifest, nan_row], ignore_index=True) df_with_nans["Unnamed: 1"] = np.nan trimmed_df = df_utils.trim_commas_df(df_with_nans) From d4ad9b8d8f85180a2882cbbf61c34d7d644c474e Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:19:38 -0700 Subject: [PATCH 058/199] fix schemas test --- tests/test_schemas.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2444d5f44..01f64ed82 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -19,7 +19,7 @@ def extended_schema_path(helpers, tmp_path): example_model_df = load_df(data_model_csv_path) # additional "Assay" attribute to be added to example schema - assay_attr_row = { + assay_attr_dict = { "Attribute": "Assay", "Description": ( "A planned process with the objective to produce information " @@ -36,7 +36,9 @@ def extended_schema_path(helpers, tmp_path): "Validation Rules": "", } - example_model_df = example_model_df.append(assay_attr_row, ignore_index=True) + assay_attr_df = pd.DataFrame.from_dict(assay_attr_dict, orient="index").T + + example_model_df = pd.concat([example_model_df, assay_attr_df], ignore_index=True) # create empty temporary file to write extended schema to schemas_folder = tmp_path / "schemas" From 9774efb5f533c88d353e043273a4f27e111a5a57 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:20:03 -0700 Subject: [PATCH 059/199] fix utils test --- tests/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2894ea479..ab2866acd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -287,7 +287,7 @@ def test_trim_commas_df(self, helpers): [[np.nan] * len(local_manifest.columns)], columns=local_manifest.columns ) - df_with_nans = local_manifest.append(nan_row, ignore_index=True) + df_with_nans = pd.concat([local_manifest, nan_row], ignore_index=True) df_with_nans["Unnamed: 1"] = np.nan trimmed_df = df_utils.trim_commas_df(df_with_nans) From 6a7a2f7a764e7f9da017132e381e7ff704b2665d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:20:16 -0700 Subject: [PATCH 060/199] Revert "fix utils test" This reverts commit f09c29bef5e23940b2052f9882f52f9eebed4ef6. --- tests/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index ab2866acd..2894ea479 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -287,7 +287,7 @@ def test_trim_commas_df(self, helpers): [[np.nan] * len(local_manifest.columns)], columns=local_manifest.columns ) - df_with_nans = pd.concat([local_manifest, nan_row], ignore_index=True) + df_with_nans = local_manifest.append(nan_row, ignore_index=True) df_with_nans["Unnamed: 1"] = np.nan trimmed_df = df_utils.trim_commas_df(df_with_nans) From f572bce80c8ae75f710d78026ba8b41e953e0909 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 11:20:23 -0700 Subject: [PATCH 061/199] Revert "fix schemas test" This reverts commit 457264d312feb623c1eb988a61b43422724767d0. --- tests/test_schemas.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 01f64ed82..2444d5f44 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -19,7 +19,7 @@ def extended_schema_path(helpers, tmp_path): example_model_df = load_df(data_model_csv_path) # additional "Assay" attribute to be added to example schema - assay_attr_dict = { + assay_attr_row = { "Attribute": "Assay", "Description": ( "A planned process with the objective to produce information " @@ -36,9 +36,7 @@ def extended_schema_path(helpers, tmp_path): "Validation Rules": "", } - assay_attr_df = pd.DataFrame.from_dict(assay_attr_dict, orient="index").T - - example_model_df = pd.concat([example_model_df, assay_attr_df], ignore_index=True) + example_model_df = example_model_df.append(assay_attr_row, ignore_index=True) # create empty temporary file to write extended schema to schemas_folder = tmp_path / "schemas" From c8ab0e3beb73543f5d534db9d77e980ecf3d9259 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 15:05:25 -0700 Subject: [PATCH 062/199] change object inference --- schematic/utils/df_utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 33b077f98..b77b5ea84 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -74,8 +74,9 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = processed_df=float_df.mask(ints_tf_df, other = ints) # Infer dtypes for columns when possible to restore type masking - processed_df = processed_df.infer_objects() - + # This mostly just labels string columns as such, changing the column type of columns with mixed numberical values converts ints to floats + processed_df = processed_df.convert_dtypes(infer_objects=False, convert_string=True, convert_integer=True, convert_boolean=False, convert_floating=True) + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From dcd5f098a421400ce9f737bbefdb0d78a5836371 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 09:56:30 -0700 Subject: [PATCH 063/199] leave column types as object --- schematic/utils/df_utils.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index b77b5ea84..7dbb139f6 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -73,10 +73,6 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = # Store values that were converted to type int in the final dataframe processed_df=float_df.mask(ints_tf_df, other = ints) - # Infer dtypes for columns when possible to restore type masking - # This mostly just labels string columns as such, changing the column type of columns with mixed numberical values converts ints to floats - processed_df = processed_df.convert_dtypes(infer_objects=False, convert_string=True, convert_integer=True, convert_boolean=False, convert_floating=True) - # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From 32b8b2a934c5f940efabb0cbe20896f35b4a999a Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 17 Jan 2024 14:20:54 -0500 Subject: [PATCH 064/199] move logic to schematic library side --- schematic/manifest/generator.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index fa842eeb5..20a48c6ae 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1552,6 +1552,29 @@ def create_manifests(jsonld:str, data_types:list, access_token:Optional[str]=Non Returns: Union[List[str], List[pd.DataFrame], BinaryIO]: a list of Googlesheet URLs, a list of pandas dataframes or an Excel file. """ + if dataset_ids: + # Check that the number of submitted data_types matches + # the number of dataset_ids (if applicable) + len_data_types = len(data_types) + len_dataset_ids = len(dataset_ids) + + try: + len_data_types == len_dataset_ids + except: + raise ValueError( + f"There is a mismatch in the number of data_types and dataset_id's that " + f"submitted. Please check your submission and try again." + ) + + # Raise an error if used in conjunction with datatype = 'all_manifests' + try: + data_types[0] != 'all manifests' + except: + raise ValueError( + f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + f"Please check your submission and try again." + ) + all_results = [] if data_types[0] == 'all manifests': sg = SchemaGenerator(path_to_json_ld=jsonld) From 134c26c7c84647a5f55b18298a53c14023d45b21 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 17 Jan 2024 14:34:57 -0500 Subject: [PATCH 065/199] add the part related to "send from directory back to api side --- schematic/manifest/generator.py | 7 ----- schematic_api/api/routes.py | 48 +++++++++------------------------ 2 files changed, 12 insertions(+), 43 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 20a48c6ae..bb66f9976 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1526,13 +1526,6 @@ def create_single_manifest(jsonld: str, data_type: str, access_token:Optional[st dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token, strict=strict, ) - # return an excel file if output_format is set to "excel" - if output_format == "excel": - dir_name = os.path.dirname(result) - file_name = os.path.basename(result) - mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' - return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) - return result @staticmethod diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 4afd8897f..280abd3bf 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -209,12 +209,13 @@ def get_temp_jsonld(schema_url): return tmp_file.name # @before_request -def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, strict_validation:bool=True): +def get_manifest_route(schema_url: str, use_annotations: bool, dataset_id=None, asset_view = None, output_format=None, title=None, strict_validation:bool=True, data_type=None): """Get the immediate dependencies that are related to a given source node. Args: schema_url: link to data model in json ld format title: title of a given manifest. dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). + data_type: data model components. output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet use_annotations: Whether to use existing annotations during manifest generation asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. @@ -232,41 +233,16 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, # get path to temporary JSON-LD file jsonld = get_temp_jsonld(schema_url) - # Gather all data_types to make manifests for. - all_args = connexion.request.args - args_dict = dict(all_args.lists()) - data_type = args_dict['data_type'] - - # Gather all dataset_ids - try: - dataset_ids = args_dict['dataset_id'] - except: - pass - - if dataset_ids: - # Check that the number of submitted data_types matches - # the number of dataset_ids (if applicable) - len_data_types = len(data_type) - len_dataset_ids = len(dataset_ids) - - try: - len_data_types == len_dataset_ids - except: - raise ValueError( - f"There is a mismatch in the number of data_types and dataset_id's that " - f"submitted. Please check your submission and try again." - ) - - # Raise an error if used in conjunction with datatype = 'all_manifests' - try: - data_type[0] != 'all manifests' - except: - raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." - ) - - all_results = ManifestGenerator.create_manifests(jsonld=jsonld, output_format=output_format, data_types=data_type, title=title, access_token=access_token, dataset_ids=dataset_ids, strict=strict_validation, use_annotations=use_annotations) + all_results = ManifestGenerator.create_manifests(jsonld=jsonld, output_format=output_format, data_types=data_type, title=title, access_token=access_token, dataset_ids=dataset_id, strict=strict_validation, use_annotations=use_annotations) + + # return an excel file if output_format is set to "excel" + if output_format == "excel": + # should only contain one excel spreadsheet path + result = all_results[0] + dir_name = os.path.dirname(result) + file_name = os.path.basename(result) + mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) return all_results #####profile validate manifest route function From adc485dd6431443419b5cb99d9714869c4b31cc1 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 17 Jan 2024 14:39:15 -0500 Subject: [PATCH 066/199] edit type hinting --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index bb66f9976..b93b110e4 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1492,7 +1492,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return dataframe @staticmethod - def create_single_manifest(jsonld: str, data_type: str, access_token:Optional[str]=None, dataset_id:Optional[str]=None, strict:Optional[bool]=True, title:Optional[str]=None, output_format:Literal["google_sheet", "excel", "dataframe"]="google_sheet", use_annotations:Optional[bool]=False) -> Union[str, pd.DataFrame, BinaryIO]: + def create_single_manifest(jsonld: str, data_type: str, access_token:Optional[str]=None, dataset_id:Optional[str]=None, strict:Optional[bool]=True, title:Optional[str]=None, output_format:Literal["google_sheet", "excel", "dataframe"]="google_sheet", use_annotations:Optional[bool]=False) -> Union[str, pd.DataFrame]: """Create a single manifest Args: From aaafeed72988fbaf62faf22e884a3a97faeb8ac9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:33:48 -0700 Subject: [PATCH 067/199] clean up `load_df` --- schematic/utils/df_utils.py | 89 ++++++++++++++++++++++++------------- 1 file changed, 59 insertions(+), 30 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 7dbb139f6..457f9ef26 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -6,6 +6,7 @@ import pandas as pd import numpy as np from pandarallel import pandarallel +from typing import Union logger = logging.getLogger(__name__) @@ -24,7 +25,6 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = Returns: a processed dataframe for manifests or unprocessed df for data models and where indicated """ - large_manifest_cutoff_size = 1000 # start performance timer t_load_df = perf_counter() @@ -35,48 +35,77 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = if not data_model: org_df=trim_commas_df(org_df) - # If type inference not allowed: trim and return if preserve_raw_input: - # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df + + is_null = org_df.isnull() + org_df = org_df.astype(str).mask(is_null, '') + + ints, is_int = find_and_convert_ints(org_df) - # If type inferences is allowed: infer types, trim, and return - else: - # create a separate copy of the manifest - # before beginning conversions to store float values - float_df=deepcopy(org_df) + float_df = convert_floats(org_df) - # Cast the columns in the dataframe to string and - # replace Null values with empty strings - null_cells = org_df.isnull() - org_df = org_df.astype(str).mask(null_cells, '') + # Store values that were converted to type int in the final dataframe + processed_df=float_df.mask(is_int, other = ints) - # Find integers stored as strings and replace with entries of type np.int64 - if org_df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance - ints = org_df.map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") + return processed_df - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose = 1) - ints = org_df.parallel_map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) +def find_and_convert_ints(df: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: + """ + Find strings that represent integers and convert to type int + Args: + df: dataframe with nulls masked as empty strings + Returns: + ints: dataframe with values that were converted to type int + is_int: dataframe with boolean values indicating which cells were converted to type int - # Identify cells converted to intergers - ints_tf_df = ints.map(pd.api.types.is_integer) + """ + large_manifest_cutoff_size = 1000 + # Find integers stored as strings and replace with entries of type np.int64 + if df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance + ints = df.map(lambda x: convert_ints(x), na_action='ignore').fillna(False) - # convert strings to numerical dtype (float) if possible, preserve non-numerical strings - for col in org_df.columns: - float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose = 1) + ints = df.parallel_map(lambda x: convert_ints(x), na_action='ignore').fillna(False) - # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) + # Identify cells converted to intergers + is_int = ints.map(pd.api.types.is_integer) - # Store values that were converted to type int in the final dataframe - processed_df=float_df.mask(ints_tf_df, other = ints) + return ints, is_int - # log manifest load and processing time - logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") - return processed_df +def convert_ints(x: str) -> Union[np.int64, bool]: + """ + Lambda function to convert a string to an integer if possible, otherwise returns False + Args: + x: string to attempt conversion to int + Returns: + x converted to type int if possible, otherwise False + """ + return np.int64(x) if str.isdigit(x) else False + +def convert_floats(df: pd.DataFrame) -> pd.DataFrame: + """ + Convert strings that represent floats to type float + Args: + df: dataframe with nulls masked as empty strings + Returns: + float_df: dataframe with values that were converted to type float. Columns are type object + """ + # create a separate copy of the manifest + # before beginning conversions to store float values + float_df=deepcopy(df) + + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings + for col in df.columns: + float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + + # replace values that couldn't be converted to float with the original str values + float_df[col].fillna(df[col][float_df[col].isna()], inplace=True) + return float_df def _parse_dates(date_string): try: From 280a14a74629dc435f496bca19ff268df34442de Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 17 Jan 2024 15:49:03 -0500 Subject: [PATCH 068/199] update test; clean up logic in generator --- schematic/manifest/generator.py | 12 ++++------- tests/test_manifest.py | 36 ++++++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index b93b110e4..f351de155 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1550,19 +1550,15 @@ def create_manifests(jsonld:str, data_types:list, access_token:Optional[str]=Non # the number of dataset_ids (if applicable) len_data_types = len(data_types) len_dataset_ids = len(dataset_ids) - - try: - len_data_types == len_dataset_ids - except: + + if len_data_types != len_dataset_ids: raise ValueError( f"There is a mismatch in the number of data_types and dataset_id's that " f"submitted. Please check your submission and try again." ) # Raise an error if used in conjunction with datatype = 'all_manifests' - try: - data_types[0] != 'all manifests' - except: + if data_types[0] == 'all manifests': raise ValueError( f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " f"Please check your submission and try again." @@ -1581,7 +1577,7 @@ def create_manifests(jsonld:str, data_types:list, access_token:Optional[str]=Non if output_format != "excel": result = ManifestGenerator.create_single_manifest(jsonld=jsonld, data_type=component, output_format=output_format, title=t, access_token=access_token, strict=strict, use_annotations=use_annotations) all_results.append(result) - else: + else: logger.error('Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. ') else: for i, dt in enumerate(data_types): diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 2ea337ca7..57f500867 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -10,7 +10,7 @@ from schematic.schemas.generator import SchemaGenerator from schematic.configuration.configuration import Configuration from schematic.utils.google_api_utils import execute_google_api_requests - +from schematic_api.api import create_app logging.basicConfig(level=logging.DEBUG) @@ -79,6 +79,10 @@ def manifest(dataset_id, manifest_generator, request): yield manifest, use_annotations, data_type, sheet_url +@pytest.fixture(scope="class") +def app(): + app = create_app() + yield app class TestManifestGenerator: @@ -432,4 +436,34 @@ def test_populate_existing_excel_spreadsheet(self, simple_manifest_generator, si # remove file os.remove(dummy_output_path) + + @pytest.mark.parametrize("return_output", ["Mock excel file path", "Mock google sheet link"]) + def test_create_single_manifest(self, simple_manifest_generator, helpers, return_output): + with patch("schematic.manifest.generator.ManifestGenerator.get_manifest", return_value=return_output): + json_ld_path = helpers.get_data_path("example.model.jsonld") + data_type = "Patient" + + result = simple_manifest_generator.create_single_manifest(jsonld=json_ld_path, data_type=data_type, output_format="google_sheet", use_annotations=False) + assert result == return_output + + @pytest.mark.parametrize("test_data_types", [["Patient", "Biospecimen"], ["all manifests"]]) + def test_create_manifests_raise_errors(self, simple_manifest_generator, helpers, test_data_types): + with pytest.raises(ValueError) as exception_info: + json_ld_path = helpers.get_data_path("example.model.jsonld") + data_types = test_data_types + dataset_ids=["syn123456"] + + simple_manifest_generator.create_manifests(jsonld=json_ld_path, data_types=data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) + + @pytest.mark.parametrize("test_data_types, dataset_ids, expected_result", [ + (["Patient", "Biospecimen"], ["mock dataset id1", "mock dataset id2"], ["mock google sheet link", "mock google sheet link"]), + (["Patient"], ["mock dataset id1"], ["mock google sheet link"]), + ]) + def test_create_manifests(self, simple_manifest_generator, helpers, test_data_types, dataset_ids, expected_result): + with patch("schematic.manifest.generator.ManifestGenerator.create_single_manifest", return_value="mock google sheet link"): + json_ld_path = helpers.get_data_path("example.model.jsonld") + all_results = simple_manifest_generator.create_manifests(jsonld=json_ld_path, data_types=test_data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) + assert all_results == expected_result + + From 7b734bcf17523e5b5899a4aacdb845352343ebef Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:52:04 -0700 Subject: [PATCH 069/199] update lambda fxn --- schematic/models/validate_manifest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 162e30ef2..66ca50513 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -10,6 +10,7 @@ import re import sys from time import perf_counter +from numbers import Number # allows specifying explicit variable types from typing import Any, Dict, Optional, Text, List @@ -235,7 +236,8 @@ def validate_manifest_values(self, manifest, jsonSchema, sg # numerical values need to be type string for the jsonValidator for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: manifest[col]=manifest[col].astype('string') - manifest = manifest.map(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') + + manifest = manifest.map(lambda x: str(x) if isinstance(x, Number) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): From 048a55db1a226418fb3db8640a8b3c0bb49124e6 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 17 Jan 2024 15:53:12 -0500 Subject: [PATCH 070/199] remove unused import --- schematic/manifest/generator.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index f351de155..94af07a7c 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -10,7 +10,6 @@ import pygsheets as ps from tempfile import NamedTemporaryFile from typing import Dict, List, Optional, Tuple, Union, BinaryIO, Literal -from flask import send_from_directory from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import ( From 0d5319275dc022e1518530605325042092e69726 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:58:16 -0700 Subject: [PATCH 071/199] use variable for col name --- tests/test_utils.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2894ea479..e686897f1 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -240,6 +240,7 @@ def test_load_schema_org(self): class TestDfUtils: @pytest.mark.parametrize("preserve_raw_input", [True, False], ids=["Do not infer datatypes", "Infer datatypes"]) def test_load_df(self, helpers, preserve_raw_input): + test_col = "Check NA" file_path = helpers.get_data_path("mock_manifests", "Invalid_Test_Manifest.csv") df = df_utils.load_df(file_path, preserve_raw_input=preserve_raw_input, data_model=False) @@ -248,13 +249,13 @@ def test_load_df(self, helpers, preserve_raw_input): assert df.shape[0] == 3 if preserve_raw_input: - assert isinstance(df["Check NA"].iloc[0], str) - assert isinstance(df["Check NA"].iloc[1], str) - assert isinstance(df["Check NA"].iloc[2], str) + assert isinstance(df[test_col].iloc[0], str) + assert isinstance(df[test_col].iloc[1], str) + assert isinstance(df[test_col].iloc[2], str) else: - assert isinstance(df["Check NA"].iloc[0], np.int64) - assert isinstance(df["Check NA"].iloc[1], float) - assert isinstance(df["Check NA"].iloc[2], str) + assert isinstance(df[test_col].iloc[0], np.int64) + assert isinstance(df[test_col].iloc[1], float) + assert isinstance(df[test_col].iloc[2], str) def test_update_df_col_present(self, helpers): From 4ae25952cd4e4535b8dee782b2d3ac8455166fe7 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:04:08 -0700 Subject: [PATCH 072/199] spacing --- schematic/models/validate_manifest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 66ca50513..c1eee8dc5 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -236,7 +236,6 @@ def validate_manifest_values(self, manifest, jsonSchema, sg # numerical values need to be type string for the jsonValidator for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: manifest[col]=manifest[col].astype('string') - manifest = manifest.map(lambda x: str(x) if isinstance(x, Number) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) From 0f8f03091aa77925188f2cb400b50bdab386921a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 14:31:44 -0700 Subject: [PATCH 073/199] update synapse client depndency --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 205030f32..8413cda00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ pygsheets = "^2.0.4" PyYAML = "^6.0.0" rdflib = "^6.0.0" setuptools = "^66.0.0" -synapseclient = "^3.1.1" +synapseclient = "^3.2.0" tenacity = "^8.0.1" toml = "^0.10.2" Flask = "^2.0.0" From ccc3946383532f1198e5afbb352e4825cda5a735 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 18 Jan 2024 15:03:47 -0700 Subject: [PATCH 074/199] regen `.lock` --- poetry.lock | 1780 +++++++++++++++++++++++---------------------------- 1 file changed, 789 insertions(+), 991 deletions(-) diff --git a/poetry.lock b/poetry.lock index 746ce0027..e48b01605 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,21 +2,19 @@ [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" -category = "main" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "altair" version = "4.2.0" description = "Altair: A declarative statistical visualization library for Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -37,20 +35,20 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt [[package]] name = "anyio" -version = "4.1.0" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, - {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -61,7 +59,6 @@ trio = ["trio (>=0.23)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "main" optional = false python-versions = "*" files = [ @@ -73,7 +70,6 @@ files = [ name = "argon2-cffi" version = "23.1.0" description = "Argon2 for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -94,7 +90,6 @@ typing = ["mypy"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -132,7 +127,6 @@ tests = ["pytest"] name = "arrow" version = "1.3.0" description = "Better dates & times for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -146,13 +140,12 @@ types-python-dateutil = ">=2.8.10" [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] [[package]] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -169,7 +162,6 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" -category = "main" optional = false python-versions = "*" files = [ @@ -188,7 +180,6 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -201,33 +192,32 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "babel" -version = "2.13.1" +version = "2.14.0" description = "Internationalization utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, - {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.extras] @@ -246,49 +236,54 @@ files = [ [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" -category = "main" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -302,7 +297,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -310,7 +305,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -329,7 +323,6 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] name = "cachetools" version = "5.3.2" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -341,7 +334,6 @@ files = [ name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -353,7 +345,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -418,7 +409,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -518,7 +508,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -533,7 +522,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-log" version = "0.4.0" description = "Logging integration for Click" -category = "main" optional = false python-versions = "*" files = [ @@ -548,7 +536,6 @@ click = "*" name = "clickclick" version = "20.10.2" description = "Click utility functions" -category = "main" optional = false python-versions = "*" files = [ @@ -564,7 +551,6 @@ PyYAML = ">=3.11" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -574,14 +560,13 @@ files = [ [[package]] name = "comm" -version = "0.2.0" +version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, - {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, ] [package.dependencies] @@ -594,7 +579,6 @@ test = ["pytest"] name = "connexion" version = "2.14.2" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -623,64 +607,63 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -693,7 +676,6 @@ toml = ["tomli"] name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -737,14 +719,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "dataclasses-json" -version = "0.6.1" +version = "0.6.3" description = "Easily serialize dataclasses to and from JSON." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "dataclasses_json-0.6.1-py3-none-any.whl", hash = "sha256:1bd8418a61fe3d588bb0079214d7fb71d44937da40742b787256fd53b26b6c80"}, - {file = "dataclasses_json-0.6.1.tar.gz", hash = "sha256:a53c220c35134ce08211a1057fd0e5bf76dc5331627c6b241cacbc570a89faae"}, + {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, + {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, ] [package.dependencies] @@ -755,7 +736,6 @@ typing-inspect = ">=0.4.0,<1" name = "dateparser" version = "1.2.0" description = "Date parsing library designed to parse dates from HTML pages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -778,7 +758,6 @@ langdetect = ["langdetect"] name = "debugpy" version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -806,7 +785,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -818,7 +796,6 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -830,7 +807,6 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -848,7 +824,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" -category = "main" optional = false python-versions = "*" files = [ @@ -863,7 +838,6 @@ packaging = "*" name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -878,7 +852,6 @@ graph = ["objgraph (>=1.7.2)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -890,7 +863,6 @@ files = [ name = "entrypoints" version = "0.4" description = "Discover and load entry points from installed packages." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -902,7 +874,6 @@ files = [ name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -914,7 +885,6 @@ files = [ name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -943,7 +913,6 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -956,14 +925,13 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "fastjsonschema" -version = "2.19.0" +version = "2.19.1" description = "Fastest Python implementation of JSON schema" -category = "main" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.0-py3-none-any.whl", hash = "sha256:b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e"}, - {file = "fastjsonschema-2.19.0.tar.gz", hash = "sha256:e25df6647e1bc4a26070b700897b07b542ec898dd4f1f6ea013e7f6a88417225"}, + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, ] [package.extras] @@ -973,7 +941,6 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -990,7 +957,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.1.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1013,7 +979,6 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" files = [ @@ -1029,7 +994,6 @@ Six = "*" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "main" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -1039,14 +1003,13 @@ files = [ [[package]] name = "google-api-core" -version = "2.14.0" +version = "2.15.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.14.0.tar.gz", hash = "sha256:5368a4502b793d9bbf812a5912e13e4e69f9bd87f6efb508460c43f5bbd1ce41"}, - {file = "google_api_core-2.14.0-py3-none-any.whl", hash = "sha256:de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952"}, + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, ] [package.dependencies] @@ -1062,18 +1025,17 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.108.0" +version = "2.114.0" description = "Google API Client Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.108.0.tar.gz", hash = "sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99"}, - {file = "google_api_python_client-2.108.0-py2.py3-none-any.whl", hash = "sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d"}, + {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, + {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -1081,14 +1043,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.23.4" +version = "2.26.2" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.4.tar.gz", hash = "sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3"}, - {file = "google_auth-2.23.4-py2.py3-none-any.whl", hash = "sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2"}, + {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, + {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, ] [package.dependencies] @@ -1107,7 +1068,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.1" description = "Google Authentication Library: httplib2 transport" -category = "main" optional = false python-versions = "*" files = [ @@ -1123,7 +1083,6 @@ httplib2 = ">=0.19.0" name = "google-auth-oauthlib" version = "0.8.0" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1140,14 +1099,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.61.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.61.0.tar.gz", hash = "sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b"}, - {file = "googleapis_common_protos-1.61.0-py2.py3-none-any.whl", hash = "sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -1160,7 +1118,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "graphviz" version = "0.20.1" description = "Simple Python interface for Graphviz" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1177,7 +1134,6 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= name = "great-expectations" version = "0.15.50" description = "Always know what to expect from your data." -category = "main" optional = false python-versions = "*" files = [ @@ -1250,80 +1206,79 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) [[package]] name = "greenlet" -version = "3.0.1" +version = "3.0.3" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, - {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, - {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, - {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, - {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, - {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, - {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, - {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, - {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, - {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, - {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, - {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, - {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, - {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, - {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1338,7 +1293,6 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1350,7 +1304,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1360,21 +1313,20 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "6.11.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -1382,7 +1334,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1394,7 +1345,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1406,7 +1356,6 @@ files = [ name = "interrogate" version = "1.5.0" description = "Interrogate a codebase for docstring coverage." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1430,14 +1379,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.27.1" +version = "6.29.0" description = "IPython Kernel for Jupyter" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, - {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, ] [package.dependencies] @@ -1446,12 +1394,12 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=20" +pyzmq = ">=24" tornado = ">=6.1" traitlets = ">=5.4.0" @@ -1460,13 +1408,12 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" version = "8.18.1" description = "IPython: Productive Interactive Computing" -category = "main" optional = false python-versions = ">=3.9" files = [ @@ -1504,7 +1451,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pa name = "ipywidgets" version = "8.1.1" description = "Jupyter interactive widgets" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1526,7 +1472,6 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" files = [ @@ -1541,7 +1486,6 @@ six = "*" name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1554,27 +1498,22 @@ arrow = ">=0.15.0" [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1586,7 +1525,6 @@ files = [ name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1606,7 +1544,6 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1620,14 +1557,13 @@ trio = ["async_generator", "trio"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1640,7 +1576,6 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." -category = "main" optional = false python-versions = "*" files = [ @@ -1655,7 +1590,6 @@ dev = ["hypothesis"] name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1670,7 +1604,6 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1680,14 +1613,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.20.0" +version = "4.21.0" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, - {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, + {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, + {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, ] [package.dependencies] @@ -1710,14 +1642,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.11.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.11.1-py3-none-any.whl", hash = "sha256:f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779"}, - {file = "jsonschema_specifications-2023.11.1.tar.gz", hash = "sha256:c9b234904ffe02f079bf91b14d79987faa685fd4b39c377a0996954c0090b9ca"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1727,7 +1658,6 @@ referencing = ">=0.31.0" name = "jupyter-client" version = "8.6.0" description = "Jupyter protocol implementation and client libraries" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1737,7 +1667,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1749,14 +1679,13 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.5.0" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, - {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -1772,7 +1701,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.9.0" description = "Jupyter Event System library" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1796,14 +1724,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.1" +version = "2.2.2" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, - {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, + {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, + {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, ] [package.dependencies] @@ -1812,14 +1739,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.11.1" +version = "2.12.5" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.11.1-py3-none-any.whl", hash = "sha256:4b3a16e3ed16fd202588890f10b8ca589bd3e29405d128beb95935f059441373"}, - {file = "jupyter_server-2.11.1.tar.gz", hash = "sha256:fe80bab96493acf5f7d6cd9a1575af8fbd253dc2591aa4d015131a1e03b5799a"}, + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, ] [package.dependencies] @@ -1849,14 +1775,13 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.4.4" +version = "0.5.1" description = "A Jupyter Server Extension Providing Terminals." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, + {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, + {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, ] [package.dependencies] @@ -1864,19 +1789,18 @@ pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} terminado = ">=0.8.3" [package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] [[package]] name = "jupyterlab" -version = "4.0.9" +version = "4.0.10" description = "JupyterLab computational environment" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.9-py3-none-any.whl", hash = "sha256:9f6f8e36d543fdbcc3df961a1d6a3f524b4a4001be0327a398f68fa4e534107c"}, - {file = "jupyterlab-4.0.9.tar.gz", hash = "sha256:9ebada41d52651f623c0c9f069ddb8a21d6848e4c887d8e5ddc0613166ed5c0b"}, + {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, + {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, ] [package.dependencies] @@ -1895,7 +1819,7 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.4)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.6)"] docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -1904,7 +1828,6 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1916,7 +1839,6 @@ files = [ name = "jupyterlab-server" version = "2.25.2" description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1943,7 +1865,6 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v name = "jupyterlab-widgets" version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1955,7 +1876,6 @@ files = [ name = "keyring" version = "23.4.1" description = "Store and access your passwords safely." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1977,7 +1897,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "keyrings-alt" version = "3.1" description = "Alternate keyring implementations" -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -1994,55 +1913,54 @@ testing = ["backports.unittest-mock", "collective.checkdocs", "fs (>=0.5,<2)", " [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "makefun" version = "1.15.2" description = "Small library to dynamically create python functions." -category = "main" optional = false python-versions = "*" files = [ @@ -2054,7 +1972,6 @@ files = [ name = "markupsafe" version = "2.1.0" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2102,30 +2019,28 @@ files = [ [[package]] name = "marshmallow" -version = "3.20.1" +version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.20.1-py3-none-any.whl", hash = "sha256:684939db93e80ad3561392f47be0230743131560a41c5110684c16e21ade0a5c"}, - {file = "marshmallow-3.20.1.tar.gz", hash = "sha256:5d2371bbe42000f2b3fb5eaa065224df7d8f8597bc19a1bbfa5bfe7fba8da889"}, + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] -docs = ["alabaster (==0.7.13)", "autodocsumm (==0.2.11)", "sphinx (==7.0.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] -lint = ["flake8 (==6.0.0)", "flake8-bugbear (==23.7.10)", "mypy (==1.4.1)", "pre-commit (>=2.4,<4.0)"] +dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["pre-commit (>=2.4,<4.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2140,7 +2055,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2152,7 +2066,6 @@ files = [ name = "mistune" version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2162,39 +2075,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -2212,7 +2124,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2224,7 +2135,6 @@ files = [ name = "nbclient" version = "0.9.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -2234,7 +2144,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2245,14 +2155,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.11.0" +version = "7.14.2" description = "Converting Jupyter Notebooks" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.11.0-py3-none-any.whl", hash = "sha256:d1d417b7f34a4e38887f8da5bdfd12372adf3b80f995d57556cb0972c68909fe"}, - {file = "nbconvert-7.11.0.tar.gz", hash = "sha256:abedc01cf543177ffde0bfc2a69726d5a478f6af10a332fc1bf29fcb4f0cf000"}, + {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, + {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, ] [package.dependencies] @@ -2279,14 +2188,13 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] webpdf = ["playwright"] [[package]] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2306,21 +2214,19 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.8" +version = "1.5.9" description = "Patch asyncio to allow nested event loops" -category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, - {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, + {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, + {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, ] [[package]] name = "networkx" version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2339,7 +2245,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "notebook" version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2363,7 +2268,6 @@ test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4 name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2379,55 +2283,53 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.26.2" +version = "1.26.3" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, - {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, - {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, - {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, - {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, - {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, - {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, - {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, - {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, - {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, - {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, - {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, - {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, - {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, - {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, - {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, - {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, - {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, - {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, - {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, - {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, - {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, - {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] name = "oauth2client" version = "4.1.3" description = "OAuth 2.0 client library" -category = "main" optional = false python-versions = "*" files = [ @@ -2446,7 +2348,6 @@ six = ">=1.6.1" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2463,7 +2364,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2573,7 +2473,6 @@ files = [ name = "overrides" version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2585,7 +2484,6 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2597,7 +2495,6 @@ files = [ name = "pandarallel" version = "1.6.5" description = "An easy to use library to speed up computation (by parallelizing on multi CPUs) with pandas." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2617,7 +2514,6 @@ doc = ["mkdocs-material"] name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2663,21 +2559,19 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] [[package]] name = "pandocfilters" -version = "1.5.0" +version = "1.5.1" description = "Utilities for writing pandoc filters in python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, ] [[package]] name = "parso" version = "0.8.3" description = "A Python Parser" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2691,21 +2585,19 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pdoc" version = "12.3.1" description = "API Documentation for Python Projects" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2725,7 +2617,6 @@ dev = ["black", "hypothesis", "mypy", "pytest", "pytest-cov", "pytest-timeout", name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." -category = "main" optional = false python-versions = "*" files = [ @@ -2738,14 +2629,13 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2756,7 +2646,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2772,7 +2661,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2785,14 +2673,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.41" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, - {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -2800,49 +2687,47 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.25.1" +version = "4.25.2" description = "" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, - {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, - {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, - {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, - {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, - {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, - {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, - {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, - {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, ] [[package]] name = "psutil" -version = "5.9.6" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -2852,7 +2737,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2864,7 +2748,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "main" optional = false python-versions = "*" files = [ @@ -2879,7 +2762,6 @@ tests = ["pytest"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2891,7 +2773,6 @@ files = [ name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2903,7 +2784,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2918,7 +2798,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2930,7 +2809,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2942,7 +2820,6 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2995,7 +2872,6 @@ email = ["email-validator (>=1.0.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3007,7 +2883,6 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3023,7 +2898,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygsheets" version = "2.0.6" description = "Google Spreadsheets Python API v4" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3042,7 +2916,6 @@ pandas = ["pandas (>=0.14.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -3069,7 +2942,6 @@ testutils = ["gitpython (>3)"] name = "pyopenssl" version = "23.3.0" description = "Python wrapper module around the OpenSSL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3088,7 +2960,6 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3101,14 +2972,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -3126,7 +2996,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3145,7 +3014,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3198,7 +3066,6 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3213,7 +3080,6 @@ six = ">=1.5" name = "python-dotenv" version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3228,7 +3094,6 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3240,7 +3105,6 @@ files = [ name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -3252,7 +3116,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -3276,7 +3139,6 @@ files = [ name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3288,7 +3150,6 @@ files = [ name = "pywinpty" version = "2.0.12" description = "Pseudo terminal support for Windows from Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3304,7 +3165,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3362,105 +3222,104 @@ files = [ [[package]] name = "pyzmq" -version = "25.1.1" +version = "25.1.2" description = "Python bindings for 0MQ" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, - {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, - {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, - {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, - {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, - {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, - {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, - {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, - {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, - {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, - {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, - {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, - {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, - {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, - {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, - {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, - {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, - {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, - {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, - {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, - {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, - {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, - {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, - {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, - {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, - {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, - {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, - {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, - {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, - {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, - {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, - {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, - {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, - {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, - {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, - {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, - {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, - {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, ] [package.dependencies] @@ -3470,7 +3329,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "rdflib" version = "6.3.2" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3490,14 +3348,13 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "referencing" -version = "0.31.0" +version = "0.32.1" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.31.0-py3-none-any.whl", hash = "sha256:381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882"}, - {file = "referencing-0.31.0.tar.gz", hash = "sha256:cc28f2c88fbe7b961a7817a0abc034c09a1e36358f82fedb4ffdf29a25398863"}, + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, ] [package.dependencies] @@ -3506,107 +3363,110 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3628,7 +3488,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3647,7 +3506,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3662,7 +3520,6 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3672,118 +3529,116 @@ files = [ [[package]] name = "rpds-py" -version = "0.13.1" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.13.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:83feb0f682d75a09ddc11aa37ba5c07dd9b824b22915207f6176ea458474ff75"}, - {file = "rpds_py-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa84bbe22ffa108f91631935c28a623001e335d66e393438258501e618fb0dde"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04f8c76b8d5c70695b4e8f1d0b391d8ef91df00ef488c6c1ffb910176459bc6"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:032c242a595629aacace44128f9795110513ad27217b091e834edec2fb09e800"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91276caef95556faeb4b8f09fe4439670d3d6206fee78d47ddb6e6de837f0b4d"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d22f2cb82e0b40e427a74a93c9a4231335bbc548aed79955dde0b64ea7f88146"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9e2794329ef070844ff9bfc012004aeddc0468dc26970953709723f76c8a5"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c797ea56f36c6f248656f0223b11307fdf4a1886f3555eba371f34152b07677f"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:82dbcd6463e580bcfb7561cece35046aaabeac5a9ddb775020160b14e6c58a5d"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:736817dbbbd030a69a1faf5413a319976c9c8ba8cdcfa98c022d3b6b2e01eca6"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f36a1e80ef4ed1996445698fd91e0d3e54738bf597c9995118b92da537d7a28"}, - {file = "rpds_py-0.13.1-cp310-none-win32.whl", hash = "sha256:4f13d3f6585bd07657a603780e99beda96a36c86acaba841f131e81393958336"}, - {file = "rpds_py-0.13.1-cp310-none-win_amd64.whl", hash = "sha256:545e94c84575057d3d5c62634611858dac859702b1519b6ffc58eca7fb1adfcf"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bfe72b249264cc1ff2f3629be240d7d2fdc778d9d298087cdec8524c91cd11f"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edc91c50e17f5cd945d821f0f1af830522dba0c10267c3aab186dc3dbaab8def"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eca04a365be380ca1f8fa48b334462e19e3382c0bb7386444d8ca43aa01c481"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e3ac5b602fea378243f993d8b707189f9061e55ebb4e56cb9fdef8166060f28"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb5d2ab183c0efe5e7b8917e4eaa2e837aacafad8a69b89aa6bc81550eed857"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9793d46d3e6522ae58e9321032827c9c0df1e56cbe5d3de965facb311aed6aa"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cd935c0220d012a27c20135c140f9cdcbc6249d5954345c81bfb714071b985c"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37b08df45f02ff1866043b95096cbe91ac99de05936dd09d6611987a82a3306a"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad666a904212aa9a6c77da7dce9d5170008cda76b7776e6731928b3f8a0d40fa"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8a6ad8429340e0a4de89353447c6441329def3632e7b2293a7d6e873217d3c2b"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c40851b659d958c5245c1236e34f0d065cc53dca8d978b49a032c8e0adfda6e"}, - {file = "rpds_py-0.13.1-cp311-none-win32.whl", hash = "sha256:4145172ab59b6c27695db6d78d040795f635cba732cead19c78cede74800949a"}, - {file = "rpds_py-0.13.1-cp311-none-win_amd64.whl", hash = "sha256:46a07a258bda12270de02b34c4884f200f864bba3dcd6e3a37fef36a168b859d"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:ba4432301ad7eeb1b00848cf46fae0e5fecfd18a8cb5fdcf856c67985f79ecc7"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d22e0660de24bd8e9ac82f4230a22a5fe4e397265709289d61d5fb333839ba50"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76a8374b294e4ccb39ccaf11d39a0537ed107534139c00b4393ca3b542cc66e5"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7d152ec7bb431040af2500e01436c9aa0d993f243346f0594a15755016bf0be1"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74a2044b870df7c9360bb3ce7e12f9ddf8e72e49cd3a353a1528cbf166ad2383"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:960e7e460fda2d0af18c75585bbe0c99f90b8f09963844618a621b804f8c3abe"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37f79f4f1f06cc96151f4a187528c3fd4a7e1065538a4af9eb68c642365957f7"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd4ea56c9542ad0091dfdef3e8572ae7a746e1e91eb56c9e08b8d0808b40f1d1"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0290712eb5603a725769b5d857f7cf15cf6ca93dda3128065bbafe6fdb709beb"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0b70c1f800059c92479dc94dda41288fd6607f741f9b1b8f89a21a86428f6383"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3dd5fb7737224e1497c886fb3ca681c15d9c00c76171f53b3c3cc8d16ccfa7fb"}, - {file = "rpds_py-0.13.1-cp312-none-win32.whl", hash = "sha256:74be3b215a5695690a0f1a9f68b1d1c93f8caad52e23242fcb8ba56aaf060281"}, - {file = "rpds_py-0.13.1-cp312-none-win_amd64.whl", hash = "sha256:f47eef55297799956464efc00c74ae55c48a7b68236856d56183fe1ddf866205"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e4a45ba34f904062c63049a760790c6a2fa7a4cc4bd160d8af243b12371aaa05"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20147996376be452cd82cd6c17701daba69a849dc143270fa10fe067bb34562a"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b9535aa22ab023704cfc6533e968f7e420affe802d85e956d8a7b4c0b0b5ea"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d4fa1eeb9bea6d9b64ac91ec51ee94cc4fc744955df5be393e1c923c920db2b0"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b2415d5a7b7ee96aa3a54d4775c1fec140476a17ee12353806297e900eaeddc"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:577d40a72550eac1386b77b43836151cb61ff6700adacda2ad4d883ca5a0b6f2"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af2d1648eb625a460eee07d3e1ea3a4a6e84a1fb3a107f6a8e95ac19f7dcce67"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b769396eb358d6b55dbf78f3f7ca631ca1b2fe02136faad5af74f0111b4b6b7"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:249c8e0055ca597707d71c5ad85fd2a1c8fdb99386a8c6c257e1b47b67a9bec1"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:fe30ef31172bdcf946502a945faad110e8fff88c32c4bec9a593df0280e64d8a"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2647192facf63be9ed2d7a49ceb07efe01dc6cfb083bd2cc53c418437400cb99"}, - {file = "rpds_py-0.13.1-cp38-none-win32.whl", hash = "sha256:4011d5c854aa804c833331d38a2b6f6f2fe58a90c9f615afdb7aa7cf9d31f721"}, - {file = "rpds_py-0.13.1-cp38-none-win_amd64.whl", hash = "sha256:7cfae77da92a20f56cf89739a557b76e5c6edc094f6ad5c090b9e15fbbfcd1a4"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:e9be1f7c5f9673616f875299339984da9447a40e3aea927750c843d6e5e2e029"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:839676475ac2ccd1532d36af3d10d290a2ca149b702ed464131e450a767550df"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90031658805c63fe488f8e9e7a88b260ea121ba3ee9cdabcece9c9ddb50da39"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ba9fbc5d6e36bfeb5292530321cc56c4ef3f98048647fabd8f57543c34174ec"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08832078767545c5ee12561ce980714e1e4c6619b5b1e9a10248de60cddfa1fd"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f5aa7f5078d35ed8e344bcba40f35bc95f9176dddb33fc4f2084e04289fa63"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80080972e1d000ad0341c7cc58b6855c80bd887675f92871221451d13a975072"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ee352691c4434eb1c01802e9daa5edcc1007ff15023a320e2693fed6a661b"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d20da6b4c7aa9ee75ad0730beaba15d65157f5beeaca54a038bb968f92bf3ce3"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:faa12a9f34671a30ea6bb027f04ec4e1fb8fa3fb3ed030893e729d4d0f3a9791"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7cf241dbb50ea71c2e628ab2a32b5bfcd36e199152fc44e5c1edb0b773f1583e"}, - {file = "rpds_py-0.13.1-cp39-none-win32.whl", hash = "sha256:dab979662da1c9fbb464e310c0b06cb5f1d174d09a462553af78f0bfb3e01920"}, - {file = "rpds_py-0.13.1-cp39-none-win_amd64.whl", hash = "sha256:a2b3c79586636f1fa69a7bd59c87c15fca80c0d34b5c003d57f2f326e5276575"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5967fa631d0ed9f8511dede08bc943a9727c949d05d1efac4ac82b2938024fb7"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8308a8d49d1354278d5c068c888a58d7158a419b2e4d87c7839ed3641498790c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0580faeb9def6d0beb7aa666294d5604e569c4e24111ada423cf9936768d95c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2da81c1492291c1a90987d76a47c7b2d310661bf7c93a9de0511e27b796a8b46"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c9a1dc5e898ce30e2f9c0aa57181cddd4532b22b7780549441d6429d22d3b58"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ae6f423cb7d1c6256b7482025ace2825728f53b7ac58bcd574de6ee9d242c2"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3179e0815827cf963e634095ae5715ee73a5af61defbc8d6ca79f1bdae1d1d"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9f8930092558fd15c9e07198625efb698f7cc00b3dc311c83eeec2540226a8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d1d388d2f5f5a6065cf83c54dd12112b7389095669ff395e632003ae8999c6b8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:08b335fb0c45f0a9e2478a9ece6a1bfb00b6f4c4780f9be3cf36479c5d8dd374"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d11afdc5992bbd7af60ed5eb519873690d921425299f51d80aa3099ed49f2bcc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8c1f6c8df23be165eb0cb78f305483d00c6827a191e3a38394c658d5b9c80bbd"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:528e2afaa56d815d2601b857644aeb395afe7e59212ab0659906dc29ae68d9a6"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df2af1180b8eeececf4f819d22cc0668bfadadfd038b19a90bd2fb2ee419ec6f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88956c993a20201744282362e3fd30962a9d86dc4f1dcf2bdb31fab27821b61f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee70ee5f4144a45a9e6169000b5b525d82673d5dab9f7587eccc92794814e7ac"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5fd099acaee2325f01281a130a39da08d885e4dedf01b84bf156ec2737d78fe"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9656a09653b18b80764647d585750df2dff8928e03a706763ab40ec8c4872acc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba239bb37663b2b4cd08e703e79e13321512dccd8e5f0e9451d9e53a6b8509a"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3f55ae773abd96b1de25fc5c3fb356f491bd19116f8f854ba705beffc1ddc3c5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:f4b15a163448ec79241fb2f1bc5a8ae1a4a304f7a48d948d208a2935b26bf8a5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1a3b2583c86bbfbf417304eeb13400ce7f8725376dc7d3efbf35dc5d7052ad48"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f1059ca9a51c936c9a8d46fbc2c9a6b4c15ab3f13a97f1ad32f024b39666ba85"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f55601fb58f92e4f4f1d05d80c24cb77505dc42103ddfd63ddfdc51d3da46fa2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcfd5f91b882eedf8d9601bd21261d6ce0e61a8c66a7152d1f5df08d3f643ab1"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6574f619e8734140d96c59bfa8a6a6e7a3336820ccd1bfd95ffa610673b650a2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4b9d3f5c48bbe8d9e3758e498b3c34863f2c9b1ac57a4e6310183740e59c980"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdd6f8738e1f1d9df5b1603bb03cb30e442710e5672262b95d0f9fcb4edb0dab"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c2bf286e5d755a075e5e97ba56b3de08cccdad6b323ab0b21cc98875176b03"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d4b390ee70ca9263b331ccfaf9819ee20e90dfd0201a295e23eb64a005dbef"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:db8d0f0ad92f74feb61c4e4a71f1d573ef37c22ef4dc19cab93e501bfdad8cbd"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2abd669a39be69cdfe145927c7eb53a875b157740bf1e2d49e9619fc6f43362e"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c173f529666bab8e3f948b74c6d91afa22ea147e6ebae49a48229d9020a47c4"}, - {file = "rpds_py-0.13.1.tar.gz", hash = "sha256:264f3a5906c62b9df3a00ad35f6da1987d321a053895bd85f9d5c708de5c0fbf"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, ] [[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -3798,7 +3653,6 @@ pyasn1 = ">=0.1.3" name = "ruamel-yaml" version = "0.17.17" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" optional = false python-versions = ">=3" files = [ @@ -3817,7 +3671,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3850,18 +3703,24 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, @@ -3871,7 +3730,6 @@ files = [ name = "schematic-db" version = "0.0.dev33" description = "" -category = "main" optional = false python-versions = ">=3.9,<4.0" files = [ @@ -3902,7 +3760,6 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] name = "scipy" version = "1.11.4" description = "Fundamental algorithms for scientific computing in Python" -category = "main" optional = false python-versions = ">=3.9" files = [ @@ -3945,7 +3802,6 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3961,7 +3817,6 @@ jeepney = ">=0.6" name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3978,7 +3833,6 @@ win32 = ["pywin32"] name = "setuptools" version = "66.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3995,7 +3849,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4007,7 +3860,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4019,7 +3871,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "main" optional = false python-versions = "*" files = [ @@ -4031,7 +3882,6 @@ files = [ name = "soupsieve" version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4043,7 +3893,6 @@ files = [ name = "sphinx" version = "7.2.6" description = "Python documentation generator" -category = "main" optional = false python-versions = ">=3.9" files = [ @@ -4079,7 +3928,6 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools name = "sphinx-click" version = "4.4.0" description = "Sphinx extension that automatically documents click applications" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4094,66 +3942,56 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.7" +version = "1.0.8" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.5" +version = "1.0.6" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.4" +version = "2.0.5" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4166,99 +4004,92 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.6" +version = "1.0.7" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.9" +version = "1.1.10" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.24" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, + {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, + {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, ] [package.dependencies] @@ -4268,7 +4099,7 @@ typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] @@ -4278,7 +4109,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -4288,13 +4119,12 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" version = "0.41.1" description = "Various utility functions for SQLAlchemy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4323,7 +4153,6 @@ url = ["furl (>=0.4.1)"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "main" optional = false python-versions = "*" files = [ @@ -4343,7 +4172,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "swagger-ui-bundle" version = "0.0.9" description = "swagger_ui_bundle - swagger-ui files in a pip package" -category = "main" optional = false python-versions = "*" files = [ @@ -4358,7 +4186,6 @@ Jinja2 = ">=2.0" name = "synapseclient" version = "3.2.0" description = "A client for Synapse, a collaborative, open-source research platform that allows teams to share data, track analyses, and collaborate." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4388,7 +4215,6 @@ tests = ["flake8 (>=3.7.0,<4.0)", "func-timeout (>=4.3,<5.0)", "pytest (>=6.0.0, name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4403,7 +4229,6 @@ widechars = ["wcwidth"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4418,7 +4243,6 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "terminado" version = "0.18.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4440,7 +4264,6 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4459,7 +4282,6 @@ test = ["flake8", "isort", "pytest"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4471,7 +4293,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4483,7 +4304,6 @@ files = [ name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4495,7 +4315,6 @@ files = [ name = "toolz" version = "0.12.0" description = "List processing tools and functional utilities" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4505,30 +4324,28 @@ files = [ [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "main" optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4547,14 +4364,13 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.14.0" +version = "5.14.1" description = "Traitlets Python configuration system" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, - {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [package.extras] @@ -4563,21 +4379,19 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.8.19.14" +version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, ] [[package]] name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4589,7 +4403,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -4603,21 +4416,19 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] name = "tzlocal" version = "5.2" description = "tzinfo object for the local timezone" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4635,7 +4446,6 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4650,7 +4460,6 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4662,7 +4471,6 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4679,7 +4487,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uwsgi" version = "2.0.23" description = "The uWSGI server" -category = "dev" optional = false python-versions = "*" files = [ @@ -4690,7 +4497,6 @@ files = [ name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -4705,21 +4511,19 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4735,7 +4539,6 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -4745,14 +4548,13 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.4" +version = "1.7.0" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, - {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, ] [package.extras] @@ -4764,7 +4566,6 @@ test = ["websockets"] name = "werkzeug" version = "2.1.2" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4779,7 +4580,6 @@ watchdog = ["watchdog"] name = "widgetsnbextension" version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4791,7 +4591,6 @@ files = [ name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4871,7 +4670,6 @@ files = [ name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4886,4 +4684,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "7123714075e813a493e59782fa8922650a7b44835cf5656d029a4130d139b636" +content-hash = "e7a53bb762e4472eb7fefd0ea60c026f3ec037a8c5e268e613e959500cde0ebf" From a6867571edeb5526fa0fb4628277582a1c741545 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 22 Jan 2024 11:37:25 -0800 Subject: [PATCH 075/199] ran black on all modules --- .github/workflows/test.yml | 2 +- schematic/__main__.py | 2 +- schematic/manifest/commands.py | 81 +- schematic/manifest/generator.py | 647 +++++--- schematic/models/GE_Helpers.py | 583 +++---- schematic/models/commands.py | 75 +- schematic/models/metadata.py | 132 +- schematic/models/validate_attribute.py | 985 ++++++------ schematic/models/validate_manifest.py | 181 ++- schematic/schemas/commands.py | 2 +- schematic/schemas/data_model_edges.py | 38 +- schematic/schemas/data_model_graph.py | 4 +- schematic/schemas/data_model_jsonld.py | 19 +- schematic/schemas/data_model_parser.py | 91 +- schematic/schemas/data_model_validator.py | 8 +- schematic/store/synapse.py | 1387 ++++++++++------- schematic/utils/cli_utils.py | 23 +- schematic/utils/df_utils.py | 73 +- schematic/utils/general.py | 68 +- schematic/utils/google_api_utils.py | 90 +- schematic/utils/schema_utils.py | 131 +- schematic/utils/validate_rules_utils.py | 351 +++-- schematic/utils/validate_utils.py | 35 +- schematic/visualization/__init__.py | 2 +- .../visualization/attributes_explorer.py | 225 ++- schematic/visualization/commands.py | 39 +- schematic/visualization/tangled_tree.py | 479 +++--- 27 files changed, 3358 insertions(+), 2395 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 809c350d4..ca8b8e9ac 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -96,7 +96,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run black schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py --check + poetry run black --check #---------------------------------------------- # type checking/enforcement diff --git a/schematic/__main__.py b/schematic/__main__.py index c03219524..fa0ee9a23 100644 --- a/schematic/__main__.py +++ b/schematic/__main__.py @@ -22,6 +22,7 @@ # dict() -> new empty dictionary CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -40,6 +41,5 @@ def main(): main.add_command(viz_cli) # add viz commands - if __name__ == "__main__": main() diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 1f916b05c..002ada68c 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -17,11 +17,12 @@ from schematic.store.synapse import SynapseStorage from schematic.configuration.configuration import CONFIG -logger = logging.getLogger('schematic') +logger = logging.getLogger("schematic") click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -62,7 +63,9 @@ def manifest(ctx, config): # use as `schematic manifest ...` help=query_dict(manifest_commands, ("manifest", "get", "data_type")), ) @click.option( - "-p", "--path_to_data_model", help=query_dict(manifest_commands, ("manifest", "get", "path_to_data_model")) + "-p", + "--path_to_data_model", + help=query_dict(manifest_commands, ("manifest", "get", "path_to_data_model")), ) @click.option( "-d", @@ -99,7 +102,7 @@ def manifest(ctx, config): # use as `schematic manifest ...` @click.option( "-av", "--alphabetize_valid_values", - default = 'ascending', + default="ascending", help=query_dict(manifest_commands, ("manifest", "get", "alphabetize_valid_values")), ) @click.pass_obj @@ -122,18 +125,18 @@ def get_manifest( # Optional parameters that need to be passed to ManifestGenerator() # If CLI parameters are None they are gotten from the CONFIG object and logged if data_type is None: - data_type = CONFIG.manifest_data_type + data_type = CONFIG.manifest_data_type log_value_from_config("data_type", data_type) if path_to_data_model is None: - path_to_data_model = CONFIG.model_location + path_to_data_model = CONFIG.model_location log_value_from_config("path_to_data_model", path_to_data_model) if title is None: - title = CONFIG.manifest_title + title = CONFIG.manifest_title log_value_from_config("title", title) - data_model_parser = DataModelParser(path_to_data_model = path_to_data_model) + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) - #Parse Model + # Parse Model logger.info("Parsing data model.") parsed_data_model = data_model_parser.parse_model() @@ -148,7 +151,7 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_data_model=path_to_data_model, - graph = graph_data_model, + graph=graph_data_model, title=t, root=data_type, use_annotations=use_annotations, @@ -157,7 +160,7 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): # call get_manifest() on manifest_generator # if output_xlsx gets specified, output_format = "excel" - if output_xlsx: + if output_xlsx: output_format = "excel" # if file name is in the path, and that file does not exist if not os.path.exists(output_xlsx): @@ -170,27 +173,31 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): ) else: raise ValueError( - f"{output_xlsx} does not exists. Please try a valid file path" - ) + f"{output_xlsx} does not exists. Please try a valid file path" + ) else: # Check if base path itself exists. if not os.path.exists(os.path.dirname(output_xlsx)): raise ValueError( - f"{output_xlsx} does not exists. Please try a valid file path" + f"{output_xlsx} does not exists. Please try a valid file path" ) output_path = output_xlsx - else: + else: output_format = None output_path = None result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=sheet_url, json_schema=json_schema, output_format = output_format, output_path = output_path + dataset_id=dataset_id, + sheet_url=sheet_url, + json_schema=json_schema, + output_format=output_format, + output_path=output_path, ) if sheet_url: logger.info("Find the manifest template using this Google Sheet URL:") click.echo(result) - if output_csv is None and output_xlsx is None: + if output_csv is None and output_xlsx is None: prefix, _ = os.path.splitext(path_to_data_model) prefix_root, prefix_ext = os.path.splitext(prefix) if prefix_ext == ".model": @@ -204,37 +211,41 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): return result export_manifest_csv(file_path=output_csv, manifest=result) logger.info( - f"Find the manifest template using this CSV file path: {output_csv}" - ) + f"Find the manifest template using this CSV file path: {output_csv}" + ) return result if type(data_type) is str: data_type = [data_type] - if data_type[0] == 'all manifests': + if data_type[0] == "all manifests": # Feed graph into the data model graph explorer dmge = DataModelGraphExplorer(graph_data_model) - component_digraph = dmge.get_digraph_by_edge_type('requiresComponent') + component_digraph = dmge.get_digraph_by_edge_type("requiresComponent") components = component_digraph.nodes() for component in components: - t = f'{title}.{component}.manifest' - result = create_single_manifest(data_type = component) + t = f"{title}.{component}.manifest" + result = create_single_manifest(data_type=component) else: for dt in data_type: if len(data_type) > 1 and not output_xlsx: - t = f'{title}.{dt}.manifest' - elif output_xlsx: + t = f"{title}.{dt}.manifest" + elif output_xlsx: if ".xlsx" or ".xls" in output_xlsx: title_with_extension = os.path.basename(output_xlsx) - t = title_with_extension.split('.')[0] + t = title_with_extension.split(".")[0] else: t = title - result = create_single_manifest(data_type = dt, output_csv=output_csv, output_xlsx=output_xlsx) + result = create_single_manifest( + data_type=dt, output_csv=output_csv, output_xlsx=output_xlsx + ) return result + @manifest.command( - "migrate", short_help=query_dict(manifest_commands, ("manifest", "migrate", "short_help")) + "migrate", + short_help=query_dict(manifest_commands, ("manifest", "migrate", "short_help")), ) @click_log.simple_verbosity_option(logger) # define the optional arguments @@ -281,18 +292,22 @@ def migrate_manifests( Running CLI with manifest migration options. """ if jsonld is None: - jsonld = CONFIG.model_location + jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) full_scope = project_scope + [archive_project] - synStore = SynapseStorage(project_scope = full_scope) + synStore = SynapseStorage(project_scope=full_scope) for project in project_scope: if not return_entities: logging.info("Re-uploading manifests as tables") - synStore.upload_annotated_project_manifests_to_synapse(project, jsonld, dry_run) + synStore.upload_annotated_project_manifests_to_synapse( + project, jsonld, dry_run + ) if archive_project: logging.info("Migrating entitites") - synStore.move_entities_to_new_project(project, archive_project, return_entities, dry_run) - - return + synStore.move_entities_to_new_project( + project, archive_project, return_entities, dry_run + ) + + return diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 049941ff2..b8b8180bd 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -24,7 +24,7 @@ from schematic.utils.df_utils import update_df, load_df from schematic.utils.validate_utils import rule_in_rule_list -#TODO: This module should only be aware of the store interface +# TODO: This module should only be aware of the store interface # we shouldn't need to expose Synapse functionality explicitly from schematic.store.synapse import SynapseStorage @@ -35,13 +35,12 @@ logger = logging.getLogger(__name__) - class ManifestGenerator(object): def __init__( self, path_to_data_model: str, # JSON-LD file to be used for generating the manifest - graph: nx.MultiDiGraph, # At this point, the graph is fully formed. - alphabetize_valid_values: str = 'ascending', + graph: nx.MultiDiGraph, # At this point, the graph is fully formed. + alphabetize_valid_values: str = "ascending", title: str = None, # manifest sheet title root: str = None, additional_metadata: Dict = None, @@ -95,15 +94,17 @@ def __init__( # additional metadata to add to manifest self.additional_metadata = additional_metadata - + # Check if the class is in the schema root_in_schema = self.dmge.is_class_in_schema(self.root) - + # If the class could not be found, give a notification if not root_in_schema: - exception_message = f"The DataType entered ({self.root}) could not be found in the data model schema. " + \ - "Please confirm that the datatype is in the data model and that the spelling matches the class label in the .jsonld file." - raise LookupError(exception_message) + exception_message = ( + f"The DataType entered ({self.root}) could not be found in the data model schema. " + + "Please confirm that the datatype is in the data model and that the spelling matches the class label in the .jsonld file." + ) + raise LookupError(exception_message) # Determine whether current data type is file-based self.is_file_based = "Filename" in self.dmge.get_node_dependencies(self.root) @@ -192,7 +193,7 @@ def _gdrive_copy_file(self, origin_file_id, copy_title): .execute()["id"] ) - def _create_empty_manifest_spreadsheet(self, title:str) -> str: + def _create_empty_manifest_spreadsheet(self, title: str) -> str: """ Creates an empty google spreadsheet returning the id. If the configuration has a template id it will be used @@ -209,21 +210,18 @@ def _create_empty_manifest_spreadsheet(self, title:str) -> str: spreadsheet_id = self._gdrive_copy_file(template_id, title) else: - spreadsheet_body = { - 'properties': { - 'title': title - } - } + spreadsheet_body = {"properties": {"title": title}} - spreadsheet_id = self.sheet_service.spreadsheets().create( - body=spreadsheet_body, - fields="spreadsheetId").execute().get("spreadsheetId" + spreadsheet_id = ( + self.sheet_service.spreadsheets() + .create(body=spreadsheet_body, fields="spreadsheetId") + .execute() + .get("spreadsheetId") ) return spreadsheet_id def _get_cell_borders(self, cell_range): - # set border style request color = { "red": 226.0 / 255.0, @@ -259,26 +257,30 @@ def callback(request_id, response, exception): batch.add( self.drive_service.permissions().create( - fileId=fileId, body=worldPermission, fields="id", + fileId=fileId, + body=worldPermission, + fields="id", ) ) batch.execute() - def _store_valid_values_as_data_dictionary(self, column_id:int, valid_values:list, spreadsheet_id:str) -> list: - '''store valid values in google sheet (sheet 2). This step is required for "ONE OF RANGE" validation + def _store_valid_values_as_data_dictionary( + self, column_id: int, valid_values: list, spreadsheet_id: str + ) -> list: + """store valid values in google sheet (sheet 2). This step is required for "ONE OF RANGE" validation Args: column_id: id of column valid_values: a list of valid values for a given attribute (i.e. for diagnosis, this looks like: [{'userEnteredValue': 'Cancer'}, {'userEnteredValue': 'Healthy'}]) spreadsheet_id: google spreadsheet id - + return: range of valid values (i.e. for diagnosis, [{'userEnteredValue': '=Sheet2!D2:D3'}]) - ''' + """ # get valid values w/o google sheet header values = [valid_value["userEnteredValue"] for valid_value in valid_values] - - if self.alphabetize and self.alphabetize.lower().startswith('a'): + + if self.alphabetize and self.alphabetize.lower().startswith("a"): values.sort(reverse=False, key=str.lower) - elif self.alphabetize and self.alphabetize.lower().startswith('d'): + elif self.alphabetize and self.alphabetize.lower().startswith("d"): values.sort(reverse=True, key=str.lower) # store valid values explicitly in workbook at the provided range to use as validation values @@ -310,18 +312,19 @@ def _get_column_data_validation_values( spreadsheet_id, valid_values, column_id, - strict:Optional[bool], + strict: Optional[bool], validation_type="ONE_OF_LIST", custom_ui=True, input_message="Choose one from dropdown", ): - # set validation strictness to config file default if None indicated. if strict == None: strict = CONFIG.google_sheets_strict_validation - #store valid values explicitly in workbook at the provided range to use as validation values + # store valid values explicitly in workbook at the provided range to use as validation values if validation_type == "ONE_OF_RANGE": - valid_values=self._store_valid_values_as_data_dictionary(column_id, valid_values, spreadsheet_id) + valid_values = self._store_valid_values_as_data_dictionary( + column_id, valid_values, spreadsheet_id + ) # setup validation data request body validation_body = { @@ -367,7 +370,6 @@ def _get_valid_values_from_jsonschema_property(self, prop: dict) -> List[str]: else: return [] - def _get_json_schema(self, json_schema_filepath: str) -> Dict: """Open json schema as a dictionary. Args: @@ -378,8 +380,12 @@ def _get_json_schema(self, json_schema_filepath: str) -> Dict: """ if not json_schema_filepath: # TODO Catch error if no JSONLD or JSON path provided. - data_model_js = DataModelJSONSchema(jsonld_path=self.model_path, graph=self.graph) - json_schema = data_model_js.get_json_validation_schema(source_node=self.root, schema_name=self.title) + data_model_js = DataModelJSONSchema( + jsonld_path=self.model_path, graph=self.graph + ) + json_schema = data_model_js.get_json_validation_schema( + source_node=self.root, schema_name=self.title + ) else: with open(json_schema_filepath) as jsonfile: json_schema = json.load(jsonfile) @@ -477,7 +483,7 @@ def _add_root_to_component(self, required_metadata_fields: Dict[str, List]): # constructor (it's optional) if not, instantiate it if not self.additional_metadata: self.additional_metadata = {} - if self.is_file_based and 'Filename' in self.additional_metadata: + if self.is_file_based and "Filename" in self.additional_metadata: self.additional_metadata["Component"] = [self.root] * max( 1, len(self.additional_metadata["Filename"]) ) @@ -691,7 +697,7 @@ def _request_update_base_color(self, i: int, color={"red": 1.0}): } return vr_format_body - def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): + def _request_regex_vr(self, gs_formula, i: int, text_color={"red": 1}): """ Generate request to change font color to black upon corretly formatted user entry. @@ -712,9 +718,7 @@ def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): "values": gs_formula, }, "format": { - "textFormat": { - "foregroundColor": text_color - } + "textFormat": {"foregroundColor": text_color} }, }, }, @@ -725,9 +729,14 @@ def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): } return requests_vr - def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int, - spreadsheet_id: str, requests_body: dict, strict: Optional[bool], - ): + def _request_regex_match_vr_formatting( + self, + validation_rules: List[str], + i: int, + spreadsheet_id: str, + requests_body: dict, + strict: Optional[bool], + ): """ Purpose: - Apply regular expression validaiton rules to google sheets. @@ -785,7 +794,7 @@ def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int "red": 232.0 / 255.0, "green": 80.0 / 255.0, "blue": 70.0 / 255.0, - } + }, ) ## Create request to for conditionally formatting user input. @@ -802,16 +811,11 @@ def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int validation_type="CUSTOM_FORMULA", ) - requests_body["requests"].append( - requests_vr_format_body["requests"] - ) + requests_body["requests"].append(requests_vr_format_body["requests"]) requests_body["requests"].append(requests_vr["requests"]) - requests_body["requests"].append( - requests_data_validation_vr["requests"] - ) + requests_body["requests"].append(requests_data_validation_vr["requests"]) return requests_body - def _request_row_format(self, i, req): """Adding description to headers, this is not executed if only JSON schema is defined. Also formatting required columns. @@ -824,7 +828,7 @@ def _request_row_format(self, i, req): """ if self.dmge: # get node definition - note = self.dmge.get_node_comment(node_display_name = req) + note = self.dmge.get_node_comment(node_display_name=req) notes_body = { "requests": [ @@ -866,7 +870,7 @@ def _request_note_valid_values(self, i, req, validation_rules, valid_values): notes_body["requests"] (dict): with information on note to add to the column header, about using multiselect. This notes body will be added to a request. - """ + """ if rule_in_rule_list("list", validation_rules) and valid_values: note = "Please enter applicable comma-separated items selected from the set of allowable terms for this attribute. See our data standards for allowable terms" notes_body = { @@ -886,7 +890,9 @@ def _request_note_valid_values(self, i, req, validation_rules, valid_values): } return notes_body["requests"] elif rule_in_rule_list("list", validation_rules) and not valid_values: - note = "Please enter values as a comma separated list. For example: XX, YY, ZZ" + note = ( + "Please enter values as a comma separated list. For example: XX, YY, ZZ" + ) notes_body = { "requests": [ { @@ -989,8 +995,12 @@ def _request_dropdown( return validation_body["requests"] def _dependency_formatting( - self, i, req_val, ordered_metadata_fields, val_dependencies, - dependency_formatting_body + self, + i, + req_val, + ordered_metadata_fields, + val_dependencies, + dependency_formatting_body, ): """If there are additional attribute dependencies find the corresponding fields that need to be filled in and construct conditional formatting rules @@ -1043,9 +1053,7 @@ def _dependency_formatting( "index": 0, } } - dependency_formatting_body["requests"].append( - conditional_format_rule - ) + dependency_formatting_body["requests"].append(conditional_format_rule) return dependency_formatting_body["requests"] def _request_dependency_formatting( @@ -1082,14 +1090,15 @@ def _request_dependency_formatting( # set conditiaon formatting for dependencies. if val_dependencies: dependency_formatting_body["requests"] = self._dependency_formatting( - i, req_val, ordered_metadata_fields, val_dependencies, - dependency_formatting_body + i, + req_val, + ordered_metadata_fields, + val_dependencies, + dependency_formatting_body, ) if dependency_formatting_body["requests"]: - requests_body["requests"].append( - dependency_formatting_body["requests"] - ) + requests_body["requests"].append(dependency_formatting_body["requests"]) return requests_body def _create_requests_body( @@ -1125,13 +1134,15 @@ def _create_requests_body( requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): # Gather validation rules and valid values for attribute. - validation_rules = self.dmge.get_node_validation_rules(node_display_name=req) - + validation_rules = self.dmge.get_node_validation_rules( + node_display_name=req + ) + # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: - requests_body =self._request_regex_match_vr_formatting( - validation_rules, i, spreadsheet_id, requests_body, strict - ) + requests_body = self._request_regex_match_vr_formatting( + validation_rules, i, spreadsheet_id, requests_body, strict + ) if req in json_schema["properties"].keys(): valid_values = self._get_valid_values_from_jsonschema_property( @@ -1146,7 +1157,9 @@ def _create_requests_body( requests_body["requests"].append(get_row_formatting) # set color of required columns to blue - required_columns_color = self._set_required_columns_color(i, req, json_schema) + required_columns_color = self._set_required_columns_color( + i, req, json_schema + ) if required_columns_color: requests_body["requests"].append(required_columns_color) # Add note on how to use multi-select, when appropriate @@ -1165,28 +1178,37 @@ def _create_requests_body( # for attributes that don't require "list", create dropdown options and set up data validation rules if not rule_in_rule_list("list", validation_rules): create_dropdown = self._request_dropdown( - i, req_vals, spreadsheet_id, validation_rules, valid_values - ) + i, req_vals, spreadsheet_id, validation_rules, valid_values + ) if create_dropdown: requests_body["requests"].append(create_dropdown) # for attributes that require "list", simply store valid values (if any) in second sheet - elif len(req_vals)>0 and rule_in_rule_list("list", validation_rules): + elif len(req_vals) > 0 and rule_in_rule_list("list", validation_rules): self._store_valid_values_as_data_dictionary(i, req_vals, spreadsheet_id) # generate a conditional format rule for each required value (i.e. valid value) # for this field (i.e. if this field is set to a valid value that may require additional # fields to be filled in, these additional fields will be formatted in a custom style (e.g. red background) - requests_body = self._request_dependency_formatting(i, req_vals, ordered_metadata_fields, requests_body) - + requests_body = self._request_dependency_formatting( + i, req_vals, ordered_metadata_fields, requests_body + ) + # Set borders formatting borders_formatting = self._request_cell_borders() if borders_formatting: requests_body["requests"].append(borders_formatting) return requests_body - def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, sheet_url, strict: Optional[bool]): + def _create_empty_gs( + self, + required_metadata_fields, + json_schema, + spreadsheet_id, + sheet_url, + strict: Optional[bool], + ): """Generate requests to add columns and format the google sheet. Args: required_metadata_fields(dict): @@ -1262,7 +1284,12 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, strict: Optional[bool], json_schema_filepath: str=None, sheet_url: Optional[bool]=None): + def get_empty_manifest( + self, + strict: Optional[bool], + json_schema_filepath: str = None, + sheet_url: Optional[bool] = None, + ): """Create an empty manifest using specifications from the json schema. Args: @@ -1283,23 +1310,30 @@ def get_empty_manifest(self, strict: Optional[bool], json_schema_filepath: str=N ) manifest_url = self._create_empty_gs( - required_metadata_fields, json_schema, spreadsheet_id, sheet_url=sheet_url, strict=strict, + required_metadata_fields, + json_schema, + spreadsheet_id, + sheet_url=sheet_url, + strict=strict, ) return manifest_url - def _get_missing_columns(self, headers_1:list , headers_2:list) -> list: + def _get_missing_columns(self, headers_1: list, headers_2: list) -> list: """Compare two colunm sets and get cols that are in headers_1, but not headers_2 Args: headers_1 (list): list of column headers headers_2 (list): list of column headers - Returns: + Returns: list: column headers in headers_1 but not headers_2 """ return set(headers_1) - set(headers_2) def set_dataframe_by_url( - self, manifest_url: str, manifest_df: pd.DataFrame, out_of_schema_columns: set =None, + self, + manifest_url: str, + manifest_df: pd.DataFrame, + out_of_schema_columns: set = None, ) -> ps.Spreadsheet: """Update Google Sheets using given pandas DataFrame. Args: @@ -1324,10 +1358,13 @@ def set_dataframe_by_url( # TODO: similarly clear formatting for out of schema columns, if any if out_of_schema_columns: num_out_of_schema_columns = len(out_of_schema_columns) - start_col = self._column_to_letter(len(manifest_df.columns) - num_out_of_schema_columns) # find start of out of schema columns - end_col = self._column_to_letter(len(manifest_df.columns) + 1) # find end of out of schema columns - wb.set_data_validation(start = start_col, end = end_col, condition_type = None) - + start_col = self._column_to_letter( + len(manifest_df.columns) - num_out_of_schema_columns + ) # find start of out of schema columns + end_col = self._column_to_letter( + len(manifest_df.columns) + 1 + ) # find end of out of schema columns + wb.set_data_validation(start=start_col, end=end_col, condition_type=None) # set permissions so that anyone with the link can edit sh.share("", role="writer", type="anyone") @@ -1384,7 +1421,7 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame, strict: Optional[bool]=None + self, annotations: pd.DataFrame, strict: Optional[bool] = None ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). Args: @@ -1423,88 +1460,126 @@ def get_manifest_with_annotations( return manifest_url, manifest_df - def export_sheet_to_excel(self, title: str = None, manifest_url : str = None, output_location: str = None) -> str: + def export_sheet_to_excel( + self, title: str = None, manifest_url: str = None, output_location: str = None + ) -> str: """ export manifest as an Excel spreadsheet and return local file path Args: title: title of the exported excel spreadsheet - manifest_url: manifest google sheet url + manifest_url: manifest google sheet url output_location: the location where the exported excel file would live - return: - Export manifest to a desired location. + return: + Export manifest to a desired location. """ # construct file name file_name = title + ".xlsx" - #if file path exists and it contains a file name: - if output_location: + # if file path exists and it contains a file name: + if output_location: if os.path.exists(output_location): - if Path(output_location).suffix == '.xlsx' or Path(output_location).suffix == '.xls': + if ( + Path(output_location).suffix == ".xlsx" + or Path(output_location).suffix == ".xls" + ): output_excel_file_path = output_location # if users define the location but it doesn't contain a file name, we should add the file name: else: output_excel_file_path = os.path.join(output_location, file_name) - + # trigger a warning if file path is provided but does not exist elif output_location and not os.path.exists(output_location): - output_excel_file_path = os.path.abspath(os.path.join(os.getcwd(), file_name)) - logger.warning(f'{output_location} does not exist. Using current working directory {output_excel_file_path}') + output_excel_file_path = os.path.abspath( + os.path.join(os.getcwd(), file_name) + ) + logger.warning( + f"{output_location} does not exist. Using current working directory {output_excel_file_path}" + ) # otherwise, use the default location else: - output_excel_file_path = os.path.abspath(os.path.join(os.getcwd(), file_name)) - + output_excel_file_path = os.path.abspath( + os.path.join(os.getcwd(), file_name) + ) + # export the manifest to excel - export_manifest_drive_service(manifest_url, file_path=output_excel_file_path, mimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') - + export_manifest_drive_service( + manifest_url, + file_path=output_excel_file_path, + mimeType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + ) + return output_excel_file_path - def _handle_output_format_logic(self, output_format: str = None, output_path: str = None, sheet_url: bool = None, empty_manifest_url: str = None, dataframe: pd.DataFrame = None, out_of_schema_columns: set =None): + def _handle_output_format_logic( + self, + output_format: str = None, + output_path: str = None, + sheet_url: bool = None, + empty_manifest_url: str = None, + dataframe: pd.DataFrame = None, + out_of_schema_columns: set = None, + ): """ Handle the logic between sheet_url parameter and output_format parameter to determine the type of output to return - Args: + Args: output_format: Determines if Google sheet URL, pandas dataframe, or Excel spreadsheet gets returned. sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return - empty_manifest_url: Google sheet URL that leads to an empty manifest + empty_manifest_url: Google sheet URL that leads to an empty manifest dataframe: the pandas dataframe that contains the metadata that needs to be populated to an empty manifest output_path: Determines the output path of the exported manifest (only relevant if returning an excel spreadsheet) out_of_schema_columns (set): Columns that are in downloaded manifest, but not in current schema. - Return: - a pandas dataframe, file path of an excel spreadsheet, or a google sheet URL + Return: + a pandas dataframe, file path of an excel spreadsheet, or a google sheet URL TODO: Depreciate sheet URL and add google_sheet as an output_format choice. """ - # if the output type gets set to "dataframe", return a data frame + # if the output type gets set to "dataframe", return a data frame if output_format == "dataframe": return dataframe - + # if the output type gets set to "excel", return an excel spreadsheet - elif output_format == "excel": + elif output_format == "excel": # export manifest url that only contains column headers to Excel - output_file_path = self.export_sheet_to_excel(title = self.title, - manifest_url = empty_manifest_url, - output_location = output_path, - ) + output_file_path = self.export_sheet_to_excel( + title=self.title, + manifest_url=empty_manifest_url, + output_location=output_path, + ) # populate an excel spreadsheet with the existing dataframe self.populate_existing_excel_spreadsheet(output_file_path, dataframe) return output_file_path - + # Return google sheet if sheet_url flag is raised. elif sheet_url: - manifest_sh = self.set_dataframe_by_url(manifest_url=empty_manifest_url, manifest_df=dataframe, out_of_schema_columns=out_of_schema_columns) + manifest_sh = self.set_dataframe_by_url( + manifest_url=empty_manifest_url, + manifest_df=dataframe, + out_of_schema_columns=out_of_schema_columns, + ) return manifest_sh.url - + # Default return a DataFrame else: return dataframe - + @staticmethod - def create_single_manifest(path_to_data_model: str, graph_data_model: nx.MultiDiGraph, data_type: str, access_token:Optional[str]=None, dataset_id:Optional[str]=None, strict:Optional[bool]=True, title:Optional[str]=None, output_format:Literal["google_sheet", "excel", "dataframe"]="google_sheet", use_annotations:Optional[bool]=False) -> Union[str, pd.DataFrame, BinaryIO]: + def create_single_manifest( + path_to_data_model: str, + graph_data_model: nx.MultiDiGraph, + data_type: str, + access_token: Optional[str] = None, + dataset_id: Optional[str] = None, + strict: Optional[bool] = True, + title: Optional[str] = None, + output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", + use_annotations: Optional[bool] = False, + ) -> Union[str, pd.DataFrame, BinaryIO]: """Create a single manifest Args: - jsonld (str): jsonld schema + jsonld (str): jsonld schema data_type (str): data type of a manifest access_token (str, optional): synapse access token. Required when getting an existing manifest. Defaults to None. dataset_id (str, optional): dataset id when generating an existing manifest. Defaults to None. @@ -1523,7 +1598,7 @@ def create_single_manifest(path_to_data_model: str, graph_data_model: nx.MultiDi title=title, root=data_type, use_annotations=use_annotations, - alphabetize_valid_values = 'ascending', + alphabetize_valid_values="ascending", ) # if returning a dataframe @@ -1532,25 +1607,46 @@ def create_single_manifest(path_to_data_model: str, graph_data_model: nx.MultiDi output_format = "dataframe" result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token, strict=strict, + dataset_id=dataset_id, + sheet_url=True, + output_format=output_format, + access_token=access_token, + strict=strict, ) # return an excel file if output_format is set to "excel" if output_format == "excel": dir_name = os.path.dirname(result) file_name = os.path.basename(result) - mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' - return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) - + mimetype = ( + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" + ) + return send_from_directory( + directory=dir_name, + path=file_name, + as_attachment=True, + mimetype=mimetype, + max_age=0, + ) + return result - + @staticmethod - def create_manifests(path_to_data_model:str, data_types:list, access_token:Optional[str]=None, dataset_ids:Optional[list]=None, output_format:Literal["google_sheet", "excel", "dataframe"]="google_sheet", title:Optional[str]=None, strict:Optional[bool]=True, use_annotations:Optional[bool]=False) -> Union[List[str], List[pd.DataFrame], BinaryIO]: + def create_manifests( + path_to_data_model: str, + data_types: list, + access_token: Optional[str] = None, + dataset_ids: Optional[list] = None, + output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", + title: Optional[str] = None, + strict: Optional[bool] = True, + use_annotations: Optional[bool] = False, + ) -> Union[List[str], List[pd.DataFrame], BinaryIO]: """Create multiple manifests Args: path_to_data_model (str): str path to data model - data_type (list): a list of data types + data_type (list): a list of data types access_token (str, optional): synapse access token. Required when getting an existing manifest. Defaults to None. dataset_id (list, optional): a list of dataset ids when generating an existing manifest. Defaults to None. output_format (str, optional):format of manifest. It has three options: google sheet, excel or dataframe. Defaults to None. @@ -1561,9 +1657,9 @@ def create_manifests(path_to_data_model:str, data_types:list, access_token:Optio Returns: Union[List[str], List[pd.DataFrame], BinaryIO]: a list of Googlesheet URLs, a list of pandas dataframes or an Excel file. """ - data_model_parser = DataModelParser(path_to_data_model = path_to_data_model) + data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) - #Parse Model + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -1574,48 +1670,82 @@ def create_manifests(path_to_data_model:str, data_types:list, access_token:Optio # Gather all returned result urls all_results = [] - if data_types[0] == 'all manifests': + if data_types[0] == "all manifests": dmge = DataModelGraphExplorer(graph_data_model) - component_digraph = dmge.get_digraph_by_edge_type('requiresComponent') + component_digraph = dmge.get_digraph_by_edge_type("requiresComponent") components = component_digraph.nodes() for component in components: if title: - t = f'{title}.{component}.manifest' - else: - t = f'Example.{component}.manifest' + t = f"{title}.{component}.manifest" + else: + t = f"Example.{component}.manifest" if output_format != "excel": - result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=component, graph_data_model=graph_data_model, output_format=output_format, title=t, access_token=access_token) + result = ManifestGenerator.create_single_manifest( + path_to_data_model=path_to_data_model, + data_type=component, + graph_data_model=graph_data_model, + output_format=output_format, + title=t, + access_token=access_token, + ) all_results.append(result) - else: - logger.error('Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. ') + else: + logger.error( + "Currently we do not support returning multiple files as Excel format at once. Please choose a different output format. " + ) else: for i, dt in enumerate(data_types): - if not title: - t = f'Example.{dt}.manifest' - else: + if not title: + t = f"Example.{dt}.manifest" + else: if len(data_types) > 1: - t = f'{title}.{dt}.manifest' - else: + t = f"{title}.{dt}.manifest" + else: t = title if dataset_ids: # if a dataset_id is provided add this to the function call. - result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, dataset_id=dataset_ids[i], output_format=output_format, title=t, access_token=access_token, use_annotations=use_annotations) + result = ManifestGenerator.create_single_manifest( + path_to_data_model=path_to_data_model, + data_type=dt, + graph_data_model=graph_data_model, + dataset_id=dataset_ids[i], + output_format=output_format, + title=t, + access_token=access_token, + use_annotations=use_annotations, + ) else: - result = ManifestGenerator.create_single_manifest(path_to_data_model=path_to_data_model, data_type=dt, graph_data_model=graph_data_model, output_format=output_format, title=t, access_token=access_token, use_annotations=use_annotations) + result = ManifestGenerator.create_single_manifest( + path_to_data_model=path_to_data_model, + data_type=dt, + graph_data_model=graph_data_model, + output_format=output_format, + title=t, + access_token=access_token, + use_annotations=use_annotations, + ) # if output is pandas dataframe or google sheet url if isinstance(result, str) or isinstance(result, pd.DataFrame): all_results.append(result) - else: + else: if len(data_types) > 1: - logger.warning(f'Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. ') + logger.warning( + f"Currently we do not support returning multiple files as Excel format at once. Only {t} would get returned. " + ) return result return all_results - def get_manifest( - self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: Optional[bool]=None, + self, + dataset_id: str = None, + sheet_url: bool = None, + json_schema: str = None, + output_format: str = None, + output_path: str = None, + access_token: str = None, + strict: Optional[bool] = None, ) -> Union[str, pd.DataFrame]: """Gets manifest for a given dataset on Synapse. TODO: move this function to class MetadatModel (after MetadataModel is refactored) @@ -1625,36 +1755,44 @@ def get_manifest( sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return output_format: Determines if Google sheet URL, pandas dataframe, or Excel spreadsheet gets returned. output_path: Determines the output path of the exported manifest - access_token: Token in .synapseConfig. Since we could not pre-load access_token as an environment variable on AWS, we have to add this variable. + access_token: Token in .synapseConfig. Since we could not pre-load access_token as an environment variable on AWS, we have to add this variable. Returns: - Googlesheet URL, pandas dataframe, or an Excel spreadsheet + Googlesheet URL, pandas dataframe, or an Excel spreadsheet """ # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, strict=strict, sheet_url=sheet_url) + manifest_url = self.get_empty_manifest( + json_schema_filepath=json_schema, strict=strict, sheet_url=sheet_url + ) # if output_form parameter is set to "excel", return an excel spreadsheet - if output_format == "excel": - output_file_path = self.export_sheet_to_excel(title = self.title, manifest_url = manifest_url, output_location = output_path) + if output_format == "excel": + output_file_path = self.export_sheet_to_excel( + title=self.title, + manifest_url=manifest_url, + output_location=output_path, + ) return output_file_path # since we are not going to return an empty dataframe for an empty manifest, here we will just return a google sheet url for all other cases - else: + else: return manifest_url # Otherwise, create manifest using the given dataset - #TODO: avoid explicitly exposing Synapse store functionality + # TODO: avoid explicitly exposing Synapse store functionality # just instantiate a Store class and let it decide at runtime/config # the store type - if access_token: + if access_token: # for getting an existing manifest on AWS store = SynapseStorage(access_token=access_token) - else: + else: store = SynapseStorage() # Get manifest file associated with given dataset (if applicable) # populate manifest with set of new files (if applicable) - manifest_record = store.updateDatasetManifestFiles(self.dmge, datasetId = dataset_id, store = False) + manifest_record = store.updateDatasetManifestFiles( + self.dmge, datasetId=dataset_id, store=False + ) # get URL of an empty manifest file created based on schema component empty_manifest_url = self.get_empty_manifest(strict=strict, sheet_url=True) @@ -1664,20 +1802,23 @@ def get_manifest( # TODO: Update or remove the warning in self.__init__() if # you change the behavior here based on self.use_annotations # Update df with existing manifest. Agnostic to output format - updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_record[1]) + updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df( + empty_manifest_url=empty_manifest_url, existing_df=manifest_record[1] + ) # determine the format of manifest - result = self._handle_output_format_logic(output_format = output_format, - output_path = output_path, - sheet_url = sheet_url, - empty_manifest_url=empty_manifest_url, - dataframe = updated_df, - out_of_schema_columns=out_of_schema_columns, - ) + result = self._handle_output_format_logic( + output_format=output_format, + output_path=output_path, + sheet_url=sheet_url, + empty_manifest_url=empty_manifest_url, + dataframe=updated_df, + out_of_schema_columns=out_of_schema_columns, + ) return result # Generate empty template and optionally fill in with annotations - # if there is no existing manifest and use annotations is set to True, + # if there is no existing manifest and use annotations is set to True, # pull annotations (in reality, annotations should be empty when there is no existing manifest) else: # Using getDatasetAnnotations() to retrieve file names and subset @@ -1687,39 +1828,50 @@ def get_manifest( if self.is_file_based: annotations = store.getDatasetAnnotations(dataset_id) # Update `additional_metadata` and generate manifest - manifest_url, manifest_df = self.get_manifest_with_annotations(annotations,strict=strict) - - # If the annotations are empty, - # ie if there are no annotations to pull or annotations were unable to be pulled because the metadata is not file based, + manifest_url, manifest_df = self.get_manifest_with_annotations( + annotations, strict=strict + ) + + # If the annotations are empty, + # ie if there are no annotations to pull or annotations were unable to be pulled because the metadata is not file based, # then create manifest from an empty manifest if annotations.empty: empty_manifest_df = self.get_dataframe_by_url(empty_manifest_url) manifest_df = empty_manifest_df - logger.warning(f"Annotations were not able to be gathered for the given parameters. This manifest will be generated from an empty manifest.") - + logger.warning( + f"Annotations were not able to be gathered for the given parameters. This manifest will be generated from an empty manifest." + ) + else: empty_manifest_df = self.get_dataframe_by_url(empty_manifest_url) if self.is_file_based: # for file-based manifest, make sure that entityId column and Filename column still gets filled even though use_annotations gets set to False - manifest_df = store.add_entity_id_and_filename(dataset_id,empty_manifest_df) + manifest_df = store.add_entity_id_and_filename( + dataset_id, empty_manifest_df + ) else: manifest_df = empty_manifest_df # Update df with existing manifest. Agnostic to output format - updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) - - # determine the format of manifest that gets return - result = self._handle_output_format_logic(output_format = output_format, - output_path = output_path, - sheet_url = sheet_url, - empty_manifest_url=empty_manifest_url, - dataframe = updated_df, - out_of_schema_columns = out_of_schema_columns, - ) + updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df( + empty_manifest_url=empty_manifest_url, existing_df=manifest_df + ) + + # determine the format of manifest that gets return + result = self._handle_output_format_logic( + output_format=output_format, + output_path=output_path, + sheet_url=sheet_url, + empty_manifest_url=empty_manifest_url, + dataframe=updated_df, + out_of_schema_columns=out_of_schema_columns, + ) return result - def _get_end_columns(self, current_schema_headers, existing_manifest_headers, out_of_schema_columns): + def _get_end_columns( + self, current_schema_headers, existing_manifest_headers, out_of_schema_columns + ): """ Gather columns to be added to the end of the manifest, and ensure entityId is at the end. Args: @@ -1731,19 +1883,24 @@ def _get_end_columns(self, current_schema_headers, existing_manifest_headers, ou """ # Identify columns to add to the end of the manifest end_columns = list(out_of_schema_columns) - + # Make sure want Ids are placed at end of manifest, in given order. - for id_name in ['Uuid', 'Id', 'entityId']: + for id_name in ["Uuid", "Id", "entityId"]: if id_name in end_columns: end_columns.remove(id_name) end_columns.append(id_name) - + # Add entity_id to the end columns if it should be there but isn't - if 'entityId' in (current_schema_headers or existing_manifest_headers) and 'entityId' not in end_columns: - end_columns.append('entityId') + if ( + "entityId" in (current_schema_headers or existing_manifest_headers) + and "entityId" not in end_columns + ): + end_columns.append("entityId") return end_columns - def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_df: pd.DataFrame) -> pd.DataFrame: + def _update_dataframe_with_existing_df( + self, empty_manifest_url: str, existing_df: pd.DataFrame + ) -> pd.DataFrame: """ Handle scenario when existing manifest does not match new manifest template due to changes in the data model: the sheet column header reflect the latest schema the existing manifest column-set may be outdated @@ -1759,40 +1916,50 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d """ # Get headers for the current schema and existing manifest df. - current_schema_headers = list(self.get_dataframe_by_url(manifest_url=empty_manifest_url).columns) + current_schema_headers = list( + self.get_dataframe_by_url(manifest_url=empty_manifest_url).columns + ) existing_manifest_headers = list(existing_df.columns) # Find columns that exist in the current schema, but are not in the manifest being downloaded. - new_columns = self._get_missing_columns(current_schema_headers, existing_manifest_headers) + new_columns = self._get_missing_columns( + current_schema_headers, existing_manifest_headers + ) # Find columns that exist in the manifest being downloaded, but not in the current schema. - out_of_schema_columns = self._get_missing_columns(existing_manifest_headers, current_schema_headers) + out_of_schema_columns = self._get_missing_columns( + existing_manifest_headers, current_schema_headers + ) # clean empty columns if any are present (there should be none) # TODO: Remove this line once we start preventing empty column names - if '' in new_columns: - new_columns = new_columns.remove('') + if "" in new_columns: + new_columns = new_columns.remove("") # Copy the df for updating. updated_df = existing_df.copy(deep=True) - + # update existing manifest w/ missing columns, if any if new_columns: updated_df = updated_df.assign( **dict(zip(new_columns, len(new_columns) * [""])) ) - end_columns = self._get_end_columns(current_schema_headers=current_schema_headers, - existing_manifest_headers=existing_manifest_headers, - out_of_schema_columns=out_of_schema_columns) - + end_columns = self._get_end_columns( + current_schema_headers=current_schema_headers, + existing_manifest_headers=existing_manifest_headers, + out_of_schema_columns=out_of_schema_columns, + ) + # sort columns in the updated manifest: # match latest schema order # move obsolete columns at the end updated_df = updated_df[self.sort_manifest_fields(updated_df.columns)] # move obsolete columns at the end with entityId at the very end - updated_df = updated_df[[c for c in updated_df if c not in end_columns] + list(end_columns)] + updated_df = updated_df[ + [c for c in updated_df if c not in end_columns] + list(end_columns) + ] return updated_df, out_of_schema_columns def _format_new_excel_column(self, worksheet, new_column_index: int, col: str): @@ -1804,78 +1971,106 @@ def _format_new_excel_column(self, worksheet, new_column_index: int, col: str): modified worksheet """ # Add column header - worksheet.cell(row=1, column=new_column_index+1).value = col - # Format new column header - worksheet.cell(row=1, column=new_column_index+1).font = Font(size=8, bold=True, color="FF000000") - worksheet.cell(row=1, column=new_column_index+1).alignment = Alignment(horizontal="center", vertical="bottom") - worksheet.cell(row=1, column=new_column_index+1).fill = PatternFill(start_color='FFE0E0E0', end_color='FFE0E0E0', fill_type='solid') + worksheet.cell(row=1, column=new_column_index + 1).value = col + # Format new column header + worksheet.cell(row=1, column=new_column_index + 1).font = Font( + size=8, bold=True, color="FF000000" + ) + worksheet.cell(row=1, column=new_column_index + 1).alignment = Alignment( + horizontal="center", vertical="bottom" + ) + worksheet.cell(row=1, column=new_column_index + 1).fill = PatternFill( + start_color="FFE0E0E0", end_color="FFE0E0E0", fill_type="solid" + ) return worksheet - def populate_existing_excel_spreadsheet(self, existing_excel_path: str = None, additional_df: pd.DataFrame = None): - '''Populate an existing excel spreadsheet by using an additional dataframe (to avoid sending metadata directly to Google APIs) + def populate_existing_excel_spreadsheet( + self, existing_excel_path: str = None, additional_df: pd.DataFrame = None + ): + """Populate an existing excel spreadsheet by using an additional dataframe (to avoid sending metadata directly to Google APIs) New columns will be placed at the end of the spreadsheet. Args: existing_excel_path: path of an existing excel spreadsheet additional_df: additional dataframe - Return: + Return: added new dataframe to the existing excel path. Note: - - Done by rows and column as a way to preserve formatting. + - Done by rows and column as a way to preserve formatting. Doing a complete replacement will remove all conditional formatting and dropdowns. - ''' + """ # load workbook workbook = load_workbook(existing_excel_path) worksheet = workbook.active # Add new data to existing excel if not additional_df.empty: - existing_excel_headers = [cell.value for cell in worksheet[1] if cell.value != None] + existing_excel_headers = [ + cell.value for cell in worksheet[1] if cell.value != None + ] new_column_index = len(existing_excel_headers) df_columns = additional_df.columns # Iteratively fill workbook with contents of additional_df - for row_num, row_contents in enumerate(dataframe_to_rows(additional_df, index=False, header=False), 2): + for row_num, row_contents in enumerate( + dataframe_to_rows(additional_df, index=False, header=False), 2 + ): for index, col in enumerate(df_columns): if col in existing_excel_headers: # Get index of column header in existing excel to ensure no values are placed in incorrect spot. existing_column_index = existing_excel_headers.index(col) - worksheet.cell(row=row_num, column=existing_column_index+1).value = row_contents[index] + worksheet.cell( + row=row_num, column=existing_column_index + 1 + ).value = row_contents[index] else: # Add new col to excel worksheet and format. - worksheet = self._format_new_excel_column(worksheet=worksheet, new_column_index=new_column_index, col=col) + worksheet = self._format_new_excel_column( + worksheet=worksheet, + new_column_index=new_column_index, + col=col, + ) # Add data to column - worksheet.cell(row=row_num, column=new_column_index+1).value = row_contents[index] + worksheet.cell( + row=row_num, column=new_column_index + 1 + ).value = row_contents[index] # Add new column to headers so it can be accounted for. existing_excel_headers.append(col) # Update index for adding new columns. - new_column_index+=1 + new_column_index += 1 workbook.save(existing_excel_path) - def populate_manifest_spreadsheet(self, existing_manifest_path: str = None, empty_manifest_url: str = None, return_excel: bool = False, title: str = None): + def populate_manifest_spreadsheet( + self, + existing_manifest_path: str = None, + empty_manifest_url: str = None, + return_excel: bool = False, + title: str = None, + ): """Creates a google sheet manifest based on existing manifest. Args: existing_manifest_path: the location of the manifest containing metadata presently stored empty_manifest_url: the path to a manifest template to be prepopulated with existing's manifest metadata return_excel: if true, return an Excel spreadsheet instead of Google sheet - title: title of output manifest + title: title of output manifest """ # read existing manifest manifest = load_df(existing_manifest_path) - - if return_excel: - '''if we are returning an Excel spreadsheet, do not populate dataframe to google''' - # get an empty manifest + + if return_excel: + """if we are returning an Excel spreadsheet, do not populate dataframe to google""" + # get an empty manifest manifest_url = empty_manifest_url # export the manifest to excel - output_excel_file_path = self.export_sheet_to_excel(manifest_url = manifest_url, title=title) - - # populate exported sheet + output_excel_file_path = self.export_sheet_to_excel( + manifest_url=manifest_url, title=title + ) + + # populate exported sheet self.populate_existing_excel_spreadsheet(output_excel_file_path, manifest) return output_excel_file_path - else: + else: manifest_sh = self.set_dataframe_by_url(empty_manifest_url, manifest) return manifest_sh.url diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 521d75157..194b268c6 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -1,4 +1,3 @@ - from statistics import mode from tabnanny import check import logging @@ -19,60 +18,67 @@ import great_expectations as ge from great_expectations.core.expectation_configuration import ExpectationConfiguration from great_expectations.data_context import BaseDataContext -from great_expectations.data_context.types.base import DataContextConfig, DatasourceConfig, FilesystemStoreBackendDefaults -from great_expectations.data_context.types.resource_identifiers import ExpectationSuiteIdentifier +from great_expectations.data_context.types.base import ( + DataContextConfig, + DatasourceConfig, + FilesystemStoreBackendDefaults, +) +from great_expectations.data_context.types.resource_identifiers import ( + ExpectationSuiteIdentifier, +) from great_expectations.exceptions.exceptions import GreatExpectationsError from schematic.models.validate_attribute import GenerateError from schematic.schemas.data_model_graph import DataModelGraphExplorer -from schematic.utils.validate_utils import rule_in_rule_list, np_array_to_str_list, iterable_to_str_list +from schematic.utils.validate_utils import ( + rule_in_rule_list, + np_array_to_str_list, + iterable_to_str_list, +) logger = logging.getLogger(__name__) + class GreatExpectationsHelpers(object): """ - Great Expectations helper class + Great Expectations helper class - Provides basic utilities to: - 1) Create GE workflow specific to manifest according to validation rules - 2) Parse results dict to generate appropriate errors + Provides basic utilities to: + 1) Create GE workflow specific to manifest according to validation rules + 2) Parse results dict to generate appropriate errors """ - def __init__(self, - dmge, - unimplemented_expectations, - manifest, - manifestPath - ): + + def __init__(self, dmge, unimplemented_expectations, manifest, manifestPath): """ - Purpose: - Instantiate a great expectations helpers object - Args: - dmge: - DataModelGraphExplorer Object - unimplemented_expectations: - dictionary of validation rules that currently do not have expectations developed - manifest: - manifest being validated - manifestPath: - path to manifest being validated + Purpose: + Instantiate a great expectations helpers object + Args: + dmge: + DataModelGraphExplorer Object + unimplemented_expectations: + dictionary of validation rules that currently do not have expectations developed + manifest: + manifest being validated + manifestPath: + path to manifest being validated """ self.unimplemented_expectations = unimplemented_expectations self.dmge = dmge self.manifest = manifest self.manifestPath = manifestPath - def build_context(self): + def build_context(self): """ - Purpose: - Create a dataContext and datasource and add to object - Returns: - saves dataContext and datasource to self + Purpose: + Create a dataContext and datasource and add to object + Returns: + saves dataContext and datasource to self """ - self.context=ge.get_context() + self.context = ge.get_context() - #create datasource configuration + # create datasource configuration datasource_config = { "name": "example_datasource", "class_name": "Datasource", @@ -89,14 +95,12 @@ def build_context(self): }, } - #create data context configuration + # create data context configuration data_context_config = DataContextConfig( datasources={ "pandas": DatasourceConfig( class_name="Datasource", - execution_engine={ - "class_name": "PandasExecutionEngine" - }, + execution_engine={"class_name": "PandasExecutionEngine"}, data_connectors={ "default_runtime_data_connector_name": { "class_name": "RuntimeDataConnector", @@ -105,25 +109,28 @@ def build_context(self): }, ) }, - store_backend_defaults=FilesystemStoreBackendDefaults(root_directory=os.path.join(os.getcwd(),'great_expectations')), + store_backend_defaults=FilesystemStoreBackendDefaults( + root_directory=os.path.join(os.getcwd(), "great_expectations") + ), ) - #build context and add data source - self.context=BaseDataContext(project_config=data_context_config) - #self.context.test_yaml_config(yaml.dump(datasource_config)) + # build context and add data source + self.context = BaseDataContext(project_config=data_context_config) + # self.context.test_yaml_config(yaml.dump(datasource_config)) self.context.add_datasource(**datasource_config) - - def build_expectation_suite(self,): + def build_expectation_suite( + self, + ): """ - Purpose: - Construct an expectation suite to validate columns with rules that have expectations - Add suite to object - Input: - - Returns: - saves expectation suite and identifier to self - + Purpose: + Construct an expectation suite to validate columns with rules that have expectations + Add suite to object + Input: + + Returns: + saves expectation suite and identifier to self + """ validation_expectation = { "int": "expect_column_values_to_be_in_type_list", @@ -136,97 +143,96 @@ def build_expectation_suite(self,): "unique": "expect_column_values_to_be_unique", "inRange": "expect_column_values_to_be_between", "IsNA": "expect_column_values_to_match_regex_list", - # To be implemented rules with possible expectations - #"list": "expect_column_values_to_not_match_regex_list", - #"regex": "expect_column_values_to_match_regex", - #"url": "expect_column_values_to_be_valid_urls", - #"matchAtLeastOne": "expect_foreign_keys_in_column_a_to_exist_in_column_b", - #"matchExactlyOne": "expect_foreign_keys_in_column_a_to_exist_in_column_b", + # "list": "expect_column_values_to_not_match_regex_list", + # "regex": "expect_column_values_to_match_regex", + # "url": "expect_column_values_to_be_valid_urls", + # "matchAtLeastOne": "expect_foreign_keys_in_column_a_to_exist_in_column_b", + # "matchExactlyOne": "expect_foreign_keys_in_column_a_to_exist_in_column_b", } - - #create blank expectation suite - self.expectation_suite_name = "Manifest_test_suite" + + # create blank expectation suite + self.expectation_suite_name = "Manifest_test_suite" self.suite = self.context.add_expectation_suite( expectation_suite_name=self.expectation_suite_name, - ) + ) - #build expectation configurations for each expectation + # build expectation configurations for each expectation for col in self.manifest.columns: - args={} - meta={} - + args = {} + meta = {} + # remove trailing/leading whitespaces from manifest self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.dmge.get_node_validation_rules(node_display_name=col) + validation_rules = self.dmge.get_node_validation_rules( + node_display_name=col + ) - #check if attribute has any rules associated with it + # check if attribute has any rules associated with it if validation_rules: - #iterate through all validation rules for an attribute + # iterate through all validation rules for an attribute for rule in validation_rules: base_rule = rule.split(" ")[0] - - #check if rule has an implemented expectation - if rule_in_rule_list(rule,self.unimplemented_expectations): + + # check if rule has an implemented expectation + if rule_in_rule_list(rule, self.unimplemented_expectations): continue - args["column"] = col args["result_format"] = "COMPLETE" - - #Validate num - if base_rule=='num': - args["mostly"]=1.0 - args["type_list"]=['int','int64', 'float', 'float64'] - meta={ + # Validate num + if base_rule == "num": + args["mostly"] = 1.0 + args["type_list"] = ["int", "int64", "float", "float64"] + meta = { "notes": { "format": "markdown", - "content": "Expect column values to be of int or float type. **Markdown** `Supported`" + "content": "Expect column values to be of int or float type. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - #Validate float - elif base_rule=='float': - args["mostly"]=1.0 - args["type_list"]=['float', 'float64'] - meta={ + + # Validate float + elif base_rule == "float": + args["mostly"] = 1.0 + args["type_list"] = ["float", "float64"] + meta = { "notes": { "format": "markdown", "content": "Expect column values to be of float type. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - #Validate int - elif base_rule=='int': - args["mostly"]=1.0 - args["type_list"]=['int','int64'] - meta={ + + # Validate int + elif base_rule == "int": + args["mostly"] = 1.0 + args["type_list"] = ["int", "int64"] + meta = { "notes": { "format": "markdown", "content": "Expect column values to be of int type. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - #Validate string - elif base_rule=='str': - args["mostly"]=1.0 - args["type_"]='str' - meta={ + + # Validate string + elif base_rule == "str": + args["mostly"] = 1.0 + args["type_"] = "str" + meta = { "notes": { "format": "markdown", "content": "Expect column values to be of string type. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - #Validate date - elif base_rule=='date': - args["mostly"]=1.0 - meta={ + # Validate date + elif base_rule == "date": + args["mostly"] = 1.0 + meta = { "notes": { "format": "markdown", "content": ( @@ -234,85 +240,98 @@ def build_expectation_suite(self,): "**Markdown** `Supported`" ), }, - "validation_rule": rule + "validation_rule": rule, } - elif base_rule==("recommended"): - args["mostly"]=0.0000000001 - args["regex_list"]=['^$'] - meta={ + elif base_rule == ("recommended"): + args["mostly"] = 0.0000000001 + args["regex_list"] = ["^$"] + meta = { "notes": { "format": "markdown", "content": "Expect column to not be empty. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - elif base_rule==("protectAges"): - #Function to convert to different age limit formats + elif base_rule == ("protectAges"): + # Function to convert to different age limit formats min_age, max_age = self.get_age_limits() - args["mostly"]=1.0 - args["min_value"]=min_age - args["max_value"]=max_age - #args['allow_cross_type_comparisons']=True # TODO Can allow after issue #980 is completed - meta={ + args["mostly"] = 1.0 + args["min_value"] = min_age + args["max_value"] = max_age + # args['allow_cross_type_comparisons']=True # TODO Can allow after issue #980 is completed + meta = { "notes": { "format": "markdown", "content": "Expect ages to be between 18 years (6,570 days) and 90 years (32,850 days) of age. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - elif base_rule==("unique"): - args["mostly"]=1.0 - meta={ + elif base_rule == ("unique"): + args["mostly"] = 1.0 + meta = { "notes": { "format": "markdown", "content": "Expect column values to be Unique. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - elif base_rule==("inRange"): - args["mostly"]=1.0 - args["min_value"]=float(rule.split(" ")[1]) if rule.split(" ")[1].lower() != 'none' else None - args["max_value"]=float(rule.split(" ")[2]) if rule.split(" ")[2].lower() != 'none' else None - args['allow_cross_type_comparisons']=True # TODO Should follow up with issue #980 - meta={ + + elif base_rule == ("inRange"): + args["mostly"] = 1.0 + args["min_value"] = ( + float(rule.split(" ")[1]) + if rule.split(" ")[1].lower() != "none" + else None + ) + args["max_value"] = ( + float(rule.split(" ")[2]) + if rule.split(" ")[2].lower() != "none" + else None + ) + args[ + "allow_cross_type_comparisons" + ] = True # TODO Should follow up with issue #980 + meta = { "notes": { "format": "markdown", "content": "Expect column values to be within a specified range. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - elif base_rule==("IsNA"): - args["mostly"]=1.0 - args["regex_list"]=['Not Applicable'] - meta={ + + elif base_rule == ("IsNA"): + args["mostly"] = 1.0 + args["regex_list"] = ["Not Applicable"] + meta = { "notes": { "format": "markdown", "content": "Expect column values to be marked Not Applicable. **Markdown** `Supported`", }, - "validation_rule": rule + "validation_rule": rule, } - - #add expectation for attribute to suite + + # add expectation for attribute to suite self.add_expectation( rule=rule, args=args, meta=meta, validation_expectation=validation_expectation, ) - - - self.context.update_expectation_suite(expectation_suite=self.suite,) - suite_identifier = ExpectationSuiteIdentifier(expectation_suite_name=self.expectation_suite_name) + self.context.update_expectation_suite( + expectation_suite=self.suite, + ) + + suite_identifier = ExpectationSuiteIdentifier( + expectation_suite_name=self.expectation_suite_name + ) self.context.build_data_docs(resource_identifiers=[suite_identifier]) ##Webpage DataDocs opened here: - #self.context.open_data_docs(resource_identifier=suite_identifier) + # self.context.open_data_docs(resource_identifier=suite_identifier) def add_expectation( self, @@ -320,46 +339,45 @@ def add_expectation( args: Dict, meta: Dict, validation_expectation: Dict, - ): + ): """ - Purpose: - Add individual expectation for a rule to the suite - Input: - rule: - validation rule - args: - dict of arguments specifying expectation behavior - meta: - dict of additional information for each expectation - validation_expectation: - dictionary to map between rules and expectations - Returns: - adds expectation to self.suite - + Purpose: + Add individual expectation for a rule to the suite + Input: + rule: + validation rule + args: + dict of arguments specifying expectation behavior + meta: + dict of additional information for each expectation + validation_expectation: + dictionary to map between rules and expectations + Returns: + adds expectation to self.suite + """ # Create an Expectation expectation_configuration = ExpectationConfiguration( # Name of expectation type being added expectation_type=validation_expectation[rule.split(" ")[0]], - - #add arguments and meta message + # add arguments and meta message kwargs={**args}, - meta={**meta} + meta={**meta}, ) # Add the Expectation to the suite self.suite.add_expectation(expectation_configuration=expectation_configuration) def build_checkpoint(self): """ - Purpose: - Build checkpoint to validate manifest - Input: - Returns: - adds checkpoint to self + Purpose: + Build checkpoint to validate manifest + Input: + Returns: + adds checkpoint to self """ - #create manifest checkpoint - self.checkpoint_name = "manifest_checkpoint" - checkpoint_config={ + # create manifest checkpoint + self.checkpoint_name = "manifest_checkpoint" + checkpoint_config = { "name": self.checkpoint_name, "config_version": 1, "class_name": "SimpleCheckpoint", @@ -375,9 +393,9 @@ def build_checkpoint(self): ], } - #self.context.test_yaml_config(yaml.dump(checkpoint_config),return_mode="report_object") + # self.context.test_yaml_config(yaml.dump(checkpoint_config),return_mode="report_object") self.context.add_checkpoint(**checkpoint_config) - + def generate_errors( self, validation_results: Dict, @@ -385,130 +403,137 @@ def generate_errors( errors: List, warnings: List, dmge: DataModelGraphExplorer, - ): + ): """ - Purpose: - Parse results dictionary and generate errors for expectations - Input: - validation_results: - dictionary of results for each expectation - validation_types: - dict of types of errors to generate for each validation rule - errors: - list of errors - warnings: - list of warnings - Returns: - errors: - list of errors - warnings: - list of warnings - self.manifest: - manifest, possibly updated (censored ages) + Purpose: + Parse results dictionary and generate errors for expectations + Input: + validation_results: + dictionary of results for each expectation + validation_types: + dict of types of errors to generate for each validation rule + errors: + list of errors + warnings: + list of warnings + Returns: + errors: + list of errors + warnings: + list of warnings + self.manifest: + manifest, possibly updated (censored ages) """ - type_dict={ + type_dict = { "float64": float, "int64": int, "str": str, } - for result_dict in validation_results[0]['results']: - - + for result_dict in validation_results[0]["results"]: indices = [] values = [] - - #if the expectaion failed, get infromation to generate error message - if not result_dict['success']: - errColumn = result_dict['expectation_config']['kwargs']['column'] - rule = result_dict['expectation_config']['meta']['validation_rule'] + # if the expectaion failed, get infromation to generate error message + if not result_dict["success"]: + errColumn = result_dict["expectation_config"]["kwargs"]["column"] + rule = result_dict["expectation_config"]["meta"]["validation_rule"] + + if ( + "exception_info" in result_dict.keys() + and result_dict["exception_info"]["exception_message"] + ): + raise GreatExpectationsError( + result_dict["exception_info"]["exception_traceback"] + ) - if 'exception_info' in result_dict.keys() and result_dict['exception_info']['exception_message']: - raise GreatExpectationsError(result_dict['exception_info']['exception_traceback']) - - #only some expectations explicitly list unexpected values and indices, read or find if not present - elif 'unexpected_index_list' in result_dict['result']: - indices = result_dict['result']['unexpected_index_list'] - values = result_dict['result']['unexpected_list'] + # only some expectations explicitly list unexpected values and indices, read or find if not present + elif "unexpected_index_list" in result_dict["result"]: + indices = result_dict["result"]["unexpected_index_list"] + values = result_dict["result"]["unexpected_list"] # Technically, this shouldn't ever happen, but will keep as a failsafe in case many things go wrong - # because type validation is column aggregate expectation and not column map expectation when columns are not of object type, + # because type validation is column aggregate expectation and not column map expectation when columns are not of object type, # indices and values cannot be returned else: for i, item in enumerate(self.manifest[errColumn]): - observed_type=result_dict['result']['observed_value'] - indices.append(i) if isinstance(item,type_dict[observed_type]) else indices - values.append(item) if isinstance(item,type_dict[observed_type]) else values - - #call functions to generate error messages and add to error list - if validation_types[rule.split(" ")[0]]['type']=='type_validation': - for row, value in zip(indices,values): + observed_type = result_dict["result"]["observed_value"] + indices.append(i) if isinstance( + item, type_dict[observed_type] + ) else indices + values.append(item) if isinstance( + item, type_dict[observed_type] + ) else values + + # call functions to generate error messages and add to error list + if validation_types[rule.split(" ")[0]]["type"] == "type_validation": + for row, value in zip(indices, values): vr_errors, vr_warnings = GenerateError.generate_type_error( - val_rule = rule, - row_num = str(row+2), - attribute_name = errColumn, - invalid_entry = str(value), - dmge = dmge, - ) + val_rule=rule, + row_num=str(row + 2), + attribute_name=errColumn, + invalid_entry=str(value), + dmge=dmge, + ) if vr_errors: - errors.append(vr_errors) + errors.append(vr_errors) if vr_warnings: - warnings.append(vr_warnings) - elif validation_types[rule.split(" ")[0]]['type']=='regex_validation': - expression=result_dict['expectation_config']['kwargs']['regex'] - for row, value in zip(indices,values): + warnings.append(vr_warnings) + elif validation_types[rule.split(" ")[0]]["type"] == "regex_validation": + expression = result_dict["expectation_config"]["kwargs"]["regex"] + for row, value in zip(indices, values): vr_errors, vr_warnings = GenerateError.generate_regex_error( - val_rule= rule, - reg_expression = expression, - row_num = str(row+2), - module_to_call = 'match', - attribute_name = errColumn, - invalid_entry = value, - dmge = dmge, - ) + val_rule=rule, + reg_expression=expression, + row_num=str(row + 2), + module_to_call="match", + attribute_name=errColumn, + invalid_entry=value, + dmge=dmge, + ) if vr_errors: - errors.append(vr_errors) + errors.append(vr_errors) if vr_warnings: - warnings.append(vr_warnings) - elif validation_types[rule.split(" ")[0]]['type']=='content_validation': + warnings.append(vr_warnings) + elif ( + validation_types[rule.split(" ")[0]]["type"] == "content_validation" + ): vr_errors, vr_warnings = GenerateError.generate_content_error( - val_rule = rule, - attribute_name = errColumn, - row_num = np_array_to_str_list(np.array(indices)+2), - error_val = iterable_to_str_list(values), - dmge = self.dmge - ) + val_rule=rule, + attribute_name=errColumn, + row_num=np_array_to_str_list(np.array(indices) + 2), + error_val=iterable_to_str_list(values), + dmge=self.dmge, + ) if vr_errors: - errors.append(vr_errors) - if rule.startswith('protectAges'): - self.censor_ages(vr_errors,errColumn) - + errors.append(vr_errors) + if rule.startswith("protectAges"): + self.censor_ages(vr_errors, errColumn) + if vr_warnings: - warnings.append(vr_warnings) - if rule.startswith('protectAges'): - self.censor_ages(vr_warnings,errColumn) - + warnings.append(vr_warnings) + if rule.startswith("protectAges"): + self.censor_ages(vr_warnings, errColumn) return errors, warnings def get_age_limits( self, - ): + ): + """ + Purpose: + Get boundaries of ages that need to be censored for different age formats + Input: + Returns: + min_age: + minimum age that will not be censored + max age: + maximum age that will not be censored + """ - Purpose: - Get boundaries of ages that need to be censored for different age formats - Input: - Returns: - min_age: - minimum age that will not be censored - max age: - maximum age that will not be censored - - """ - - min_age = 6550 #days - max_age = 32849 #days + + min_age = 6550 # days + max_age = 32849 # days return min_age, max_age @@ -516,28 +541,30 @@ def censor_ages( self, message: List, col: str, - ): + ): """ - Purpose: - Censor ages in manifest as appropriate - Input: - message: - error or warning message for age validation rule - col: - name of column containing ages - Returns: - updates self.manifest with censored ages - TODO: Speed up conversion from str list to int list + Purpose: + Censor ages in manifest as appropriate + Input: + message: + error or warning message for age validation rule + col: + name of column containing ages + Returns: + updates self.manifest with censored ages + TODO: Speed up conversion from str list to int list """ censor_rows = [] - + for row in message[0]: censor_rows.append(int(row) - 2) - self.manifest.loc[censor_rows,(col)] = 'age censored' + self.manifest.loc[censor_rows, (col)] = "age censored" # update the manifest file, so that ages are censored - self.manifest.to_csv(self.manifestPath.replace('.csv','_censored.csv'), index=False) + self.manifest.to_csv( + self.manifestPath.replace(".csv", "_censored.csv"), index=False + ) logging.info("Sensitive ages have been censored.") return diff --git a/schematic/models/commands.py b/schematic/models/commands.py index eeeb7c809..dedab10e2 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -11,16 +11,22 @@ from jsonschema import ValidationError from schematic.models.metadata import MetadataModel -from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs, parse_comma_str_to_list +from schematic.utils.cli_utils import ( + log_value_from_config, + query_dict, + parse_synIDs, + parse_comma_str_to_list, +) from schematic.help import model_commands from schematic.exceptions import MissingConfigValueError from schematic.configuration.configuration import CONFIG -logger = logging.getLogger('schematic') +logger = logging.getLogger("schematic") click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -39,7 +45,7 @@ def model(ctx, config): # use as `schematic model ...` try: logger.debug(f"Loading config file contents in '{config}'") CONFIG.load_config(config) - ctx.obj = CONFIG + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -76,19 +82,23 @@ def model(ctx, config): # use as `schematic model ...` "--hide_blanks", "-hb", is_flag=True, - help=query_dict(model_commands,("model","submit","hide_blanks")), + help=query_dict(model_commands, ("model", "submit", "hide_blanks")), ) @click.option( "--manifest_record_type", "-mrt", - default='table_file_and_entities', - type=click.Choice(['table_and_file', 'file_only', 'file_and_entities', 'table_file_and_entities'], case_sensitive=True), - help=query_dict(model_commands, ("model", "submit", "manifest_record_type"))) + default="table_file_and_entities", + type=click.Choice( + ["table_and_file", "file_only", "file_and_entities", "table_file_and_entities"], + case_sensitive=True, + ), + help=query_dict(model_commands, ("model", "submit", "manifest_record_type")), +) @click.option( "-rr", "--restrict_rules", is_flag=True, - help=query_dict(model_commands,("model","validate","restrict_rules")), + help=query_dict(model_commands, ("model", "validate", "restrict_rules")), ) @click.option( "-ps", @@ -100,27 +110,36 @@ def model(ctx, config): # use as `schematic model ...` @click.option( "--table_manipulation", "-tm", - default='replace', - type=click.Choice(['replace', 'upsert'], case_sensitive=True), - help=query_dict(model_commands, ("model", "submit", "table_manipulation"))) + default="replace", + type=click.Choice(["replace", "upsert"], case_sensitive=True), + help=query_dict(model_commands, ("model", "submit", "table_manipulation")), +) @click.pass_obj def submit_manifest( - ctx, manifest_path, dataset_id, validate_component, manifest_record_type, use_schema_label, hide_blanks, restrict_rules, project_scope, table_manipulation, + ctx, + manifest_path, + dataset_id, + validate_component, + manifest_record_type, + use_schema_label, + hide_blanks, + restrict_rules, + project_scope, + table_manipulation, ): """ Running CLI with manifest validation (optional) and submission options. """ - - jsonld = CONFIG.model_location + + jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( inputMModelLocation=jsonld, inputMModelLocationType="local" ) - manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = jsonld, + path_to_json_ld=jsonld, manifest_path=manifest_path, dataset_id=dataset_id, validate_component=validate_component, @@ -131,7 +150,7 @@ def submit_manifest( project_scope=project_scope, table_manipulation=table_manipulation, ) - + if manifest_id: logger.info( f"File at '{manifest_path}' was successfully associated " @@ -167,7 +186,7 @@ def submit_manifest( "-rr", "--restrict_rules", is_flag=True, - help=query_dict(model_commands,("model","validate","restrict_rules")), + help=query_dict(model_commands, ("model", "validate", "restrict_rules")), ) @click.option( "-ps", @@ -177,14 +196,16 @@ def submit_manifest( help=query_dict(model_commands, ("model", "validate", "project_scope")), ) @click.pass_obj -def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules,project_scope): +def validate_manifest( + ctx, manifest_path, data_type, json_schema, restrict_rules, project_scope +): """ Running CLI for manifest validation. """ if data_type is None: - data_type = CONFIG.manifest_data_type + data_type = CONFIG.manifest_data_type log_value_from_config("data_type", data_type) - + try: len(data_type) == 1 except: @@ -196,7 +217,7 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules t_validate = perf_counter() - jsonld = CONFIG.model_location + jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( @@ -204,7 +225,11 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules ) errors, warnings = metadata_model.validateModelManifest( - manifestPath=manifest_path, rootNode=data_type, jsonSchema=json_schema, restrict_rules=restrict_rules, project_scope=project_scope, + manifestPath=manifest_path, + rootNode=data_type, + jsonSchema=json_schema, + restrict_rules=restrict_rules, + project_scope=project_scope, ) if not errors: @@ -216,6 +241,4 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules else: click.echo(errors) - logger.debug( - f"Total elapsed time {perf_counter()-t_validate} seconds" - ) + logger.debug(f"Total elapsed time {perf_counter()-t_validate} seconds") diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index 50e718014..833c9af8e 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -12,7 +12,7 @@ from schematic.schemas.data_model_json_schema import DataModelJSONSchema -#TODO: This module should only be aware of the store interface +# TODO: This module should only be aware of the store interface # we shouldn't need to expose Synapse functionality explicitly from schematic.store.synapse import SynapseStorage @@ -34,8 +34,11 @@ class MetadataModel(object): - generate validation schema view of the metadata model """ - def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> None: - + def __init__( + self, + inputMModelLocation: str, + inputMModelLocationType: str, + ) -> None: """Instantiates a MetadataModel object. Args: @@ -51,8 +54,8 @@ def __init__(self, inputMModelLocation: str, inputMModelLocationType: str,) -> N self.inputMModelLocation = inputMModelLocation - data_model_parser = DataModelParser(path_to_data_model = self.inputMModelLocation) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=self.inputMModelLocation) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -138,7 +141,7 @@ def getModelManifest( mg = ManifestGenerator( path_to_json_ld=self.inputMModelLocation, - graph = self.graph_data_model, + graph=self.graph_data_model, title=title, root=rootNode, additional_metadata=additionalMetadata, @@ -208,12 +211,13 @@ def validateModelManifest( ValueError: rootNode not found in metadata model. """ # get validation schema for a given node in the data model, if the user has not provided input validation schema - + if not jsonSchema: - # Instantiate Data Model Json Schema - self.data_model_js = DataModelJSONSchema(jsonld_path=self.inputMModelLocation, graph=self.graph_data_model) - + self.data_model_js = DataModelJSONSchema( + jsonld_path=self.inputMModelLocation, graph=self.graph_data_model + ) + jsonSchema = self.data_model_js.get_json_validation_schema( rootNode, rootNode + "_validation" ) @@ -221,12 +225,14 @@ def validateModelManifest( errors = [] warnings = [] - load_args={ - "dtype":"string", - } + load_args = { + "dtype": "string", + } # get annotations from manifest (array of json annotations corresponding to manifest rows) manifest = load_df( - manifestPath, preserve_raw_input=False, **load_args, + manifestPath, + preserve_raw_input=False, + **load_args, ) # read manifest csv file as is from manifest path # handler for mismatched components/data types @@ -259,24 +265,28 @@ def validateModelManifest( ) return errors, warnings - + # check if suite has been created. If so, delete it if os.path.exists("great_expectations/expectations/Manifest_test_suite.json"): os.remove("great_expectations/expectations/Manifest_test_suite.json") - - errors, warnings, manifest = validate_all(self, - errors=errors, - warnings=warnings, - manifest=manifest, - manifestPath=manifestPath, - dmge=self.dmge, - jsonSchema=jsonSchema, - restrict_rules=restrict_rules, - project_scope=project_scope, - access_token=access_token) + + errors, warnings, manifest = validate_all( + self, + errors=errors, + warnings=warnings, + manifest=manifest, + manifestPath=manifestPath, + dmge=self.dmge, + jsonSchema=jsonSchema, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, + ) return errors, warnings - def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_excel = False) -> str: + def populateModelManifest( + self, title, manifestPath: str, rootNode: str, return_excel=False + ) -> str: """Populate an existing annotations manifest based on a dataframe. TODO: Remove this method; always use getModelManifest instead @@ -291,12 +301,17 @@ def populateModelManifest(self, title, manifestPath: str, rootNode: str, return_ ValueError: rootNode not found in metadata model. """ mg = ManifestGenerator( - path_to_data_model=self.inputMModelLocation, graph = self.graph_data_model, title=title, root=rootNode + path_to_data_model=self.inputMModelLocation, + graph=self.graph_data_model, + title=title, + root=rootNode, ) emptyManifestURL = mg.get_manifest() - return mg.populate_manifest_spreadsheet(manifestPath, emptyManifestURL, return_excel = return_excel, title=title) + return mg.populate_manifest_spreadsheet( + manifestPath, emptyManifestURL, return_excel=return_excel, title=title + ) def submit_metadata_manifest( self, @@ -310,7 +325,7 @@ def submit_metadata_manifest( use_schema_label: bool = True, hide_blanks: bool = False, project_scope: List = None, - table_manipulation: str = 'replace' + table_manipulation: str = "replace", ) -> str: """Wrap methods that are responsible for validation of manifests for a given component, and association of the same manifest file with a specified dataset. @@ -325,17 +340,18 @@ def submit_metadata_manifest( ValidationError: If validation against data model was not successful. """ - #TODO: avoid explicitly exposing Synapse store functionality + # TODO: avoid explicitly exposing Synapse store functionality # just instantiate a Store class and let it decide at runtime/config # the store type - syn_store = SynapseStorage(access_token = access_token, project_scope = project_scope) - manifest_id=None - censored_manifest_id=None - restrict_maniest=False - censored_manifest_path=manifest_path.replace('.csv','_censored.csv') + syn_store = SynapseStorage( + access_token=access_token, project_scope=project_scope + ) + manifest_id = None + censored_manifest_id = None + restrict_maniest = False + censored_manifest_path = manifest_path.replace(".csv", "_censored.csv") # check if user wants to perform validation or not if validate_component is not None: - try: # check if the component ("class" in schema) passed as argument is valid (present in schema) or not self.dmge.is_class_in_schema(validate_component) @@ -350,38 +366,42 @@ def submit_metadata_manifest( # automatic JSON schema generation and validation with that JSON schema val_errors, val_warnings = self.validateModelManifest( - manifestPath=manifest_path, rootNode=validate_component, restrict_rules=restrict_rules, project_scope=project_scope, access_token=access_token + manifestPath=manifest_path, + rootNode=validate_component, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, ) # if there are no errors in validation process - if val_errors == []: + if val_errors == []: # upload manifest file from `manifest_path` path to entity with Syn ID `dataset_id` if os.path.exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - dmge = self.dmge, - metadataManifestPath = censored_manifest_path, - datasetId = dataset_id, - manifest_record_type = manifest_record_type, - useSchemaLabel = use_schema_label, - hideBlanks = hide_blanks, + dmge=self.dmge, + metadataManifestPath=censored_manifest_path, + datasetId=dataset_id, + manifest_record_type=manifest_record_type, + useSchemaLabel=use_schema_label, + hideBlanks=hide_blanks, table_manipulation=table_manipulation, ) restrict_maniest = True - + manifest_id = syn_store.associateMetadataWithFiles( - dmge = self.dmge, - metadataManifestPath = manifest_path, - datasetId = dataset_id, - manifest_record_type = manifest_record_type, - useSchemaLabel = use_schema_label, - hideBlanks = hide_blanks, + dmge=self.dmge, + metadataManifestPath=manifest_path, + datasetId=dataset_id, + manifest_record_type=manifest_record_type, + useSchemaLabel=use_schema_label, + hideBlanks=hide_blanks, restrict_manifest=restrict_maniest, table_manipulation=table_manipulation, ) logger.info(f"No validation errors occured during validation.") return manifest_id - + else: raise ValidationError( "Manifest could not be validated under provided data model. " @@ -391,7 +411,7 @@ def submit_metadata_manifest( # no need to perform validation, just submit/associate the metadata manifest file if os.path.exists(censored_manifest_path): censored_manifest_id = syn_store.associateMetadataWithFiles( - dmge = self.dmge, + dmge=self.dmge, metadataManifestPath=censored_manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, @@ -400,9 +420,9 @@ def submit_metadata_manifest( table_manipulation=table_manipulation, ) restrict_maniest = True - + manifest_id = syn_store.associateMetadataWithFiles( - dmge = self.dmge, + dmge=self.dmge, metadataManifestPath=manifest_path, datasetId=dataset_id, manifest_record_type=manifest_record_type, diff --git a/schematic/models/validate_attribute.py b/schematic/models/validate_attribute.py index f923891ba..fdba69454 100644 --- a/schematic/models/validate_attribute.py +++ b/schematic/models/validate_attribute.py @@ -5,12 +5,12 @@ import time from time import perf_counter from os import getenv + # allows specifying explicit variable types from typing import Any, Dict, List, Optional, Text from urllib import error from urllib.parse import urlparse -from urllib.request import (HTTPDefaultErrorHandler, OpenerDirector, Request, - urlopen) +from urllib.request import HTTPDefaultErrorHandler, OpenerDirector, Request, urlopen import numpy as np import pandas as pd @@ -21,93 +21,105 @@ from schematic.store.base import BaseStorage from schematic.store.synapse import SynapseStorage from schematic.utils.validate_rules_utils import validation_rule_info -from schematic.utils.validate_utils import (comma_separated_list_regex, - parse_str_series_to_list, - np_array_to_str_list, - iterable_to_str_list, - rule_in_rule_list, - ) +from schematic.utils.validate_utils import ( + comma_separated_list_regex, + parse_str_series_to_list, + np_array_to_str_list, + iterable_to_str_list, + rule_in_rule_list, +) from synapseclient.core.exceptions import SynapseNoCredentialsError logger = logging.getLogger(__name__) + class GenerateError: - def generate_schema_error(row_num: str, attribute_name: str, error_msg: str, invalid_entry: str, dmge: DataModelGraphExplorer,)-> List[str]: - ''' + def generate_schema_error( + row_num: str, + attribute_name: str, + error_msg: str, + invalid_entry: str, + dmge: DataModelGraphExplorer, + ) -> List[str]: + """ Purpose: Process error messages generated from schema Input: - row_num: the row the error occurred on. - attribute_name: the attribute the error occurred on. - error_msg: Error message - ''' + """ error_list = [] warning_list = [] - - #Determine which, if any, message to raise - if attribute_name.lower() == 'wrong schema': - raises = 'error' - else: + + # Determine which, if any, message to raise + if attribute_name.lower() == "wrong schema": + raises = "error" + else: raises = GenerateError.get_message_level( - val_rule = 'schema', - attribute_name = attribute_name, - dmge = dmge, - ) + val_rule="schema", + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list - error_col = attribute_name # Attribute name error_row = row_num # index row of the manifest where the error presented. error_message = error_msg arg_error_string = ( - f"For the attribute '{error_col}', on row {error_row}, {error_message}." + f"For the attribute '{error_col}', on row {error_row}, {error_message}." ) logLevel(arg_error_string) - if raises == 'error': + if raises == "error": error_list = [error_row, error_col, error_message, invalid_entry] - elif raises == 'warning': + elif raises == "warning": warning_list = [error_row, error_col, error_message, invalid_entry] - return error_list, warning_list + return error_list, warning_list def generate_list_error( - list_string: str, row_num: str, attribute_name: str, list_error: str, - invalid_entry:str, dmge: DataModelGraphExplorer, val_rule: str, + list_string: str, + row_num: str, + attribute_name: str, + list_error: str, + invalid_entry: str, + dmge: DataModelGraphExplorer, + val_rule: str, ) -> List[str]: """ - Purpose: - If an error is found in the string formatting, detect and record - an error message. - Input: - - list_string: the user input list, that is represented as a string. - - row_num: the row the error occurred on. - - attribute_name: the attribute the error occurred on. - Returns: - logger.error or logger.warning. - Errors: List[str] Error details for further storage. - warnings: List[str] Warning details for further storage. - """ + Purpose: + If an error is found in the string formatting, detect and record + an error message. + Input: + - list_string: the user input list, that is represented as a string. + - row_num: the row the error occurred on. + - attribute_name: the attribute the error occurred on. + Returns: + logger.error or logger.warning. + Errors: List[str] Error details for further storage. + warnings: List[str] Warning details for further storage. + """ error_list = [] warning_list = [] - - #Determine which, if any, message to raise + + # Determine which, if any, message to raise raises = GenerateError.get_message_level( - val_rule = val_rule, - attribute_name = attribute_name, - dmge = dmge, - ) + val_rule=val_rule, + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list @@ -122,15 +134,15 @@ def generate_list_error( error_col = attribute_name # Attribute name error_message = error_str error_val = invalid_entry - #return error and empty list for warnings - - if raises == 'error': + # return error and empty list for warnings + + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - - return error_list, warning_list + + return error_list, warning_list def generate_regex_error( val_rule: str, @@ -142,33 +154,33 @@ def generate_regex_error( dmge: DataModelGraphExplorer, ) -> List[str]: """ - Purpose: - Generate an logging error as well as a stored error message, when - a regex error is encountered. - Input: - val_rule: str, defined in the schema. - reg_expression: str, defined in the schema - row_num: str, row where the error was detected - module_to_call: re module specified in the schema - attribute_name: str, attribute being validated - Returns: - logger.error or logger.warning. - Errors: List[str] Error details for further storage. - warnings: List[str] Warning details for further storage. - """ + Purpose: + Generate an logging error as well as a stored error message, when + a regex error is encountered. + Input: + val_rule: str, defined in the schema. + reg_expression: str, defined in the schema + row_num: str, row where the error was detected + module_to_call: re module specified in the schema + attribute_name: str, attribute being validated + Returns: + logger.error or logger.warning. + Errors: List[str] Error details for further storage. + warnings: List[str] Warning details for further storage. + """ error_list = [] warning_list = [] - - #Determine which, if any, message to raise + + # Determine which, if any, message to raise raises = GenerateError.get_message_level( - val_rule = val_rule, - attribute_name = attribute_name, - dmge = dmge, - ) + val_rule=val_rule, + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list @@ -182,45 +194,49 @@ def generate_regex_error( error_message = regex_error_string error_val = invalid_entry - #return error and empty list for warnings - if raises == 'error': + # return error and empty list for warnings + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - - return error_list, warning_list + + return error_list, warning_list def generate_type_error( - val_rule: str, row_num: str, attribute_name: str, invalid_entry:str, dmge: DataModelGraphExplorer, + val_rule: str, + row_num: str, + attribute_name: str, + invalid_entry: str, + dmge: DataModelGraphExplorer, ) -> List[str]: """ - Purpose: - Generate an logging error as well as a stored error message, when - a type error is encountered. - Input: - val_rule: str, defined in the schema. - row_num: str, row where the error was detected - attribute_name: str, attribute being validated - Returns: - logger.error or logger.warning. - Errors: List[str] Error details for further storage. - warnings: List[str] Warning details for further storage. - """ + Purpose: + Generate an logging error as well as a stored error message, when + a type error is encountered. + Input: + val_rule: str, defined in the schema. + row_num: str, row where the error was detected + attribute_name: str, attribute being validated + Returns: + logger.error or logger.warning. + Errors: List[str] Error details for further storage. + warnings: List[str] Warning details for further storage. + """ error_list = [] warning_list = [] - #Determine which, if any, message to raise + # Determine which, if any, message to raise raises = GenerateError.get_message_level( - dmge = dmge, - attribute_name = attribute_name, - val_rule = val_rule, - ) + dmge=dmge, + attribute_name=attribute_name, + val_rule=val_rule, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list @@ -233,73 +249,82 @@ def generate_type_error( error_message = type_error_str error_val = invalid_entry - #TODO: not sure if this i needed (to split) - validation_rules=dmge.get_node_validation_rules(node_display_name=attribute_name) + # TODO: not sure if this i needed (to split) + validation_rules = dmge.get_node_validation_rules( + node_display_name=attribute_name + ) - #TODO: Can remove when handling updated so split within graph - if validation_rules and '::' in validation_rules[0]: - validation_rules = validation_rules[0].split("::") + # TODO: Can remove when handling updated so split within graph + if validation_rules and "::" in validation_rules[0]: + validation_rules = validation_rules[0].split("::") # If IsNA rule is being used to allow `Not Applicable` entries, do not log a message - if error_val.lower() == 'not applicable' and rule_in_rule_list('IsNA', validation_rules): - pass + if error_val.lower() == "not applicable" and rule_in_rule_list( + "IsNA", validation_rules + ): + pass else: logLevel(type_error_str) - #return error and empty list for warnings - if raises == 'error': + # return error and empty list for warnings + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - - return error_list, warning_list + + return error_list, warning_list def generate_url_error( - url: str, url_error: str, row_num: str, attribute_name: str, argument: str, - invalid_entry:str, dmge: DataModelGraphExplorer, val_rule: str, + url: str, + url_error: str, + row_num: str, + attribute_name: str, + argument: str, + invalid_entry: str, + dmge: DataModelGraphExplorer, + val_rule: str, ) -> List[str]: """ - Purpose: - Generate an logging error as well as a stored error message, when - a URL error is encountered. - - Types of errors included: - - Invalid URL: Refers to a URL that brings up an error when - attempted to be accessed such as a HTTPError 404 Webpage Not Found. - - Argument Error: this refers to a valid URL that does not - contain within it the arguments specified by the schema, - such as 'protocols.io' or 'dox.doi.org' - - Random Entry: this refers to an entry try that is not - validated to be a URL. - e.g. 'lkejrlei', '0', 'not applicable' - Input: - url: str, that was input by the user. - url_error: str, error detected in url_validation() - attribute_name: str, attribute being validated - argument: str, argument being validated. - Returns: - logger.error or logger.warning. - Errors: List[str] Error details for further storage. - warnings: List[str] Warning details for further storage. - """ + Purpose: + Generate an logging error as well as a stored error message, when + a URL error is encountered. + + Types of errors included: + - Invalid URL: Refers to a URL that brings up an error when + attempted to be accessed such as a HTTPError 404 Webpage Not Found. + - Argument Error: this refers to a valid URL that does not + contain within it the arguments specified by the schema, + such as 'protocols.io' or 'dox.doi.org' + - Random Entry: this refers to an entry try that is not + validated to be a URL. + e.g. 'lkejrlei', '0', 'not applicable' + Input: + url: str, that was input by the user. + url_error: str, error detected in url_validation() + attribute_name: str, attribute being validated + argument: str, argument being validated. + Returns: + logger.error or logger.warning. + Errors: List[str] Error details for further storage. + warnings: List[str] Warning details for further storage. + """ error_list = [] warning_list = [] - - #Determine which, if any, message to raise + + # Determine which, if any, message to raise raises = GenerateError.get_message_level( - val_rule = val_rule, - attribute_name = attribute_name, - dmge = dmge, - ) + val_rule=val_rule, + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list - error_row = row_num # index row of the manifest where the error presented. error_col = attribute_name # Attribute name if url_error == "invalid_url": @@ -328,99 +353,99 @@ def generate_url_error( error_message = random_entry_error_str error_val = f"URL Error: Random Entry" - #return error and empty list for warnings - if raises == 'error': + # return error and empty list for warnings + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - - return error_list, warning_list + + return error_list, warning_list def generate_cross_warning( val_rule: str, attribute_name: str, dmge: DataModelGraphExplorer, - matching_manifests = [], - missing_manifest_ID = None, - invalid_entry = None, - row_num = None, - + matching_manifests=[], + missing_manifest_ID=None, + invalid_entry=None, + row_num=None, ) -> List[str]: """ - Purpose: - Generate an logging error as well as a stored error message, when - a cross validation error is encountered. - Input: - val_rule: str, defined in the schema. - matching_manifests: list of manifests with all values in the target attribute present - manifest_ID: str, synID of the target manifest missing the source value - attribute_name: str, attribute being validated - invalid_entry: str, value present in source manifest that is missing in the target - row_num: row in source manifest with value missing in target manifests - Returns: - logger.error or logger.warning. - Errors: List[str] Error details for further storage. - warnings: List[str] Warning details for further storage. - """ + Purpose: + Generate an logging error as well as a stored error message, when + a cross validation error is encountered. + Input: + val_rule: str, defined in the schema. + matching_manifests: list of manifests with all values in the target attribute present + manifest_ID: str, synID of the target manifest missing the source value + attribute_name: str, attribute being validated + invalid_entry: str, value present in source manifest that is missing in the target + row_num: row in source manifest with value missing in target manifests + Returns: + logger.error or logger.warning. + Errors: List[str] Error details for further storage. + warnings: List[str] Warning details for further storage. + """ error_list = [] warning_list = [] - - #Determine which, if any, message to raise + + # Determine which, if any, message to raise raises = GenerateError.get_message_level( - val_rule = val_rule, - attribute_name = attribute_name, - dmge = dmge, - ) + val_rule=val_rule, + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list - if val_rule.__contains__('matchAtLeast'): - cross_error_str = ( - f"Value(s) {invalid_entry} from row(s) {row_num} of the attribute {attribute_name} in the source manifest are missing." ) - cross_error_str += f" Manifest(s) {missing_manifest_ID} are missing the value(s)." if missing_manifest_ID else "" - - elif val_rule.__contains__('matchExactly'): + if val_rule.__contains__("matchAtLeast"): + cross_error_str = f"Value(s) {invalid_entry} from row(s) {row_num} of the attribute {attribute_name} in the source manifest are missing." + cross_error_str += ( + f" Manifest(s) {missing_manifest_ID} are missing the value(s)." + if missing_manifest_ID + else "" + ) + + elif val_rule.__contains__("matchExactly"): if matching_manifests != []: - cross_error_str = ( - f"All values from attribute {attribute_name} in the source manifest are present in {len(matching_manifests)} manifests instead of only 1.") - cross_error_str += f" Manifests {matching_manifests} match the values in the source attribute." if matching_manifests else "" - - elif val_rule.__contains__('set'): - cross_error_str = ( - f"No matches for the values from attribute {attribute_name} in the source manifest are present in any other manifests instead of being present in exactly 1. " + cross_error_str = f"All values from attribute {attribute_name} in the source manifest are present in {len(matching_manifests)} manifests instead of only 1." + cross_error_str += ( + f" Manifests {matching_manifests} match the values in the source attribute." + if matching_manifests + else "" ) - elif val_rule.__contains__('value'): - cross_error_str = ( - f"Value(s) {invalid_entry} from row(s) {row_num} of the attribute {attribute_name} in the source manifest are not present in only one other manifest. " - ) + + elif val_rule.__contains__("set"): + cross_error_str = f"No matches for the values from attribute {attribute_name} in the source manifest are present in any other manifests instead of being present in exactly 1. " + elif val_rule.__contains__("value"): + cross_error_str = f"Value(s) {invalid_entry} from row(s) {row_num} of the attribute {attribute_name} in the source manifest are not present in only one other manifest. " logLevel(cross_error_str) error_row = row_num # index row of the manifest where the error presented. error_col = attribute_name # Attribute name error_message = cross_error_str - error_val = invalid_entry #Value from source manifest missing from targets - - #return error and empty list for warnings - if raises == 'error': + error_val = invalid_entry # Value from source manifest missing from targets + + # return error and empty list for warnings + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - - return error_list, warning_list + return error_list, warning_list def generate_content_error( val_rule: str, attribute_name: str, dmge: DataModelGraphExplorer, - row_num = None, - error_val = None, + row_num=None, + error_val=None, ) -> (List[str], List[str]): """ Purpose: @@ -448,84 +473,74 @@ def generate_content_error( if error_val: error_val = iterable_to_str_list(set(error_val)) - #Determine which, if any, message to raise + # Determine which, if any, message to raise raises = GenerateError.get_message_level( val_rule=val_rule, - attribute_name = attribute_name, - dmge = dmge, - ) + attribute_name=attribute_name, + dmge=dmge, + ) - #if a message needs to be raised, get the approrpiate function to do so + # if a message needs to be raised, get the approrpiate function to do so if raises: - logLevel = getattr(logger,raises) + logLevel = getattr(logger, raises) else: return error_list, warning_list - - #log warning or error message - if val_rule.startswith('recommended'): - content_error_str = ( - f"Column {attribute_name} is recommended but empty." - ) + + # log warning or error message + if val_rule.startswith("recommended"): + content_error_str = f"Column {attribute_name} is recommended but empty." logLevel(content_error_str) error_message = content_error_str - if raises == 'error': + if raises == "error": error_list = [error_col, error_message] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_col, error_message] return error_list, warning_list - elif val_rule.startswith('unique'): - content_error_str = ( - f"Column {attribute_name} has the duplicate value(s) {error_val} in rows: {row_num}." - ) + elif val_rule.startswith("unique"): + content_error_str = f"Column {attribute_name} has the duplicate value(s) {error_val} in rows: {row_num}." - elif val_rule.startswith('protectAges'): - content_error_str = ( - f"Column {attribute_name} contains ages that should be censored in rows: {row_num}." - ) + elif val_rule.startswith("protectAges"): + content_error_str = f"Column {attribute_name} contains ages that should be censored in rows: {row_num}." - elif val_rule.startswith('inRange'): - content_error_str = ( - f"{attribute_name} values in rows {row_num} are out of the specified range." - ) - elif val_rule.startswith('date'): + elif val_rule.startswith("inRange"): + content_error_str = f"{attribute_name} values in rows {row_num} are out of the specified range." + elif val_rule.startswith("date"): content_error_str = ( f"{attribute_name} values in rows {row_num} are not parsable as dates." - ) - elif val_rule.startswith('IsNA'): - content_error_str = ( - f"{attribute_name} values in rows {row_num} are not marked as 'Not Applicable'." - ) + ) + elif val_rule.startswith("IsNA"): + content_error_str = f"{attribute_name} values in rows {row_num} are not marked as 'Not Applicable'." if val_rule != "IsNA": logLevel(content_error_str) - error_row = row_num + error_row = row_num error_message = content_error_str - #return error and empty list for warnings - if raises == 'error': + # return error and empty list for warnings + if raises == "error": error_list = [error_row, error_col, error_message, error_val] - #return warning and empty list for errors - elif raises == 'warning': + # return warning and empty list for errors + elif raises == "warning": warning_list = [error_row, error_col, error_message, error_val] - + return error_list, warning_list def get_message_level( dmge: DataModelGraphExplorer, attribute_name: str, val_rule: str, - ) -> str: + ) -> str: """ Purpose: Determine whether an error or warning message should be logged and displayed - - if node is not required, + + if node is not required, return warning - if node is recommended and requried, - return None + if node is recommended and requried, + return None for other rules, parse possible, if not use default specified in validation_rule_info Input: @@ -541,22 +556,26 @@ def get_message_level( rule_parts = val_rule.split(" ") rule_info = validation_rule_info() - #set message level to default and change after - if rule_parts[0] != 'schema': - level = rule_info[rule_parts[0]]['default_message_level'] + # set message level to default and change after + if rule_parts[0] != "schema": + level = rule_info[rule_parts[0]]["default_message_level"] # Parse rule for level, set to default if not specified - if rule_parts[-1].lower() == 'error' or rule_parts[0] == 'schema': - level = 'error' - elif rule_parts[-1].lower() == 'warning': - level = 'warning' + if rule_parts[-1].lower() == "error" or rule_parts[0] == "schema": + level = "error" + elif rule_parts[-1].lower() == "warning": + level = "warning" elif not dmge.get_node_required(node_display_name=attribute_name): # If not required raise warnings to notify - level = 'warning' - elif dmge.get_node_required(node_display_name=attribute_name) and 'recommended' in val_rule: + level = "warning" + elif ( + dmge.get_node_required(node_display_name=attribute_name) + and "recommended" in val_rule + ): level = None - + return level + class ValidateAttribute(object): """ A collection of functions to validate manifest attributes. @@ -572,37 +591,48 @@ class ValidateAttribute(object): - Add string length validator """ - def get_target_manifests(target_component, project_scope: List, access_token: str = None): + def get_target_manifests( + target_component, project_scope: List, access_token: str = None + ): t_manifest_search = perf_counter() - target_manifest_IDs=[] - target_dataset_IDs=[] - - #login + target_manifest_IDs = [] + target_dataset_IDs = [] + + # login try: - synStore = SynapseStorage(access_token=access_token, project_scope=project_scope) + synStore = SynapseStorage( + access_token=access_token, project_scope=project_scope + ) except SynapseNoCredentialsError as e: raise ValueError( "No Synapse credentials were provided. Credentials must be provided to utilize cross-manfiest validation functionality." - ) from e + ) from e - #Get list of all projects user has access to + # Get list of all projects user has access to projects = synStore.getStorageProjects(project_scope=project_scope) for project in projects: - - #get all manifests associated with datasets in the projects - target_datasets=synStore.getProjectManifests(projectId=project[0]) + # get all manifests associated with datasets in the projects + target_datasets = synStore.getProjectManifests(projectId=project[0]) - #If the manifest includes the target component, include synID in list + # If the manifest includes the target component, include synID in list for target_dataset in target_datasets: - if target_component == target_dataset[-1][0].replace(" ","").lower() and target_dataset[1][0] != "": + if ( + target_component == target_dataset[-1][0].replace(" ", "").lower() + and target_dataset[1][0] != "" + ): target_manifest_IDs.append(target_dataset[1][0]) target_dataset_IDs.append(target_dataset[0][0]) - logger.debug(f"Cross manifest gathering elapsed time {perf_counter()-t_manifest_search}") - return synStore, target_manifest_IDs, target_dataset_IDs + logger.debug( + f"Cross manifest gathering elapsed time {perf_counter()-t_manifest_search}" + ) + return synStore, target_manifest_IDs, target_dataset_IDs def list_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, + self, + val_rule: str, + manifest_col: pd.core.series.Series, + dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]], pd.core.series.Series): """ Purpose: @@ -625,32 +655,30 @@ def list_validation( manifest_col = manifest_col.astype(str) csv_re = comma_separated_list_regex() - rule_parts=val_rule.lower().split(" ") + rule_parts = val_rule.lower().split(" ") if len(rule_parts) > 1: - list_robustness=rule_parts[1] + list_robustness = rule_parts[1] else: - list_robustness = 'strict' - + list_robustness = "strict" - if list_robustness == 'strict': - # This will capture any if an entry is not formatted properly. Only for strict lists + if list_robustness == "strict": + # This will capture any if an entry is not formatted properly. Only for strict lists for i, list_string in enumerate(manifest_col): - if not re.fullmatch(csv_re,list_string): + if not re.fullmatch(csv_re, list_string): list_error = "not_comma_delimited" vr_errors, vr_warnings = GenerateError.generate_list_error( - list_string, - row_num=str(i + 2), - attribute_name=manifest_col.name, - list_error=list_error, - invalid_entry=manifest_col[i], - dmge = dmge, - val_rule = val_rule, - ) + list_string, + row_num=str(i + 2), + attribute_name=manifest_col.name, + list_error=list_error, + invalid_entry=manifest_col[i], + dmge=dmge, + val_rule=val_rule, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) - # Convert string to list. manifest_col = parse_str_series_to_list(manifest_col) @@ -658,7 +686,10 @@ def list_validation( return errors, warnings, manifest_col def regex_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, + self, + val_rule: str, + manifest_col: pd.core.series.Series, + dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -671,9 +702,9 @@ def regex_validation( - dmge: DataModelGraphExplorer Object Using this module requres validation rules written in the following manner: 'regex module regular expression' - - regex: is an exact string specifying that the input is to be validated as a + - regex: is an exact string specifying that the input is to be validated as a regular expression. - - module: is the name of the module within re to run ie. search. + - module: is the name of the module within re to run ie. search. - regular_expression: is the regular expression with which to validate the user input. Returns: @@ -682,7 +713,7 @@ def regex_validation( logger.error or logger.warning. Errors: List[str] Error details for further storage. warnings: List[str] Warning details for further storage. - TODO: + TODO: move validation to convert step. """ @@ -700,11 +731,13 @@ def regex_validation( errors = [] warnings = [] - validation_rules = dmge.get_node_validation_rules(node_display_name=manifest_col.name) - if validation_rules and '::' in validation_rules[0]: - validation_rules = validation_rules[0].split("::") + validation_rules = dmge.get_node_validation_rules( + node_display_name=manifest_col.name + ) + if validation_rules and "::" in validation_rules[0]: + validation_rules = validation_rules[0].split("::") # Handle case where validating re's within a list. - if re.search('list',"|".join(validation_rules)): + if re.search("list", "|".join(validation_rules)): if type(manifest_col[0]) == str: # Convert string to list. manifest_col = parse_str_series_to_list(manifest_col) @@ -716,14 +749,14 @@ def regex_validation( re_to_check ): vr_errors, vr_warnings = GenerateError.generate_regex_error( - val_rule = val_rule, - reg_expression = reg_expression, - row_num=str(i + 2), - module_to_call=reg_exp_rules[1], - attribute_name=manifest_col.name, - invalid_entry=manifest_col[i], - dmge = dmge, - ) + val_rule=val_rule, + reg_expression=reg_expression, + row_num=str(i + 2), + module_to_call=reg_exp_rules[1], + attribute_name=manifest_col.name, + invalid_entry=manifest_col[i], + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -737,14 +770,14 @@ def regex_validation( re_to_check ): vr_errors, vr_warnings = GenerateError.generate_regex_error( - val_rule = val_rule, - reg_expression = reg_expression, - row_num=str(i + 2), - module_to_call=reg_exp_rules[1], - attribute_name=manifest_col.name, - invalid_entry=manifest_col[i], - dmge = dmge, - ) + val_rule=val_rule, + reg_expression=reg_expression, + row_num=str(i + 2), + module_to_call=reg_exp_rules[1], + attribute_name=manifest_col.name, + invalid_entry=manifest_col[i], + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -753,7 +786,10 @@ def regex_validation( return errors, warnings def type_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, dmge: DataModelGraphExplorer, + self, + val_rule: str, + manifest_col: pd.core.series.Series, + dmge: DataModelGraphExplorer, ) -> (List[List[str]], List[List[str]]): """ Purpose: @@ -775,10 +811,10 @@ def type_validation( Convert all inputs to .lower() just to prevent any entry errors. """ specified_type = { - 'num': (int, np.int64, float), - 'int': (int, np.int64), - 'float': (float), - 'str': (str), + "num": (int, np.int64, float), + "int": (int, np.int64), + "float": (float), + "str": (str), } errors = [] @@ -788,12 +824,12 @@ def type_validation( for i, value in enumerate(manifest_col): if bool(value) and not isinstance(value, specified_type[val_rule]): vr_errors, vr_warnings = GenerateError.generate_type_error( - val_rule = val_rule , - row_num=str(i + 2), - attribute_name=manifest_col.name, - invalid_entry=str(manifest_col[i]), - dmge = dmge, - ) + val_rule=val_rule, + row_num=str(i + 2), + attribute_name=manifest_col.name, + invalid_entry=str(manifest_col[i]), + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -802,19 +838,21 @@ def type_validation( for i, value in enumerate(manifest_col): if bool(value) and not isinstance(value, specified_type[val_rule]): vr_errors, vr_warnings = GenerateError.generate_type_error( - val_rule = val_rule, - row_num=str(i + 2), - attribute_name=manifest_col.name, - invalid_entry=str(manifest_col[i]), - dmge = dmge, - ) + val_rule=val_rule, + row_num=str(i + 2), + attribute_name=manifest_col.name, + invalid_entry=str(manifest_col[i]), + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) return errors, warnings - def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphExplorer) -> (List[List[str]], List[List[str]]): + def url_validation( + self, val_rule: str, manifest_col: str, dmge: DataModelGraphExplorer + ) -> (List[List[str]], List[List[str]]): """ Purpose: Validate URL's submitted for a particular attribute in a manifest. @@ -837,7 +875,7 @@ def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphE for i, url in enumerate(manifest_col): # Check if a random phrase, string or number was added and # log the appropriate error. - if not isinstance(url,str) or not ( + if not isinstance(url, str) or not ( urlparse(url).scheme + urlparse(url).netloc + urlparse(url).params @@ -848,15 +886,15 @@ def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphE url_error = "random_entry" valid_url = False vr_errors, vr_warnings = GenerateError.generate_url_error( - url, - url_error=url_error, - row_num=str(i + 2), - attribute_name=manifest_col.name, - argument=url_args, - invalid_entry=manifest_col[i], - dmge = dmge, - val_rule = val_rule, - ) + url, + url_error=url_error, + row_num=str(i + 2), + attribute_name=manifest_col.name, + argument=url_args, + invalid_entry=manifest_col[i], + dmge=dmge, + val_rule=val_rule, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -876,15 +914,15 @@ def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphE valid_url = False url_error = "invalid_url" vr_errors, vr_warnings = GenerateError.generate_url_error( - url, - url_error=url_error, - row_num=str(i + 2), - attribute_name=manifest_col.name, - argument=url_args, - invalid_entry=manifest_col[i], - dmge = dmge, - val_rule = val_rule, - ) + url, + url_error=url_error, + row_num=str(i + 2), + attribute_name=manifest_col.name, + argument=url_args, + invalid_entry=manifest_col[i], + dmge=dmge, + val_rule=val_rule, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -896,15 +934,15 @@ def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphE if arg not in url: url_error = "arg_error" vr_errors, vr_warnings = GenerateError.generate_url_error( - url, - url_error=url_error, - row_num=str(i + 2), - attribute_name=manifest_col.name, - argument=arg, - invalid_entry=manifest_col[i], - dmge = dmge, - val_rule = val_rule, - ) + url, + url_error=url_error, + row_num=str(i + 2), + attribute_name=manifest_col.name, + argument=arg, + invalid_entry=manifest_col[i], + dmge=dmge, + val_rule=val_rule, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: @@ -912,7 +950,12 @@ def url_validation(self, val_rule: str, manifest_col: str, dmge: DataModelGraphE return errors, warnings def cross_validation( - self, val_rule: str, manifest_col: pd.core.series.Series, project_scope: List, dmge: DataModelGraphExplorer, access_token: str, + self, + val_rule: str, + manifest_col: pd.core.series.Series, + project_scope: List, + dmge: DataModelGraphExplorer, + access_token: str, ) -> List[List[str]]: """ Purpose: @@ -924,45 +967,51 @@ def cross_validation( attribute in the manifest - dmge: DataModelGraphExplorer Object Output: - This function will return errors when values in the current manifest's attribute + This function will return errors when values in the current manifest's attribute are not fully present in the correct amount of other manifests. """ errors = [] warnings = [] missing_values = {} - missing_manifest_log={} - present_manifest_log=[] + missing_manifest_log = {} + present_manifest_log = [] target_column = pd.Series(dtype=object) - #parse sources and targets - source_attribute=manifest_col.name + # parse sources and targets + source_attribute = manifest_col.name [target_component, target_attribute] = val_rule.lower().split(" ")[1].split(".") - scope=val_rule.lower().split(" ")[2] - target_column.name=target_attribute - - - #Get IDs of manifests with target component - synStore, target_manifest_IDs, target_dataset_IDs = ValidateAttribute.get_target_manifests(target_component, project_scope, access_token) + scope = val_rule.lower().split(" ")[2] + target_column.name = target_attribute + + # Get IDs of manifests with target component + ( + synStore, + target_manifest_IDs, + target_dataset_IDs, + ) = ValidateAttribute.get_target_manifests( + target_component, project_scope, access_token + ) t_cross_manifest = perf_counter() - #Read each manifest - for target_manifest_ID, target_dataset_ID in zip(target_manifest_IDs,target_dataset_IDs): + # Read each manifest + for target_manifest_ID, target_dataset_ID in zip( + target_manifest_IDs, target_dataset_IDs + ): entity = synStore.getDatasetManifest( - datasetId = target_dataset_ID, - downloadFile = True - ) - target_manifest=pd.read_csv(entity.path) + datasetId=target_dataset_ID, downloadFile=True + ) + target_manifest = pd.read_csv(entity.path) - #convert manifest column names into validation rule input format - - column_names={} + # convert manifest column names into validation rule input format - + column_names = {} for name in target_manifest.columns: - column_names[name.replace(" ","").lower()]=name + column_names[name.replace(" ", "").lower()] = name - if scope.__contains__('set'): - #If the manifest has the target attribute for the component do the cross validation - if target_attribute in column_names: + if scope.__contains__("set"): + # If the manifest has the target attribute for the component do the cross validation + if target_attribute in column_names: target_column = target_manifest[column_names[target_attribute]] - #Do the validation on both columns + # Do the validation on both columns missing_values = manifest_col[~manifest_col.isin(target_column)] if missing_values.empty: @@ -970,98 +1019,116 @@ def cross_validation( else: missing_manifest_log[target_manifest_ID] = missing_values - elif scope.__contains__('value'): + elif scope.__contains__("value"): if target_attribute in column_names: - target_manifest.rename(columns={column_names[target_attribute]: target_attribute}, inplace=True) - + target_manifest.rename( + columns={column_names[target_attribute]: target_attribute}, + inplace=True, + ) + target_column = pd.concat( - objs = [target_column, target_manifest[target_attribute]], - join = 'outer', - ignore_index= True, - ) - target_column = target_column.astype('object') - #print(target_column) - - - - missing_rows=[] - missing_values=[] - - - if scope.__contains__('value'): + objs=[target_column, target_manifest[target_attribute]], + join="outer", + ignore_index=True, + ) + target_column = target_column.astype("object") + # print(target_column) + + missing_rows = [] + missing_values = [] + + if scope.__contains__("value"): missing_values = manifest_col[~manifest_col.isin(target_column)] - duplicated_values = manifest_col[manifest_col.isin(target_column[target_column.duplicated()])] - - if val_rule.__contains__('matchAtLeastOne') and not missing_values.empty: + duplicated_values = manifest_col[ + manifest_col.isin(target_column[target_column.duplicated()]) + ] + + if val_rule.__contains__("matchAtLeastOne") and not missing_values.empty: missing_rows = missing_values.index.to_numpy() + 2 missing_rows = np_array_to_str_list(missing_rows) vr_errors, vr_warnings = GenerateError.generate_cross_warning( - val_rule = val_rule, - row_num = missing_rows, - attribute_name = source_attribute, - invalid_entry = iterable_to_str_list(missing_values), - dmge = dmge, - ) + val_rule=val_rule, + row_num=missing_rows, + attribute_name=source_attribute, + invalid_entry=iterable_to_str_list(missing_values), + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) - elif val_rule.__contains__('matchExactlyOne') and (duplicated_values.any() or missing_values.any()): - invalid_values = pd.merge(duplicated_values,missing_values,how='outer') - invalid_rows = pd.merge(duplicated_values,missing_values,how='outer',left_index=True,right_index=True).index.to_numpy() + 2 - invalid_rows = np_array_to_str_list(invalid_rows) + elif val_rule.__contains__("matchExactlyOne") and ( + duplicated_values.any() or missing_values.any() + ): + invalid_values = pd.merge( + duplicated_values, missing_values, how="outer" + ) + invalid_rows = ( + pd.merge( + duplicated_values, + missing_values, + how="outer", + left_index=True, + right_index=True, + ).index.to_numpy() + + 2 + ) + invalid_rows = np_array_to_str_list(invalid_rows) vr_errors, vr_warnings = GenerateError.generate_cross_warning( - val_rule = val_rule, - row_num = invalid_rows, - attribute_name = source_attribute, - invalid_entry = iterable_to_str_list(invalid_values.squeeze()), - dmge = dmge, - ) + val_rule=val_rule, + row_num=invalid_rows, + attribute_name=source_attribute, + invalid_entry=iterable_to_str_list(invalid_values.squeeze()), + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) - - - - #generate warnings if necessary - elif scope.__contains__('set'): - if val_rule.__contains__('matchAtLeastOne') and len(present_manifest_log) < 1: - missing_entries = list(missing_manifest_log.values()) - missing_manifest_IDs = list(missing_manifest_log.keys()) + + # generate warnings if necessary + elif scope.__contains__("set"): + if ( + val_rule.__contains__("matchAtLeastOne") + and len(present_manifest_log) < 1 + ): + missing_entries = list(missing_manifest_log.values()) + missing_manifest_IDs = list(missing_manifest_log.keys()) for missing_entry in missing_entries: - missing_rows.append(missing_entry.index[0]+2) + missing_rows.append(missing_entry.index[0] + 2) missing_values.append(missing_entry.values[0]) - - missing_rows=iterable_to_str_list(set(missing_rows)) - missing_values=iterable_to_str_list(set(missing_values)) - + + missing_rows = iterable_to_str_list(set(missing_rows)) + missing_values = iterable_to_str_list(set(missing_values)) + vr_errors, vr_warnings = GenerateError.generate_cross_warning( - val_rule = val_rule, - row_num = missing_rows, - attribute_name = source_attribute, - invalid_entry = missing_values, - missing_manifest_ID = missing_manifest_IDs, - dmge = dmge, - ) + val_rule=val_rule, + row_num=missing_rows, + attribute_name=source_attribute, + invalid_entry=missing_values, + missing_manifest_ID=missing_manifest_IDs, + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) - elif val_rule.__contains__('matchExactlyOne') and len(present_manifest_log) != 1: + elif ( + val_rule.__contains__("matchExactlyOne") + and len(present_manifest_log) != 1 + ): vr_errors, vr_warnings = GenerateError.generate_cross_warning( - val_rule = val_rule, - attribute_name = source_attribute, - matching_manifests = present_manifest_log, - dmge = dmge, - ) + val_rule=val_rule, + attribute_name=source_attribute, + matching_manifests=present_manifest_log, + dmge=dmge, + ) if vr_errors: errors.append(vr_errors) if vr_warnings: warnings.append(vr_warnings) - - logger.debug(f"cross manifest validation time {perf_counter()-t_cross_manifest}") + logger.debug( + f"cross manifest validation time {perf_counter()-t_cross_manifest}" + ) return errors, warnings - - diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index ff180998a..d3af5c575 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -27,21 +27,22 @@ logger = logging.getLogger(__name__) + class ValidateManifest(object): def __init__(self, errors, manifest, manifestPath, dmge, jsonSchema): self.errors = errors self.manifest = manifest self.manifestPath = manifestPath self.dmge = dmge - self.jsonSchema = jsonSchema + self.jsonSchema = jsonSchema def get_multiple_types_error( self, validation_rules: list, attribute_name: str, error_type: str ) -> List[str]: """ - Generate error message for errors when trying to specify - multiple validation rules. - """ + Generate error message for errors when trying to specify + multiple validation rules. + """ error_col = attribute_name # Attribute name if error_type == "too_many_rules": error_str = ( @@ -62,7 +63,12 @@ def get_multiple_types_error( return ["NA", error_col, error_message, error_val] def validate_manifest_rules( - self, manifest: pd.core.frame.DataFrame, dmge: DataModelGraphExplorer, restrict_rules: bool, project_scope: List, access_token: Optional[str] = None, + self, + manifest: pd.core.frame.DataFrame, + dmge: DataModelGraphExplorer, + restrict_rules: bool, + project_scope: List, + access_token: Optional[str] = None, ) -> (pd.core.frame.DataFrame, List[List[str]]): """ Purpose: @@ -76,17 +82,17 @@ def validate_manifest_rules( initialized within models/metadata.py Returns: manifest: pd.core.frame.DataFrame - If a 'list' validatior is run, the manifest needs to be + If a 'list' validatior is run, the manifest needs to be updated to change the attribute column values to a list. In this case the manifest will be updated then exported. errors: List[List[str]] If any errors are generated they will be added to an errors list log recording the following information: [error_row, error_col, error_message, error_val] - TODO: + TODO: -Investigate why a :: delimiter is breaking up the validation rules without me having to do anything... - - Move the rules formatting validation to the JSONLD + - Move the rules formatting validation to the JSONLD generation script. """ @@ -95,19 +101,19 @@ def validate_manifest_rules( validation_types = validation_rule_info() - type_dict={ + type_dict = { "float64": float, "int64": int, "str": str, } - unimplemented_expectations=[ + unimplemented_expectations = [ "url", "list", "regex.*", "matchAtLeastOne.*", "matchExactlyOne.*", - ] + ] in_house_rules = [ "int", @@ -122,67 +128,66 @@ def validate_manifest_rules( ] # initialize error and warning handling lists. - errors = [] - warnings = [] + errors = [] + warnings = [] if not restrict_rules: t_GE = perf_counter() - #operations necessary to set up and run ge suite validation - ge_helpers=GreatExpectationsHelpers( + # operations necessary to set up and run ge suite validation + ge_helpers = GreatExpectationsHelpers( dmge=dmge, unimplemented_expectations=unimplemented_expectations, - manifest = manifest, - manifestPath = self.manifestPath, - ) + manifest=manifest, + manifestPath=self.manifestPath, + ) ge_helpers.build_context() ge_helpers.build_expectation_suite() ge_helpers.build_checkpoint() try: - #run GE validation + # run GE validation results = ge_helpers.context.run_checkpoint( checkpoint_name=ge_helpers.checkpoint_name, batch_request={ "runtime_parameters": {"batch_data": manifest}, - "batch_identifiers": { - "default_identifier_name": "manifestID" - }, + "batch_identifiers": {"default_identifier_name": "manifestID"}, }, - result_format={'result_format': 'COMPLETE'}, - ) + result_format={"result_format": "COMPLETE"}, + ) finally: - ge_helpers.context.delete_checkpoint(ge_helpers.checkpoint_name) - ge_helpers.context.delete_expectation_suite(ge_helpers.expectation_suite_name) - - validation_results = results.list_validation_results() + ge_helpers.context.delete_checkpoint(ge_helpers.checkpoint_name) + ge_helpers.context.delete_expectation_suite( + ge_helpers.expectation_suite_name + ) + + validation_results = results.list_validation_results() - #parse validation results dict and generate errors + # parse validation results dict and generate errors errors, warnings = ge_helpers.generate_errors( - errors = errors, - warnings = warnings, - validation_results = validation_results, - validation_types = validation_types, - dmge = dmge, - ) - logger.debug(f"GE elapsed time {perf_counter()-t_GE}") - else: - logger.info("Great Expetations suite will not be utilized.") - - t_err=perf_counter() - regex_re=re.compile('regex.*') + errors=errors, + warnings=warnings, + validation_results=validation_results, + validation_types=validation_types, + dmge=dmge, + ) + logger.debug(f"GE elapsed time {perf_counter()-t_GE}") + else: + logger.info("Great Expetations suite will not be utilized.") + + t_err = perf_counter() + regex_re = re.compile("regex.*") for col in manifest.columns: - # remove trailing/leading whitespaces from manifest manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) validation_rules = dmge.get_node_validation_rules(node_display_name=col) - #TODO: Can remove when handling updated so split within graph - if validation_rules and '::' in validation_rules[0]: + # TODO: Can remove when handling updated so split within graph + if validation_rules and "::" in validation_rules[0]: validation_rules = validation_rules[0].split("::") # Check that attribute rules conform to limits: - # no more than two rules for an attribute. + # no more than two rules for an attribute. # As more combinations get added, may want to bring out into its own function / or use validate_rules_utils? if len(validation_rules) > 2: errors.append( @@ -194,20 +199,27 @@ def validate_manifest_rules( # Given a validation rule, run validation. Skip validations already performed by GE for rule in validation_rules: validation_type = rule.split(" ")[0] - if rule_in_rule_list(rule,unimplemented_expectations) or (rule_in_rule_list(rule,in_house_rules) and restrict_rules): - if not rule_in_rule_list(rule,in_house_rules): - logger.warning(f"Validation rule {rule.split(' ')[0]} has not been implemented in house and cannnot be validated without Great Expectations.") - continue + if rule_in_rule_list(rule, unimplemented_expectations) or ( + rule_in_rule_list(rule, in_house_rules) and restrict_rules + ): + if not rule_in_rule_list(rule, in_house_rules): + logger.warning( + f"Validation rule {rule.split(' ')[0]} has not been implemented in house and cannnot be validated without Great Expectations." + ) + continue - t_indiv_rule=perf_counter() - #Validate for each individual validation rule. + t_indiv_rule = perf_counter() + # Validate for each individual validation rule. validation_method = getattr( - ValidateAttribute, validation_types[validation_type]['type'] - ) + ValidateAttribute, validation_types[validation_type]["type"] + ) if validation_type == "list": vr_errors, vr_warnings, manifest_col = validation_method( - self, rule, manifest[col], dmge, + self, + rule, + manifest[col], + dmge, ) manifest[col] = manifest_col elif validation_type.lower().startswith("match"): @@ -216,29 +228,45 @@ def validate_manifest_rules( ) else: vr_errors, vr_warnings = validation_method( - self, rule, manifest[col], dmge, + self, + rule, + manifest[col], + dmge, ) # Check for validation rule errors and add them to other errors. if vr_errors: errors.extend(vr_errors) if vr_warnings: warnings.extend(vr_warnings) - logger.debug(f"Rule {rule} elapsed time: {perf_counter()-t_indiv_rule}") + logger.debug( + f"Rule {rule} elapsed time: {perf_counter()-t_indiv_rule}" + ) logger.debug(f"In House validation elapsed time {perf_counter()-t_err}") return manifest, errors, warnings - def validate_manifest_values(self, manifest, jsonSchema, dmge, + def validate_manifest_values( + self, + manifest, + jsonSchema, + dmge, ) -> (List[List[str]], List[List[str]]): t_json_schema = perf_counter() errors = [] warnings = [] - col_attr = {} # save the mapping between column index and attribute name - + col_attr = {} # save the mapping between column index and attribute name + # numerical values need to be type string for the jsonValidator - for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: - manifest[col]=manifest[col].astype('string') - manifest = manifest.applymap(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') + for col in manifest.select_dtypes( + include=[int, np.int64, float, np.float64] + ).columns: + manifest[col] = manifest[col].astype("string") + manifest = manifest.applymap( + lambda x: str(x) + if isinstance(x, (int, np.int64, float, np.float64)) + else x, + na_action="ignore", + ) annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): @@ -250,19 +278,40 @@ def validate_manifest_values(self, manifest, jsonSchema, dmge, errorMsg = error.message[0:500] errorVal = error.instance if len(error.path) > 0 else "Wrong schema" - val_errors, val_warnings = GenerateError.generate_schema_error(row_num = errorRow, attribute_name = errorColName, error_msg = errorMsg, invalid_entry = errorVal, dmge = dmge) + val_errors, val_warnings = GenerateError.generate_schema_error( + row_num=errorRow, + attribute_name=errorColName, + error_msg=errorMsg, + invalid_entry=errorVal, + dmge=dmge, + ) if val_errors: errors.append(val_errors) if val_warnings: warnings.append(val_warnings) - logger.debug(f"JSON Schema validation elapsed time {perf_counter()-t_json_schema}") + logger.debug( + f"JSON Schema validation elapsed time {perf_counter()-t_json_schema}" + ) return errors, warnings -def validate_all(self, errors, warnings, manifest, manifestPath, dmge, jsonSchema, restrict_rules, project_scope: List, access_token: str): +def validate_all( + self, + errors, + warnings, + manifest, + manifestPath, + dmge, + jsonSchema, + restrict_rules, + project_scope: List, + access_token: str, +): vm = ValidateManifest(errors, manifest, manifestPath, dmge, jsonSchema) - manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules(manifest, dmge, restrict_rules, project_scope, access_token) + manifest, vmr_errors, vmr_warnings = vm.validate_manifest_rules( + manifest, dmge, restrict_rules, project_scope, access_token + ) if vmr_errors: errors.extend(vmr_errors) if vmr_warnings: diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 80700e2bf..da2f19fa3 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -101,7 +101,7 @@ def convert(schema, output_jsonld): # output JSON-LD file alongside CSV file by default, get path. if output_jsonld is None: - if not '.jsonld' in schema: + if not ".jsonld" in schema: csv_no_ext = re.sub("[.]csv$", "", schema) output_jsonld = csv_no_ext + ".jsonld" else: diff --git a/schematic/schemas/data_model_edges.py b/schematic/schemas/data_model_edges.py index 7abbc26a8..225039866 100644 --- a/schematic/schemas/data_model_edges.py +++ b/schematic/schemas/data_model_edges.py @@ -14,7 +14,7 @@ def generate_edge( all_node_dict: dict, attr_rel_dict: dict, edge_relationships: dict, - edge_list:list, + edge_list: list, ) -> list[tuple[str, str, dict[str:str, str:int]]]: """Generate an edge between a target node and relevant other nodes the data model. In short, does this current node belong to a recorded relationship in the attribute, relationshps dictionary. Go through each attribute and relationship to find where the node may be. Args: @@ -67,25 +67,31 @@ def generate_edge( # Add edges, in a manner that preserves directionality # TODO: rewrite to use edge_dir if rel_key in ["subClassOf", "domainIncludes"]: - edge_list.append(( - all_node_dict[node]["label"], - all_node_dict[attribute_display_name]["label"], - {'key':edge_key, - 'weight':weight,}) + edge_list.append( + ( + all_node_dict[node]["label"], + all_node_dict[attribute_display_name]["label"], + { + "key": edge_key, + "weight": weight, + }, ) + ) else: - edge_list.append(( - all_node_dict[attribute_display_name]["label"], - all_node_dict[node]["label"], - {'key':edge_key, - 'weight':weight},) + edge_list.append( + ( + all_node_dict[attribute_display_name]["label"], + all_node_dict[node]["label"], + {"key": edge_key, "weight": weight}, ) + ) # Add add rangeIncludes/valid value relationships in reverse as well, making the attribute the parent of the valid value. if rel_key == "rangeIncludes": - edge_list.append(( - all_node_dict[attribute_display_name]["label"], - all_node_dict[node]["label"], - {'key':"parentOf", - 'weight':weight},) + edge_list.append( + ( + all_node_dict[attribute_display_name]["label"], + all_node_dict[node]["label"], + {"key": "parentOf", "weight": weight}, ) + ) return edge_list diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 917d1eb71..353e2dc52 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -110,7 +110,7 @@ def generate_data_model_graph(self) -> nx.MultiDiGraph: # Add edges to the Graph for node_1, node_2, edge_dict in edge_list: - G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + G.add_edge(node_1, node_2, key=edge_dict["key"], weight=edge_dict["weight"]) return G @@ -364,7 +364,7 @@ def get_ordered_entry(self, key: str, source_node_label: str) -> list[str]: ) edge_key = self.rel_dict[key]["edge_key"] - + # Handle out edges if self.rel_dict[key]["jsonld_direction"] == "out": # use outedges diff --git a/schematic/schemas/data_model_jsonld.py b/schematic/schemas/data_model_jsonld.py index fa9af86ef..eff549017 100644 --- a/schematic/schemas/data_model_jsonld.py +++ b/schematic/schemas/data_model_jsonld.py @@ -135,7 +135,7 @@ def get_edges_associated_with_node( return node_edges def get_edges_associated_with_property_nodes( - self, node:str + self, node: str ) -> List[tuple[str, str, dict[str, int]]]: """Get edges associated with property nodes to make sure we add that relationship. Args: @@ -144,7 +144,7 @@ def get_edges_associated_with_property_nodes( node_edges, list: List of Tuples of edges associated with the given node, tuple contains the two nodes, plus the weight dict associated with the edge connection. """ # Get edge keys for domainIncludes and subclassOf - domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] + domainIncludes_edge_key = self.rel_dict["domainIncludes"]["edge_key"] node_edges = [] # Get dict of edges for the current property node node_edges_dict = self.graph[node] @@ -167,10 +167,11 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # Get all edges associated with the current node node_edges = self.get_edges_associated_with_node(node=node) - # For properties look for reverse relationships too if node in self.dmge.find_properties(): - property_node_edges = self.get_edges_associated_with_property_nodes(node=node) + property_node_edges = self.get_edges_associated_with_property_nodes( + node=node + ) node_edges.extend(property_node_edges) # Get node pairs and weights for each edge @@ -188,8 +189,10 @@ def add_edge_rels_to_template(self, template: dict, rel_vals: dict, node: str): # If the relationship defined and edge_key if relationship == edge_key: # TODO: rewrite to use edge_dir - domainIncludes_edge_key = self.rel_dict['domainIncludes']['edge_key'] - subclassOf_edge_key = self.rel_dict['subClassOf']['edge_key'] + domainIncludes_edge_key = self.rel_dict["domainIncludes"][ + "edge_key" + ] + subclassOf_edge_key = self.rel_dict["subClassOf"]["edge_key"] if edge_key in [subclassOf_edge_key]: if node_2 == node: # Make sure the key is in the template (differs between properties and classes) @@ -414,7 +417,9 @@ def reorder_template_entries(self, template: dict) -> dict: key=key, source_node_label=template_label ) if not len(entry) == len(sorted_edges): - logger.error("There is an error with sorting values in the JSONLD, please issue a bug report.") + logger.error( + "There is an error with sorting values in the JSONLD, please issue a bug report." + ) edge_weights_dict = {edge: i for i, edge in enumerate(sorted_edges)} ordered_edges = [0] * len(edge_weights_dict.keys()) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index a541eb3ed..0fdfc5e70 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -243,7 +243,13 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str:str], model_jsonld:dict) -> Any: + def parse_entry( + self, + rel_entry: any, + id_jsonld_key: str, + dn_label_dict: dict[str:str], + model_jsonld: dict, + ) -> Any: """Parse an input entry based on certain attributes Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value @@ -256,7 +262,9 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - parsed_rel_entry = self.convert_entry_to_dn_label([r[id_jsonld_key].split(":")[1] for r in rel_entry], model_jsonld) + parsed_rel_entry = self.convert_entry_to_dn_label( + [r[id_jsonld_key].split(":")[1] for r in rel_entry], model_jsonld + ) # Strip context from string and convert true/false to bool elif type(rel_entry) == str: # Remove contexts and treat strings as appropriate. @@ -268,16 +276,18 @@ def parse_entry(self, rel_entry: any, id_jsonld_key: str, dn_label_dict:dict[str elif parsed_rel_entry.lower == "false": parsed_rel_entry = False else: - parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) + parsed_rel_entry = self.convert_entry_to_dn_label( + rel_entry, model_jsonld + ) # For anything else get that else: - parsed_rel_entry=self.convert_entry_to_dn_label(rel_entry, model_jsonld) + parsed_rel_entry = self.convert_entry_to_dn_label(rel_entry, model_jsonld) return parsed_rel_entry def label_to_dn_dict(self, model_jsonld: list[dict]): - """ Generate a dictionary of labels to display name, so can easily look up display names using the label. + """Generate a dictionary of labels to display name, so can easily look up display names using the label. Args: model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model Returns: @@ -289,10 +299,12 @@ def label_to_dn_dict(self, model_jsonld: list[dict]): ] dn_label_dict = {} for entry in model_jsonld: - dn_label_dict[entry[label_jsonld_key]]=entry[dn_jsonld_key] + dn_label_dict[entry[label_jsonld_key]] = entry[dn_jsonld_key] return dn_label_dict - def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_jsonld:list[dict]) -> Union[str,list]: + def convert_entry_to_dn_label( + self, parsed_rel_entry: Union[str, list], model_jsonld: list[dict] + ) -> Union[str, list]: """Convert a parsed entry to display name, taking into account the entry type Args: parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. @@ -304,7 +316,10 @@ def convert_entry_to_dn_label(self, parsed_rel_entry:Union[str,list], model_json dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) # Handle if using the display name as the label if type(parsed_rel_entry) == list: - parsed_rel_entry = [dn_label_dict.get(entry) if dn_label_dict.get(entry) else entry for entry in parsed_rel_entry ] + parsed_rel_entry = [ + dn_label_dict.get(entry) if dn_label_dict.get(entry) else entry + for entry in parsed_rel_entry + ] elif type(parsed_rel_entry) == str: converted_label = dn_label_dict.get(parsed_rel_entry) if converted_label: @@ -362,26 +377,26 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di for rel_key, rel_vals in self.rel_dict.items(): # Determine if current entry in the for loop, can be described by the current relationship that is being cycled through. # used to also check "csv_header" in rel_vals.keys() which allows all JSONLD values through even if it does not have a CSV counterpart, will allow other values thorough in the else statement now - if ( - rel_vals["jsonld_key"] in entry.keys() - and rel_vals["csv_header"] - ): + if rel_vals["jsonld_key"] in entry.keys() and rel_vals["csv_header"]: # Retrieve entry value associated with the given relationship rel_entry = entry[rel_vals["jsonld_key"]] # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, + rel_entry=rel_entry, + id_jsonld_key=id_jsonld_key, + dn_label_dict=dn_label_dict, + model_jsonld=model_jsonld, ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] - if rel_key == 'domainIncludes': + if rel_key == "domainIncludes": # In the JSONLD the domain includes field contains the ids of attributes that the current attribute is the property/parent of. # Because of this we need to handle these values differently. # We will get the values in the field (parsed_val), then add the current attribute as to the property key in the attr_rel_dictionary[p_attr_key]. for parsed_val in parsed_rel_entry: attr_in_dict = False - #Get propert/parent key (displayName) - p_attr_key='' + # Get propert/parent key (displayName) + p_attr_key = "" # Check if the parsed value is already a part of the attr_rel_dictionary for attr_dn, rels in attr_rel_dictionary.items(): if parsed_val == attr_dn: @@ -389,23 +404,44 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di attr_in_dict = True # If it is part of the dictionary update add current attribute as a property of the parsed value if attr_in_dict == True: - if not rel_csv_header in attr_rel_dictionary[p_attr_key]["Relationships"]: - attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) + if ( + not rel_csv_header + in attr_rel_dictionary[p_attr_key][ + "Relationships" + ] + ): + attr_rel_dictionary[p_attr_key][ + "Relationships" + ].update( + {rel_csv_header: [entry[dn_jsonld_key]]} + ) else: - attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[dn_jsonld_key]]}) + attr_rel_dictionary[p_attr_key][ + "Relationships" + ].update( + {rel_csv_header: [entry[dn_jsonld_key]]} + ) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value - p_attr_key = self.convert_entry_to_dn_label(parsed_val, model_jsonld) - - attr_rel_dictionary.update(attr_dict_template(p_attr_key)) - attr_rel_dictionary[p_attr_key]["Relationships"].update({rel_csv_header:[entry[label_jsonld_key]]}) - + p_attr_key = self.convert_entry_to_dn_label( + parsed_val, model_jsonld + ) + + attr_rel_dictionary.update( + attr_dict_template(p_attr_key) + ) + attr_rel_dictionary[p_attr_key][ + "Relationships" + ].update( + {rel_csv_header: [entry[label_jsonld_key]]} + ) + else: attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) - + elif ( rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] @@ -415,7 +451,10 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di # If there is an entry parset it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( - rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, + rel_entry=rel_entry, + id_jsonld_key=id_jsonld_key, + dn_label_dict=dn_label_dict, + model_jsonld=model_jsonld, ) # Add relationships for each attribute and relationship to the dictionary attr_rel_dictionary[attr_key]["Relationships"].update( diff --git a/schematic/schemas/data_model_validator.py b/schematic/schemas/data_model_validator.py index e3d626882..f0939999b 100644 --- a/schematic/schemas/data_model_validator.py +++ b/schematic/schemas/data_model_validator.py @@ -5,8 +5,10 @@ from typing import Any, Dict, Optional, Text, List, Tuple from schematic.schemas.data_model_relationships import DataModelRelationships + logger = logging.getLogger(__name__) + class DataModelValidator: """ Check for consistency within data model. @@ -87,7 +89,9 @@ def check_is_dag(self) -> List[str]: """ error = [] if not nx.is_directed_acyclic_graph(self.graph): - cycles = multiprocessing.Process(target=self.run_cycles, name="Get Cycles", args=(self.graph,)) + cycles = multiprocessing.Process( + target=self.run_cycles, name="Get Cycles", args=(self.graph,) + ) cycles.start() # Give up to 5 seconds to find cycles, if not exit and issue standard error @@ -102,7 +106,7 @@ def check_is_dag(self) -> List[str]: error.append( f"Schematic requires models be a directed acyclic graph (DAG). Please inspect your model." - ) + ) return error diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index c291b742e..1144dc88a 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -14,8 +14,15 @@ import synapseclient import uuid # used to generate unique names for entities -from tenacity import retry, stop_after_attempt, wait_chain, wait_fixed, retry_if_exception_type +from tenacity import ( + retry, + stop_after_attempt, + wait_chain, + wait_fixed, + retry_if_exception_type, +) from time import sleep + # allows specifying explicit variable types from typing import Dict, List, Tuple, Sequence, Union, Optional @@ -34,7 +41,11 @@ from synapseclient.entity import File from synapseclient.table import CsvFileTable, build_table, Schema from synapseclient.annotations import from_synapse_annotations -from synapseclient.core.exceptions import SynapseHTTPError, SynapseAuthenticationError, SynapseUnmetAccessRestrictions +from synapseclient.core.exceptions import ( + SynapseHTTPError, + SynapseAuthenticationError, + SynapseUnmetAccessRestrictions, +) import synapseutils from synapseutils.copy_functions import changeFileMetaData @@ -46,16 +57,19 @@ from schematic.utils.df_utils import update_df, load_df, col_in_dataframe from schematic.utils.validate_utils import comma_separated_list_regex, rule_in_rule_list + # entity_type_mapping, get_dir_size, create_temp_folder, check_synapse_cache_size, and clear_synapse_cache functions are used for AWS deployment # Please do not remove these import statements -from schematic.utils.general import (entity_type_mapping, - get_dir_size, - convert_gb_to_bytes, - create_temp_folder, - check_synapse_cache_size, - clear_synapse_cache, - profile, - calculate_datetime) +from schematic.utils.general import ( + entity_type_mapping, + get_dir_size, + convert_gb_to_bytes, + create_temp_folder, + check_synapse_cache_size, + clear_synapse_cache, + profile, + calculate_datetime, +) from schematic.utils.schema_utils import get_class_label_from_display_name from schematic.store.base import BaseStorage @@ -64,12 +78,14 @@ logger = logging.getLogger("Synapse storage") + @dataclass class ManifestDownload(object): """ syn: an object of type synapseclient. - manifest_id: id of a manifest + manifest_id: id of a manifest """ + syn: synapseclient.Synapse manifest_id: str @@ -91,33 +107,37 @@ def _download_manifest_to_folder(self) -> File: # create temporary folders for storing manifests download_location = create_temp_folder(temporary_manifest_storage) else: - download_location=CONFIG.manifest_folder + download_location = CONFIG.manifest_folder manifest_data = self.syn.get( - self.manifest_id, - downloadLocation=download_location, - ifcollision="overwrite.local", - ) + self.manifest_id, + downloadLocation=download_location, + ifcollision="overwrite.local", + ) return manifest_data def _entity_type_checking(self) -> str: """ check the entity type of the id that needs to be downloaded - Return: + Return: if the entity type is wrong, raise an error """ # check the type of entity entity_type = entity_type_mapping(self.syn, self.manifest_id) - if entity_type != "file": - logger.error(f'You are using entity type: {entity_type}. Please provide a file ID') + if entity_type != "file": + logger.error( + f"You are using entity type: {entity_type}. Please provide a file ID" + ) @staticmethod - def download_manifest(self, newManifestName: str="", manifest_df: pd.DataFrame=pd.DataFrame()) -> Union[str,File]: + def download_manifest( + self, newManifestName: str = "", manifest_df: pd.DataFrame = pd.DataFrame() + ) -> Union[str, File]: """ - Download a manifest based on a given manifest id. + Download a manifest based on a given manifest id. Args: newManifestName(optional): new name of a manifest that gets downloaded. manifest_df(optional): a dataframe containing name and id of manifests in a given asset view - Return: + Return: manifest_data: synapse entity file object """ @@ -131,36 +151,44 @@ def download_manifest(self, newManifestName: str="", manifest_df: pd.DataFrame=p # download a manifest try: manifest_data = self._download_manifest_to_folder() - except(SynapseUnmetAccessRestrictions, SynapseAuthenticationError): + except (SynapseUnmetAccessRestrictions, SynapseAuthenticationError): # if there's an error getting an uncensored manifest, try getting the censored manifest if not manifest_df.empty: - censored_regex=re.compile('.*censored.*') - censored = manifest_df['name'].str.contains(censored_regex) - new_manifest_id=manifest_df[censored]["id"][0] + censored_regex = re.compile(".*censored.*") + censored = manifest_df["name"].str.contains(censored_regex) + new_manifest_id = manifest_df[censored]["id"][0] self.manifest_id = new_manifest_id - try: + try: manifest_data = self._download_manifest_to_folder() - except (SynapseUnmetAccessRestrictions, SynapseAuthenticationError) as e: - raise PermissionError("You don't have access to censored and uncensored manifests in this dataset.") from e + except ( + SynapseUnmetAccessRestrictions, + SynapseAuthenticationError, + ) as e: + raise PermissionError( + "You don't have access to censored and uncensored manifests in this dataset." + ) from e else: - logger.error(f"You don't have access to the requested resource: {self.manifest_id}") + logger.error( + f"You don't have access to the requested resource: {self.manifest_id}" + ) - if newManifestName and os.path.exists(manifest_data.get('path')): + if newManifestName and os.path.exists(manifest_data.get("path")): # Rename the file we just made to the new name - new_manifest_filename = newManifestName + '.csv' + new_manifest_filename = newManifestName + ".csv" # get location of existing manifest. The manifest that will be renamed should live in the same folder as existing manifest. - parent_folder = os.path.dirname(manifest_data.get('path')) + parent_folder = os.path.dirname(manifest_data.get("path")) new_manifest_path_name = os.path.join(parent_folder, new_manifest_filename) - os.rename(manifest_data['path'], new_manifest_path_name) + os.rename(manifest_data["path"], new_manifest_path_name) # Update file names/paths in manifest_data - manifest_data['name'] = new_manifest_filename - manifest_data['filename'] = new_manifest_filename - manifest_data['path'] = new_manifest_path_name + manifest_data["name"] = new_manifest_filename + manifest_data["filename"] = new_manifest_filename + manifest_data["path"] = new_manifest_path_name return manifest_data + class SynapseStorage(BaseStorage): """Implementation of Storage interface for datasets/files stored on Synapse. Provides utilities to list files in a specific project; update files annotations, create fileviews, etc. @@ -198,24 +226,28 @@ def __init__( def _purge_synapse_cache(self, maximum_storage_allowed_cache_gb=1): """ - Purge synapse cache if it exceeds a certain size. Default to 1GB. + Purge synapse cache if it exceeds a certain size. Default to 1GB. Args: - maximum_storage_allowed_cache_gb: the maximum storage allowed before purging cache. Default is 1 GB. + maximum_storage_allowed_cache_gb: the maximum storage allowed before purging cache. Default is 1 GB. """ # try clearing the cache # scan a directory and check size of files if os.path.exists(self.root_synapse_cache): - maximum_storage_allowed_cache_bytes = convert_gb_to_bytes(maximum_storage_allowed_cache_gb) + maximum_storage_allowed_cache_bytes = convert_gb_to_bytes( + maximum_storage_allowed_cache_gb + ) nbytes = get_dir_size(self.root_synapse_cache) dir_size_bytes = check_synapse_cache_size(directory=self.root_synapse_cache) # if 1 GB has already been taken, purge cache before 15 min if dir_size_bytes >= maximum_storage_allowed_cache_bytes: num_of_deleted_files = clear_synapse_cache(self.syn.cache, minutes=15) - logger.info(f'{num_of_deleted_files} files have been deleted from {self.root_synapse_cache}') + logger.info( + f"{num_of_deleted_files} files have been deleted from {self.root_synapse_cache}" + ) else: # on AWS, OS takes around 14-17% of our ephemeral storage (20GiB) # instead of guessing how much space that we left, print out .synapseCache here - logger.info(f'the total size of .synapseCache is: {nbytes} bytes') + logger.info(f"the total size of .synapseCache is: {nbytes} bytes") def _query_fileview(self): self._purge_synapse_cache() @@ -225,14 +257,14 @@ def _query_fileview(self): if self.project_scope: self.storageFileviewTable = self.syn.tableQuery( f"SELECT * FROM {self.storageFileview} WHERE projectId IN {tuple(self.project_scope + [''])}" - ).asDataFrame() + ).asDataFrame() else: # get data in administrative fileview for this pipeline self.storageFileviewTable = self.syn.tableQuery( "SELECT * FROM " + self.storageFileview ).asDataFrame() except SynapseHTTPError: - raise AccessCredentialsError(self.storageFileview) + raise AccessCredentialsError(self.storageFileview) @staticmethod def login(token=None, access_token=None): @@ -253,7 +285,9 @@ def login(token=None, access_token=None): syn = synapseclient.Synapse() syn.default_headers["Authorization"] = f"Bearer {access_token}" except synapseclient.core.exceptions.SynapseHTTPError: - raise ValueError("No access to resources. Please make sure that your token is correct") + raise ValueError( + "No access to resources. Please make sure that your token is correct" + ) else: # login using synapse credentials provided by user in .synapseConfig (default) file syn = synapseclient.Synapse(configPath=CONFIG.synapse_configuration_path) @@ -264,18 +298,18 @@ def missing_entity_handler(method): def wrapper(*args, **kwargs): try: return method(*args, **kwargs) - except(SynapseHTTPError) as ex: - str_message = str(ex).replace("\n","") - if 'trash' in str_message or 'does not exist' in str_message: + except SynapseHTTPError as ex: + str_message = str(ex).replace("\n", "") + if "trash" in str_message or "does not exist" in str_message: logging.warning(str_message) return None else: raise ex + return wrapper def getStorageFileviewTable(self): - """ Returns the storageFileviewTable obtained during initialization. - """ + """Returns the storageFileviewTable obtained during initialization.""" return self.storageFileviewTable def getPaginatedRestResults(self, currentUserId: str) -> Dict[str, str]: @@ -337,7 +371,7 @@ def getStorageProjects(self, project_scope: List = None) -> list[tuple[str, str] # find set of user projects that are also in this pipeline's storage projects set storageProjects = list(set(storageProjects) & set(currentUserProjects)) - + # Limit projects to scope if specified if project_scope: storageProjects = list(set(storageProjects) & set(project_scope)) @@ -346,7 +380,7 @@ def getStorageProjects(self, project_scope: List = None) -> list[tuple[str, str] raise Warning( f"There are no projects that the user has access to that match the criteria of the specified project scope: {project_scope}" ) - + # prepare a return list of project IDs and names projects = [] for projectId in storageProjects: @@ -420,20 +454,19 @@ def getFilesInStorageDataset( """ # select all files within a given storage dataset folder (top level folder in a Synapse storage project or folder marked with contentType = 'dataset') - walked_path = synapseutils.walk(self.syn, datasetId, includeTypes=["folder", "file"]) + walked_path = synapseutils.walk( + self.syn, datasetId, includeTypes=["folder", "file"] + ) file_list = [] # iterate over all results for dirpath, dirname, filenames in walked_path: - # iterate over all files in a folder for filename in filenames: - if (not "manifest" in filename[0] and not fileNames) or ( fileNames and filename[0] in fileNames ): - # don't add manifest to list of files unless it is specified in the list of specified fileNames; return all found files # except the manifest if no fileNames have been specified # TODO: refactor for clarity/maintainability @@ -448,39 +481,42 @@ def getFilesInStorageDataset( return file_list def _get_manifest_id(self, manifest: pd.DataFrame) -> str: - """If both censored and uncensored manifests are present, return uncensored manifest; if only one manifest is present, return manifest id of that manifest; if more than two manifests are present, return the manifest id of the first one. + """If both censored and uncensored manifests are present, return uncensored manifest; if only one manifest is present, return manifest id of that manifest; if more than two manifests are present, return the manifest id of the first one. Args: manifest: a dataframe contains name and id of manifests in a given asset view - Return: + Return: manifest_syn_id: id of a given censored or uncensored manifest - """ - censored_regex=re.compile('.*censored.*') - censored = manifest['name'].str.contains(censored_regex) + """ + censored_regex = re.compile(".*censored.*") + censored = manifest["name"].str.contains(censored_regex) if any(censored): # Try to use uncensored manifest first - not_censored=~censored + not_censored = ~censored if any(not_censored): - manifest_syn_id=manifest[not_censored]["id"][0] + manifest_syn_id = manifest[not_censored]["id"][0] # if only censored manifests are available, just use the first censored manifest - else: + else: manifest_syn_id = manifest["id"][0] - #otherwise, use the first (implied only) version that exists + # otherwise, use the first (implied only) version that exists else: manifest_syn_id = manifest["id"][0] - + return manifest_syn_id def getDatasetManifest( - self, datasetId: str, downloadFile: bool = False, newManifestName: str='', + self, + datasetId: str, + downloadFile: bool = False, + newManifestName: str = "", ) -> Union[str, File]: """Gets the manifest associated with a given dataset. Args: datasetId: synapse ID of a storage dataset. downloadFile: boolean argument indicating if manifest file in dataset should be downloaded or not. - newManifestName: new name of a manifest that gets downloaded + newManifestName: new name of a manifest that gets downloaded Returns: manifest_syn_id (String): Synapse ID of exisiting manifest file. @@ -492,104 +528,121 @@ def getDatasetManifest( # get a list of files containing the manifest for this dataset (if any) all_files = self.storageFileviewTable - # construct regex based on manifest basename in the config - manifest_re=re.compile(os.path.basename(self.manifest)+".*.[tc]sv") + # construct regex based on manifest basename in the config + manifest_re = re.compile(os.path.basename(self.manifest) + ".*.[tc]sv") # search manifest based on given manifest basename regex above # and return a dataframe containing name and id of manifests in a given asset view manifest = all_files[ - (all_files['name'].str.contains(manifest_re,regex=True)) + (all_files["name"].str.contains(manifest_re, regex=True)) & (all_files["parentId"] == datasetId) ] manifest = manifest[["id", "name"]] - + # if there is no pre-exisiting manifest in the specified dataset if manifest.empty: - logger.warning(f"Could not find a manifest that fits basename {self.manifest} in asset view and dataset {datasetId}") + logger.warning( + f"Could not find a manifest that fits basename {self.manifest} in asset view and dataset {datasetId}" + ) return "" # if there is an exisiting manifest else: manifest_syn_id = self._get_manifest_id(manifest) - if downloadFile: + if downloadFile: md = ManifestDownload(self.syn, manifest_id=manifest_syn_id) - manifest_data = ManifestDownload.download_manifest(md, newManifestName=newManifestName, manifest_df=manifest) - ## TO DO: revisit how downstream code handle manifest_data. If the downstream code would break when manifest_data is an empty string, - ## then we should catch the error here without returning an empty string. + manifest_data = ManifestDownload.download_manifest( + md, newManifestName=newManifestName, manifest_df=manifest + ) + ## TO DO: revisit how downstream code handle manifest_data. If the downstream code would break when manifest_data is an empty string, + ## then we should catch the error here without returning an empty string. if not manifest_data: - logger.debug(f"No manifest data returned. Please check if you have successfully downloaded manifest: {manifest_syn_id}") + logger.debug( + f"No manifest data returned. Please check if you have successfully downloaded manifest: {manifest_syn_id}" + ) return manifest_data return manifest_syn_id - def getDataTypeFromManifest(self, manifestId:str): + def getDataTypeFromManifest(self, manifestId: str): """Fetch a manifest and return data types of all columns - Args: + Args: manifestId: synapse ID of a manifest """ - # get manifest file path + # get manifest file path manifest_filepath = self.syn.get(manifestId).path - # load manifest dataframe - manifest = load_df(manifest_filepath, preserve_raw_input=False, data_model=False) + # load manifest dataframe + manifest = load_df( + manifest_filepath, preserve_raw_input=False, data_model=False + ) # convert the dataFrame to use best possible dtypes. manifest_new = manifest.convert_dtypes() # get data types of columns - result = manifest_new.dtypes.to_frame('dtypes').reset_index() - - # return the result as a dictionary - result_dict = result.set_index('index')['dtypes'].astype(str).to_dict() + result = manifest_new.dtypes.to_frame("dtypes").reset_index() + # return the result as a dictionary + result_dict = result.set_index("index")["dtypes"].astype(str).to_dict() return result_dict - def _get_files_metadata_from_dataset(self, datasetId: str, only_new_files: bool, manifest:pd.DataFrame=None) -> Optional[dict]: + def _get_files_metadata_from_dataset( + self, datasetId: str, only_new_files: bool, manifest: pd.DataFrame = None + ) -> Optional[dict]: """retrieve file ids under a particular datasetId Args: - datasetId (str): a dataset id - only_new_files (bool): if only adding new files that are not already exist - manifest (pd.DataFrame): metadata manifest dataframe. Default to None. + datasetId (str): a dataset id + only_new_files (bool): if only adding new files that are not already exist + manifest (pd.DataFrame): metadata manifest dataframe. Default to None. Returns: a dictionary that contains filename and entityid under a given datasetId or None if there is nothing under a given dataset id are not available """ dataset_files = self.getFilesInStorageDataset(datasetId) if dataset_files: - dataset_file_names_id_dict = self._get_file_entityIds(dataset_files, only_new_files=only_new_files, manifest=manifest) + dataset_file_names_id_dict = self._get_file_entityIds( + dataset_files, only_new_files=only_new_files, manifest=manifest + ) return dataset_file_names_id_dict else: return None - def add_entity_id_and_filename(self, datasetId: str, manifest: pd.DataFrame) -> pd.DataFrame: + def add_entity_id_and_filename( + self, datasetId: str, manifest: pd.DataFrame + ) -> pd.DataFrame: """add entityid and filename column to an existing manifest assuming entityId column is not already present Args: datasetId (str): dataset syn id - manifest (pd.DataFrame): existing manifest dataframe, assuming this dataframe does not have an entityId column and Filename column is present but completely empty + manifest (pd.DataFrame): existing manifest dataframe, assuming this dataframe does not have an entityId column and Filename column is present but completely empty Returns: - pd.DataFrame: returns a pandas dataframe + pd.DataFrame: returns a pandas dataframe """ - # get file names and entity ids of a given dataset - dataset_files_dict = self._get_files_metadata_from_dataset(datasetId, only_new_files=False) + # get file names and entity ids of a given dataset + dataset_files_dict = self._get_files_metadata_from_dataset( + datasetId, only_new_files=False + ) - if dataset_files_dict: - # turn manifest dataframe back to a dictionary for operation - manifest_dict = manifest.to_dict('list') + if dataset_files_dict: + # turn manifest dataframe back to a dictionary for operation + manifest_dict = manifest.to_dict("list") # update Filename column # add entityId column to the end manifest_dict.update(dataset_files_dict) - - # if the component column exists in existing manifest, fill up that column + + # if the component column exists in existing manifest, fill up that column if "Component" in manifest_dict.keys(): - manifest_dict["Component"] = manifest_dict["Component"] * max(1, len(manifest_dict["Filename"])) - + manifest_dict["Component"] = manifest_dict["Component"] * max( + 1, len(manifest_dict["Filename"]) + ) + # turn dictionary back to a dataframe - manifest_df_index = pd.DataFrame.from_dict(manifest_dict, orient='index') + manifest_df_index = pd.DataFrame.from_dict(manifest_dict, orient="index") manifest_df_updated = manifest_df_index.transpose() # fill na with empty string @@ -602,8 +655,10 @@ def add_entity_id_and_filename(self, datasetId: str, manifest: pd.DataFrame) -> else: return manifest - def fill_in_entity_id_filename(self, datasetId: str, manifest: pd.DataFrame) -> Tuple[List, pd.DataFrame]: - """fill in Filename column and EntityId column. EntityId column and Filename column will be created if not already present. + def fill_in_entity_id_filename( + self, datasetId: str, manifest: pd.DataFrame + ) -> Tuple[List, pd.DataFrame]: + """fill in Filename column and EntityId column. EntityId column and Filename column will be created if not already present. Args: datasetId (str): dataset syn id @@ -620,20 +675,24 @@ def fill_in_entity_id_filename(self, datasetId: str, manifest: pd.DataFrame) -> # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=True, manifest=manifest) + new_files = self._get_file_entityIds( + dataset_files=dataset_files, only_new_files=True, manifest=manifest + ) # update manifest so that it contains new dataset files new_files = pd.DataFrame(new_files) manifest = ( - pd.concat([manifest, new_files], sort=False) - .reset_index() - .drop("index", axis=1) + pd.concat([manifest, new_files], sort=False) + .reset_index() + .drop("index", axis=1) ) - manifest = manifest.fillna("") + manifest = manifest.fillna("") return dataset_files, manifest - - def updateDatasetManifestFiles(self, dmge: DataModelGraphExplorer, datasetId: str, store:bool = True) -> Union[Tuple[str, pd.DataFrame], None]: + + def updateDatasetManifestFiles( + self, dmge: DataModelGraphExplorer, datasetId: str, store: bool = True + ) -> Union[Tuple[str, pd.DataFrame], None]: """Fetch the names and entity IDs of all current files in dataset in store, if any; update dataset's manifest with new files, if any. Args: @@ -644,7 +703,7 @@ def updateDatasetManifestFiles(self, dmge: DataModelGraphExplorer, datasetId: st Returns: - Synapse ID of updated manifest and Pandas dataframe containing the updated manifest. + Synapse ID of updated manifest and Pandas dataframe containing the updated manifest. If there is no existing manifest return None """ @@ -670,15 +729,21 @@ def updateDatasetManifestFiles(self, dmge: DataModelGraphExplorer, datasetId: st manifest.to_csv(manifest_filepath, index=False) # store manifest and update associated metadata with manifest on Synapse - manifest_id = self.associateMetadataWithFiles(dmge, manifest_filepath, datasetId) + manifest_id = self.associateMetadataWithFiles( + dmge, manifest_filepath, datasetId + ) - return manifest_id, manifest - - def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): + + def _get_file_entityIds( + self, + dataset_files: List, + only_new_files: bool = False, + manifest: pd.DataFrame = None, + ): """ Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files - + Args: manifest: metadata manifest dataset_file: List of all files in a dataset @@ -693,7 +758,7 @@ def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False raise UnboundLocalError( "No manifest was passed in, a manifest is required when `only_new_files` is True." ) - + # find new files (that are not in the current manifest) if any for file_id, file_name in dataset_files: if not file_id in manifest["entityId"].values: @@ -725,16 +790,16 @@ def getProjectManifests( TODO: Return manifest URI instead of Synapse ID for interoperability with other implementations of a store interface """ - component=None - entity=None + component = None + entity = None manifests = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently - + # Get synID of manifest for a dataset manifestId = self.getDatasetManifest(datasetId) @@ -743,48 +808,53 @@ def getProjectManifests( annotations = self.getFileAnnotations(manifestId) # If manifest has annotations specifying component, use that - if annotations and 'Component' in annotations: - component = annotations['Component'] + if annotations and "Component" in annotations: + component = annotations["Component"] entity = self.syn.get(manifestId, downloadFile=False) manifest_name = entity["properties"]["name"] # otherwise download the manifest and parse for information - elif not annotations or 'Component' not in annotations: + elif not annotations or "Component" not in annotations: logging.debug( f"No component annotations have been found for manifest {manifestId}. " "The manifest will be downloaded and parsed instead. " "For increased speed, add component annotations to manifest." - ) + ) - manifest_info = self.getDatasetManifest(datasetId,downloadFile=True) + manifest_info = self.getDatasetManifest( + datasetId, downloadFile=True + ) manifest_name = manifest_info["properties"].get("name", "") if not manifest_name: - logger.error(f'Failed to download manifests from {datasetId}') + logger.error(f"Failed to download manifests from {datasetId}") manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) # Get component from component column if it exists - if "Component" in manifest_df and not manifest_df["Component"].empty: - list(set(manifest_df['Component'])) + if ( + "Component" in manifest_df + and not manifest_df["Component"].empty + ): + list(set(manifest_df["Component"])) component = list(set(manifest_df["Component"])) - #Added to address issues raised during DCA testing - if '' in component: - component.remove('') + # Added to address issues raised during DCA testing + if "" in component: + component.remove("") if len(component) == 1: component = component[0] elif len(component) > 1: logging.warning( - f"Manifest {manifestId} is composed of multiple components. Schematic does not support mulit-component manifests at this time." - "Behavior of manifests with multiple components is undefined" + f"Manifest {manifestId} is composed of multiple components. Schematic does not support mulit-component manifests at this time." + "Behavior of manifests with multiple components is undefined" ) else: manifest_name = "" - component = None + component = None if component: manifest = ( (datasetId, datasetName), @@ -792,7 +862,9 @@ def getProjectManifests( (component, component), ) elif manifestId: - logging.debug(f"Manifest {manifestId} does not have an associated Component") + logging.debug( + f"Manifest {manifestId} does not have an associated Component" + ) manifest = ( (datasetId, datasetName), (manifestId, manifest_name), @@ -807,10 +879,12 @@ def getProjectManifests( if manifest: manifests.append(manifest) - + return manifests - def upload_project_manifests_to_synapse(self, dmge: DataModelGraphExplorer, projectId: str) -> List[str]: + def upload_project_manifests_to_synapse( + self, dmge: DataModelGraphExplorer, projectId: str + ) -> List[str]: """Upload all metadata manifest files across all datasets in a specified project as tables in Synapse. Returns: String of all the manifest_table_ids of all the manifests that have been loaded. @@ -820,7 +894,7 @@ def upload_project_manifests_to_synapse(self, dmge: DataModelGraphExplorer, proj manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -832,20 +906,27 @@ def upload_project_manifests_to_synapse(self, dmge: DataModelGraphExplorer, proj manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest_table_id = uploadDB(dmge=dmge, manifest=manifest, datasetId=datasetId, table_name=datasetName) + manifest_table_id = uploadDB( + dmge=dmge, + manifest=manifest, + datasetId=datasetId, + table_name=datasetName, + ) manifest_loaded.append(datasetName) return manifest_loaded - def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_json_ld: str, dry_run: bool = False) -> List[str]: - ''' + def upload_annotated_project_manifests_to_synapse( + self, projectId: str, path_to_json_ld: str, dry_run: bool = False + ) -> List[str]: + """ Purpose: For all manifests in a project, upload them as a table and add annotations manifest csv. Assumes the manifest is already present as a CSV in a dataset in the project. - ''' + """ # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = path_to_json_ld) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=path_to_json_ld) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -854,13 +935,13 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - #Instantiate DataModelGraphExplorer + # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) manifests = [] manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -873,15 +954,26 @@ def upload_annotated_project_manifests_to_synapse(self, projectId:str, path_to_j manifest_id = manifest_info["properties"]["id"] manifest_name = manifest_info["properties"]["name"] manifest_path = manifest_info["path"] - manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) + manifest = ( + (datasetId, datasetName), + (manifest_id, manifest_name), + ("", ""), + ) if not dry_run: - manifest_syn_id = self.associateMetadataWithFiles(dmge, manifest_path, datasetId, manifest_record_type='table') + manifest_syn_id = self.associateMetadataWithFiles( + dmge, manifest_path, datasetId, manifest_record_type="table" + ) manifest_loaded.append(manifest) - - return manifests, manifest_loaded + return manifests, manifest_loaded - def move_entities_to_new_project(self, projectId: str, newProjectId: str, returnEntities: bool = False, dry_run: bool = False): + def move_entities_to_new_project( + self, + projectId: str, + newProjectId: str, + returnEntities: bool = False, + dry_run: bool = False, + ): """ For each manifest csv in a project, look for all the entitiy ids that are associated. Look up the entitiy in the files, move the entity to new project. @@ -891,7 +983,7 @@ def move_entities_to_new_project(self, projectId: str, newProjectId: str, return manifest_loaded = [] datasets = self.getStorageDatasetsInProject(projectId) if datasets: - for (datasetId, datasetName) in datasets: + for datasetId, datasetName in datasets: # encode information about the manifest in a simple list (so that R clients can unpack it) # eventually can serialize differently @@ -905,35 +997,48 @@ def move_entities_to_new_project(self, projectId: str, newProjectId: str, return manifest_path = manifest_info["path"] manifest_df = load_df(manifest_path) - manifest = ((datasetId, datasetName), (manifest_id, manifest_name), ("", "")) + manifest = ( + (datasetId, datasetName), + (manifest_id, manifest_name), + ("", ""), + ) manifest_loaded.append(manifest) annotation_entities = self.storageFileviewTable[ - (self.storageFileviewTable['id'].isin(manifest_df['entityId'])) - & (self.storageFileviewTable['type'] == 'folder') - ]['id'] + (self.storageFileviewTable["id"].isin(manifest_df["entityId"])) + & (self.storageFileviewTable["type"] == "folder") + ]["id"] if returnEntities: - for entityId in annotation_entities: + for entityId in annotation_entities: if not dry_run: self.syn.move(entityId, datasetId) else: - logging.info(f"{entityId} will be moved to folder {datasetId}.") - else: + logging.info( + f"{entityId} will be moved to folder {datasetId}." + ) + else: # generate project folder - archive_project_folder = Folder(projectId+'_archive', parent = newProjectId) + archive_project_folder = Folder( + projectId + "_archive", parent=newProjectId + ) archive_project_folder = self.syn.store(archive_project_folder) - + # generate dataset folder - dataset_archive_folder = Folder("_".join([datasetId,datasetName,'archive']), parent = archive_project_folder.id) - dataset_archive_folder = self.syn.store(dataset_archive_folder) + dataset_archive_folder = Folder( + "_".join([datasetId, datasetName, "archive"]), + parent=archive_project_folder.id, + ) + dataset_archive_folder = self.syn.store(dataset_archive_folder) for entityId in annotation_entities: # move entities to folder if not dry_run: self.syn.move(entityId, dataset_archive_folder.id) else: - logging.info(f"{entityId} will be moved to folder {dataset_archive_folder.id}.") + logging.info( + f"{entityId} will be moved to folder {dataset_archive_folder.id}." + ) else: raise LookupError( f"No datasets were found in the specified project: {projectId}. Re-check specified master_fileview in CONFIG and retry." @@ -957,7 +1062,7 @@ def _get_tables(self, datasetId: str = None, projectId: str = None) -> List[Tabl project = projectId elif datasetId: project = self.syn.get(self.getDatasetProject(datasetId)) - + return list(self.syn.getChildren(project, includeTypes=["table"])) def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[str]: @@ -966,31 +1071,32 @@ def get_table_info(self, datasetId: str = None, projectId: str = None) -> List[s Returns: list[str]: A list of table names """ - tables = self._get_tables(datasetId = datasetId, projectId = projectId) + tables = self._get_tables(datasetId=datasetId, projectId=projectId) if tables: return {table["name"]: table["id"] for table in tables} - else: - return {None:None} + else: + return {None: None} @missing_entity_handler - def uploadDB(self, - dmge: DataModelGraphExplorer, - manifest: pd.DataFrame, - datasetId: str, - table_name: str, - restrict: bool = False, - useSchemaLabel: bool = True, - table_manipulation: str = 'replace', - ): + def uploadDB( + self, + dmge: DataModelGraphExplorer, + manifest: pd.DataFrame, + datasetId: str, + table_name: str, + restrict: bool = False, + useSchemaLabel: bool = True, + table_manipulation: str = "replace", + ): """ Method to upload a database to an asset store. In synapse, this will upload a metadata table - + Args: dmge: DataModelGraphExplorer object manifest: pd.Df manifest to upload datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions useSchemaLabel: bool whether to use schemaLabel (True) or display label (False) existingTableId: str of the synId of the existing table, if one already exists table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) @@ -999,20 +1105,29 @@ def uploadDB(self, manifest_table_id: synID of the uploaded table manifest: the original manifset table_manifest: manifest formatted appropriately for the table - + """ - - col_schema, table_manifest = self.formatDB(dmge=dmge, manifest=manifest, useSchemaLabel=useSchemaLabel) + col_schema, table_manifest = self.formatDB( + dmge=dmge, manifest=manifest, useSchemaLabel=useSchemaLabel + ) - manifest_table_id = self.buildDB(datasetId, table_name, col_schema, table_manifest, table_manipulation, dmge, restrict,) + manifest_table_id = self.buildDB( + datasetId, + table_name, + col_schema, + table_manifest, + table_manipulation, + dmge, + restrict, + ) return manifest_table_id, manifest, table_manifest def formatDB(self, dmge, manifest, useSchemaLabel): """ Method to format a manifest appropriatly for upload as table - + Args: dmge: DataModelGraphExplorer object manifest: pd.Df manifest to upload @@ -1021,136 +1136,166 @@ def formatDB(self, dmge, manifest, useSchemaLabel): Returns: col_schema: schema for table columns: type, size, etc table_manifest: formatted manifest - + """ # Rename the manifest columns to display names to match fileview - blacklist_chars = ['(', ')', '.', ' ', '-'] + blacklist_chars = ["(", ")", ".", " ", "-"] manifest_columns = manifest.columns.tolist() - table_manifest=deepcopy(manifest) + table_manifest = deepcopy(manifest) if useSchemaLabel: cols = [ - get_class_label_from_display_name( - str(col) - ).translate({ord(x): '' for x in blacklist_chars}) + get_class_label_from_display_name(str(col)).translate( + {ord(x): "" for x in blacklist_chars} + ) for col in manifest_columns ] - cols = list(map(lambda x: x.replace('EntityId', 'entityId'), cols)) - + cols = list(map(lambda x: x.replace("EntityId", "entityId"), cols)) # Reset column names in table manifest table_manifest.columns = cols - #move entity id to end of df - entity_col = table_manifest.pop('entityId') - table_manifest.insert(len(table_manifest.columns), 'entityId', entity_col) + # move entity id to end of df + entity_col = table_manifest.pop("entityId") + table_manifest.insert(len(table_manifest.columns), "entityId", entity_col) # Get the column schema col_schema = as_table_columns(table_manifest) # Set Id column length to 64 (for some reason not being auto set.) for i, col in enumerate(col_schema): - if col['name'].lower() == 'id': - col_schema[i]['maximumSize'] = 64 + if col["name"].lower() == "id": + col_schema[i]["maximumSize"] = 64 return col_schema, table_manifest - def buildDB(self, - datasetId: str, - table_name: str, + def buildDB( + self, + datasetId: str, + table_name: str, col_schema: List, table_manifest: pd.DataFrame, table_manipulation: str, - dmge: DataModelGraphExplorer, + dmge: DataModelGraphExplorer, restrict: bool = False, - - ): + ): """ Method to construct the table appropriately: create new table, replace existing, or upsert new into existing - Calls TableOperations class to execute - + Calls TableOperations class to execute + Args: datasetId: synID of the dataset for the manifest table_name: name of the table to be uploaded col_schema: schema for table columns: type, size, etc from `formatDB` table_manifest: formatted manifest that can be uploaded as a table table_manipulation: str, 'replace' or 'upsert', in the case where a manifest already exists, should the new metadata replace the existing (replace) or be added to it (upsert) - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions Returns: manifest_table_id: synID of the uploaded table - + """ - table_info = self.get_table_info(datasetId = datasetId) + table_info = self.get_table_info(datasetId=datasetId) # Put table manifest onto synapse - schema = Schema(name=table_name, columns=col_schema, parent=self.getDatasetProject(datasetId)) + schema = Schema( + name=table_name, + columns=col_schema, + parent=self.getDatasetProject(datasetId), + ) if table_name in table_info: existingTableId = table_info[table_name] else: existingTableId = None - tableOps = TableOperations( - synStore = self, - tableToLoad = table_manifest, - tableName = table_name, - datasetId = datasetId, - existingTableId = existingTableId, - restrict = restrict, - ) + synStore=self, + tableToLoad=table_manifest, + tableName=table_name, + datasetId=datasetId, + existingTableId=existingTableId, + restrict=restrict, + ) if not table_manipulation or table_name not in table_info.keys(): - manifest_table_id = tableOps.createTable(columnTypeDict=col_schema, specifySchema=True,) + manifest_table_id = tableOps.createTable( + columnTypeDict=col_schema, + specifySchema=True, + ) elif table_name in table_info.keys() and table_info[table_name]: - - if table_manipulation.lower() == 'replace': - manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) - elif table_manipulation.lower() == 'upsert': - manifest_table_id = tableOps.upsertTable(dmge=dmge,) - elif table_manipulation.lower() == 'update': + if table_manipulation.lower() == "replace": + manifest_table_id = tableOps.replaceTable( + specifySchema=True, + columnTypeDict=col_schema, + ) + elif table_manipulation.lower() == "upsert": + manifest_table_id = tableOps.upsertTable( + dmge=dmge, + ) + elif table_manipulation.lower() == "update": manifest_table_id = tableOps.updateTable() - - - if table_manipulation and table_manipulation.lower() == 'upsert': - existing_tables=self.get_table_info(datasetId=datasetId) - tableId=existing_tables[table_name] + if table_manipulation and table_manipulation.lower() == "upsert": + existing_tables = self.get_table_info(datasetId=datasetId) + tableId = existing_tables[table_name] annos = self.syn.get_annotations(tableId) - annos['primary_key'] = table_manifest['Component'][0] + "_id" + annos["primary_key"] = table_manifest["Component"][0] + "_id" annos = self.syn.set_annotations(annos) return manifest_table_id - - def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restrict_manifest, component_name = ''): + def upload_manifest_file( + self, + manifest, + metadataManifestPath, + datasetId, + restrict_manifest, + component_name="", + ): # Update manifest to have the new entityId column manifest.to_csv(metadataManifestPath, index=False) # store manifest to Synapse as a CSV # update file name - file_name_full = metadataManifestPath.split('/')[-1] - file_extension = file_name_full.split('.')[-1] + file_name_full = metadataManifestPath.split("/")[-1] + file_extension = file_name_full.split(".")[-1] # Differentiate "censored" and "uncensored" manifest - if "censored" in file_name_full: - file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + "_censored" + '.' + file_extension - else: - file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + '.' + file_extension + if "censored" in file_name_full: + file_name_new = ( + os.path.basename(CONFIG.synapse_manifest_basename) + + "_" + + component_name + + "_censored" + + "." + + file_extension + ) + else: + file_name_new = ( + os.path.basename(CONFIG.synapse_manifest_basename) + + "_" + + component_name + + "." + + file_extension + ) manifestSynapseFile = File( metadataManifestPath, description="Manifest for dataset " + datasetId, parent=datasetId, - name=file_name_new + name=file_name_new, + ) + + manifest_synapse_file_id = self.syn.store( + manifestSynapseFile, isRestricted=restrict_manifest + ).id + changeFileMetaData( + syn=self.syn, entity=manifest_synapse_file_id, downloadAs=file_name_new ) - manifest_synapse_file_id = self.syn.store(manifestSynapseFile, isRestricted = restrict_manifest).id - changeFileMetaData(syn = self.syn, entity = manifest_synapse_file_id, downloadAs = file_name_new) - return manifest_synapse_file_id @missing_entity_handler @@ -1160,11 +1305,12 @@ def format_row_annotations(self, dmge, row, entityId, hideBlanks): # this could create a divergence between manifest column and annotations. this should be ok for most use cases. # columns with special characters are outside of the schema metadataSyn = {} - blacklist_chars = ['(', ')', '.', ' ', '-'] - - for k, v in row.to_dict().items(): + blacklist_chars = ["(", ")", ".", " ", "-"] - keySyn = get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) + for k, v in row.to_dict().items(): + keySyn = get_class_label_from_display_name(str(k)).translate( + {ord(x): "" for x in blacklist_chars} + ) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1181,45 +1327,51 @@ def format_row_annotations(self, dmge, row, entityId, hideBlanks): metadataSyn[keySyn] = v # set annotation(s) for the various objects/items in a dataset on Synapse annos = self.syn.get_annotations(entityId) - csv_list_regex=comma_separated_list_regex() + csv_list_regex = comma_separated_list_regex() for anno_k, anno_v in metadataSyn.items(): - # Remove keys with nan or empty string values from dict of annotations to be uploaded # if present on current data annotation - if hideBlanks and (anno_v == '' or (isinstance(anno_v,float) and np.isnan(anno_v))): + if hideBlanks and ( + anno_v == "" or (isinstance(anno_v, float) and np.isnan(anno_v)) + ): annos.pop(anno_k) if anno_k in annos.keys() else annos # Otherwise save annotation as approrpriate else: - if isinstance(anno_v,float) and np.isnan(anno_v): - annos[anno_k] = "" - elif isinstance(anno_v,str) and re.fullmatch(csv_list_regex, anno_v) and rule_in_rule_list('list', dmge.get_node_validation_rules(anno_k)): + if isinstance(anno_v, float) and np.isnan(anno_v): + annos[anno_k] = "" + elif ( + isinstance(anno_v, str) + and re.fullmatch(csv_list_regex, anno_v) + and rule_in_rule_list( + "list", dmge.get_node_validation_rules(anno_k) + ) + ): annos[anno_k] = anno_v.split(",") else: annos[anno_k] = anno_v - + return annos @missing_entity_handler def format_manifest_annotations(self, manifest, manifest_synapse_id): - ''' + """ Set annotations for the manifest (as a whole) so they can be applied to the manifest table or csv. For now just getting the Component. - ''' - + """ + entity = self.syn.get(manifest_synapse_id, downloadFile=False) is_file = entity.concreteType.endswith(".FileEntity") is_table = entity.concreteType.endswith(".TableEntity") if is_file: - # Get file metadata metadata = self.getFileAnnotations(manifest_synapse_id) # If there is a defined component add it to the metadata. - if 'Component' in manifest.columns: + if "Component" in manifest.columns: # Gather component information - component = manifest['Component'].unique() - + component = manifest["Component"].unique() + # Double check that only a single component is listed, else raise an error. try: len(component) == 1 @@ -1229,12 +1381,12 @@ def format_manifest_annotations(self, manifest, manifest_synapse_id): ) from err # Add component to metadata - metadata['Component'] = component[0] - + metadata["Component"] = component[0] + elif is_table: # Get table metadata metadata = self.getTableAnnotations(manifest_synapse_id) - + # Get annotations annos = self.syn.get_annotations(manifest_synapse_id) @@ -1243,6 +1395,7 @@ def format_manifest_annotations(self, manifest, manifest_synapse_id): annos[annos_k] = annos_v return annos + ''' def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPath, useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False): @@ -1320,7 +1473,7 @@ def annotate_upload_manifest_table(self, manifest, datasetId, metadataManifestPa return manifest_synapse_table_id ''' - def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: + def _read_manifest(self, metadataManifestPath: str) -> pd.DataFrame: """Helper function to read in provided manifest as a pandas DataFrame for subsequent downstream processing. Args: metadataManifestPath (str): path where manifest is stored @@ -1331,17 +1484,21 @@ def _read_manifest(self, metadataManifestPath:str) -> pd.DataFrame: """ # read new manifest csv try: - load_args={ - "dtype":"string", + load_args = { + "dtype": "string", } - manifest = load_df(metadataManifestPath, preserve_raw_input = False, **load_args) + manifest = load_df( + metadataManifestPath, preserve_raw_input=False, **load_args + ) except FileNotFoundError as err: raise FileNotFoundError( f"No manifest file was found at this path: {metadataManifestPath}" ) from err return manifest - def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, dmge: DataModelGraphExplorer): + def _add_id_columns_to_manifest( + self, manifest: pd.DataFrame, dmge: DataModelGraphExplorer + ): """Helper function to add id and entityId columns to the manifest if they do not already exist, Fill id values per row. Args: Manifest loaded as a pd.Dataframe @@ -1353,22 +1510,24 @@ def _add_id_columns_to_manifest(self, manifest: pd.DataFrame, dmge: DataModelGra if not col_in_dataframe("Id", manifest): # See if schema has `Uuid` column specified try: - uuid_col_in_schema = dmge.is_class_in_schema('Uuid') or dmge.is_class_in_schema('uuid') - except (KeyError): + uuid_col_in_schema = dmge.is_class_in_schema( + "Uuid" + ) or dmge.is_class_in_schema("uuid") + except KeyError: uuid_col_in_schema = False # Rename `Uuid` column if it wasn't specified in the schema if col_in_dataframe("Uuid", manifest) and not uuid_col_in_schema: - manifest.rename(columns={'Uuid': 'Id'}, inplace=True) + manifest.rename(columns={"Uuid": "Id"}, inplace=True) # If no `Uuid` column exists or it is specified in the schema, create a new `Id` column else: - manifest["Id"] = '' + manifest["Id"] = "" - for idx,row in manifest.iterrows(): + for idx, row in manifest.iterrows(): if not row["Id"]: gen_uuid = str(uuid.uuid4()) row["Id"] = gen_uuid - manifest.loc[idx, 'Id'] = gen_uuid + manifest.loc[idx, "Id"] = gen_uuid # add entityId as a column if not already there or # fill any blanks with an empty string. @@ -1388,12 +1547,12 @@ def _generate_table_name(self, manifest): component_name (str): Name of the manifest component (if applicable) """ # Create table name here. - if 'Component' in manifest.columns: - component_name = manifest['Component'][0].lower() - table_name = component_name + '_synapse_storage_manifest_table' + if "Component" in manifest.columns: + component_name = manifest["Component"][0].lower() + table_name = component_name + "_synapse_storage_manifest_table" else: - component_name = '' - table_name = 'synapse_storage_manifest_table' + component_name = "" + table_name = "synapse_storage_manifest_table" return table_name, component_name def _add_annotations(self, dmge, row, entityId, hideBlanks): @@ -1410,7 +1569,7 @@ def _add_annotations(self, dmge, row, entityId, hideBlanks): annos = self.format_row_annotations(dmge, row, entityId, hideBlanks) if annos: - # Store annotations for an entity folder + # Store annotations for an entity folder self.syn.set_annotations(annos) return @@ -1424,7 +1583,7 @@ def _create_entity_id(self, idx, row, manifest, datasetId): Returns: manifest (pd.DataFrame): manifest with entityId added to the appropriate row entityId (str): Generated Entity Id. - + """ rowEntity = Folder(str(uuid.uuid4()), parent=datasetId) rowEntity = self.syn.store(rowEntity) @@ -1434,15 +1593,15 @@ def _create_entity_id(self, idx, row, manifest, datasetId): return manifest, entityId def add_annotations_to_entities_files( - self, - dmge, - manifest, - manifest_record_type, - datasetId, - hideBlanks, - manifest_synapse_table_id='' - ): - '''Depending on upload type add Ids to entityId row. Add anotations to connected files. + self, + dmge, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id="", + ): + """Depending on upload type add Ids to entityId row. Add anotations to connected files. Args: dmge: DataModelGraphExplorer Object manifest (pd.DataFrame): loaded df containing user supplied data. @@ -1453,28 +1612,36 @@ def add_annotations_to_entities_files( Returns: manifest (pd.DataFrame): modified to add entitiyId as appropriate. - ''' + """ # Expected behavior is to annotate files if `Filename` is present regardless of `-mrt` setting - if 'filename' in [col.lower() for col in manifest.columns]: + if "filename" in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files_and_entityIds = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=False) + files_and_entityIds = self._get_file_entityIds( + dataset_files=dataset_files, only_new_files=False + ) file_df = pd.DataFrame(files_and_entityIds) - + # Merge dataframes to add entityIds - manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) + manifest = manifest.merge( + file_df, how="left", on="Filename", suffixes=["_x", None] + ).drop("entityId_x", axis=1) # Fill `entityId` for each row if missing and annotate entity as appropriate for idx, row in manifest.iterrows(): - if not row["entityId"] and (manifest_record_type == 'file_and_entities' or - manifest_record_type == 'table_file_and_entities'): - manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) - elif not row["entityId"] and manifest_record_type == 'table_and_file': - # If not using entityIds, fill with manifest_table_id so + if not row["entityId"] and ( + manifest_record_type == "file_and_entities" + or manifest_record_type == "table_file_and_entities" + ): + manifest, entityId = self._create_entity_id( + idx, row, manifest, datasetId + ) + elif not row["entityId"] and manifest_record_type == "table_and_file": + # If not using entityIds, fill with manifest_table_id so row["entityId"] = manifest_synapse_table_id manifest.loc[idx, "entityId"] = manifest_synapse_table_id - entityId = '' + entityId = "" else: # get the file id of the file to annotate, collected in above step. entityId = row["entityId"] @@ -1486,19 +1653,19 @@ def add_annotations_to_entities_files( return manifest def upload_manifest_as_table( - self, - dmge, - manifest, - metadataManifestPath, - datasetId, - table_name, - component_name, - restrict, - manifest_record_type, - useSchemaLabel, - hideBlanks, - table_manipulation, - ): + self, + dmge, + manifest, + metadataManifestPath, + datasetId, + table_name, + component_name, + restrict, + manifest_record_type, + useSchemaLabel, + hideBlanks, + table_manipulation, + ): """Upload manifest to Synapse as a table and csv. Args: dmge: DataModelGraphExplorer object @@ -1513,51 +1680,71 @@ def upload_manifest_as_table( table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. Return: manifest_synapse_file_id: SynID of manifest csv uploaded to synapse. - """ + """ # Upload manifest as a table, get the ID and updated manifest. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - dmge=dmge, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - useSchemaLabel=useSchemaLabel, - table_manipulation=table_manipulation) - - manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) + dmge=dmge, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + useSchemaLabel=useSchemaLabel, + table_manipulation=table_manipulation, + ) + + manifest = self.add_annotations_to_entities_files( + dmge, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id, + ) # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, + metadataManifestPath, + datasetId, + restrict, + component_name=component_name, + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - dmge=dmge, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - useSchemaLabel=useSchemaLabel, - table_manipulation='update') + dmge=dmge, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + useSchemaLabel=useSchemaLabel, + table_manipulation="update", + ) # Set annotations for the table manifest - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_table_id + ) self.syn.set_annotations(manifest_annotations) return manifest_synapse_file_id def upload_manifest_as_csv( - self, - dmge, - manifest, - metadataManifestPath, - datasetId, - restrict, - manifest_record_type, - hideBlanks, - component_name): + self, + dmge, + manifest, + metadataManifestPath, + datasetId, + restrict, + manifest_record_type, + hideBlanks, + component_name, + ): """Upload manifest to Synapse as a csv only. Args: dmge: DataModelGraphExplorer object @@ -1573,34 +1760,43 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ - manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks) + manifest = self.add_annotations_to_entities_files( + dmge, manifest, manifest_record_type, datasetId, hideBlanks + ) # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, - metadataManifestPath, datasetId, restrict, component_name = component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, + metadataManifestPath, + datasetId, + restrict, + component_name=component_name, + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + return manifest_synapse_file_id def upload_manifest_combo( - self, - dmge, - manifest, - metadataManifestPath, - datasetId, - table_name, - component_name, - restrict, - manifest_record_type, - useSchemaLabel, - hideBlanks, - table_manipulation, - ): + self, + dmge, + manifest, + metadataManifestPath, + datasetId, + table_name, + component_name, + restrict, + manifest_record_type, + useSchemaLabel, + hideBlanks, + table_manipulation, + ): """Upload manifest to Synapse as a table and CSV with entities. Args: dmge: DataModelGraphExplorer object @@ -1618,46 +1814,68 @@ def upload_manifest_combo( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - dmge=dmge, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - useSchemaLabel=useSchemaLabel, - table_manipulation=table_manipulation) - - manifest = self.add_annotations_to_entities_files(dmge, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) - + dmge=dmge, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + useSchemaLabel=useSchemaLabel, + table_manipulation=table_manipulation, + ) + + manifest = self.add_annotations_to_entities_files( + dmge, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + manifest_synapse_table_id, + ) + # Load manifest to synapse as a CSV File - manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) - + manifest_synapse_file_id = self.upload_manifest_file( + manifest, metadataManifestPath, datasetId, restrict, component_name + ) + # Set annotations for the file manifest. - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_file_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_file_id + ) self.syn.set_annotations(manifest_annotations) logger.info("Associated manifest file with dataset on Synapse.") - + # Update manifest Synapse table with new entity id column. manifest_synapse_table_id, manifest, table_manifest = self.uploadDB( - dmge=dmge, - manifest=manifest, - datasetId=datasetId, - table_name=table_name, - restrict=restrict, - useSchemaLabel=useSchemaLabel, - table_manipulation='update') + dmge=dmge, + manifest=manifest, + datasetId=datasetId, + table_name=table_name, + restrict=restrict, + useSchemaLabel=useSchemaLabel, + table_manipulation="update", + ) # Set annotations for the table manifest - manifest_annotations = self.format_manifest_annotations(manifest, manifest_synapse_table_id) + manifest_annotations = self.format_manifest_annotations( + manifest, manifest_synapse_table_id + ) self.syn.set_annotations(manifest_annotations) return manifest_synapse_file_id def associateMetadataWithFiles( - self, dmge: DataModelGraphExplorer, metadataManifestPath: str, datasetId: str, manifest_record_type: str = 'table_file_and_entities', - useSchemaLabel: bool = True, hideBlanks: bool = False, restrict_manifest = False, table_manipulation: str = 'replace', + self, + dmge: DataModelGraphExplorer, + metadataManifestPath: str, + datasetId: str, + manifest_record_type: str = "table_file_and_entities", + useSchemaLabel: bool = True, + hideBlanks: bool = False, + restrict_manifest=False, + table_manipulation: str = "replace", ) -> str: """Associate metadata with files in a storage dataset already on Synapse. Upload metadataManifest in the storage dataset folder on Synapse as well. Return synapseId of the uploaded manifest file. - + If this is a new manifest there could be no Synapse entities associated with the rows of this manifest this may be due to data type (e.g. clinical data) being tabular and not requiring files; to utilize uniform interfaces downstream @@ -1688,62 +1906,62 @@ def associateMetadataWithFiles( table_name, component_name = self._generate_table_name(manifest) # Upload manifest to synapse based on user input (manifest_record_type) - + if manifest_record_type == "file_only": manifest_synapse_file_id = self.upload_manifest_as_csv( - dmge, - manifest, - metadataManifestPath, - datasetId=datasetId, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - component_name = component_name, - ) + dmge, + manifest, + metadataManifestPath, + datasetId=datasetId, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + component_name=component_name, + ) elif manifest_record_type == "table_and_file": manifest_synapse_file_id = self.upload_manifest_as_table( - dmge, - manifest, - metadataManifestPath, - datasetId=datasetId, - table_name=table_name, - component_name=component_name, - restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - table_manipulation=table_manipulation, - ) + dmge, + manifest, + metadataManifestPath, + datasetId=datasetId, + table_name=table_name, + component_name=component_name, + restrict=restrict_manifest, + useSchemaLabel=useSchemaLabel, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + table_manipulation=table_manipulation, + ) elif manifest_record_type == "file_and_entities": - manifest_synapse_file_id = self.upload_manifest_as_csv( - dmge, - manifest, - metadataManifestPath, - datasetId=datasetId, - restrict=restrict_manifest, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - component_name = component_name, - ) + manifest_synapse_file_id = self.upload_manifest_as_csv( + dmge, + manifest, + metadataManifestPath, + datasetId=datasetId, + restrict=restrict_manifest, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + component_name=component_name, + ) elif manifest_record_type == "table_file_and_entities": manifest_synapse_file_id = self.upload_manifest_combo( - dmge, - manifest, - metadataManifestPath, - datasetId=datasetId, - table_name=table_name, - component_name=component_name, - restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, - hideBlanks=hideBlanks, - manifest_record_type=manifest_record_type, - table_manipulation=table_manipulation, - ) + dmge, + manifest, + metadataManifestPath, + datasetId=datasetId, + table_name=table_name, + component_name=component_name, + restrict=restrict_manifest, + useSchemaLabel=useSchemaLabel, + hideBlanks=hideBlanks, + manifest_record_type=manifest_record_type, + table_manipulation=table_manipulation, + ) else: raise ValueError("Please enter a valid manifest_record_type.") return manifest_synapse_file_id - def getTableAnnotations(self, table_id:str): + def getTableAnnotations(self, table_id: str): """Generate dictionary of annotations for the given Synapse file. Synapse returns all custom annotations as lists since they can contain multiple values. In all cases, the values will @@ -1881,7 +2099,7 @@ def getDatasetAnnotations( # Add filenames for the files that "survived" annotation retrieval filenames = [dataset_files_map[i] for i in table["entityId"]] - if 'Filename' not in table.columns: + if "Filename" not in table.columns: table.insert(0, "Filename", filenames) # Ensure that entityId and eTag are at the end @@ -1904,17 +2122,21 @@ def checkIfinAssetView(self, syn_id) -> str: # get data in administrative fileview for this pipeline assetViewTable = self.getStorageFileviewTable() all_files = list(assetViewTable["id"]) - if syn_id in all_files: + if syn_id in all_files: return True - else: + else: return False - @retry(stop = stop_after_attempt(5), - wait = wait_chain(*[wait_fixed(10) for i in range (2)] + - [wait_fixed(15) for i in range(2)] + - [wait_fixed(20)]), - retry=retry_if_exception_type(LookupError), - retry_error_callback = raise_final_error) + @retry( + stop=stop_after_attempt(5), + wait=wait_chain( + *[wait_fixed(10) for i in range(2)] + + [wait_fixed(15) for i in range(2)] + + [wait_fixed(20)] + ), + retry=retry_if_exception_type(LookupError), + retry_error_callback=raise_final_error, + ) def getDatasetProject(self, datasetId: str) -> str: """Get parent project for a given dataset ID. @@ -1941,7 +2163,6 @@ def getDatasetProject(self, datasetId: str) -> str: dataset_index = self.storageFileviewTable["id"] == datasetId dataset_row = self.storageFileviewTable[dataset_index] - # Return `projectId` for given row if only one found if len(dataset_row) == 1: dataset_project = dataset_row["projectId"].values[0] @@ -1959,11 +2180,11 @@ def getDatasetProject(self, datasetId: str) -> str: ) # If not, then assume dataset not in file view - raise LookupError ( + raise LookupError( f"The given dataset ({datasetId}) doesn't appear in the " f"configured file view ({self.storageFileview}). This might " "mean that the file view's scope needs to be updated." - ) + ) def getDatasetAnnotationsBatch( self, datasetId: str, dataset_file_ids: Sequence[str] = None @@ -1994,21 +2215,20 @@ def getDatasetAnnotationsBatch( return table def _get_table_schema_by_cname(self, table_schema): - # assume no duplicate column names in the table table_schema_by_cname = {} for col_record in table_schema: - - #TODO clean up dictionary for compactness (e.g. remove redundant 'name' key) + # TODO clean up dictionary for compactness (e.g. remove redundant 'name' key) table_schema_by_cname[col_record["name"]] = col_record return table_schema_by_cname + class TableOperations: """ Object to hold functions for various table operations specific to the Synapse Asset Store. - + Currently implement operations are: createTable: upload a manifest as a new table when none exist replaceTable: replace a metadata in a table from one manifest with metadata from another manifest @@ -2017,15 +2237,16 @@ class TableOperations: Operations currently in development are: upsertTable: add metadata from a manifest to an existing table that contains metadata from another manifest """ - def __init__(self, - synStore: SynapseStorage, - tableToLoad: pd.DataFrame = None, - tableName: str = None, - datasetId: str = None, - existingTableId: str = None, - restrict: bool = False - ): - + + def __init__( + self, + synStore: SynapseStorage, + tableToLoad: pd.DataFrame = None, + tableName: str = None, + datasetId: str = None, + existingTableId: str = None, + restrict: bool = False, + ): """ Class governing table operations (creation, replacement, upserts, updates) in schematic @@ -2033,7 +2254,7 @@ def __init__(self, tableName: name of the table to be uploaded datasetId: synID of the dataset for the manifest existingTableId: synId of the table currently exising on synapse (if there is one) - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions """ self.synStore = synStore @@ -2043,78 +2264,100 @@ def __init__(self, self.existingTableId = existingTableId self.restrict = restrict - - def createTable(self, columnTypeDict: dict = None, specifySchema: bool = True,): + def createTable( + self, + columnTypeDict: dict = None, + specifySchema: bool = True, + ): """ Method to create a table from a metadata manifest and upload it to synapse - + Args: columnTypeDict: dictionary schema for table columns: type, size, etc - specifySchema: to specify a specific schema for the table format + specifySchema: to specify a specific schema for the table format Returns: table.schema.id: synID of the newly created table """ - datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile=False) datasetName = datasetEntity.name - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) if not self.tableName: - self.tableName = datasetName + 'table' + self.tableName = datasetName + "table" datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") - #create list of columns: + # create list of columns: cols = [] for col in self.tableToLoad.columns: if col in table_schema_by_cname: - col_type = table_schema_by_cname[col]['columnType'] - max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 + col_type = table_schema_by_cname[col]["columnType"] + max_size = ( + table_schema_by_cname[col]["maximumSize"] + if "maximumSize" in table_schema_by_cname[col].keys() + else 100 + ) max_list_len = 250 if max_size and max_list_len: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size, maximumListLength=max_list_len)) + cols.append( + Column( + name=col, + columnType=col_type, + maximumSize=max_size, + maximumListLength=max_list_len, + ) + ) elif max_size: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size)) + cols.append( + Column(name=col, columnType=col_type, maximumSize=max_size) + ) else: cols.append(Column(name=col, columnType=col_type)) else: - #TODO add warning that the given col was not found and it's max size is set to 100 - cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + # TODO add warning that the given col was not found and it's max size is set to 100 + cols.append(Column(name=col, columnType="STRING", maximumSize=100)) + schema = Schema( + name=self.tableName, columns=cols, parent=datasetParentProject + ) table = Table(schema, self.tableToLoad) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = self.synStore.syn.store(table, isRestricted=self.restrict) return table.schema.id else: # For just uploading the tables to synapse using default # column types. table = build_table(self.tableName, datasetParentProject, self.tableToLoad) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = self.synStore.syn.store(table, isRestricted=self.restrict) return table.schema.id - def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,): + def replaceTable( + self, + specifySchema: bool = True, + columnTypeDict: dict = None, + ): """ Method to replace an existing table on synapse with metadata from a new manifest - + Args: - specifySchema: to infer a schema for the table format - columnTypeDict: dictionary schema for table columns: type, size, etc + specifySchema: to infer a schema for the table format + columnTypeDict: dictionary schema for table columns: type, size, etc Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile=False) datasetName = datasetEntity.name - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) - existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + existing_table, existing_results = self.synStore.get_synapse_table( + self.existingTableId + ) # remove rows self.synStore.syn.delete(existing_results) # wait for row deletion to finish on synapse before getting empty table sleep(10) - + # removes all current columns current_table = self.synStore.syn.get(self.existingTableId) current_columns = self.synStore.syn.getTableColumns(current_table) @@ -2122,58 +2365,70 @@ def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,) current_table.removeColumn(col) if not self.tableName: - self.tableName = datasetName + 'table' - + self.tableName = datasetName + "table" + # Process columns according to manifest entries - table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") - #create list of columns: + # create list of columns: cols = [] - + for col in self.tableToLoad.columns: - if col in table_schema_by_cname: - col_type = table_schema_by_cname[col]['columnType'] - max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 + col_type = table_schema_by_cname[col]["columnType"] + max_size = ( + table_schema_by_cname[col]["maximumSize"] + if "maximumSize" in table_schema_by_cname[col].keys() + else 100 + ) max_list_len = 250 if max_size and max_list_len: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size, maximumListLength=max_list_len)) + cols.append( + Column( + name=col, + columnType=col_type, + maximumSize=max_size, + maximumListLength=max_list_len, + ) + ) elif max_size: - cols.append(Column(name=col, columnType=col_type, - maximumSize=max_size)) + cols.append( + Column(name=col, columnType=col_type, maximumSize=max_size) + ) else: cols.append(Column(name=col, columnType=col_type)) else: - - #TODO add warning that the given col was not found and it's max size is set to 100 - cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - + # TODO add warning that the given col was not found and it's max size is set to 100 + cols.append(Column(name=col, columnType="STRING", maximumSize=100)) + # adds new columns to schema for col in cols: current_table.addColumn(col) - self.synStore.syn.store(current_table, isRestricted = self.restrict) + self.synStore.syn.store(current_table, isRestricted=self.restrict) # wait for synapse store to finish sleep(1) # build schema and table from columns and store with necessary restrictions - schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + schema = Schema( + name=self.tableName, columns=cols, parent=datasetParentProject + ) schema.id = self.existingTableId - table = Table(schema, self.tableToLoad, etag = existing_results.etag) - table = self.synStore.syn.store(table, isRestricted = self.restrict) + table = Table(schema, self.tableToLoad, etag=existing_results.etag) + table = self.synStore.syn.store(table, isRestricted=self.restrict) else: logging.error("Must specify a schema for table replacements") # remove system metadata from manifest - existing_table.drop(columns = ['ROW_ID', 'ROW_VERSION'], inplace = True) + existing_table.drop(columns=["ROW_ID", "ROW_VERSION"], inplace=True) return self.existingTableId - - def _get_auth_token(self,): + def _get_auth_token( + self, + ): authtoken = None # Get access token from environment variable if available @@ -2185,14 +2440,16 @@ def _get_auth_token(self,): # Get token from authorization header # Primarily useful for API endpoint functionality - if 'Authorization' in self.synStore.syn.default_headers: - authtoken = self.synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] + if "Authorization" in self.synStore.syn.default_headers: + authtoken = self.synStore.syn.default_headers["Authorization"].split( + "Bearer " + )[-1] return authtoken # retrive credentials from synapse object # Primarily useful for local users, could only be stored here when a .synapseConfig file is used, but including to be safe synapse_object_creds = self.synStore.syn.credentials - if hasattr(synapse_object_creds, '_token'): + if hasattr(synapse_object_creds, "_token"): authtoken = synapse_object_creds.secret # Try getting creds from .synapseConfig file if it exists @@ -2201,51 +2458,61 @@ def _get_auth_token(self,): config = self.synStore.syn.getConfigFile(CONFIG.synapse_configuration_path) # check which credentials are provided in file - if config.has_option('authentication', 'authtoken'): - authtoken = config.get('authentication', 'authtoken') - + if config.has_option("authentication", "authtoken"): + authtoken = config.get("authentication", "authtoken") + # raise error if required credentials are not found if not authtoken: raise NameError( "authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" ) - + return authtoken def upsertTable(self, dmge: DataModelGraphExplorer): """ Method to upsert rows from a new manifest into an existing table on synapse - For upsert functionality to work, primary keys must follow the naming convention of _id + For upsert functionality to work, primary keys must follow the naming convention of _id `-tm upsert` should be used for initial table uploads if users intend to upsert into them at a later time; using 'upsert' at creation will generate the metadata necessary for upsert functionality. Currently it is required to use -dl/--use_display_label with table upserts. - + Args: dmge: DataModelGraphExplorer instance - + Returns: existingTableId: synID of the already existing table that had its metadata replaced - """ + """ authtoken = self._get_auth_token() - synapseDB = SynapseDatabase(auth_token=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) + synapseDB = SynapseDatabase( + auth_token=authtoken, + project_id=self.synStore.getDatasetProject(self.datasetId), + ) try: # Try performing upsert - synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) - except(SynapseHTTPError) as ex: + synapseDB.upsert_table_rows( + table_name=self.tableName, data=self.tableToLoad + ) + except SynapseHTTPError as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload - if 'Id is not a valid column name or id' in str(ex): + if "Id is not a valid column name or id" in str(ex): self._update_table_uuid_column(dmge) - synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) + synapseDB.upsert_table_rows( + table_name=self.tableName, data=self.tableToLoad + ) # Raise if other error else: raise ex return self.existingTableId - - def _update_table_uuid_column(self, dmge: DataModelGraphExplorer,) -> None: + + def _update_table_uuid_column( + self, + dmge: DataModelGraphExplorer, + ) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention @@ -2259,61 +2526,77 @@ def _update_table_uuid_column(self, dmge: DataModelGraphExplorer,) -> None: # Get the columns of the schema schema = self.synStore.syn.get(self.existingTableId) cols = self.synStore.syn.getTableColumns(schema) - + # Iterate through columns until `Uuid` column is found for col in cols: - if col.name.lower() == 'uuid': + if col.name.lower() == "uuid": # See if schema has `Uuid` column specified try: - uuid_col_in_schema = dmge.is_class_in_schema(col.name) - except (KeyError): + uuid_col_in_schema = dmge.is_class_in_schema(col.name) + except KeyError: uuid_col_in_schema = False # If there is, then create a new `Id` column from scratch if uuid_col_in_schema: - new_col = Column(columnType = "STRING", maximumSize = 64, name = "Id") + new_col = Column(columnType="STRING", maximumSize=64, name="Id") schema.addColumn(new_col) schema = self.synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: - # Build ColumnModel that will be used for new column - id_column = Column(name='Id', columnType='STRING', maximumSize=64, defaultValue=None, maximumListLength=1) + id_column = Column( + name="Id", + columnType="STRING", + maximumSize=64, + defaultValue=None, + maximumListLength=1, + ) new_col_response = self.synStore.syn.store(id_column) - # Define columnChange body columnChangeDict = { "concreteType": "org.sagebionetworks.repo.model.table.TableSchemaChangeRequest", "entityId": self.existingTableId, "changes": [ - { - "oldColumnId": col['id'], - "newColumnId": new_col_response['id'], + { + "oldColumnId": col["id"], + "newColumnId": new_col_response["id"], } - ] + ], } - self.synStore.syn._async_table_update(table=self.existingTableId, changes=[columnChangeDict], wait=False) + self.synStore.syn._async_table_update( + table=self.existingTableId, + changes=[columnChangeDict], + wait=False, + ) break return - def updateTable(self, update_col: str = 'Id',): + def updateTable( + self, + update_col: str = "Id", + ): """ Method to update an existing table with a new column - + Args: - updateCol: column to index the old and new tables on + updateCol: column to index the old and new tables on Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) - + existing_table, existing_results = self.synStore.get_synapse_table( + self.existingTableId + ) + self.tableToLoad = update_df(existing_table, self.tableToLoad, update_col) # store table with existing etag data and impose restrictions as appropriate - self.synStore.syn.store(Table(self.existingTableId, self.tableToLoad, etag = existing_results.etag), isRestricted = self.restrict) + self.synStore.syn.store( + Table(self.existingTableId, self.tableToLoad, etag=existing_results.etag), + isRestricted=self.restrict, + ) return self.existingTableId diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index c68fe46f6..684bafba1 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -47,8 +47,11 @@ def log_value_from_config(arg_name: str, config_value: Any): f"The {arg_name} argument is being taken from configuration file, i.e., {config_value}." ) + def parse_synIDs( - ctx, param, synIDs, + ctx, + param, + synIDs, ) -> List[str]: """Parse and validate a comma separated string of synIDs @@ -64,12 +67,12 @@ def parse_synIDs( List of synID strings Raises: - ValueError: If the entire string does not match a regex for + ValueError: If the entire string does not match a regex for a valid comma separated string of SynIDs """ if synIDs: project_regex = re.compile("(syn\d+\,?)+") - valid=project_regex.fullmatch(synIDs) + valid = project_regex.fullmatch(synIDs) if valid: synIDs = synIDs.split(",") @@ -78,17 +81,19 @@ def parse_synIDs( else: raise ValueError( - f"The provided list of project synID(s): {synIDs}, is not formatted correctly. " - "\nPlease check your list of projects for errors." - ) + f"The provided list of project synID(s): {synIDs}, is not formatted correctly. " + "\nPlease check your list of projects for errors." + ) else: return + def parse_comma_str_to_list( - ctx, param, comma_string, + ctx, + param, + comma_string, ) -> List[str]: - if comma_string: return comma_string.split(",") else: - return None \ No newline at end of file + return None diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 789fb4881..14c8d6b2b 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -27,54 +27,60 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): large_manifest_cutoff_size = 1000 # start performance timer t_load_df = perf_counter() - - #Read CSV to df as type specified in kwargs - org_df = pd.read_csv(file_path, keep_default_na = True, encoding='utf8', **load_args) - + + # Read CSV to df as type specified in kwargs + org_df = pd.read_csv(file_path, keep_default_na=True, encoding="utf8", **load_args) + # If type inference not allowed: trim and return if preserve_raw_input: - #only trim if not data model csv + # only trim if not data model csv if not data_model: - org_df=trim_commas_df(org_df) - + org_df = trim_commas_df(org_df) + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df # If type inferences is allowed: infer types, trim, and return else: - # create a separate copy of the manifest + # create a separate copy of the manifest # before beginning conversions to store float values - float_df=deepcopy(org_df) - + float_df = deepcopy(org_df) + # Cast the columns in the dataframe to string and # replace Null values with empty strings - null_cells = org_df.isnull() - org_df = org_df.astype(str).mask(null_cells, '') + null_cells = org_df.isnull() + org_df = org_df.astype(str).mask(null_cells, "") # Find integers stored as strings and replace with entries of type np.int64 - if org_df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance - ints = org_df.applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) - - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose = 1) - ints = org_df.parallel_applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + if ( + org_df.size < large_manifest_cutoff_size + ): # If small manifest, iterate as normal for improved performance + ints = org_df.applymap( + lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" + ).fillna(False) + + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose=1) + ints = org_df.parallel_applymap( + lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" + ).fillna(False) # Identify cells converted to intergers ints_tf_df = ints.applymap(pd.api.types.is_integer) # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in org_df.columns: - float_df[col]=pd.to_numeric(float_df[col], errors='coerce') + float_df[col] = pd.to_numeric(float_df[col], errors="coerce") # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(org_df[col][float_df[col].isna()],inplace=True) - + float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) + # Trim nans and empty rows and columns processed_df = trim_commas_df(float_df) # Store values that were converted to type int in the final dataframe - processed_df=processed_df.mask(ints_tf_df, other = ints) - + processed_df = processed_df.mask(ints_tf_df, other=ints) + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df @@ -82,15 +88,13 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): def _parse_dates(date_string): try: - date = dp.parse(date_string = date_string, settings = {'STRICT_PARSING': True}) + date = dp.parse(date_string=date_string, settings={"STRICT_PARSING": True}) return date if date else False except TypeError: return False - def normalize_table(df: pd.DataFrame, primary_key: str) -> pd.DataFrame: - """ Function to normalize a table (e.g. dedup) Args: @@ -160,7 +164,7 @@ def update_df( input_df_idx.reset_index(inplace=True) input_df_idx = input_df_idx[input_df.columns] - # Sometimes pandas update can change the column datatype, recast + # Sometimes pandas update can change the column datatype, recast for col in input_df_idx.columns: input_df_idx[col] = input_df_idx[col].astype(input_df.dtypes[col]) @@ -182,7 +186,7 @@ def trim_commas_df(df: pd.DataFrame): # remove all completely empty rows df = df.dropna(how="all", axis=0) - #Fill in nan cells with empty strings + # Fill in nan cells with empty strings df.fillna("", inplace=True) return df @@ -197,9 +201,14 @@ def col_in_dataframe(col: str, df: pd.DataFrame) -> bool: Returns: bool: whether or not the column name is a column in the dataframe, case agnostic """ - return col.lower() in [manifest_col.lower() for manifest_col in df.columns.to_list()] + return col.lower() in [ + manifest_col.lower() for manifest_col in df.columns.to_list() + ] -def populate_df_col_with_another_col(df: pd.DataFrame, source_col: str, target_col: str) -> pd.DataFrame: + +def populate_df_col_with_another_col( + df: pd.DataFrame, source_col: str, target_col: str +) -> pd.DataFrame: """Copy the values from one column in a dataframe to another column in the same dataframe Args: df: pandas dataframe with data from manifest file. @@ -210,5 +219,5 @@ def populate_df_col_with_another_col(df: pd.DataFrame, source_col: str, target_c dataframe with contents updated """ # Copy the contents over - df[target_col]=df[source_col] - return df \ No newline at end of file + df[target_col] = df[source_col] + return df diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 8b7b62e35..66ddb2252 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -18,6 +18,7 @@ logger = logging.getLogger(__name__) + def find_duplicates(_list): """Find duplicate items in a list""" return set([x for x in _list if _list.count(x) > 1]) @@ -45,10 +46,10 @@ def unlist(_list): def get_dir_size(path: str): - """Recursively descend the directory tree rooted at the top and call .st_size function to calculate size of files in bytes. + """Recursively descend the directory tree rooted at the top and call .st_size function to calculate size of files in bytes. Args: path: path to a folder - return: total size of all the files in a given directory in bytes. + return: total size of all the files in a given directory in bytes. """ total = 0 # Recursively scan directory to find entries @@ -60,27 +61,30 @@ def get_dir_size(path: str): total += get_dir_size(entry.path) return total -def calculate_datetime(minutes: int, input_date: datetime, before_or_after: str = "before") -> datetime: - """calculate date time + +def calculate_datetime( + minutes: int, input_date: datetime, before_or_after: str = "before" +) -> datetime: + """calculate date time Args: input_date (datetime): date time object provided by users minutes (int): number of minutes - before_or_after (str): default to "before". if "before", calculate x minutes before current date time. if "after", calculate x minutes after current date time. + before_or_after (str): default to "before". if "before", calculate x minutes before current date time. if "after", calculate x minutes after current date time. Returns: datetime: return result of date time calculation """ - if before_or_after=="before": + if before_or_after == "before": date_time_result = input_date - timedelta(minutes=minutes) - elif before_or_after=="after": + elif before_or_after == "after": date_time_result = input_date + timedelta(minutes=minutes) else: raise ValueError("Invalid value. Use either 'before' or 'after'.") return date_time_result -def check_synapse_cache_size(directory='/root/.synapseCache')-> Union[float, int]: +def check_synapse_cache_size(directory="/root/.synapseCache") -> Union[float, int]: """use du --sh command to calculate size of .synapseCache. Args: @@ -89,27 +93,28 @@ def check_synapse_cache_size(directory='/root/.synapseCache')-> Union[float, int Returns: float or integer: returns size of .synapsecache directory in bytes """ - # Note: this command might fail on windows user. But since this command is primarily for running on AWS, it is fine. - command = ['du', '-sh', directory] - output = subprocess.run(command, capture_output=True).stdout.decode('utf-8') - + # Note: this command might fail on windows user. But since this command is primarily for running on AWS, it is fine. + command = ["du", "-sh", directory] + output = subprocess.run(command, capture_output=True).stdout.decode("utf-8") + # Parsing the output to extract the directory size - size = output.split('\t')[0] + size = output.split("\t")[0] if "K" in size: - size_in_kb = float(size.rstrip('K')) + size_in_kb = float(size.rstrip("K")) byte_size = size_in_kb * 1000 elif "M" in size: - size_in_mb = float(size.rstrip('M')) + size_in_mb = float(size.rstrip("M")) byte_size = size_in_mb * 1000000 - elif "G" in size: - size_in_gb = float(size.rstrip('G')) + elif "G" in size: + size_in_gb = float(size.rstrip("G")) byte_size = convert_gb_to_bytes(size_in_gb) elif "B" in size: - byte_size = float(size.rstrip('B')) + byte_size = float(size.rstrip("B")) else: - logger.error('Cannot recongize the file size unit') + logger.error("Cannot recongize the file size unit") return byte_size + def clear_synapse_cache(cache: cache.Cache, minutes: int) -> int: """clear synapse cache before a certain time @@ -120,10 +125,13 @@ def clear_synapse_cache(cache: cache.Cache, minutes: int) -> int: int: number of files that get deleted """ current_date = datetime.utcnow() - minutes_earlier = calculate_datetime(input_date=current_date, minutes=minutes, before_or_after="before") - num_of_deleted_files = cache.purge(before_date = minutes_earlier) + minutes_earlier = calculate_datetime( + input_date=current_date, minutes=minutes, before_or_after="before" + ) + num_of_deleted_files = cache.purge(before_date=minutes_earlier) return num_of_deleted_files + def convert_gb_to_bytes(gb: int): """convert gb to bytes Args: @@ -164,8 +172,9 @@ def entity_type_mapping(syn, entity_id): # if there's no matching type, return concreteType return entity.concreteType + def create_temp_folder(path: str) -> str: - """This function creates a temporary directory in the specified directory + """This function creates a temporary directory in the specified directory Args: path(str): a directory path where all the temporary files will live Returns: returns the absolute pathname of the new directory. @@ -175,7 +184,9 @@ def create_temp_folder(path: str) -> str: return path -def profile(output_file=None, sort_by='cumulative', lines_to_print=None, strip_dirs=False): +def profile( + output_file=None, sort_by="cumulative", lines_to_print=None, strip_dirs=False +): """ The function was initially taken from: https://towardsdatascience.com/how-to-profile-your-code-in-python-e70c834fad89 A time profiler decorator. @@ -205,20 +216,20 @@ def profile(output_file=None, sort_by='cumulative', lines_to_print=None, strip_d def inner(func): @wraps(func) def wrapper(*args, **kwargs): - _output_file = output_file or func.__name__ + '.prof' + _output_file = output_file or func.__name__ + ".prof" pr = Profile() pr.enable() retval = func(*args, **kwargs) pr.disable() pr.dump_stats(_output_file) - #if we are running the functions on AWS: + # if we are running the functions on AWS: if "SECRETS_MANAGER_SECRETS" in os.environ: ps = pstats.Stats(pr) # limit this to 30 line for now otherwise it will be too long for AWS log - ps.sort_stats('cumulative').print_stats(30) - else: - with open(_output_file, 'w') as f: + ps.sort_stats("cumulative").print_stats(30) + else: + with open(_output_file, "w") as f: ps = pstats.Stats(pr, stream=f) if strip_dirs: ps.strip_dirs() @@ -233,6 +244,7 @@ def wrapper(*args, **kwargs): return inner + def normalize_path(path: str, parent_folder: str) -> str: """ Normalizes a path. diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index ac3def72f..f7862a3a2 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -24,6 +24,7 @@ "https://www.googleapis.com/auth/drive", ] + # TODO: replace by pygsheets calls? def build_credentials() -> Dict[str, Any]: creds = generate_token() @@ -42,14 +43,18 @@ def build_credentials() -> Dict[str, Any]: def build_service_account_creds() -> Dict[str, Any]: if "SERVICE_ACCOUNT_CREDS" in os.environ: - dict_creds=json.loads(os.environ["SERVICE_ACCOUNT_CREDS"]) - credentials = service_account.Credentials.from_service_account_info(dict_creds, scopes=SCOPES) + dict_creds = json.loads(os.environ["SERVICE_ACCOUNT_CREDS"]) + credentials = service_account.Credentials.from_service_account_info( + dict_creds, scopes=SCOPES + ) # for AWS deployment elif "SECRETS_MANAGER_SECRETS" in os.environ: - all_secrets_dict =json.loads(os.environ["SECRETS_MANAGER_SECRETS"]) - dict_creds=json.loads(all_secrets_dict["SERVICE_ACCOUNT_CREDS"]) - credentials = service_account.Credentials.from_service_account_info(dict_creds, scopes=SCOPES) + all_secrets_dict = json.loads(os.environ["SECRETS_MANAGER_SECRETS"]) + dict_creds = json.loads(all_secrets_dict["SERVICE_ACCOUNT_CREDS"]) + credentials = service_account.Credentials.from_service_account_info( + dict_creds, scopes=SCOPES + ) else: credentials = service_account.Credentials.from_service_account_file( CONFIG.service_account_credentials_path, scopes=SCOPES @@ -73,8 +78,10 @@ def download_creds_file() -> None: # if file path of service_account does not exist # and if an environment variable related to service account is not found # regenerate service_account credentials - if not os.path.exists(CONFIG.service_account_credentials_path) and "SERVICE_ACCOUNT_CREDS" not in os.environ: - + if ( + not os.path.exists(CONFIG.service_account_credentials_path) + and "SERVICE_ACCOUNT_CREDS" not in os.environ + ): # synapse ID of the 'schematic_service_account_creds.json' file API_CREDS = CONFIG.service_account_credentials_synapse_id @@ -121,9 +128,10 @@ def execute_google_api_requests(service, requests_body, **kwargs): return response + def export_manifest_drive_service(manifest_url, file_path, mimeType): - ''' - Export manifest by using google drive api. If export as an Excel spreadsheet, the exported spreasheet would also include a hidden sheet + """ + Export manifest by using google drive api. If export as an Excel spreadsheet, the exported spreasheet would also include a hidden sheet Args: manifest_url: google sheet manifest url file_path: file path of the exported manifest @@ -131,30 +139,32 @@ def export_manifest_drive_service(manifest_url, file_path, mimeType): result: Google sheet gets exported in desired format - ''' + """ - # intialize drive service + # intialize drive service services_creds = build_service_account_creds() drive_service = services_creds["drive_service"] # get spreadsheet id - spreadsheet_id = manifest_url.split('/')[-1] + spreadsheet_id = manifest_url.split("/")[-1] - # use google drive - data = drive_service.files().export(fileId=spreadsheet_id, mimeType=mimeType).execute() + # use google drive + data = ( + drive_service.files().export(fileId=spreadsheet_id, mimeType=mimeType).execute() + ) # open file and write data - with open(os.path.abspath(file_path), 'wb') as f: - try: + with open(os.path.abspath(file_path), "wb") as f: + try: f.write(data) - except FileNotFoundError as not_found: + except FileNotFoundError as not_found: logger.error(f"{not_found.filename} could not be found") f.close - + def export_manifest_csv(file_path, manifest): - ''' + """ Export manifest as a CSV by using google drive api Args: manifest: could be a dataframe or a manifest url @@ -162,46 +172,54 @@ def export_manifest_csv(file_path, manifest): mimeType: exporting mimetype result: Google sheet gets exported as a CSV - ''' + """ if isinstance(manifest, pd.DataFrame): manifest.to_csv(file_path, index=False) - else: - export_manifest_drive_service(manifest, file_path, mimeType = 'text/csv') - + else: + export_manifest_drive_service(manifest, file_path, mimeType="text/csv") def export_manifest_excel(manifest, output_excel=None): - ''' + """ Export manifest as an Excel spreadsheet by using google sheet API. This approach could export hidden sheet Args: - manifest: could be a dataframe or a manifest url + manifest: could be a dataframe or a manifest url output_excel: name of the exported manifest sheet - result: Google sheet gets exported as an excel spreadsheet. If there's a hidden sheet, the hidden sheet also gets exported. - ''' - # intialize drive service + result: Google sheet gets exported as an excel spreadsheet. If there's a hidden sheet, the hidden sheet also gets exported. + """ + # intialize drive service services_creds = build_service_account_creds() sheet_service = services_creds["sheet_service"] if isinstance(manifest, pd.DataFrame): manifest.to_excel(output_excel, index=False) else: - # get spreadsheet id from url - spreadsheet_id = manifest.split('/')[-1] + # get spreadsheet id from url + spreadsheet_id = manifest.split("/")[-1] # use google sheet api - sheet_metadata = sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute() - sheets = sheet_metadata.get('sheets') + sheet_metadata = ( + sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute() + ) + sheets = sheet_metadata.get("sheets") # export to Excel writer = pd.ExcelWriter(output_excel) # export each sheet in manifest for sheet in sheets: - dataset = sheet_service.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=sheet['properties']['title']).execute() - dataset_df = pd.DataFrame(dataset['values']) + dataset = ( + sheet_service.spreadsheets() + .values() + .get(spreadsheetId=spreadsheet_id, range=sheet["properties"]["title"]) + .execute() + ) + dataset_df = pd.DataFrame(dataset["values"]) dataset_df.columns = dataset_df.iloc[0] dataset_df.drop(dataset_df.index[0], inplace=True) - dataset_df.to_excel(writer, sheet_name=sheet['properties']['title'], index=False) + dataset_df.to_excel( + writer, sheet_name=sheet["properties"]["title"], index=False + ) writer.save() - writer.close() \ No newline at end of file + writer.close() diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index b8cab8e66..2e43f8f3e 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -4,30 +4,39 @@ import string from typing import List, Dict -def attr_dict_template(key_name:str)->Dict[str,dict[str,dict]]: - return {key_name: {'Relationships': {}}} - -def get_property_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: - """Convert a given display name string into a proper property label string - Args: - display_name, str: node display name - strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. - Returns: - label, str: property label of display name - """ - # This is the newer more strict method - if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) - label = inflection.camelize(display_name, uppercase_first_letter=False) - - # This method remains for backwards compatibility - else: - display_name = display_name.translate({ord(c): None for c in string.whitespace}) - label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) - - return label - -def get_class_label_from_display_name(display_name:str, strict_camel_case:bool = False) -> str: + +def attr_dict_template(key_name: str) -> Dict[str, dict[str, dict]]: + return {key_name: {"Relationships": {}}} + + +def get_property_label_from_display_name( + display_name: str, strict_camel_case: bool = False +) -> str: + """Convert a given display name string into a proper property label string + Args: + display_name, str: node display name + strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. + Returns: + label, str: property label of display name + """ + # This is the newer more strict method + if strict_camel_case: + display_name = display_name.strip().translate( + {ord(c): "_" for c in string.whitespace} + ) + label = inflection.camelize(display_name, uppercase_first_letter=False) + + # This method remains for backwards compatibility + else: + display_name = display_name.translate({ord(c): None for c in string.whitespace}) + label = inflection.camelize(display_name.strip(), uppercase_first_letter=False) + + return label + + +def get_class_label_from_display_name( + display_name: str, strict_camel_case: bool = False +) -> str: """Convert a given display name string into a proper class label string Args: display_name, str: node display name @@ -37,7 +46,9 @@ def get_class_label_from_display_name(display_name:str, strict_camel_case:bool = """ # This is the newer more strict method if strict_camel_case: - display_name = display_name.strip().translate({ord(c): "_" for c in string.whitespace}) + display_name = display_name.strip().translate( + {ord(c): "_" for c in string.whitespace} + ) label = inflection.camelize(display_name, uppercase_first_letter=True) # This method remains for backwards compatibility @@ -47,21 +58,27 @@ def get_class_label_from_display_name(display_name:str, strict_camel_case:bool = return label -def get_attribute_display_name_from_label(node_name: str, attr_relationships: dict) -> str: - '''Get attribute display name for a node, using the node label, requires the attr_relationships dicitonary from the data model parser + +def get_attribute_display_name_from_label( + node_name: str, attr_relationships: dict +) -> str: + """Get attribute display name for a node, using the node label, requires the attr_relationships dicitonary from the data model parser Args: node_name, str: node label attr_relationships, dict: dictionary defining attributes and relationships, generated in data model parser. Returns: display_name, str: node display name, recorded in attr_relationships. - ''' - if 'Attribute' in attr_relationships.keys(): - display_name = attr_relationships['Attribute'] + """ + if "Attribute" in attr_relationships.keys(): + display_name = attr_relationships["Attribute"] else: display_name = node_name return display_name -def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_case:bool = False) -> str: + +def get_label_from_display_name( + display_name: str, entry_type: str, strict_camel_case: bool = False +) -> str: """Get node label from provided display name, based on whether the node is a class or property Args: display_name, str: node display name @@ -73,15 +90,22 @@ def get_label_from_display_name(display_name:str, entry_type:str, strict_camel_c ValueError if entry_type.lower(), is not either 'class' or 'property' """ - if entry_type.lower()=='class': - label = get_class_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) - - elif entry_type.lower()=='property': - label=get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + if entry_type.lower() == "class": + label = get_class_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) + + elif entry_type.lower() == "property": + label = get_property_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) else: - raise ValueError(f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'") + raise ValueError( + f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'" + ) return label + def convert_bool_to_str(provided_bool: bool) -> str: """Convert bool to string. Args: @@ -91,17 +115,19 @@ def convert_bool_to_str(provided_bool: bool) -> str: """ return str(provided_bool) -def parse_validation_rules(validation_rules:List[str]) -> List[str]: + +def parse_validation_rules(validation_rules: List[str]) -> List[str]: """Split multiple validation rules based on :: delimiter Args: validation_rules, list: list containing a string validation rule Returns: validation_rules, list: if submitted List """ - if validation_rules and '::' in validation_rules[0]: - validation_rules = validation_rules[0].split('::') + if validation_rules and "::" in validation_rules[0]: + validation_rules = validation_rules[0].split("::") return validation_rules + def export_schema(schema: dict, file_path: str) -> None: """Export schema to given filepath. Args: @@ -111,16 +137,17 @@ def export_schema(schema: dict, file_path: str) -> None: with open(file_path, "w") as f: json.dump(schema, f, sort_keys=True, indent=4, ensure_ascii=False) + def strip_context(context_value: str) -> tuple[str]: - """Strip contexts from str entry. - Args: - context_value, str: string from which to strip context from - Returns: - context, str: the original context - v, str: value separated from context - """ - if ':' in context_value: - context, v = context_value.split(':') - elif '@' in context_value: - context, v = context_value.split('@') - return context, v + """Strip contexts from str entry. + Args: + context_value, str: string from which to strip context from + Returns: + context, str: the original context + v, str: value separated from context + """ + if ":" in context_value: + context, v = context_value.split(":") + elif "@" in context_value: + context, v = context_value.split("@") + return context, v diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index f0e91a470..f1588ed2e 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -7,163 +7,184 @@ logger = logging.getLogger(__name__) + def validation_rule_info(): - ''' + """ Function to return dict that holds information about each rule Will be pulled into validate_single_rule, validate_manifest_rules, validate_schema_rules - Structure: + Structure: Rule:{ 'arguments':(, ), 'type': , 'complementary_rules': []} } - ''' + """ rule_dict = { - "int": { - 'arguments':(1, 0), - 'type': "type_validation", - 'complementary_rules': ['inRange', 'IsNA'], - 'default_message_level': 'error'}, - - "float": { - 'arguments':(1, 0), - 'type': "type_validation", - 'complementary_rules': ['inRange', 'IsNA'], - 'default_message_level': 'error'}, - - "num": { - 'arguments':(1, 0), - 'type': "type_validation", - 'complementary_rules': ['inRange', 'IsNA'], - 'default_message_level': 'error'}, - - "str": { - 'arguments':(1, 0), - 'type': "type_validation", - 'complementary_rules': None, - 'default_message_level': 'error'}, - - "date": { - 'arguments':(1, 0), - 'type': "content_validation", - 'complementary_rules': None, - 'default_message_level': 'error' - }, - - "regex": { - 'arguments':(3, 2), - 'fixed_arg': ['strict'], - 'type': "regex_validation", - 'complementary_rules': ['list'], - 'default_message_level': 'error'}, - - "url" : { - 'arguments':(101, 0), - 'type': "url_validation", - 'complementary_rules': None, - 'default_message_level': 'error'}, - - "list": { - 'arguments':(2, 0), - 'type': "list_validation", - 'complementary_rules': ['regex'], - 'default_message_level': 'error'}, - - "matchAtLeastOne": { - 'arguments':(3, 2), - 'type': "cross_validation", - 'complementary_rules': None, - 'default_message_level': 'warning'}, - - "matchExactlyOne": { - 'arguments':(3, 2), - 'type': "cross_validation", - 'complementary_rules': None, - 'default_message_level': 'warning'}, - - "recommended": { - 'arguments':(1, 0), - 'type': "content_validation", - 'complementary_rules': None, - 'default_message_level': 'warning'}, - - "protectAges": { - 'arguments':(1, 0), - 'type': "content_validation", - 'complementary_rules': ['inRange',], - 'default_message_level': 'warning'}, - - "unique": { - 'arguments':(1, 0), - 'type': "content_validation", - 'complementary_rules': None, - 'default_message_level': 'error'}, - - "inRange": { - 'arguments':(3, 2), - 'type': "content_validation", - 'complementary_rules': ['int','float','num','protectAges'], - 'default_message_level': 'error'}, - - "IsNA": { - 'arguments':(1, 0), - 'type': "content_validation", - 'complementary_rules': ['int', 'float', 'num', ], - 'default_message_level': 'warning'}, - } + "int": { + "arguments": (1, 0), + "type": "type_validation", + "complementary_rules": ["inRange", "IsNA"], + "default_message_level": "error", + }, + "float": { + "arguments": (1, 0), + "type": "type_validation", + "complementary_rules": ["inRange", "IsNA"], + "default_message_level": "error", + }, + "num": { + "arguments": (1, 0), + "type": "type_validation", + "complementary_rules": ["inRange", "IsNA"], + "default_message_level": "error", + }, + "str": { + "arguments": (1, 0), + "type": "type_validation", + "complementary_rules": None, + "default_message_level": "error", + }, + "date": { + "arguments": (1, 0), + "type": "content_validation", + "complementary_rules": None, + "default_message_level": "error", + }, + "regex": { + "arguments": (3, 2), + "fixed_arg": ["strict"], + "type": "regex_validation", + "complementary_rules": ["list"], + "default_message_level": "error", + }, + "url": { + "arguments": (101, 0), + "type": "url_validation", + "complementary_rules": None, + "default_message_level": "error", + }, + "list": { + "arguments": (2, 0), + "type": "list_validation", + "complementary_rules": ["regex"], + "default_message_level": "error", + }, + "matchAtLeastOne": { + "arguments": (3, 2), + "type": "cross_validation", + "complementary_rules": None, + "default_message_level": "warning", + }, + "matchExactlyOne": { + "arguments": (3, 2), + "type": "cross_validation", + "complementary_rules": None, + "default_message_level": "warning", + }, + "recommended": { + "arguments": (1, 0), + "type": "content_validation", + "complementary_rules": None, + "default_message_level": "warning", + }, + "protectAges": { + "arguments": (1, 0), + "type": "content_validation", + "complementary_rules": [ + "inRange", + ], + "default_message_level": "warning", + }, + "unique": { + "arguments": (1, 0), + "type": "content_validation", + "complementary_rules": None, + "default_message_level": "error", + }, + "inRange": { + "arguments": (3, 2), + "type": "content_validation", + "complementary_rules": ["int", "float", "num", "protectAges"], + "default_message_level": "error", + }, + "IsNA": { + "arguments": (1, 0), + "type": "content_validation", + "complementary_rules": [ + "int", + "float", + "num", + ], + "default_message_level": "warning", + }, + } return rule_dict -def get_error(validation_rules: list, - attribute_name: str, error_type: str, input_filetype:str,) -> List[str]: - ''' - Generate error message for errors when trying to specify + +def get_error( + validation_rules: list, + attribute_name: str, + error_type: str, + input_filetype: str, +) -> List[str]: + """ + Generate error message for errors when trying to specify multiple validation rules. - ''' - error_col = attribute_name # Attribute name - - if error_type == 'delimiter': - error_str = (f"The {input_filetype}, has an error in the validation rule " + """ + error_col = attribute_name # Attribute name + + if error_type == "delimiter": + error_str = ( + f"The {input_filetype}, has an error in the validation rule " f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) are improperly " - f"specified. Please check your delimiter is '::'") + f"specified. Please check your delimiter is '::'" + ) logging.error(error_str) error_message = error_str error_val = f"Multiple Rules: Delimiter" - - if error_type == 'not_rule': - error_str = (f"The {input_filetype}, has an error in the validation rule " + + if error_type == "not_rule": + error_str = ( + f"The {input_filetype}, has an error in the validation rule " f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not " - f"a valid rule. Please check spelling.") + f"a valid rule. Please check spelling." + ) logging.error(error_str) error_message = error_str error_val = f"Not a Rule" - - if error_type == 'args_not_allowed': - error_str = (f"The {input_filetype}, has an error in the validation rule " + + if error_type == "args_not_allowed": + error_str = ( + f"The {input_filetype}, has an error in the validation rule " f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not" - f"formatted properly. No additional arguments are allowed for this rule.") + f"formatted properly. No additional arguments are allowed for this rule." + ) logging.error(error_str) error_message = error_str error_val = f"Args not allowed." - if error_type == 'incorrect_num_args': - rule_type=validation_rules.split(" ")[0] - + if error_type == "incorrect_num_args": + rule_type = validation_rules.split(" ")[0] + if rule_type in validation_rule_info(): - no_allowed, no_required = validation_rule_info()[rule_type]['arguments'] + no_allowed, no_required = validation_rule_info()[rule_type]["arguments"] else: - no_allowed, no_required = ('', '') + no_allowed, no_required = ("", "") - error_str = (f"The {input_filetype}, has an error in the validation rule " + error_str = ( + f"The {input_filetype}, has an error in the validation rule " f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not " - f"formatted properly. The number of provided arguments does not match the number allowed({no_allowed}) or required({no_required}).") + f"formatted properly. The number of provided arguments does not match the number allowed({no_allowed}) or required({no_required})." + ) logging.error(error_str) error_message = error_str error_val = f"Incorrect num arguments." - - return ['NA', error_col, error_message, error_val] + + return ["NA", error_col, error_message, error_val] + def validate_single_rule(validation_rule, attribute, input_filetype): - ''' + """ Perform validation for a single rule to ensure it is specified correctly with an appropriate number of arguments Inputs: validation_rule: single rule being validated @@ -172,49 +193,78 @@ def validate_single_rule(validation_rule, attribute, input_filetype): Returns: errors: List of errors - ''' + """ errors = [] validation_types = validation_rule_info() validation_rule_with_args = [ - val_rule.strip() for val_rule in validation_rule.strip().split(" ")] + val_rule.strip() for val_rule in validation_rule.strip().split(" ") + ] rule_type = validation_rule_with_args[0] # ensure rules are not delimited incorrectly - if ':' in validation_rule: - errors.append(get_error(validation_rule, attribute, - error_type = 'delimiter', input_filetype=input_filetype)) + if ":" in validation_rule: + errors.append( + get_error( + validation_rule, + attribute, + error_type="delimiter", + input_filetype=input_filetype, + ) + ) # Check that the rule is actually a valid rule type. elif rule_type not in validation_types.keys(): - errors.append(get_error(validation_rule, attribute, - error_type = 'not_rule', input_filetype=input_filetype)) + errors.append( + get_error( + validation_rule, + attribute, + error_type="not_rule", + input_filetype=input_filetype, + ) + ) # if the rule is indeed a rule and formatted correctly, check that arguments are appropriate else: - arguments_allowed, arguments_required = validation_types[rule_type]['arguments'] + arguments_allowed, arguments_required = validation_types[rule_type]["arguments"] # Remove any fixed args from our calc. - if 'fixed_arg' in validation_types[rule_type].keys(): - fixed_args = validation_types[rule_type]['fixed_arg'] - num_args = len([vr for vr in validation_rule_with_args if vr not in fixed_args])-1 + if "fixed_arg" in validation_types[rule_type].keys(): + fixed_args = validation_types[rule_type]["fixed_arg"] + num_args = ( + len([vr for vr in validation_rule_with_args if vr not in fixed_args]) + - 1 + ) else: num_args = len(validation_rule_with_args) - 1 - + # If arguments are provided but not allowed, raise an error. if num_args and not arguments_allowed: - errors.append(get_error(validation_rule, attribute, - error_type = 'args_not_allowed', input_filetype=input_filetype)) - + errors.append( + get_error( + validation_rule, + attribute, + error_type="args_not_allowed", + input_filetype=input_filetype, + ) + ) + # If arguments are allowed, check that the correct amount have been passed. # There must be at least the number of args required, # and not more than allowed elif arguments_allowed: if (num_args < arguments_required) or (num_args > arguments_allowed): - errors.append(get_error(validation_rule, attribute, - error_type = 'incorrect_num_args', input_filetype=input_filetype)) + errors.append( + get_error( + validation_rule, + attribute, + error_type="incorrect_num_args", + input_filetype=input_filetype, + ) + ) return errors + def validate_schema_rules(validation_rules, attribute, input_filetype): - ''' + """ validation_rules: list input_filetype: str, used in error generation to aid user in locating the source of the error. @@ -222,19 +272,18 @@ def validate_schema_rules(validation_rules, attribute, input_filetype): Validation Rules Formatting rules: Single Rules: Specified with the correct required arguments with no more than what is allowed - ''' + """ errors = [] - + # validate each individual rule for rule in validation_rules: - errors.extend(validate_single_rule(rule, - attribute, input_filetype)) + errors.extend(validate_single_rule(rule, attribute, input_filetype)) if errors: raise ValidationError( - f"The {input_filetype} has an error in the validation_rules set " - f"for attribute {attribute}. " - f"Validation failed with the following errors: {errors}" - ) - - return \ No newline at end of file + f"The {input_filetype} has an error in the validation_rules set " + f"for attribute {attribute}. " + f"Validation failed with the following errors: {errors}" + ) + + return diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index 78c8277eb..ee64728a4 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -8,6 +8,7 @@ import numpy as np from numbers import Number + def validate_schema(schema): """Validate schema against schema.org standard""" data_path = "validation_schemas/model.schema.json" @@ -31,49 +32,52 @@ def validate_class_schema(schema): json_schema = load_json(json_schema_path) return validate(schema, json_schema) + def comma_separated_list_regex(): - # Regex to match with comma separated list - # Requires at least one element and a comma to be valid + # Regex to match with comma separated list + # Requires at least one element and a comma to be valid # Does not require a trailing comma - csv_list_regex=compile('([^\,]+\,)(([^\,]+\,?)*)') + csv_list_regex = compile("([^\,]+\,)(([^\,]+\,?)*)") return csv_list_regex + def rule_in_rule_list(rule: str, rule_list: List[str]): - # Function to standardize - # checking to see if a rule is contained in a list of rules. - # Uses regex to avoid issues arising from validation rules with arguments + # Function to standardize + # checking to see if a rule is contained in a list of rules. + # Uses regex to avoid issues arising from validation rules with arguments # or rules that have arguments updated. # seperate rule type if arguments are specified rule_type = rule.split(" ")[0] # Process string and list of strings for regex comparison - rule_type = rule_type + '[^\|]*' - rule_list = '|'.join(rule_list) + rule_type = rule_type + "[^\|]*" + rule_list = "|".join(rule_list) return search(rule_type, rule_list, flags=IGNORECASE) + def parse_str_series_to_list(col: pd.Series): """ Parse a pandas series of comma delimited strings - into a series with values that are lists of strings - ex. + into a series with values that are lists of strings + ex. Input: 'a,b,c' - Output: ['a','b','c'] + Output: ['a','b','c'] """ - col = col.apply( - lambda x: [s.strip() for s in str(x).split(",")] - ) + col = col.apply(lambda x: [s.strip() for s in str(x).split(",")]) return col + def np_array_to_str_list(np_array): """ Parse a numpy array of ints to a list of strings """ - return np.char.mod('%d', np_array).tolist() + return np.char.mod("%d", np_array).tolist() + def iterable_to_str_list(iterable): """ @@ -94,4 +98,3 @@ def iterable_to_str_list(iterable): strlist.append(str(element)) return strlist - \ No newline at end of file diff --git a/schematic/visualization/__init__.py b/schematic/visualization/__init__.py index b5dbf3f05..a96118fc3 100644 --- a/schematic/visualization/__init__.py +++ b/schematic/visualization/__init__.py @@ -1,2 +1,2 @@ from schematic.visualization.attributes_explorer import AttributesExplorer -from schematic.visualization.tangled_tree import TangledTree \ No newline at end of file +from schematic.visualization.tangled_tree import TangledTree diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 0917172dd..90877e7e8 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -14,19 +14,20 @@ logger = logging.getLogger(__name__) -class AttributesExplorer(): - def __init__(self, - path_to_jsonld: str, - )-> None: - + +class AttributesExplorer: + def __init__( + self, + path_to_jsonld: str, + ) -> None: self.path_to_jsonld = path_to_jsonld self.jsonld = load_json(self.path_to_jsonld) # Instantiate Data Model Parser - data_model_parser = DataModelParser(path_to_data_model = self.path_to_jsonld) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=self.path_to_jsonld) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -39,56 +40,66 @@ def __init__(self, self.dmge = DataModelGraphExplorer(self.graph_data_model) # Instantiate Data Model Json Schema - self.data_model_js = DataModelJSONSchema(jsonld_path=self.path_to_jsonld, graph=self.graph_data_model) - - self.output_path = self.create_output_path('merged_csv') - + self.data_model_js = DataModelJSONSchema( + jsonld_path=self.path_to_jsonld, graph=self.graph_data_model + ) + + self.output_path = self.create_output_path("merged_csv") + def create_output_path(self, terminal_folder): - ''' Create output path to store Observable visualization data if it does not already exist. - + """Create output path to store Observable visualization data if it does not already exist. + Args: self.path_to_jsonld - - Returns: output_path (str): path to store outputs - ''' + + Returns: output_path (str): path to store outputs + """ base_dir = os.path.dirname(self.path_to_jsonld) - self.schema_name = self.path_to_jsonld.split('/')[-1].split('.model.jsonld')[0] - output_path = os.path.join(base_dir, 'visualization', self.schema_name, terminal_folder) + self.schema_name = self.path_to_jsonld.split("/")[-1].split(".model.jsonld")[0] + output_path = os.path.join( + base_dir, "visualization", self.schema_name, terminal_folder + ) if not os.path.exists(output_path): os.makedirs(output_path) return output_path def convert_string_cols_to_json(self, df: pd.DataFrame, cols_to_modify: list): - """Converts values in a column from strings to JSON list + """Converts values in a column from strings to JSON list for upload to Synapse. """ for col in df.columns: if col in cols_to_modify: - df[col] = df[col].apply(lambda x: json.dumps([y.strip() for y in x]) if x != "NaN" and x and x == np.nan else x) + df[col] = df[col].apply( + lambda x: json.dumps([y.strip() for y in x]) + if x != "NaN" and x and x == np.nan + else x + ) return df def parse_attributes(self, save_file=True): - ''' + """ Args: save_file (bool): True: merged_df is saved locally to output_path. False: merged_df is returned. Returns: merged_df (pd.DataFrame): dataframe containing data relating to attributes - for the provided data model for all components in the data model. + for the provided data model for all components in the data model. Dataframe is saved locally as a csv if save_file == True, or returned if - save_file == False. - - ''' + save_file == False. + + """ # get all components - component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type("requiresComponent") components = component_dg.nodes() - + # For each data type to be loaded gather all attribtes the user would # have to provide. return self._parse_attributes(components, save_file) - - def parse_component_attributes(self, component=None, save_file=True, include_index=True): - ''' + + def parse_component_attributes( + self, component=None, save_file=True, include_index=True + ): + """ Args: save_file (bool): True: merged_df is saved locally to output_path. False: merged_df is returned. @@ -97,10 +108,10 @@ def parse_component_attributes(self, component=None, save_file=True, include_ind Returns: merged_df (pd.DataFrame): dataframe containing data relating to attributes - for the provided data model for the specified component in the data model. + for the provided data model for the specified component in the data model. Dataframe is saved locally as a csv if save_file == True, or returned if - save_file == False. - ''' + save_file == False. + """ if not component: raise ValueError("You must provide a component to visualize.") @@ -108,7 +119,7 @@ def parse_component_attributes(self, component=None, save_file=True, include_ind return self._parse_attributes([component], save_file, include_index) def _parse_attributes(self, components, save_file=True, include_index=True): - ''' + """ Args: save_file (bool): True: merged_df is saved locally to output_path. False: merged_df is returned. @@ -119,15 +130,15 @@ def _parse_attributes(self, components, save_file=True, include_index=True): Returns: merged_df (pd.DataFrame): dataframe containing data relating to attributes - for the provided data model for specified components in the data model. + for the provided data model for specified components in the data model. Dataframe is saved locally as a csv if save_file == True, or returned if - save_file == False. + save_file == False. Raises: ValueError: - If unable hits an error while attempting to get conditional requirements. + If unable hits an error while attempting to get conditional requirements. This error is likely to be found if there is a mismatch in naming. - ''' - + """ + # For each data type to be loaded gather all attribtes the user would # have to provide. df_store = [] @@ -135,56 +146,67 @@ def _parse_attributes(self, components, save_file=True, include_index=True): data_dict = {} # get the json schema - json_schema = self.data_model_js.get_json_validation_schema(source_node=component, schema_name=self.path_to_jsonld) + json_schema = self.data_model_js.get_json_validation_schema( + source_node=component, schema_name=self.path_to_jsonld + ) # Gather all attribues, their valid values and requirements - for key, value in json_schema['properties'].items(): + for key, value in json_schema["properties"].items(): data_dict[key] = {} for k, v in value.items(): - if k == 'enum': - data_dict[key]['Valid Values'] = value['enum'] - if key in json_schema['required']: - data_dict[key]['Required'] = True + if k == "enum": + data_dict[key]["Valid Values"] = value["enum"] + if key in json_schema["required"]: + data_dict[key]["Required"] = True else: - data_dict[key]['Required'] = False - data_dict[key]['Component'] = component + data_dict[key]["Required"] = False + data_dict[key]["Component"] = component # Add additional details per key (from the JSON-ld) - for dic in self.jsonld['@graph']: - if 'sms:displayName' in dic.keys(): - key = dic['sms:displayName'] + for dic in self.jsonld["@graph"]: + if "sms:displayName" in dic.keys(): + key = dic["sms:displayName"] if key in data_dict.keys(): - data_dict[key]['Attribute'] = dic['sms:displayName'] - data_dict[key]['Label'] = dic['rdfs:label'] - data_dict[key]['Description'] = dic['rdfs:comment'] - if 'validationRules' in dic.keys(): - data_dict[key]['Validation Rules'] = dic['validationRules'] + data_dict[key]["Attribute"] = dic["sms:displayName"] + data_dict[key]["Label"] = dic["rdfs:label"] + data_dict[key]["Description"] = dic["rdfs:comment"] + if "validationRules" in dic.keys(): + data_dict[key]["Validation Rules"] = dic["validationRules"] # Find conditional dependencies - if 'allOf' in json_schema.keys(): - for conditional_dependencies in json_schema['allOf']: - key = list(conditional_dependencies['then']['properties'])[0] + if "allOf" in json_schema.keys(): + for conditional_dependencies in json_schema["allOf"]: + key = list(conditional_dependencies["then"]["properties"])[0] try: if key in data_dict.keys(): - if 'Cond_Req' not in data_dict[key].keys(): - data_dict[key]['Cond_Req'] = [] - data_dict[key]['Conditional Requirements'] = [] - attribute = list(conditional_dependencies['if']['properties'])[0] - value = conditional_dependencies['if']['properties'][attribute]['enum'] + if "Cond_Req" not in data_dict[key].keys(): + data_dict[key]["Cond_Req"] = [] + data_dict[key]["Conditional Requirements"] = [] + attribute = list( + conditional_dependencies["if"]["properties"] + )[0] + value = conditional_dependencies["if"]["properties"][ + attribute + ]["enum"] # Capitalize attribute if it begins with a lowercase letter, for aesthetics. if attribute[0].islower(): attribute = attribute.capitalize() # Remove "Type" (i.e. turn "Biospecimen Type" to "Biospcimen") - if "Type" in attribute: + if "Type" in attribute: attribute = attribute.split(" ")[0] - + # Remove "Type" (i.e. turn "Tissue Type" to "Tissue") if "Type" in value[0]: value[0] = value[0].split(" ")[0] conditional_statement = f'{attribute} is "{value[0]}"' - if conditional_statement not in data_dict[key]['Conditional Requirements']: - data_dict[key]['Cond_Req'] = True - data_dict[key]['Conditional Requirements'].extend([conditional_statement]) + if ( + conditional_statement + not in data_dict[key]["Conditional Requirements"] + ): + data_dict[key]["Cond_Req"] = True + data_dict[key]["Conditional Requirements"].extend( + [conditional_statement] + ) except: raise ValueError( f"There is an error getting conditional requirements related " @@ -192,36 +214,69 @@ def _parse_attributes(self, components, save_file=True, include_index=True): ) for key, value in data_dict.items(): - if 'Conditional Requirements' in value.keys(): + if "Conditional Requirements" in value.keys(): + ## reformat conditional requirement - ## reformat conditional requirement + # get all attributes + attr_lst = [ + i.split(" is ")[-1] + for i in data_dict[key]["Conditional Requirements"] + ] - # get all attributes - attr_lst = [i.split(" is ")[-1] for i in data_dict[key]['Conditional Requirements']] - - # join a list of attributes by using OR + # join a list of attributes by using OR attr_str = " OR ".join(attr_lst) - # reformat the conditional requirement - component_name = data_dict[key]['Conditional Requirements'][0].split(' is ')[0] - conditional_statement_str = f' If {component_name} is {attr_str} then "{key}" is required' + # reformat the conditional requirement + component_name = data_dict[key]["Conditional Requirements"][ + 0 + ].split(" is ")[0] + conditional_statement_str = ( + f' If {component_name} is {attr_str} then "{key}" is required' + ) - data_dict[key]['Conditional Requirements'] = conditional_statement_str + data_dict[key][ + "Conditional Requirements" + ] = conditional_statement_str df = pd.DataFrame(data_dict) df = df.T - cols = ['Attribute', 'Label', 'Description', 'Required', 'Cond_Req', 'Valid Values', 'Conditional Requirements', 'Validation Rules', 'Component'] + cols = [ + "Attribute", + "Label", + "Description", + "Required", + "Cond_Req", + "Valid Values", + "Conditional Requirements", + "Validation Rules", + "Component", + ] cols = [col for col in cols if col in df.columns] df = df[cols] - df = self.convert_string_cols_to_json(df, ['Valid Values']) - #df.to_csv(os.path.join(csv_output_path, data_type + '.vis_data.csv')) + df = self.convert_string_cols_to_json(df, ["Valid Values"]) + # df.to_csv(os.path.join(csv_output_path, data_type + '.vis_data.csv')) df_store.append(df) - merged_attributes_df = pd.concat(df_store, join='outer') - cols = ['Attribute', 'Label', 'Description', 'Required', 'Cond_Req', 'Valid Values', 'Conditional Requirements', 'Validation Rules', 'Component'] + merged_attributes_df = pd.concat(df_store, join="outer") + cols = [ + "Attribute", + "Label", + "Description", + "Required", + "Cond_Req", + "Valid Values", + "Conditional Requirements", + "Validation Rules", + "Component", + ] cols = [col for col in cols if col in merged_attributes_df.columns] merged_attributes_df = merged_attributes_df[cols] if save_file == True: - return merged_attributes_df.to_csv(os.path.join(self.output_path, self.schema_name + 'attributes_data.vis_data.csv'), index=include_index) + return merged_attributes_df.to_csv( + os.path.join( + self.output_path, self.schema_name + "attributes_data.vis_data.csv" + ), + index=include_index, + ) elif save_file == False: return merged_attributes_df.to_csv(index=include_index) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index ad9670e2b..5ecc4f8f7 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -18,6 +18,7 @@ CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options + # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -36,22 +37,20 @@ def viz(ctx, config): # use as `schematic model ...` try: logger.debug(f"Loading config file contents in '{config}'") CONFIG.load_config(config) - ctx.obj = CONFIG + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) sys.exit(1) + @viz.command( "attributes", ) @click_log.simple_verbosity_option(logger) - @click.pass_obj def get_attributes(ctx): - """ - - """ + """ """ # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) @@ -59,31 +58,28 @@ def get_attributes(ctx): AttributesExplorer(path_to_jsonld).parse_attributes(save_file=True) return -@viz.command( - "tangled_tree_text" -) + +@viz.command("tangled_tree_text") @click_log.simple_verbosity_option(logger) @click.option( "-ft", "--figure_type", - type=click.Choice(['component', 'dependency'], case_sensitive=False), + type=click.Choice(["component", "dependency"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) @click.option( "-tf", "--text_format", - type=click.Choice(['plain', 'highlighted'], case_sensitive=False), + type=click.Choice(["plain", "highlighted"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "text_format")), ) - @click.pass_obj def get_tangled_tree_text(ctx, figure_type, text_format): - """ Get text to be placed on the tangled tree visualization. - """ + """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) - + # Initialize TangledTree tangled_tree = TangledTree(path_to_jsonld, figure_type) @@ -91,28 +87,25 @@ def get_tangled_tree_text(ctx, figure_type, text_format): text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=True) return -@viz.command( - "tangled_tree_layers" -) + +@viz.command("tangled_tree_layers") @click_log.simple_verbosity_option(logger) @click.option( "-ft", "--figure_type", - type=click.Choice(['component', 'dependency'], case_sensitive=False), + type=click.Choice(["component", "dependency"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) - @click.pass_obj def get_tangled_tree_component_layers(ctx, figure_type): - ''' Get the components that belong in each layer of the tangled tree visualization. - ''' + """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) - + # Initialize Tangled Tree tangled_tree = TangledTree(path_to_jsonld, figure_type) - + # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=True) diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 83635a39c..d0de01bf7 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -25,17 +25,18 @@ logger = logging.getLogger(__name__) -#OUTPUT_DATA_DIR = str(Path('tests/data/visualization/AMPAD').resolve()) -#DATA_DIR = str(Path('tests/data').resolve()) +# OUTPUT_DATA_DIR = str(Path('tests/data/visualization/AMPAD').resolve()) +# DATA_DIR = str(Path('tests/data').resolve()) + class TangledTree(object): - """ - """ + """ """ - def __init__(self, - path_to_json_ld: str, - figure_type: str, - ) -> None: + def __init__( + self, + path_to_json_ld: str, + figure_type: str, + ) -> None: # Load jsonld self.path_to_json_ld = path_to_json_ld self.json_data_model = load_json(self.path_to_json_ld) @@ -44,9 +45,9 @@ def __init__(self, self.schema_name = path.basename(self.path_to_json_ld).split(".model.jsonld")[0] # Instantiate Data Model Parser - data_model_parser = DataModelParser(path_to_data_model = self.path_to_json_ld) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=self.path_to_json_ld) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -60,18 +61,18 @@ def __init__(self, # Set Parameters self.figure_type = figure_type.lower() - self.dependency_type = ''.join(('requires', self.figure_type.capitalize())) + self.dependency_type = "".join(("requires", self.figure_type.capitalize())) # Get names self.schema = load_json(self.path_to_json_ld) - self.schema_abbr = self.schema_name.split('_')[0] + self.schema_abbr = self.schema_name.split("_")[0] # Initialize AttributesExplorer self.ae = AttributesExplorer(self.path_to_json_ld) # Create output paths. - self.text_csv_output_path = self.ae.create_output_path('text_csv') - self.json_output_path = self.ae.create_output_path('tangled_tree_json') + self.text_csv_output_path = self.ae.create_output_path("text_csv") + self.json_output_path = self.ae.create_output_path("tangled_tree_json") def strip_double_quotes(self, string): # Remove double quotes from beginning and end of string. @@ -82,7 +83,7 @@ def strip_double_quotes(self, string): return string def get_text_for_tangled_tree(self, text_type, save_file=False): - '''Gather the text that needs to be either higlighted or plain for the tangled tree visualization. + """Gather the text that needs to be either higlighted or plain for the tangled tree visualization. Args: text_type (str): Choices = ['highlighted', 'plain'], determines the type of text rendering to return. @@ -90,16 +91,16 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): Returns: If save_file==True: Saves plain or highlighted text as a CSV (to disk). save_file==False: Returns plain or highlighted text as a csv string. - ''' + """ # Get nodes in the digraph, many more nodes returned if figure type is dependency cdg = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = cdg.nodes() - if self.dependency_type == 'requiresComponent': + if self.dependency_type == "requiresComponent": component_nodes = nodes else: # get component nodes if making dependency figure - component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type("requiresComponent") component_nodes = component_dg.nodes() # Initialize lists @@ -109,12 +110,13 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): # For each component node in the tangled tree gather the plain and higlighted text. for node in component_nodes: # Get the highlighted components based on figure_type - if self.figure_type == 'component': - highlight_descendants = self.dmge.get_descendants_by_edge_type(node, 'requiresComponent') - elif self.figure_type == 'dependency': + if self.figure_type == "component": + highlight_descendants = self.dmge.get_descendants_by_edge_type( + node, "requiresComponent" + ) + elif self.figure_type == "dependency": highlight_descendants = [node] - # Format text to be higlighted and gather text to be formated plain. if not highlight_descendants: # If there are no highlighted descendants just highlight the selected node (format for observable.) @@ -126,98 +128,107 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): for hd in highlight_descendants: highlighted.append([node, "id", hd]) # Gather the non-higlighted text as plain text descendants. - plain_descendants = [node for node in nodes if node not in highlight_descendants] - + plain_descendants = [ + node for node in nodes if node not in highlight_descendants + ] + # Format all the plain text for observable. for nd in plain_descendants: plain.append([node, "id", nd]) # Prepare df depending on what type of text we need. - df = pd.DataFrame(locals()[text_type.lower()], columns = ['Component', 'type', 'name']) + df = pd.DataFrame( + locals()[text_type.lower()], columns=["Component", "type", "name"] + ) # Depending on input either export csv locally to disk or as a string. - if save_file==True: + if save_file == True: file_name = f"{self.schema_abbr}_{self.figure_type}_{text_type}.csv" df.to_csv(os.path.join(self.text_csv_output_path, file_name)) return - elif save_file==False: + elif save_file == False: return df.to_csv() def get_topological_generations(self): - ''' Gather topological_gen, nodes and edges based on figure type. + """Gather topological_gen, nodes and edges based on figure type. Outputs: topological_gen (List(list)):list of lists. Indicates layers of nodes. nodes: (Networkx NodeView) Nodes of the component or dependency graph. When iterated over it functions like a list. edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. - ''' + """ # Get nodes in the digraph digraph = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = digraph.nodes() # Get subgraph - #mm_graph = self.sg.se.get_nx_schema() - #subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) + # mm_graph = self.sg.se.get_nx_schema() + # subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) subg = self.dmge.get_subgraph_by_edge_type(self.dependency_type) # Get edges and topological_gen based on figure type. - if self.figure_type == 'component': + if self.figure_type == "component": edges = digraph.edges() topological_gen = list(reversed(list(nx.topological_generations(subg)))) - elif self.figure_type == 'dependency': + elif self.figure_type == "dependency": rev_digraph = nx.DiGraph.reverse(digraph) edges = rev_digraph.edges() topological_gen = list(nx.topological_generations(subg)) - + return topological_gen, nodes, edges, subg - def remove_unwanted_characters_from_conditional_statement(self, cond_req: str) -> str: - '''Remove unwanted characters from conditional statement + def remove_unwanted_characters_from_conditional_statement( + self, cond_req: str + ) -> str: + """Remove unwanted characters from conditional statement Example of conditional requirement: If File Format IS "BAM" OR "CRAM" OR "CSV/TSV" then Genome Build is required Example output: File Format IS "BAM" OR "CRAM" OR "CSV/TSV" - ''' + """ if "then" in cond_req: # remove everything after "then" - cond_req_new = cond_req.split('then')[0] + cond_req_new = cond_req.split("then")[0] # remove "If" and empty space cond_req = cond_req_new.replace("If", "").lstrip().rstrip() return cond_req def get_ca_alias(self, conditional_requirements: list) -> dict: - '''Get the alias for each conditional attribute. + """Get the alias for each conditional attribute. NOTE: Obtaining attributes(attr) and aliases(ali) in this function is specific to how formatting is set in AttributesExplorer. If that formatting changes, this section - will likely break or in the worst case have a silent error. + will likely break or in the worst case have a silent error. Input: conditional_requirements_list (list): list of strings of conditional requirements from outputs of AttributesExplorer. Output: ca_alias (dict): key: alias (attribute response) value: attribute - ''' + """ ca_alias = {} # clean up conditional requirements - conditional_requirements = [self.remove_unwanted_characters_from_conditional_statement(req) for req in conditional_requirements] + conditional_requirements = [ + self.remove_unwanted_characters_from_conditional_statement(req) + for req in conditional_requirements + ] for i, req in enumerate(conditional_requirements): if "OR" not in req: - attr, ali = req.split(' is ') + attr, ali = req.split(" is ") attr = "".join(attr.split()) ali = self.strip_double_quotes(ali) ca_alias[ali] = attr else: - attr, alias_str = req.split(' is ') - alias_lst = alias_str.split(' OR ') + attr, alias_str = req.split(" is ") + alias_lst = alias_str.split(" OR ") for elem in alias_lst: elem = self.strip_double_quotes(elem) ca_alias[elem] = attr return ca_alias def gather_component_dependency_info(self, cn, attributes_df): - '''Gather all component dependency information. + """Gather all component dependency information. Inputs: cn: (str) component name attributes_df: (Pandas DataFrame) Details for all attributes across all components. From AttributesExplorer. @@ -227,47 +238,55 @@ def gather_component_dependency_info(self, cn, attributes_df): key: alias (attribute response) value: attribute all_attributes (list): all attributes associated with a particular component. - ''' + """ # Gather all component dependency information component_attributes = self.dmge.get_descendants_by_edge_type( - cn, - self.dependency_type, - connected=True - ) - + cn, self.dependency_type, connected=True + ) + # Dont want to display `Component` in the figure so remove - if 'Component' in component_attributes: - component_attributes.remove('Component') - + if "Component" in component_attributes: + component_attributes.remove("Component") + # Gather conditional attributes so they can be added to the figure. - if 'Cond_Req' in attributes_df.columns: - conditional_attributes = list(attributes_df[(attributes_df['Cond_Req']==True) - &(attributes_df['Component']==cn)]['Label']) - ca_df = attributes_df[(attributes_df['Cond_Req']==True)&(attributes_df['Component']==cn)] - conditional_requirements = list(attributes_df[(attributes_df['Cond_Req']==True) - &(attributes_df['Component']==cn)]['Conditional Requirements']) + if "Cond_Req" in attributes_df.columns: + conditional_attributes = list( + attributes_df[ + (attributes_df["Cond_Req"] == True) + & (attributes_df["Component"] == cn) + ]["Label"] + ) + ca_df = attributes_df[ + (attributes_df["Cond_Req"] == True) & (attributes_df["Component"] == cn) + ] + conditional_requirements = list( + attributes_df[ + (attributes_df["Cond_Req"] == True) + & (attributes_df["Component"] == cn) + ]["Conditional Requirements"] + ) ca_alias = self.get_ca_alias(conditional_requirements) else: # If there are no conditional attributes/requirements, initialize blank lists. conditional_attributes = [] ca_alias = {} - + # Gather a list of all attributes for the current component. - all_attributes = list(np.append(component_attributes,conditional_attributes)) - + all_attributes = list(np.append(component_attributes, conditional_attributes)) + return conditional_attributes, ca_alias, all_attributes def find_source_nodes(self, nodes, edges, all_attributes=[]): - '''Find all nodes in the graph that do not have a parent node. + """Find all nodes in the graph that do not have a parent node. Inputs: nodes: (Networkx NodeView) Nodes of the component or dependency graph. When iterated over it functions like a list. edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. attributes_df: (Pandas DataFrame) Details for all attributes across all components. From AttributesExplorer. Outputs: - source_nodes (list(str)): List of parentless nodes in - ''' + source_nodes (list(str)): List of parentless nodes in + """ # Find edges that are not source nodes. not_source = [] for node in nodes: @@ -278,7 +297,7 @@ def find_source_nodes(self, nodes, edges, all_attributes=[]): # Find source nodes as nodes that are not in not_source. source_nodes = [] for node in nodes: - if self.figure_type == 'dependency': + if self.figure_type == "dependency": if node not in not_source and node in all_attributes: source_nodes.append(node) else: @@ -287,7 +306,7 @@ def find_source_nodes(self, nodes, edges, all_attributes=[]): return source_nodes def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): - '''Based on the dependency type, create dictionaries between parent and child and child and parent attributes. + """Based on the dependency type, create dictionaries between parent and child and child and parent attributes. Input: nodes: (Networkx NodeView) Nodes of the component or dependency graph. edges: (Networkx EdgeDataView (component figure) or List(list) (dependency figure)) @@ -300,76 +319,68 @@ def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): parent_children (dict): key: parent value: list of the parents children - ''' + """ child_parents = {} parent_children = {} - if self.dependency_type == 'requiresComponent': - + if self.dependency_type == "requiresComponent": # Construct child_parents dictionary for edge in edges: - # Add child as a key if edge[0] not in child_parents.keys(): child_parents[edge[0]] = [] - + # Add parents to list child_parents[edge[0]].append(edge[1]) - + # Construct parent_children dictionary for edge in edges: - # Add parent as a key if edge[1] not in parent_children.keys(): parent_children[edge[1]] = [] - + # Add children to list parent_children[edge[1]].append(edge[0]) - - elif self.dependency_type == 'requiresDependency': - + + elif self.dependency_type == "requiresDependency": # Construct child_parents dictionary for edge in edges: - # Check if child is an attribute for the current component if edge[0] in all_attributes: - # Add child as a key if edge[0] not in child_parents.keys(): child_parents[edge[0]] = [] - + # Add parent to list if it is an attriute for the current component if edge[1] in all_attributes: child_parents[edge[0]].append(edge[1]) - + # Construct parent_children dictionary for edge in edges: - # Check if parent is an attribute for the current component if edge[1] in all_attributes: - # Add parent as a key if edge[1] not in parent_children.keys(): parent_children[edge[1]] = [] - + # Add child to list if it is an attriute for the current component if edge[0] in all_attributes: parent_children[edge[1]].append(edge[0]) return child_parents, parent_children - def alias_edges(self, ca_alias:dict, edges) -> List[list]: - '''Create new edges based on aliasing between an attribute and its response. + def alias_edges(self, ca_alias: dict, edges) -> List[list]: + """Create new edges based on aliasing between an attribute and its response. Purpose: Create aliased edges. - For example: + For example: If BiospecimenType (attribute) is AnalyteBiospecimenType (response) Then ShippingConditionType (conditional requirement) is now required. In the model the edges that connect these options are: (AnalyteBiospecimenType, BiospecimenType) (ShippingConditionType, AnalyteBiospecimenType) - Use alias defined in self.get_ca_alias along to define new edges that would - directly link attributes to their conditional requirements, in this + Use alias defined in self.get_ca_alias along to define new edges that would + directly link attributes to their conditional requirements, in this example the new edge would be: [ShippingConditionType, BiospecimenType] Inputs: @@ -379,17 +390,16 @@ def alias_edges(self, ca_alias:dict, edges) -> List[list]: edges (Networkx EdgeDataView): Edges of component or dependency graph. When iterated over it works like a list of tuples. Output: aliased_edges (List[lists]) of aliased edges. - ''' + """ aliased_edges = [] for i, edge in enumerate(edges): - # construct one set of edges at a time edge_set = [] - + # If the first edge has an alias add alias to the first position in the current edge set if edge[0] in ca_alias.keys(): edge_set.append(ca_alias[edge[0]]) - + # Else add the non-aliased edge else: edge_set.append(edge[0]) @@ -397,7 +407,7 @@ def alias_edges(self, ca_alias:dict, edges) -> List[list]: # If the secod edge has an alias add alias to the first position in the current edge set if edge[1] in ca_alias.keys(): edge_set.append(ca_alias[edge[1]]) - + # Else add the non-aliased edge else: edge_set.append(edge[1]) @@ -407,11 +417,13 @@ def alias_edges(self, ca_alias:dict, edges) -> List[list]: return aliased_edges - def prune_expand_topological_gen(self, topological_gen, all_attributes, conditional_attributes): - ''' + def prune_expand_topological_gen( + self, topological_gen, all_attributes, conditional_attributes + ): + """ Purpose: Remake topological_gen with only relevant nodes. - This is necessary since for the figure this function is being used in we + This is necessary since for the figure this function is being used in we only want to display a portion of the graph data. In addition to only displaying relevant nodes, we want to add conditional attributes to topological_gen so we can visualize them in the tangled tree @@ -423,23 +435,21 @@ def prune_expand_topological_gen(self, topological_gen, all_attributes, conditio Output: new_top_gen (List[list]): mimics structure of topological_gen but only includes the nodes we want - ''' + """ pruned_topological_gen = [] # For each layer(gen) in the topological generation list for i, layer in enumerate(topological_gen): - current_layer = [] next_layer = [] - + # For each node in the layer for node in layer: - # If the node is relevant to this component and is not a conditional attribute add it to the current layer. if node in all_attributes and node not in conditional_attributes: current_layer.append(node) - + # If its a conditional attribute add it to a followup layer. if node in conditional_attributes: next_layer.append(node) @@ -453,19 +463,19 @@ def prune_expand_topological_gen(self, topological_gen, all_attributes, conditio return pruned_topological_gen def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): - ''' + """ Purpose: - Reconfigure topological gen to move things back appropriate layers if + Reconfigure topological gen to move things back appropriate layers if they would have a back reference. - The Tangle Tree figure requrires an acyclic directed graph that has additional + The Tangle Tree figure requrires an acyclic directed graph that has additional layering rules between connected nodes. - If there is a backward connection then the line connecting them will break (this would suggest a cyclic connection.) - - Additionally if two or more nodes are connecting to a downstream node it is - best to put both parent nodes at the same level, if possible, to + - Additionally if two or more nodes are connecting to a downstream node it is + best to put both parent nodes at the same level, if possible, to prevent line breaks. - - Also want to move any children nodes one layer below + - Also want to move any children nodes one layer below the parent node(s). If there are multiple parents, put one layer below the parent that is furthest from the origin. @@ -484,83 +494,84 @@ def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): base_layers_copy_copy: dict, key: component name, value: layer represents the final layering after moving the components/attributes to their desired layer.c - ''' + """ # Convert topological_gen to a dictionary - base_layers = {com:i for i, lev in enumerate(topological_gen) - for com in lev} - + base_layers = {com: i for i, lev in enumerate(topological_gen) for com in lev} + # Make another version to iterate on -- Cant set to equal or will overwrite the original. - base_layers_copy = {com:i for i, lev in enumerate(topological_gen) - for com in lev} + base_layers_copy = { + com: i for i, lev in enumerate(topological_gen) for com in lev + } # Move child nodes one node downstream of their parents. for level in topological_gen: for node in level: - # Check if node has a parent. if node in child_parents.keys(): - - #node_level = base_layers[node] + # node_level = base_layers[node] # Look at the parents for the node. parent_levels = [] for par in child_parents[node]: - # Get the layer the parent is located at. parent_levels.append(base_layers[par]) - + # Get the max layer a parent of the node can be found. max_parent_level = max(parent_levels) # Move the node one layer beyond the max parent node position, so it will be downstream of its parents. base_layers_copy[node] = max_parent_level + 1 - + # Make another version of updated positions iterate on further. base_layers_copy_copy = base_layers_copy # Move parental source nodes if necessary. for level in topological_gen: for node in level: - # Check if node has any parents. if node in child_parents.keys(): parent_levels = [] modify_par = [] - + # For each parent get their position. for par in child_parents[node]: parent_levels.append(base_layers_copy[par]) - - # If one of the parents is a source node move + + # If one of the parents is a source node move # it to the same level as the other nodes the child connects to so # that the connections will not be backwards (and result in a broken line) for par in child_parents[node]: - - # For a given parent determine if its a source node and that the parents + # For a given parent determine if its a source node and that the parents # are not already at level 0, and the parent is not the current component node. - if (par in source_nodes and - (parent_levels.count(parent_levels[0]) != len(parent_levels)) - and par != cn): - + if ( + par in source_nodes + and ( + parent_levels.count(parent_levels[0]) + != len(parent_levels) + ) + and par != cn + ): # If so, remove its position from parent_levels parent_levels.remove(base_layers_copy[par]) - + # Add this parent to a list of parental positions to modify later. modify_par.append(par) - + # Get the new max parent level for this node. max_parent_level = max(parent_levels) - + # Move the node one position downstream of its max parent level. base_layers_copy_copy[node] = max_parent_level + 1 - + # For each parental position to modify, move the parents level up to the max_parent_level. for par in modify_par: base_layers_copy_copy[par] = max_parent_level - + return base_layers, base_layers_copy_copy - def adjust_node_placement(self, base_layers_copy_copy, base_layers, topological_gen): - '''Reorder nodes within topological_generations to match how they were ordered in base_layers_copy_copy + def adjust_node_placement( + self, base_layers_copy_copy, base_layers, topological_gen + ): + """Reorder nodes within topological_generations to match how they were ordered in base_layers_copy_copy Input: topological_gen: list of lists. Indicates layers of nodes. base_layers: dict, key: component name, value: layer @@ -571,51 +582,46 @@ def adjust_node_placement(self, base_layers_copy_copy, base_layers, topological_ Output: topological_gen: same format but as the incoming topologial_gen but ordered to match base_layers_copy_copy. - ''' - if self.figure_type == 'component': + """ + if self.figure_type == "component": # For each node get its new layer in the tangled tree for node, i in base_layers_copy_copy.items(): - # Check if node is not already in the proper layer if node not in topological_gen[i]: - # If not put it in the appropriate layer topological_gen[i].append(node) - + # Remove from inappropriate layer. topological_gen[base_layers[node]].remove(node) - - elif self.figure_type == 'dependency': + + elif self.figure_type == "dependency": for node, i in base_layers_copy_copy.items(): - - # Check if the location of the node is more than the number of + # Check if the location of the node is more than the number of # layers topological gen current handles if i > len(topological_gen) - 1: - # If so, add node to new node at the end of topological_gen topological_gen.append([node]) - + # Remove the node from its previous position. topological_gen[base_layers[node]].remove(node) - + # Else, check if node is not already in the proper layer elif node not in topological_gen[i]: - # If not put it in the appropriate layer topological_gen[i].append(node) - + # Remove from inappropriate layer. topological_gen[base_layers[node]].remove(node) return topological_gen def move_source_nodes_to_bottom_of_layer(self, node_layers, source_nodes): - '''For aesthetic purposes move source nodes to the bottom of their respective layers. + """For aesthetic purposes move source nodes to the bottom of their respective layers. Input: node_layers (List(list)): Lists of lists of each layer and the nodes contained in that layer as strings. source_nodes (list): list of nodes that do not have a parent. Output: node_layers (List(list)): modified to move source nodes to the bottom of each layer. - ''' + """ for i, layer in enumerate(node_layers): nodes_to_move = [] for node in layer: @@ -626,8 +632,10 @@ def move_source_nodes_to_bottom_of_layer(self, node_layers, source_nodes): node_layers[i].append(node) return node_layers - def get_layers_dict_list(self, node_layers, child_parents, parent_children, all_parent_children): - '''Convert node_layers to a list of lists of dictionaries that specifies each node and its parents (if applicable). + def get_layers_dict_list( + self, node_layers, child_parents, parent_children, all_parent_children + ): + """Convert node_layers to a list of lists of dictionaries that specifies each node and its parents (if applicable). Inputs: node_layers: list of lists of each layer and the nodes contained in that layer as strings. child_parents (dict): @@ -638,31 +646,46 @@ def get_layers_dict_list(self, node_layers, child_parents, parent_children, all_ value: list of the parents children Outputs: layers_list (List(list): list of lists of dictionaries that specifies each node and its parents (if applicable) - ''' + """ num_layers = len(node_layers) layers_list = [[] for i in range(0, num_layers)] for i, layer in enumerate(node_layers): for node in layer: if node in child_parents.keys(): parents = child_parents[node] - else: + else: parents = [] if node in parent_children.keys(): direct_children = parent_children[node] - else: + else: direct_children = [] if node in all_parent_children.keys(): all_children = all_parent_children[node] - else: + else: all_children = [] - layers_list[i].append({'id': node, 'parents': parents, 'direct_children': direct_children, 'children': all_children}) + layers_list[i].append( + { + "id": node, + "parents": parents, + "direct_children": direct_children, + "children": all_children, + } + ) return layers_list - def get_node_layers_json(self, topological_gen, source_nodes, child_parents, parent_children, cn='', all_parent_children=None): - '''Return all the layers of a single tangled tree as a JSON String. + def get_node_layers_json( + self, + topological_gen, + source_nodes, + child_parents, + parent_children, + cn="", + all_parent_children=None, + ): + """Return all the layers of a single tangled tree as a JSON String. Inputs: topological_gen:list of lists. Indicates layers of nodes. source_nodes: list of nodes that do not have a parent. @@ -677,32 +700,38 @@ def get_node_layers_json(self, topological_gen, source_nodes, child_parents, par value: list of the parents children (including all downstream nodes). Default to an empty dictionary Outputs: layers_json (JSON String): Layers of nodes in the tangled tree as a json string. - ''' + """ - base_layers, base_layers_copy_copy = self.get_base_layers(topological_gen, - child_parents, source_nodes, cn) + base_layers, base_layers_copy_copy = self.get_base_layers( + topological_gen, child_parents, source_nodes, cn + ) # Rearrange node_layers to follow the pattern laid out in component layers. - node_layers = self.adjust_node_placement(base_layers_copy_copy, - base_layers, topological_gen) + node_layers = self.adjust_node_placement( + base_layers_copy_copy, base_layers, topological_gen + ) # Move source nodes to the bottom of each layer. - node_layers = self.move_source_nodes_to_bottom_of_layer(node_layers, source_nodes) + node_layers = self.move_source_nodes_to_bottom_of_layer( + node_layers, source_nodes + ) # Convert layers to a list of dictionaries if not all_parent_children: - # default to an empty dictionary + # default to an empty dictionary all_parent_children = dict() - - layers_dicts = self.get_layers_dict_list(node_layers, child_parents, parent_children, all_parent_children) + + layers_dicts = self.get_layers_dict_list( + node_layers, child_parents, parent_children, all_parent_children + ) # Convert dictionary to a JSON string layers_json = json.dumps(layers_dicts) return layers_json - def save_outputs(self, save_file, layers_json, cn='', all_layers=None): - ''' + def save_outputs(self, save_file, layers_json, cn="", all_layers=None): + """ Inputs: save_file (bool): Indicates whether to save a file locally or not.: layers_json (JSON String): Layers of nodes in the tangled tree as a json string. @@ -711,20 +740,28 @@ def save_outputs(self, save_file, layers_json, cn='', all_layers=None): If a dependency figure the list is added to each time this function is called, so starts incomplete. default=[]. Outputs: - all_layers (list of json strings): + all_layers (list of json strings): If save_file == False: Each string represents contains the layers for a single tangled tree. If save_file ==True: is an empty list. - ''' + """ if all_layers is None: all_layers = [] if save_file == True: if cn: - output_file_name = f"{self.schema_abbr}_{self.figure_type}_{cn}_tangled_tree.json" + output_file_name = ( + f"{self.schema_abbr}_{self.figure_type}_{cn}_tangled_tree.json" + ) else: - output_file_name = f"{self.schema_abbr}_{self.figure_type}_tangled_tree.json" - with open(os.path.join(self.json_output_path, output_file_name), 'w') as outfile: + output_file_name = ( + f"{self.schema_abbr}_{self.figure_type}_tangled_tree.json" + ) + with open( + os.path.join(self.json_output_path, output_file_name), "w" + ) as outfile: outfile.write(layers_json) - logger.info(f"Tangled Tree JSON String saved to {os.path.join(self.json_output_path, output_file_name)}.") + logger.info( + f"Tangled Tree JSON String saved to {os.path.join(self.json_output_path, output_file_name)}." + ) all_layers = layers_json elif save_file == False: all_layers.append(layers_json) @@ -732,25 +769,27 @@ def save_outputs(self, save_file, layers_json, cn='', all_layers=None): def get_ancestors_nodes(self, subgraph, components): """ - Inputs: + Inputs: subgraph: networkX graph object - components: a list of nodes - outputs: + components: a list of nodes + outputs: all_parent_children: a dictionary that indicates a list of children (including all the intermediate children) of a given node """ all_parent_children = {} - for component in components: - all_ancestors = self.dmge.get_nodes_ancestors(subgraph=subgraph, node_label=component) + for component in components: + all_ancestors = self.dmge.get_nodes_ancestors( + subgraph=subgraph, node_label=component + ) all_parent_children[component] = all_ancestors return all_parent_children def get_tangled_tree_layers(self, save_file=True): - '''Based on user indicated figure type, construct the layers of nodes of a tangled tree. + """Based on user indicated figure type, construct the layers of nodes of a tangled tree. Inputs: save_file (bool): Indicates whether to save a file locally or not. Outputs: - all_layers (list of json strings): + all_layers (list of json strings): If save_file == False: Each string represents contains the layers for a single tangled tree. If save_file ==True: is an empty list. @@ -758,61 +797,81 @@ def get_tangled_tree_layers(self, save_file=True): If there are many conditional requirements associated with a depependency, and those conditional requirements have overlapping attributes associated with them the tangled tree will only report one - - ''' + + """ # Gather the data model's, topological generations, nodes and edges topological_gen, nodes, edges, subg = self.get_topological_generations() - if self.figure_type == 'component': + if self.figure_type == "component": # Gather all source nodes source_nodes = self.find_source_nodes(nodes, edges) - + # Map all children to their parents and vice versa - child_parents, parent_children = self.get_parent_child_dictionary(nodes, edges) + child_parents, parent_children = self.get_parent_child_dictionary( + nodes, edges + ) # find all the downstream nodes all_parent_children = self.get_ancestors_nodes(subg, parent_children.keys()) - + # Get the layers that each node belongs to. - layers_json = self.get_node_layers_json(topological_gen, source_nodes, child_parents, parent_children, all_parent_children=all_parent_children) + layers_json = self.get_node_layers_json( + topological_gen, + source_nodes, + child_parents, + parent_children, + all_parent_children=all_parent_children, + ) # If indicated save outputs locally else gather all layers. - all_layers = self.save_outputs(save_file, layers_json) + all_layers = self.save_outputs(save_file, layers_json) - if self.figure_type == 'dependency': + if self.figure_type == "dependency": # Get component digraph and nodes. - component_dg = self.dmge.get_digraph_by_edge_type('requiresComponent') + component_dg = self.dmge.get_digraph_by_edge_type("requiresComponent") component_nodes = component_dg.nodes() # Get table of attributes. attributes_csv_str = self.ae.parse_attributes(save_file=False) attributes_df = pd.read_table(StringIO(attributes_csv_str), sep=",") - - all_layers =[] + all_layers = [] for cn in component_nodes: # Gather attribute and dependency information per node - conditional_attributes, ca_alias, all_attributes = self.gather_component_dependency_info(cn, attributes_df) + ( + conditional_attributes, + ca_alias, + all_attributes, + ) = self.gather_component_dependency_info(cn, attributes_df) # Gather all source nodes - source_nodes = self.find_source_nodes(component_nodes, edges, all_attributes) + source_nodes = self.find_source_nodes( + component_nodes, edges, all_attributes + ) # Alias the conditional requirement edge back to its actual parent label, # then apply aliasing back to the edges aliased_edges = self.alias_edges(ca_alias, edges) # Gather relationships between children and their parents. - child_parents, parent_children = self.get_parent_child_dictionary(nodes, - aliased_edges, all_attributes) + child_parents, parent_children = self.get_parent_child_dictionary( + nodes, aliased_edges, all_attributes + ) # Remake topological_gen so it has only relevant nodes. - pruned_topological_gen = self.prune_expand_topological_gen(topological_gen, all_attributes, conditional_attributes) + pruned_topological_gen = self.prune_expand_topological_gen( + topological_gen, all_attributes, conditional_attributes + ) # Get the layers that each node belongs to. - layers_json = self.get_node_layers_json(pruned_topological_gen, source_nodes, child_parents, parent_children, cn) + layers_json = self.get_node_layers_json( + pruned_topological_gen, + source_nodes, + child_parents, + parent_children, + cn, + ) # If indicated save outputs locally else, gather all layers. all_layers = self.save_outputs(save_file, layers_json, cn, all_layers) return all_layers - - \ No newline at end of file From f111661ce8ef318bf5acdd845eff7a9a36611ce4 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 22 Jan 2024 11:40:17 -0800 Subject: [PATCH 076/199] fixed black github action --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ca8b8e9ac..b2adf95f8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -96,7 +96,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run black --check + poetry run black schematic --check #---------------------------------------------- # type checking/enforcement From 2573f7441e742dbbb1bac08500c83d0b0c52249c Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 23 Jan 2024 17:34:16 -0500 Subject: [PATCH 077/199] fix tests --- tests/test_manifest.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 612f00770..c8d958f2c 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -519,7 +519,9 @@ def test_create_single_manifest(self, simple_manifest_generator, helpers, return json_ld_path = helpers.get_data_path("example.model.jsonld") data_type = "Patient" - result = simple_manifest_generator.create_single_manifest(jsonld=json_ld_path, data_type=data_type, output_format="google_sheet", use_annotations=False) + graph_data_model = generate_graph_data_model(helpers, path_to_data_model=json_ld_path) + + result = simple_manifest_generator.create_single_manifest(path_to_data_model=json_ld_path, graph_data_model=graph_data_model, data_type=data_type, output_format="google_sheet", use_annotations=False) assert result == return_output @pytest.mark.parametrize("test_data_types", [["Patient", "Biospecimen"], ["all manifests"]]) @@ -529,7 +531,7 @@ def test_create_manifests_raise_errors(self, simple_manifest_generator, helpers, data_types = test_data_types dataset_ids=["syn123456"] - simple_manifest_generator.create_manifests(jsonld=json_ld_path, data_types=data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) + simple_manifest_generator.create_manifests(path_to_data_model=json_ld_path, data_types=data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) @pytest.mark.parametrize("test_data_types, dataset_ids, expected_result", [ (["Patient", "Biospecimen"], ["mock dataset id1", "mock dataset id2"], ["mock google sheet link", "mock google sheet link"]), @@ -538,7 +540,7 @@ def test_create_manifests_raise_errors(self, simple_manifest_generator, helpers, def test_create_manifests(self, simple_manifest_generator, helpers, test_data_types, dataset_ids, expected_result): with patch("schematic.manifest.generator.ManifestGenerator.create_single_manifest", return_value="mock google sheet link"): json_ld_path = helpers.get_data_path("example.model.jsonld") - all_results = simple_manifest_generator.create_manifests(jsonld=json_ld_path, data_types=test_data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) + all_results = simple_manifest_generator.create_manifests(path_to_data_model=json_ld_path, data_types=test_data_types, dataset_ids=dataset_ids, output_format="google_sheet", use_annotations=False) assert all_results == expected_result From 1c4872bc5de8c465a4e728ff71886296827d1631 Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 23 Jan 2024 17:37:56 -0500 Subject: [PATCH 078/199] run black --- schematic/manifest/generator.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 7886d9f21..e3e244271 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1649,16 +1649,16 @@ def create_manifests( if len_data_types != len_dataset_ids: raise ValueError( - f"There is a mismatch in the number of data_types and dataset_id's that " - f"submitted. Please check your submission and try again." - ) - + f"There is a mismatch in the number of data_types and dataset_id's that " + f"submitted. Please check your submission and try again." + ) + # Raise an error if used in conjunction with datatype = 'all_manifests' - if data_types[0] == 'all manifests': + if data_types[0] == "all manifests": raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." - ) + f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + f"Please check your submission and try again." + ) data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) From d595be9f1d1b8a9aba7da507debc28f325848b73 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 24 Jan 2024 18:55:26 -0800 Subject: [PATCH 079/199] run black --- schematic/models/commands.py | 1 - schematic/models/metadata.py | 1 - schematic/store/synapse.py | 51 +++++++++++++++++++++++------------- 3 files changed, 33 insertions(+), 20 deletions(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index e10ade85a..0c1e6e8a3 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -124,7 +124,6 @@ def model(ctx, config): # use as `schematic model ...` type=click.Choice(["class_label", "display_label"], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "annotation_keys")), ) - @click.pass_obj def submit_manifest( ctx, diff --git a/schematic/models/metadata.py b/schematic/models/metadata.py index ca3805fc5..6d353185f 100644 --- a/schematic/models/metadata.py +++ b/schematic/models/metadata.py @@ -328,7 +328,6 @@ def submit_metadata_manifest( table_column_names: str = "class_label", annotation_keys: str = "class_label", ) -> str: - """Wrap methods that are responsible for validation of manifests for a given component, and association of the same manifest file with a specified dataset. Args: diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 458ec687c..929acbbd3 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1164,7 +1164,9 @@ def formatDB(self, dmge, manifest, table_column_names): for col in manifest_columns ] else: - ValueError(f"The provided table_column_name: {table_column_names} is not valid, please resubmit with an allowed value only.") + ValueError( + f"The provided table_column_name: {table_column_names} is not valid, please resubmit with an allowed value only." + ) cols = list(map(lambda x: x.replace("EntityId", "entityId"), cols)) @@ -1312,7 +1314,9 @@ def upload_manifest_file( return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, dmge, row, entityId:str, hideBlanks:bool, annotation_keys:str): + def format_row_annotations( + self, dmge, row, entityId: str, hideBlanks: bool, annotation_keys: str + ): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1571,8 +1575,14 @@ def _generate_table_name(self, manifest): table_name = "synapse_storage_manifest_table" return table_name, component_name - - def _add_annotations(self, dmge, row, entityId: str, hideBlanks: bool, annotation_keys: str,): + def _add_annotations( + self, + dmge, + row, + entityId: str, + hideBlanks: bool, + annotation_keys: str, + ): """Helper function to format and add annotations to entities in Synapse. Args: dmge: DataModelGraphExplorer object, @@ -1586,7 +1596,9 @@ def _add_annotations(self, dmge, row, entityId: str, hideBlanks: bool, annotatio Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(dmge, row, entityId, hideBlanks, annotation_keys) + annos = self.format_row_annotations( + dmge, row, entityId, hideBlanks, annotation_keys + ) if annos: # Store annotations for an entity folder @@ -1678,16 +1690,16 @@ def add_annotations_to_entities_files( def upload_manifest_as_table( self, - dmge:DataModelGraphExplorer, - manifest:pd.DataFrame, - metadataManifestPath:str, - datasetId:str, - table_name:str, - component_name:str, - restrict:bool, - manifest_record_type:str, - hideBlanks:bool, - table_manipulation:str, + dmge: DataModelGraphExplorer, + manifest: pd.DataFrame, + metadataManifestPath: str, + datasetId: str, + table_name: str, + component_name: str, + restrict: bool, + manifest_record_type: str, + hideBlanks: bool, + table_manipulation: str, table_column_names: str, annotation_keys: str, ): @@ -1794,7 +1806,12 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ manifest = self.add_annotations_to_entities_files( - dmge, manifest, manifest_record_type, datasetId, hideBlanks, annotation_keys=annotation_keys + dmge, + manifest, + manifest_record_type, + datasetId, + hideBlanks, + annotation_keys=annotation_keys, ) # Load manifest to synapse as a CSV File @@ -1862,7 +1879,6 @@ def upload_manifest_combo( table_column_names=table_column_names, ) - manifest = self.add_annotations_to_entities_files( dmge, manifest, @@ -2169,7 +2185,6 @@ def getDatasetAnnotations( def raise_final_error(retry_state): return retry_state.outcome.result() - def checkIfinAssetView(self, syn_id) -> str: # get data in administrative fileview for this pipeline assetViewTable = self.getStorageFileviewTable() From 929e21c14503733270877e66f32c0c4a300bff44 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 24 Jan 2024 19:23:06 -0800 Subject: [PATCH 080/199] update test_store.py --- tests/test_store.py | 630 ++++++++++++++++++++++++++++++-------------- 1 file changed, 425 insertions(+), 205 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index c448f2208..f68cd6bf5 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -19,8 +19,10 @@ from schematic.models.metadata import MetadataModel from schematic.store.base import BaseStorage -from schematic.store.synapse import (DatasetFileView, - ManifestDownload,) +from schematic.store.synapse import ( + DatasetFileView, + ManifestDownload, +) logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -30,11 +32,13 @@ def test_download_manifest_id(): yield "syn51203973" + @pytest.fixture def mock_manifest_download(synapse_store, test_download_manifest_id): md = ManifestDownload(synapse_store.syn, test_download_manifest_id) yield md + @pytest.fixture def dataset_fileview(dataset_id, synapse_store): dataset_fileview = DatasetFileView(dataset_id, synapse_store.syn) @@ -53,33 +57,36 @@ def dataset_fileview_table_tidy(dataset_fileview, dataset_fileview_table): table = dataset_fileview.tidy_table() yield table + @pytest.fixture def version(synapse_store, helpers): - yield helpers.get_python_version() + @pytest.fixture def projectId(synapse_store, helpers): projectId = helpers.get_python_project(helpers) yield projectId + @pytest.fixture def datasetId(synapse_store, projectId, helpers): dataset = Folder( - name = 'Table Test Dataset ' + helpers.get_python_version(), - parent = projectId, - ) + name="Table Test Dataset " + helpers.get_python_version(), + parent=projectId, + ) datasetId = synapse_store.syn.store(dataset).id sleep(5) yield datasetId + def raise_final_error(retry_state): return retry_state.outcome.result() + class TestBaseStorage: def test_init(self): - with pytest.raises(NotImplementedError): BaseStorage() @@ -110,47 +117,75 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict - @pytest.mark.parametrize('only_new_files',[True, False]) + @pytest.mark.parametrize("only_new_files", [True, False]) def test_get_file_entityIds(self, helpers, synapse_store, only_new_files): - #TODO: Automatically reset manifest at path specified below after each test + # TODO: Automatically reset manifest at path specified below after each test # so that subsequent runs do not affect each other manifest_path = "mock_manifests/test_BulkRNAseq.csv" - dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') + dataset_files = synapse_store.getFilesInStorageDataset("syn39241199") if only_new_files: # Prepare manifest is getting Ids for new files only manifest = helpers.get_data_frame(manifest_path) - entityIds = pd.DataFrame({'entityId': ['syn39242580', 'syn51900502']}) + entityIds = pd.DataFrame({"entityId": ["syn39242580", "syn51900502"]}) # If this line errors out then the changes on the manifest file need to be discarded manifest = manifest.join(entityIds) - + # get entityIds for new files - files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) + files_and_Ids = synapse_store._get_file_entityIds( + dataset_files=dataset_files, + only_new_files=only_new_files, + manifest=manifest, + ) # Assert that there are no new files for value in files_and_Ids.values(): assert value == [] - + else: # get entityIds for all files - files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files) + files_and_Ids = synapse_store._get_file_entityIds( + dataset_files=dataset_files, only_new_files=only_new_files + ) # assert that the correct number of files were found - assert len(files_and_Ids['entityId']) == 2 - - @pytest.mark.parametrize('manifest_path, test_annotations, datasetId, manifest_record_type', - [ ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}, 'syn34295552', 'file_and_entities'), - ("mock_manifests/test_BulkRNAseq.csv", {'FileFormat': 'BAM', 'GenomeBuild': 'GRCh38'}, 'syn39241199', 'table_and_file')], - ids = ['non file-based', - 'file-based']) - def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, datasetId, manifest_record_type, config: Configuration): + assert len(files_and_Ids["entityId"]) == 2 + + @pytest.mark.parametrize( + "manifest_path, test_annotations, datasetId, manifest_record_type", + [ + ( + "mock_manifests/annotations_test_manifest.csv", + {"CheckInt": "7", "CheckList": "valid, list, values"}, + "syn34295552", + "file_and_entities", + ), + ( + "mock_manifests/test_BulkRNAseq.csv", + {"FileFormat": "BAM", "GenomeBuild": "GRCh38"}, + "syn39241199", + "table_and_file", + ), + ], + ids=["non file-based", "file-based"], + ) + def test_annotation_submission( + self, + synapse_store, + helpers, + manifest_path, + test_annotations, + datasetId, + manifest_record_type, + config: Configuration, + ): # Upload dataset annotations # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = config.model_location) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=config.model_location) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -163,13 +198,12 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test dmge = DataModelGraphExplorer(graph_data_model) manifest_id = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = datasetId, - manifest_record_type = manifest_record_type, - useSchemaLabel = True, - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(manifest_path), + datasetId=datasetId, + manifest_record_type=manifest_record_type, + hideBlanks=True, + restrict_manifest=False, ) # Retrive annotations @@ -181,9 +215,9 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test assert key in annotations.keys() assert annotations[key] == test_annotations[key] - if manifest_path.endswith('annotations_test_manifest.csv'): - assert 'CheckRecommended' not in annotations.keys() - elif manifest_path.endswith('test_BulkRNAseq.csv'): + if manifest_path.endswith("annotations_test_manifest.csv"): + assert "CheckRecommended" not in annotations.keys() + elif manifest_path.endswith("test_BulkRNAseq.csv"): entity = synapse_store.syn.get(entity_id) assert type(entity) == File @@ -228,7 +262,6 @@ def test_getDatasetAnnotations(self, dataset_id, synapse_store, force_batch): pd.testing.assert_frame_equal(expected_df, actual_df, check_like=True) def test_getDatasetProject(self, dataset_id, synapse_store): - assert synapse_store.getDatasetProject(dataset_id) == "syn23643250" assert synapse_store.getDatasetProject("syn23643250") == "syn23643250" @@ -237,23 +270,37 @@ def test_getDatasetProject(self, dataset_id, synapse_store): with pytest.raises(PermissionError): synapse_store.getDatasetProject("syn12345678") - - @pytest.mark.parametrize("full_path,expected", [(True, [('syn126', 'parent_folder/test_file'), ('syn125', 'parent_folder/test_folder/test_file_2')]),(False, [('syn126', 'test_file'), ('syn125', 'test_file_2')])]) + + @pytest.mark.parametrize( + "full_path,expected", + [ + ( + True, + [ + ("syn126", "parent_folder/test_file"), + ("syn125", "parent_folder/test_folder/test_file_2"), + ], + ), + (False, [("syn126", "test_file"), ("syn125", "test_file_2")]), + ], + ) def test_getFilesInStorageDataset(self, synapse_store, full_path, expected): mock_return = [ - ( - ("parent_folder", "syn123"), - [("test_folder", "syn124")], - [("test_file", "syn126")], - ), - ( - (os.path.join("parent_folder", "test_folder"), "syn124"), - [], - [("test_file_2", "syn125")], - ), + ( + ("parent_folder", "syn123"), + [("test_folder", "syn124")], + [("test_file", "syn126")], + ), + ( + (os.path.join("parent_folder", "test_folder"), "syn124"), + [], + [("test_file_2", "syn125")], + ), ] - with patch('synapseutils.walk_functions._helpWalk', return_value=mock_return): - file_list = synapse_store.getFilesInStorageDataset(datasetId="syn_mock", fileNames=None, fullpath=full_path) + with patch("synapseutils.walk_functions._helpWalk", return_value=mock_return): + file_list = synapse_store.getFilesInStorageDataset( + datasetId="syn_mock", fileNames=None, fullpath=full_path + ) assert file_list == expected @pytest.mark.parametrize("downloadFile", [True, False]) @@ -261,62 +308,144 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # get a test manifest manifest_data = synapse_store.getDatasetManifest("syn51204502", downloadFile) - #make sure the file gets downloaded + # make sure the file gets downloaded if downloadFile: - assert manifest_data['name'] == "synapse_storage_manifest_censored.csv" - assert os.path.exists(manifest_data['path']) + assert manifest_data["name"] == "synapse_storage_manifest_censored.csv" + assert os.path.exists(manifest_data["path"]) # clean up - os.remove(manifest_data['path']) - else: + os.remove(manifest_data["path"]) + else: # return manifest id assert manifest_data == "syn51204513" - @pytest.mark.parametrize("existing_manifest_df", [pd.DataFrame(), pd.DataFrame({"Filename": ["existing_mock_file_path"], "entityId": ["existing_mock_entity_id"]})]) + @pytest.mark.parametrize( + "existing_manifest_df", + [ + pd.DataFrame(), + pd.DataFrame( + { + "Filename": ["existing_mock_file_path"], + "entityId": ["existing_mock_entity_id"], + } + ), + ], + ) def test_fill_in_entity_id_filename(self, synapse_store, existing_manifest_df): - with patch("schematic.store.synapse.SynapseStorage.getFilesInStorageDataset", return_value=["syn123", "syn124", "syn125"]) as mock_get_file_storage, \ - patch("schematic.store.synapse.SynapseStorage._get_file_entityIds", return_value={"Filename": ["mock_file_path"], "entityId": ["mock_entity_id"]}) as mock_get_file_entity_id: - dataset_files, new_manifest = synapse_store.fill_in_entity_id_filename(datasetId="test_syn_id", manifest=existing_manifest_df) + with patch( + "schematic.store.synapse.SynapseStorage.getFilesInStorageDataset", + return_value=["syn123", "syn124", "syn125"], + ) as mock_get_file_storage, patch( + "schematic.store.synapse.SynapseStorage._get_file_entityIds", + return_value={ + "Filename": ["mock_file_path"], + "entityId": ["mock_entity_id"], + }, + ) as mock_get_file_entity_id: + dataset_files, new_manifest = synapse_store.fill_in_entity_id_filename( + datasetId="test_syn_id", manifest=existing_manifest_df + ) if not existing_manifest_df.empty: - expected_df=pd.DataFrame({"Filename": ["existing_mock_file_path", "mock_file_path"], "entityId": ["existing_mock_entity_id", "mock_entity_id"]}) + expected_df = pd.DataFrame( + { + "Filename": ["existing_mock_file_path", "mock_file_path"], + "entityId": ["existing_mock_entity_id", "mock_entity_id"], + } + ) else: - expected_df=pd.DataFrame({"Filename": ["mock_file_path"], "entityId": ["mock_entity_id"]}) + expected_df = pd.DataFrame( + {"Filename": ["mock_file_path"], "entityId": ["mock_entity_id"]} + ) assert_frame_equal(new_manifest, expected_df) assert dataset_files == ["syn123", "syn124", "syn125"] # Test case: make sure that Filename and entityId column get filled and component column has the same length as filename column def test_add_entity_id_and_filename_with_component_col(self, synapse_store): - with patch("schematic.store.synapse.SynapseStorage._get_files_metadata_from_dataset", return_value={"Filename": ["test_file1", "test_file2"], "entityId": ["syn123", "syn124"]}): - mock_manifest = pd.DataFrame.from_dict({"Filename": [""], "Component": ["MockComponent"], "Sample ID": [""]}).reset_index(drop=True) - manifest_to_return = synapse_store.add_entity_id_and_filename(datasetId="mock_syn_id", manifest=mock_manifest) - expected_df = pd.DataFrame.from_dict({"Filename": ["test_file1", "test_file2"], "Component": ["MockComponent", "MockComponent"], "Sample ID": ["", ""], "entityId": ["syn123", "syn124"]}) + with patch( + "schematic.store.synapse.SynapseStorage._get_files_metadata_from_dataset", + return_value={ + "Filename": ["test_file1", "test_file2"], + "entityId": ["syn123", "syn124"], + }, + ): + mock_manifest = pd.DataFrame.from_dict( + {"Filename": [""], "Component": ["MockComponent"], "Sample ID": [""]} + ).reset_index(drop=True) + manifest_to_return = synapse_store.add_entity_id_and_filename( + datasetId="mock_syn_id", manifest=mock_manifest + ) + expected_df = pd.DataFrame.from_dict( + { + "Filename": ["test_file1", "test_file2"], + "Component": ["MockComponent", "MockComponent"], + "Sample ID": ["", ""], + "entityId": ["syn123", "syn124"], + } + ) assert_frame_equal(manifest_to_return, expected_df) - # Test case: make sure that Filename and entityId column get filled when component column does not exist + # Test case: make sure that Filename and entityId column get filled when component column does not exist def test_add_entity_id_and_filename_without_component_col(self, synapse_store): - with patch("schematic.store.synapse.SynapseStorage._get_files_metadata_from_dataset", return_value={"Filename": ["test_file1", "test_file2"], "entityId": ["syn123", "syn124"]}): - mock_manifest = pd.DataFrame.from_dict({"Filename": [""], "Sample ID": [""]}).reset_index(drop=True) - manifest_to_return = synapse_store.add_entity_id_and_filename(datasetId="mock_syn_id", manifest=mock_manifest) - expected_df = pd.DataFrame.from_dict({"Filename": ["test_file1", "test_file2"], "Sample ID": ["", ""], "entityId": ["syn123", "syn124"]}) + with patch( + "schematic.store.synapse.SynapseStorage._get_files_metadata_from_dataset", + return_value={ + "Filename": ["test_file1", "test_file2"], + "entityId": ["syn123", "syn124"], + }, + ): + mock_manifest = pd.DataFrame.from_dict( + {"Filename": [""], "Sample ID": [""]} + ).reset_index(drop=True) + manifest_to_return = synapse_store.add_entity_id_and_filename( + datasetId="mock_syn_id", manifest=mock_manifest + ) + expected_df = pd.DataFrame.from_dict( + { + "Filename": ["test_file1", "test_file2"], + "Sample ID": ["", ""], + "entityId": ["syn123", "syn124"], + } + ) assert_frame_equal(manifest_to_return, expected_df) def test_get_files_metadata_from_dataset(self, synapse_store): - patch_get_children = [('syn123', 'parent_folder/test_A.txt'), ('syn456', 'parent_folder/test_B.txt')] - mock_file_entityId = {"Filename": ["parent_folder/test_A.txt", "parent_folder/test_B.txt"], "entityId": ["syn123", "syn456"]} - with patch("schematic.store.synapse.SynapseStorage.getFilesInStorageDataset", return_value=patch_get_children): - with patch("schematic.store.synapse.SynapseStorage._get_file_entityIds", return_value=mock_file_entityId): - dataset_file_names_id_dict = synapse_store._get_files_metadata_from_dataset("mock dataset id", only_new_files=True) - assert dataset_file_names_id_dict == {"Filename": ["parent_folder/test_A.txt", "parent_folder/test_B.txt"], "entityId": ["syn123", "syn456"]} + patch_get_children = [ + ("syn123", "parent_folder/test_A.txt"), + ("syn456", "parent_folder/test_B.txt"), + ] + mock_file_entityId = { + "Filename": ["parent_folder/test_A.txt", "parent_folder/test_B.txt"], + "entityId": ["syn123", "syn456"], + } + with patch( + "schematic.store.synapse.SynapseStorage.getFilesInStorageDataset", + return_value=patch_get_children, + ): + with patch( + "schematic.store.synapse.SynapseStorage._get_file_entityIds", + return_value=mock_file_entityId, + ): + dataset_file_names_id_dict = ( + synapse_store._get_files_metadata_from_dataset( + "mock dataset id", only_new_files=True + ) + ) + assert dataset_file_names_id_dict == { + "Filename": [ + "parent_folder/test_A.txt", + "parent_folder/test_B.txt", + ], + "entityId": ["syn123", "syn456"], + } + class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): - assert dataset_fileview.datasetId == dataset_id assert dataset_fileview.synapse is synapse_store.syn assert dataset_fileview.parentId == dataset_id assert isinstance(dataset_fileview.view_schema, EntityViewSchema) def test_enter_exit(self, dataset_id, synapse_store): - # Within the 'with' statement, the file view should be available with DatasetFileView(dataset_id, synapse_store.syn) as fileview: assert isinstance(fileview.view_schema, EntityViewSchema) @@ -326,7 +455,6 @@ def test_enter_exit(self, dataset_id, synapse_store): assert fileview.view_schema is None def test_query(self, dataset_fileview_table): - table = dataset_fileview_table # The content is tested in test_getDatasetAnnotations() @@ -351,7 +479,6 @@ def test_query(self, dataset_fileview_table): assert math.isclose(year_value, 1980.0) def test_tidy_table(self, dataset_fileview_table_tidy): - table = dataset_fileview_table_tidy # The content is tested in test_getDatasetAnnotations() @@ -376,32 +503,55 @@ def test_tidy_table(self, dataset_fileview_table_tidy): assert isinstance(year_value, str) assert year_value == "1980" + @pytest.mark.table_operations class TestTableOperations: - @pytest.mark.parametrize("table_column_names", ['display_name', 'display_label', 'class_label'], ids=['tcn_display_name', 'tcn_display_label', 'tcn_class_label']) - @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) - def test_createTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId, table_column_names,annotation_keys): + @pytest.mark.parametrize( + "table_column_names", + ["display_name", "display_label", "class_label"], + ids=["tcn_display_name", "tcn_display_label", "tcn_class_label"], + ) + @pytest.mark.parametrize( + "annotation_keys", + ["display_label", "class_label"], + ids=["aks_display_label", "aks_class_label"], + ) + def test_createTable( + self, + helpers, + synapse_store, + config: Configuration, + projectId, + datasetId, + table_column_names, + annotation_keys, + ): table_manipulation = None # Check if FollowUp table exists if so delete - existing_tables = synapse_store.get_table_info(projectId = projectId) + existing_tables = synapse_store.get_table_info(projectId=projectId) + + table_name = "followup_synapse_storage_manifest_table" - table_name='followup_synapse_storage_manifest_table' - if table_name in existing_tables.keys(): synapse_store.syn.delete(existing_tables[table_name]) sleep(10) # assert no table - assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() + assert ( + table_name + not in synapse_store.get_table_info(projectId=projectId).keys() + ) # associate metadata with files manifest_path = "mock_manifests/table_manifest.csv" - inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) - + inputModelLocaiton = helpers.get_data_path( + os.path.basename(config.model_location) + ) + # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=inputModelLocaiton) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -415,48 +565,70 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = datasetId, - manifest_record_type = 'table_and_file', - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(manifest_path), + datasetId=datasetId, + manifest_record_type="table_and_file", + hideBlanks=True, + restrict_manifest=False, table_manipulation=table_manipulation, table_column_names=table_column_names, annotation_keys=annotation_keys, ) - existing_tables = synapse_store.get_table_info(projectId = projectId) - + existing_tables = synapse_store.get_table_info(projectId=projectId) + # clean Up synapse_store.syn.delete(manifestId) # assert table exists assert table_name in existing_tables.keys() - @pytest.mark.parametrize("table_column_names", ['display_label', 'class_label'], ids=['tcn_display_label', 'tcn_class_label']) - @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) - def test_replaceTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId, table_column_names, annotation_keys): - table_manipulation = 'replace' - - table_name='followup_synapse_storage_manifest_table' + @pytest.mark.parametrize( + "table_column_names", + ["display_label", "class_label"], + ids=["tcn_display_label", "tcn_class_label"], + ) + @pytest.mark.parametrize( + "annotation_keys", + ["display_label", "class_label"], + ids=["aks_display_label", "aks_class_label"], + ) + def test_replaceTable( + self, + helpers, + synapse_store, + config: Configuration, + projectId, + datasetId, + table_column_names, + annotation_keys, + ): + table_manipulation = "replace" + + table_name = "followup_synapse_storage_manifest_table" manifest_path = "mock_manifests/table_manifest.csv" replacement_manifest_path = "mock_manifests/table_manifest_replacement.csv" - column_of_interest="DaystoFollowUp" - + column_of_interest = "DaystoFollowUp" + # Check if FollowUp table exists if so delete - existing_tables = synapse_store.get_table_info(projectId = projectId) - + existing_tables = synapse_store.get_table_info(projectId=projectId) + if table_name in existing_tables.keys(): synapse_store.syn.delete(existing_tables[table_name]) sleep(10) # assert no table - assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() + assert ( + table_name + not in synapse_store.get_table_info(projectId=projectId).keys() + ) # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) - #sg = SchemaGenerator(inputModelLocaiton) + inputModelLocaiton = helpers.get_data_path( + os.path.basename(config.model_location) + ) + # sg = SchemaGenerator(inputModelLocaiton) - data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=inputModelLocaiton) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -470,74 +642,95 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = datasetId, - manifest_record_type = 'table_and_file', - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(manifest_path), + datasetId=datasetId, + manifest_record_type="table_and_file", + hideBlanks=True, + restrict_manifest=False, table_manipulation=table_manipulation, table_column_names=table_column_names, annotation_keys=annotation_keys, ) - existing_tables = synapse_store.get_table_info(projectId = projectId) + existing_tables = synapse_store.get_table_info(projectId=projectId) - # Query table for DaystoFollowUp column + # Query table for DaystoFollowUp column tableId = existing_tables[table_name] - daysToFollowUp = synapse_store.syn.tableQuery( - f"SELECT {column_of_interest} FROM {tableId}" - ).asDataFrame().squeeze() + daysToFollowUp = ( + synapse_store.syn.tableQuery(f"SELECT {column_of_interest} FROM {tableId}") + .asDataFrame() + .squeeze() + ) # assert Days to FollowUp == 73 assert (daysToFollowUp == 73).all() - + # Associate replacement manifest with files manifestId = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(replacement_manifest_path), - datasetId = datasetId, - manifest_record_type = 'table_and_file', - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(replacement_manifest_path), + datasetId=datasetId, + manifest_record_type="table_and_file", + hideBlanks=True, + restrict_manifest=False, table_column_names=table_column_names, annotation_keys=annotation_keys, ) - existing_tables = synapse_store.get_table_info(projectId = projectId) - - # Query table for DaystoFollowUp column + existing_tables = synapse_store.get_table_info(projectId=projectId) + + # Query table for DaystoFollowUp column tableId = existing_tables[table_name] - daysToFollowUp = synapse_store.syn.tableQuery( - f"SELECT {column_of_interest} FROM {tableId}" - ).asDataFrame().squeeze() + daysToFollowUp = ( + synapse_store.syn.tableQuery(f"SELECT {column_of_interest} FROM {tableId}") + .asDataFrame() + .squeeze() + ) # assert Days to FollowUp == 89 now and not 73 assert (daysToFollowUp == 89).all() - # delete table + # delete table synapse_store.syn.delete(tableId) - @pytest.mark.parametrize("annotation_keys", ['display_label', 'class_label'], ids=['aks_display_label', 'aks_class_label']) - def test_upsertTable(self, helpers, synapse_store, config:Configuration, projectId, datasetId, annotation_keys): + @pytest.mark.parametrize( + "annotation_keys", + ["display_label", "class_label"], + ids=["aks_display_label", "aks_class_label"], + ) + def test_upsertTable( + self, + helpers, + synapse_store, + config: Configuration, + projectId, + datasetId, + annotation_keys, + ): table_manipulation = "upsert" - table_name="MockRDB_synapse_storage_manifest_table".lower() + table_name = "MockRDB_synapse_storage_manifest_table".lower() manifest_path = "mock_manifests/rdb_table_manifest.csv" replacement_manifest_path = "mock_manifests/rdb_table_manifest_upsert.csv" - column_of_interest="MockRDB_id,SourceManifest" - + column_of_interest = "MockRDB_id,SourceManifest" + # Check if FollowUp table exists if so delete - existing_tables = synapse_store.get_table_info(projectId = projectId) - + existing_tables = synapse_store.get_table_info(projectId=projectId) + if table_name in existing_tables.keys(): synapse_store.syn.delete(existing_tables[table_name]) sleep(10) # assert no table - assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() + assert ( + table_name + not in synapse_store.get_table_info(projectId=projectId).keys() + ) # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) + inputModelLocaiton = helpers.get_data_path( + os.path.basename(config.model_location) + ) - data_model_parser = DataModelParser(path_to_data_model = inputModelLocaiton) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=inputModelLocaiton) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -549,73 +742,95 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) - # updating file view on synapse takes a long time + # updating file view on synapse takes a long time manifestId = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = datasetId, - manifest_record_type = 'table_and_file', - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(manifest_path), + datasetId=datasetId, + manifest_record_type="table_and_file", + hideBlanks=True, + restrict_manifest=False, table_manipulation=table_manipulation, - table_column_names='display_name', + table_column_names="display_name", annotation_keys=annotation_keys, ) - existing_tables = synapse_store.get_table_info(projectId = projectId) + existing_tables = synapse_store.get_table_info(projectId=projectId) - #set primary key annotation for uploaded table + # set primary key annotation for uploaded table tableId = existing_tables[table_name] - # Query table for DaystoFollowUp column - table_query = synapse_store.syn.tableQuery( - f"SELECT {column_of_interest} FROM {tableId}" - ).asDataFrame().squeeze() + # Query table for DaystoFollowUp column + table_query = ( + synapse_store.syn.tableQuery(f"SELECT {column_of_interest} FROM {tableId}") + .asDataFrame() + .squeeze() + ) # assert max ID is '4' and that there are 4 entries assert table_query.MockRDB_id.max() == 4 assert table_query.MockRDB_id.size == 4 - assert table_query['SourceManifest'][3] == 'Manifest1' - + assert table_query["SourceManifest"][3] == "Manifest1" + # Associate new manifest with files manifestId = synapse_store.associateMetadataWithFiles( - dmge = dmge, - metadataManifestPath = helpers.get_data_path(replacement_manifest_path), - datasetId = datasetId, - manifest_record_type = 'table_and_file', - hideBlanks = True, - restrict_manifest = False, + dmge=dmge, + metadataManifestPath=helpers.get_data_path(replacement_manifest_path), + datasetId=datasetId, + manifest_record_type="table_and_file", + hideBlanks=True, + restrict_manifest=False, table_manipulation=table_manipulation, - table_column_names='display_name', + table_column_names="display_name", annotation_keys=annotation_keys, ) - existing_tables = synapse_store.get_table_info(projectId = projectId) - - # Query table for DaystoFollowUp column + existing_tables = synapse_store.get_table_info(projectId=projectId) + + # Query table for DaystoFollowUp column tableId = existing_tables[table_name] - table_query = synapse_store.syn.tableQuery( - f"SELECT {column_of_interest} FROM {tableId}" - ).asDataFrame().squeeze() + table_query = ( + synapse_store.syn.tableQuery(f"SELECT {column_of_interest} FROM {tableId}") + .asDataFrame() + .squeeze() + ) # assert max ID is '4' and that there are 4 entries assert table_query.MockRDB_id.max() == 8 assert table_query.MockRDB_id.size == 8 - assert table_query['SourceManifest'][3] == 'Manifest2' - # delete table + assert table_query["SourceManifest"][3] == "Manifest2" + # delete table synapse_store.syn.delete(tableId) + class TestDownloadManifest: - @pytest.mark.parametrize("datasetFileView", [{"id": ["syn51203973", "syn51203943"], "name": ["synapse_storage_manifest.csv", "synapse_storage_manifest_censored.csv"]}, {"id": ["syn51203973"], "name": ["synapse_storage_manifest.csv"]}, {"id": ["syn51203943"], "name": ["synapse_storage_manifest_censored.csv"]}]) + @pytest.mark.parametrize( + "datasetFileView", + [ + { + "id": ["syn51203973", "syn51203943"], + "name": [ + "synapse_storage_manifest.csv", + "synapse_storage_manifest_censored.csv", + ], + }, + {"id": ["syn51203973"], "name": ["synapse_storage_manifest.csv"]}, + {"id": ["syn51203943"], "name": ["synapse_storage_manifest_censored.csv"]}, + ], + ) def test_get_manifest_id(self, synapse_store, datasetFileView): # rows that contain the censored manifest datasetFileViewDataFrame = pd.DataFrame(datasetFileView) - row_censored = datasetFileViewDataFrame.loc[datasetFileViewDataFrame['name'] == "synapse_storage_manifest_censored.csv"] + row_censored = datasetFileViewDataFrame.loc[ + datasetFileViewDataFrame["name"] == "synapse_storage_manifest_censored.csv" + ] if not row_censored.empty > 0: - censored_manifest_id = row_censored['id'].values[0] + censored_manifest_id = row_censored["id"].values[0] # rows that contain the uncensored manifest - row_uncensored = datasetFileViewDataFrame.loc[datasetFileViewDataFrame['name'] == "synapse_storage_manifest.csv"] + row_uncensored = datasetFileViewDataFrame.loc[ + datasetFileViewDataFrame["name"] == "synapse_storage_manifest.csv" + ] if not row_uncensored.empty > 0: - uncensored_manifest_id = row_uncensored['id'].values[0] - + uncensored_manifest_id = row_uncensored["id"].values[0] + # get id of the uncensored manifest manifest_syn_id = synapse_store._get_manifest_id(datasetFileViewDataFrame) @@ -623,42 +838,44 @@ def test_get_manifest_id(self, synapse_store, datasetFileView): if not row_uncensored.empty > 0: assert manifest_syn_id == uncensored_manifest_id # if only censored manifests are present, return only id of censored manifest - elif row_uncensored.empty and not row_censored.empty: + elif row_uncensored.empty and not row_censored.empty: assert manifest_syn_id == censored_manifest_id - @pytest.mark.parametrize("newManifestName",["", "Example"]) + @pytest.mark.parametrize("newManifestName", ["", "Example"]) def test_download_manifest(self, mock_manifest_download, newManifestName): # test the download function by downloading a manifest - manifest_data = mock_manifest_download.download_manifest(mock_manifest_download, newManifestName) - assert os.path.exists(manifest_data['path']) + manifest_data = mock_manifest_download.download_manifest( + mock_manifest_download, newManifestName + ) + assert os.path.exists(manifest_data["path"]) if not newManifestName: assert manifest_data["name"] == "synapse_storage_manifest.csv" else: assert manifest_data["name"] == "Example.csv" - + # clean up - os.remove(manifest_data['path']) + os.remove(manifest_data["path"]) def test_download_access_restricted_manifest(self, synapse_store): - # attempt to download an uncensored manifest that has access restriction. + # attempt to download an uncensored manifest that has access restriction. # if the code works correctly, the censored manifest that does not have access restriction would get downloaded (see: syn29862066) md = ManifestDownload(synapse_store.syn, "syn29862066") manifest_data = md.download_manifest(md) - assert os.path.exists(manifest_data['path']) - - # clean up - os.remove(manifest_data['path']) + assert os.path.exists(manifest_data["path"]) + + # clean up + os.remove(manifest_data["path"]) def test_download_manifest_on_aws(self, mock_manifest_download, monkeypatch): # mock AWS environment by providing SECRETS_MANAGER_SECRETS environment variable and attempt to download a manifest - monkeypatch.setenv('SECRETS_MANAGER_SECRETS', 'mock_value') + monkeypatch.setenv("SECRETS_MANAGER_SECRETS", "mock_value") manifest_data = mock_manifest_download.download_manifest(mock_manifest_download) - assert os.path.exists(manifest_data['path']) - # clean up - os.remove(manifest_data['path']) + assert os.path.exists(manifest_data["path"]) + # clean up + os.remove(manifest_data["path"]) @pytest.mark.parametrize("entity_id", ["syn27600053", "syn29862078"]) def test_entity_type_checking(self, synapse_store, entity_id, caplog): @@ -666,4 +883,7 @@ def test_entity_type_checking(self, synapse_store, entity_id, caplog): md._entity_type_checking() if entity_id == "syn27600053": for record in caplog.records: - assert "You are using entity type: folder. Please provide a file ID" in record.message + assert ( + "You are using entity type: folder. Please provide a file ID" + in record.message + ) From e836887e4fa6072473f3f97db3ba8edaf6adbff3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 25 Jan 2024 10:36:07 -0800 Subject: [PATCH 081/199] formatting changes --- schematic_api/api/routes.py | 131 ++--- tests/test_api.py | 973 ++++++++++++++++++++++++------------ 2 files changed, 718 insertions(+), 386 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 77d2901a6..44fdd3e42 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -214,7 +214,6 @@ def initalize_metadata_model(schema_url): # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - metadata_model = MetadataModel( inputMModelLocation=data_model, inputMModelLocationType="local" ) @@ -232,6 +231,7 @@ def get_temp_jsonld(schema_url): # get path to temporary JSON-LD file return tmp_file.name + def get_temp_csv(schema_url): # retrieve a CSV via URL and store it in a temporary location with urllib.request.urlopen(schema_url) as response: @@ -241,15 +241,18 @@ def get_temp_csv(schema_url): # get path to temporary csv file return tmp_file.name + def get_temp_model_path(schema_url): # Get model type: - model_extension = pathlib.Path(schema_url).suffix.replace('.', '').upper() - if model_extension == 'CSV': + model_extension = pathlib.Path(schema_url).suffix.replace(".", "").upper() + if model_extension == "CSV": temp_path = get_temp_csv(schema_url) - elif model_extension == 'JSONLD': + elif model_extension == "JSONLD": temp_path = get_temp_jsonld(schema_url) else: - raise ValueError("Did not provide a valid model type CSV or JSONLD, please check submission and try again.") + raise ValueError( + "Did not provide a valid model type CSV or JSONLD, please check submission and try again." + ) return temp_path @@ -266,7 +269,7 @@ def get_manifest_route( """Get the immediate dependencies that are related to a given source node. Args: schema_url: link to data model in json ld or csv format - title: title of a given manifest. + title: title of a given manifest. dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet use_annotations: Whether to use existing annotations during manifest generation @@ -280,8 +283,8 @@ def get_manifest_route( access_token = get_access_token() # call config_handler() - config_handler(asset_view = asset_view) - + config_handler(asset_view=asset_view) + temp_path = get_temp_model_path(schema_url=schema_url) # Gather all data_types to make manifests for. @@ -314,12 +317,21 @@ def get_manifest_route( data_type[0] != "all manifests" except: raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." - ) - - all_results = ManifestGenerator.create_manifests(path_to_data_model=schema_url, output_format=output_format, data_types=data_type, title=title, access_token=access_token, dataset_ids=dataset_ids, strict=strict_validation, use_annotations=use_annotations) - + f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + f"Please check your submission and try again." + ) + + all_results = ManifestGenerator.create_manifests( + path_to_data_model=schema_url, + output_format=output_format, + data_types=data_type, + title=title, + access_token=access_token, + dataset_ids=dataset_ids, + strict=strict_validation, + use_annotations=use_annotations, + ) + return all_results @@ -419,7 +431,7 @@ def submit_manifest_route( validate_component = None else: validate_component = data_type - + # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) @@ -437,13 +449,13 @@ def submit_manifest_route( access_token = get_access_token() manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = data_model, - manifest_path=temp_path, - dataset_id=dataset_id, - validate_component=validate_component, - access_token=access_token, - manifest_record_type = manifest_record_type, - restrict_rules = restrict_rules, + path_to_json_ld=data_model, + manifest_path=temp_path, + dataset_id=dataset_id, + validate_component=validate_component, + access_token=access_token, + manifest_record_type=manifest_record_type, + restrict_rules=restrict_rules, hide_blanks=hide_blanks, table_manipulation=table_manipulation, project_scope=project_scope, @@ -461,12 +473,13 @@ def populate_manifest_route(schema_url, title=None, data_type=None, return_excel # Get path to temp file where manifest file contents will be saved temp_path = save_file() - # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - - #Initalize MetadataModel - metadata_model = MetadataModel(inputMModelLocation=data_model, inputMModelLocationType='local') + + # Initalize MetadataModel + metadata_model = MetadataModel( + inputMModelLocation=data_model, inputMModelLocationType="local" + ) # Call populateModelManifest class populated_manifest_link = metadata_model.populateModelManifest( @@ -588,17 +601,18 @@ def get_viz_component_attributes_explorer(schema_url, component, include_index): # call config_handler() config_handler() - # get path to temp data model file (csv or jsonld) as appropriate + # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(data_model).parse_component_attributes(component, save_file=False, include_index=include_index) + attributes_csv = AttributesExplorer(data_model).parse_component_attributes( + component, save_file=False, include_index=include_index + ) return attributes_csv @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_text(schema_url, figure_type, text_format): - # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) @@ -621,7 +635,7 @@ def get_viz_tangled_tree_layers(schema_url, figure_type): # Initialize Tangled Tree tangled_tree = TangledTree(data_model, figure_type) - + # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=False) @@ -748,8 +762,8 @@ def get_manifest_datatype(manifest_id, asset_view): def get_schema_pickle(schema_url): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -762,15 +776,15 @@ def get_schema_pickle(schema_url): path = os.getcwd() export_path = os.path.join(path, "tests/data/schema.gpickle") - with open(export_path, 'wb') as file: + with open(export_path, "wb") as file: pickle.dump(graph_data_model, file) return export_path def get_subgraph_by_edge_type(schema_url, relationship): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -780,10 +794,10 @@ def get_subgraph_by_edge_type(schema_url, relationship): graph_data_model = data_model_grapher.generate_data_model_graph() dmge = DataModelGraphExplorer(graph_data_model) - + # relationship subgraph relationship_subgraph = dmge.get_subgraph_by_edge_type(relationship) - # return relationship + # return relationship Arr = [] for t in relationship_subgraph.edges: lst = list(t) @@ -793,8 +807,8 @@ def get_subgraph_by_edge_type(schema_url, relationship): def find_class_specific_properties(schema_url, schema_class): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -834,8 +848,8 @@ def get_node_dependencies( Returns: list[str]: List of nodes that are dependent on the source node. """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -845,7 +859,7 @@ def get_node_dependencies( graph_data_model = data_model_grapher.generate_data_model_graph() dmge = DataModelGraphExplorer(graph_data_model) - + dependencies = dmge.get_node_dependencies( source_node, return_display_names, return_schema_ordered ) @@ -853,8 +867,7 @@ def get_node_dependencies( def get_property_label_from_display_name_route( - display_name: str, - strict_camel_case: bool = False + display_name: str, strict_camel_case: bool = False ) -> str: """Converts a given display name string into a proper property label string @@ -867,7 +880,9 @@ def get_property_label_from_display_name_route( Returns: str: The property label of the display name """ - label = get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + label = get_property_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) return label @@ -885,8 +900,8 @@ def get_node_range( Returns: list[str]: A list of nodes """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -912,8 +927,8 @@ def get_if_node_required(schema_url: str, node_display_name: str) -> bool: True: If the given node is a "required" node. False: If the given node is not a "required" (i.e., an "optional") node. """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -938,9 +953,9 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: List of valiation rules for a given node. """ # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -949,7 +964,7 @@ def get_node_validation_rules(schema_url: str, node_display_name: str) -> list: # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - #Instantiate DataModelGraphExplorer + # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) node_validation_rules = dmge.get_node_validation_rules(node_display_name) @@ -969,9 +984,9 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: """ # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -980,7 +995,7 @@ def get_nodes_display_names(schema_url: str, node_list: list[str]) -> list: # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - #Instantiate DataModelGraphExplorer + # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) node_display_names = dmge.get_nodes_display_names(node_list) diff --git a/tests/test_api.py b/tests/test_api.py index d872134dd..c946d4c0d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,4 +1,3 @@ - import configparser import json import logging @@ -23,65 +22,83 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) + ## TO DO: Clean up url and use a global variable SERVER_URL @pytest.fixture(scope="class") def app(): app = create_app() yield app + @pytest.fixture(scope="class") def client(app): - app.config['SCHEMATIC_CONFIG'] = None + app.config["SCHEMATIC_CONFIG"] = None with app.test_client() as client: yield client + @pytest.fixture(scope="class") def test_manifest_csv(helpers): test_manifest_path = helpers.get_data_path("mock_manifests/Valid_Test_Manifest.csv") yield test_manifest_path + @pytest.fixture(scope="class") def test_manifest_submit(helpers): - test_manifest_path = helpers.get_data_path("mock_manifests/example_biospecimen_test.csv") - yield test_manifest_path + test_manifest_path = helpers.get_data_path( + "mock_manifests/example_biospecimen_test.csv" + ) + yield test_manifest_path + @pytest.fixture(scope="class") def test_invalid_manifest(helpers): - test_invalid_manifest = helpers.get_data_frame("mock_manifests/Invalid_Test_Manifest.csv", preserve_raw_input=False) + test_invalid_manifest = helpers.get_data_frame( + "mock_manifests/Invalid_Test_Manifest.csv", preserve_raw_input=False + ) yield test_invalid_manifest + @pytest.fixture(scope="class") def test_upsert_manifest_csv(helpers): - test_upsert_manifest_path = helpers.get_data_path("mock_manifests/rdb_table_manifest.csv") + test_upsert_manifest_path = helpers.get_data_path( + "mock_manifests/rdb_table_manifest.csv" + ) yield test_upsert_manifest_path + @pytest.fixture(scope="class") def test_manifest_json(helpers): - test_manifest_path = helpers.get_data_path("mock_manifests/Example.Patient.manifest.json") + test_manifest_path = helpers.get_data_path( + "mock_manifests/Example.Patient.manifest.json" + ) yield test_manifest_path + @pytest.fixture(scope="class") def data_model_jsonld(): - #data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" + # data_model_jsonld ="https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld" data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.model.jsonld" yield data_model_jsonld + @pytest.fixture(scope="class") def benchmark_data_model_jsonld(): - #benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + # benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" benchmark_data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" yield benchmark_data_model_jsonld + def get_MockComponent_attribute(): """ Yield all of the mock conponent attributes one at a time TODO: pull in jsonld from fixture """ - #schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" + # schema_url = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" schema_url = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -91,43 +108,45 @@ def get_MockComponent_attribute(): graph_data_model = data_model_grapher.generate_data_model_graph() dmge = DataModelGraphExplorer(graph_data_model) - #sg = SchemaGenerator("https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld") - attributes=dmge.get_node_dependencies('MockComponent') - attributes.remove('Component') + # sg = SchemaGenerator("https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld") + attributes = dmge.get_node_dependencies("MockComponent") + attributes.remove("Component") for MockComponent_attribute in attributes: - yield MockComponent_attribute + yield MockComponent_attribute + @pytest.fixture(scope="class") -def syn_token(config:Configuration): +def syn_token(config: Configuration): synapse_config_path = config.synapse_configuration_path config_parser = configparser.ConfigParser() config_parser.read(synapse_config_path) # try using synapse access token if "SYNAPSE_ACCESS_TOKEN" in os.environ: - token=os.environ["SYNAPSE_ACCESS_TOKEN"] + token = os.environ["SYNAPSE_ACCESS_TOKEN"] else: token = config_parser["authentication"]["authtoken"] yield token + @pytest.fixture def request_headers(syn_token): - headers = { - "Authorization": "Bearer " + syn_token - } + headers = {"Authorization": "Bearer " + syn_token} yield headers + @pytest.mark.schematic_api class TestSynapseStorage: @pytest.mark.synapse_credentials_needed @pytest.mark.parametrize("return_type", ["json", "csv"]) def test_get_storage_assets_tables(self, client, return_type, request_headers): - params = { - "asset_view": "syn23643253", - "return_type": return_type - } + params = {"asset_view": "syn23643253", "return_type": return_type} - response = client.get('http://localhost:3001/v1/storage/assets/tables', query_string=params, headers=request_headers) + response = client.get( + "http://localhost:3001/v1/storage/assets/tables", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 @@ -139,16 +158,16 @@ def test_get_storage_assets_tables(self, client, return_type, request_headers): # if return type == csv, returning a csv file else: assert response_dt.endswith("file_view_table.csv") - # clean up + # clean up if os.path.exists(response_dt): os.remove(response_dt) - else: + else: pass - + @pytest.mark.synapse_credentials_needed @pytest.mark.parametrize("full_path", [True, False]) @pytest.mark.parametrize("file_names", [None, "Sample_A.txt"]) - def test_get_dataset_files(self,full_path, file_names, request_headers, client): + def test_get_dataset_files(self, full_path, file_names, request_headers, client): params = { "asset_view": "syn23643253", "dataset_id": "syn23643250", @@ -157,68 +176,100 @@ def test_get_dataset_files(self,full_path, file_names, request_headers, client): if file_names: params["file_names"] = file_names - - response = client.get('http://localhost:3001/v1/storage/dataset/files', query_string=params, headers=request_headers) + + response = client.get( + "http://localhost:3001/v1/storage/dataset/files", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = json.loads(response.data) # would show full file path .txt in result if full_path: - if file_names: - assert ["syn23643255","schematic - main/DataTypeX/Sample_A.txt"] and ["syn24226530","schematic - main/TestDatasets/TestDataset-Annotations/Sample_A.txt"] and ["syn25057024","schematic - main/TestDatasets/TestDataset-Annotations-v2/Sample_A.txt"] in response_dt - else: - assert ["syn23643255","schematic - main/DataTypeX/Sample_A.txt"] in response_dt - else: - if file_names: - assert ["syn23643255","Sample_A.txt"] and ["syn24226530","Sample_A.txt"] and ["syn25057024","Sample_A.txt"] in response_dt - assert ['syn23643256', 'Sample_C.txt'] and ['syn24226531', 'Sample_B.txt'] not in response_dt - else: - assert ['syn23643256', 'Sample_C.txt'] and ['syn24226530', 'Sample_A.txt'] and ['syn24226531', 'Sample_B.txt'] in response_dt - + if file_names: + assert ( + ["syn23643255", "schematic - main/DataTypeX/Sample_A.txt"] + and [ + "syn24226530", + "schematic - main/TestDatasets/TestDataset-Annotations/Sample_A.txt", + ] + and [ + "syn25057024", + "schematic - main/TestDatasets/TestDataset-Annotations-v2/Sample_A.txt", + ] + in response_dt + ) + else: + assert [ + "syn23643255", + "schematic - main/DataTypeX/Sample_A.txt", + ] in response_dt + else: + if file_names: + assert ( + ["syn23643255", "Sample_A.txt"] + and ["syn24226530", "Sample_A.txt"] + and ["syn25057024", "Sample_A.txt"] in response_dt + ) + assert ["syn23643256", "Sample_C.txt"] and [ + "syn24226531", + "Sample_B.txt", + ] not in response_dt + else: + assert ( + ["syn23643256", "Sample_C.txt"] + and ["syn24226530", "Sample_A.txt"] + and ["syn24226531", "Sample_B.txt"] in response_dt + ) + @pytest.mark.synapse_credentials_needed def test_get_storage_project_dataset(self, request_headers, client): - params = { - "asset_view": "syn23643253", - "project_id": "syn26251192" - } + params = {"asset_view": "syn23643253", "project_id": "syn26251192"} - response = client.get("http://localhost:3001/v1/storage/project/datasets", query_string = params, headers = request_headers) + response = client.get( + "http://localhost:3001/v1/storage/project/datasets", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = json.loads(response.data) - assert ["syn26251193","Issue522"] in response_dt + assert ["syn26251193", "Issue522"] in response_dt @pytest.mark.synapse_credentials_needed def test_get_storage_project_manifests(self, request_headers, client): + params = {"asset_view": "syn23643253", "project_id": "syn30988314"} - params = { - "asset_view": "syn23643253", - "project_id": "syn30988314" - } - - response = client.get("http://localhost:3001/v1/storage/project/manifests", query_string=params, headers=request_headers) + response = client.get( + "http://localhost:3001/v1/storage/project/manifests", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 @pytest.mark.synapse_credentials_needed def test_get_storage_projects(self, request_headers, client): + params = {"asset_view": "syn23643253"} - params = { - "asset_view": "syn23643253" - } - - response = client.get("http://localhost:3001/v1/storage/projects", query_string = params, headers = request_headers) + response = client.get( + "http://localhost:3001/v1/storage/projects", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 @pytest.mark.synapse_credentials_needed @pytest.mark.parametrize("entity_id", ["syn34640850", "syn23643253", "syn24992754"]) def test_get_entity_type(self, request_headers, client, entity_id): - params = { - "asset_view": "syn23643253", - "entity_id": entity_id - } - response = client.get("http://localhost:3001/v1/storage/entity/type", query_string = params, headers = request_headers) + params = {"asset_view": "syn23643253", "entity_id": entity_id} + response = client.get( + "http://localhost:3001/v1/storage/entity/type", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = json.loads(response.data) @@ -232,11 +283,12 @@ def test_get_entity_type(self, request_headers, client, entity_id): @pytest.mark.synapse_credentials_needed @pytest.mark.parametrize("entity_id", ["syn30988314", "syn27221721"]) def test_if_in_assetview(self, request_headers, client, entity_id): - params = { - "asset_view": "syn23643253", - "entity_id": entity_id - } - response = client.get("http://localhost:3001/v1/storage/if_in_asset_view", query_string = params, headers = request_headers) + params = {"asset_view": "syn23643253", "entity_id": entity_id} + response = client.get( + "http://localhost:3001/v1/storage/if_in_asset_view", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = json.loads(response.data) @@ -245,38 +297,47 @@ def test_if_in_assetview(self, request_headers, client, entity_id): elif entity_id == "syn27221721": assert response_dt == False + @pytest.mark.schematic_api class TestMetadataModelOperation: - @pytest.mark.parametrize("as_graph", [True, False]) + @pytest.mark.parametrize("as_graph", [True, False]) def test_component_requirement(self, client, data_model_jsonld, as_graph): params = { "schema_url": data_model_jsonld, - "source_component": "BulkRNA-seqAssay", - "as_graph": as_graph + "source_component": "BulkRNA-seqAssay", + "as_graph": as_graph, } - response = client.get("http://localhost:3001/v1/model/component-requirements", query_string = params) + response = client.get( + "http://localhost:3001/v1/model/component-requirements", query_string=params + ) assert response.status_code == 200 response_dt = json.loads(response.data) if as_graph: - assert response_dt == [['Biospecimen','Patient'],['BulkRNA-seqAssay','Biospecimen']] - else: - assert response_dt == ['Patient','Biospecimen','BulkRNA-seqAssay'] + assert response_dt == [ + ["Biospecimen", "Patient"], + ["BulkRNA-seqAssay", "Biospecimen"], + ] + else: + assert response_dt == ["Patient", "Biospecimen", "BulkRNA-seqAssay"] @pytest.mark.schematic_api class TestUtilsOperation: - @pytest.mark.parametrize("strict_camel_case", [True, False]) + @pytest.mark.parametrize("strict_camel_case", [True, False]) def test_get_property_label_from_display_name(self, client, strict_camel_case): params = { "display_name": "mocular entity", - "strict_camel_case": strict_camel_case + "strict_camel_case": strict_camel_case, } - response = client.get("http://localhost:3001/v1/utils/get_property_label_from_display_name", query_string = params) + response = client.get( + "http://localhost:3001/v1/utils/get_property_label_from_display_name", + query_string=params, + ) assert response.status_code == 200 response_dt = json.loads(response.data) @@ -290,10 +351,10 @@ def test_get_property_label_from_display_name(self, client, strict_camel_case): @pytest.mark.schematic_api class TestDataModelGraphExplorerOperation: def test_get_schema(self, client, data_model_jsonld): - params = { - "schema_url": data_model_jsonld - } - response = client.get("http://localhost:3001/v1/schemas/get/schema", query_string = params) + params = {"schema_url": data_model_jsonld} + response = client.get( + "http://localhost:3001/v1/schemas/get/schema", query_string=params + ) response_dt = response.data assert response.status_code == 200 @@ -304,61 +365,72 @@ def test_get_schema(self, client, data_model_jsonld): os.remove(response_dt) def test_if_node_required(test, client, data_model_jsonld): - params = { - "schema_url": data_model_jsonld, - "node_display_name": "FamilyHistory" - } + params = {"schema_url": data_model_jsonld, "node_display_name": "FamilyHistory"} - response = client.get("http://localhost:3001/v1/schemas/is_node_required", query_string = params) + response = client.get( + "http://localhost:3001/v1/schemas/is_node_required", query_string=params + ) response_dta = json.loads(response.data) assert response.status_code == 200 assert response_dta == True + def test_get_node_validation_rules(test, client, data_model_jsonld): params = { "schema_url": data_model_jsonld, - "node_display_name": "CheckRegexList" + "node_display_name": "CheckRegexList", } - response = client.get("http://localhost:3001/v1/schemas/get_node_validation_rules", query_string = params) + response = client.get( + "http://localhost:3001/v1/schemas/get_node_validation_rules", + query_string=params, + ) response_dta = json.loads(response.data) assert response.status_code == 200 assert "list strict" in response_dta - assert "regex match [a-f]" in response_dta + assert "regex match [a-f]" in response_dta def test_get_nodes_display_names(test, client, data_model_jsonld): params = { "schema_url": data_model_jsonld, - "node_list": ["FamilyHistory", "Biospecimen"] + "node_list": ["FamilyHistory", "Biospecimen"], } - response = client.get("http://localhost:3001/v1/schemas/get_nodes_display_names", query_string = params) + response = client.get( + "http://localhost:3001/v1/schemas/get_nodes_display_names", + query_string=params, + ) response_dta = json.loads(response.data) assert response.status_code == 200 assert "Family History" and "Biospecimen" in response_dta - @pytest.mark.parametrize("relationship", ["parentOf", "requiresDependency", "rangeValue", "domainValue"]) + @pytest.mark.parametrize( + "relationship", ["parentOf", "requiresDependency", "rangeValue", "domainValue"] + ) def test_get_subgraph_by_edge(self, client, data_model_jsonld, relationship): - params = { - "schema_url": data_model_jsonld, - "relationship": relationship - } + params = {"schema_url": data_model_jsonld, "relationship": relationship} - response = client.get("http://localhost:3001/v1/schemas/get/graph_by_edge_type", query_string=params) + response = client.get( + "http://localhost:3001/v1/schemas/get/graph_by_edge_type", + query_string=params, + ) assert response.status_code == 200 - @pytest.mark.parametrize("return_display_names", [True, False]) @pytest.mark.parametrize("node_label", ["FamilyHistory", "TissueStatus"]) - def test_get_node_range(self, client, data_model_jsonld, return_display_names, node_label): + def test_get_node_range( + self, client, data_model_jsonld, return_display_names, node_label + ): params = { "schema_url": data_model_jsonld, "return_display_names": return_display_names, - "node_label": node_label + "node_label": node_label, } - response = client.get('http://localhost:3001/v1/schemas/get_node_range', query_string=params) + response = client.get( + "http://localhost:3001/v1/schemas/get_node_range", query_string=params + ) response_dt = json.loads(response.data) assert response.status_code == 200 - if "node_label" == "FamilyHistory": + if "node_label" == "FamilyHistory": assert "Breast" in response_dt assert "Lung" in response_dt @@ -369,8 +441,14 @@ def test_get_node_range(self, client, data_model_jsonld, return_display_names, n @pytest.mark.parametrize("return_display_names", [None, True, False]) @pytest.mark.parametrize("return_schema_ordered", [None, True, False]) @pytest.mark.parametrize("source_node", ["Patient", "Biospecimen"]) - def test_node_dependencies(self, client, data_model_jsonld, source_node, return_display_names, return_schema_ordered): - + def test_node_dependencies( + self, + client, + data_model_jsonld, + source_node, + return_display_names, + return_schema_ordered, + ): return_display_names = True return_schema_ordered = False @@ -378,10 +456,13 @@ def test_node_dependencies(self, client, data_model_jsonld, source_node, return_ "schema_url": data_model_jsonld, "source_node": source_node, "return_display_names": return_display_names, - "return_schema_ordered": return_schema_ordered + "return_schema_ordered": return_schema_ordered, } - response = client.get('http://localhost:3001/v1/schemas/get_node_dependencies', query_string=params) + response = client.get( + "http://localhost:3001/v1/schemas/get_node_dependencies", + query_string=params, + ) response_dt = json.loads(response.data) assert response.status_code == 200 @@ -392,71 +473,92 @@ def test_node_dependencies(self, client, data_model_jsonld, source_node, return_ # by default, return_schema_ordered is set to True if return_schema_ordered == True or return_schema_ordered == None: - assert response_dt == ["Patient ID","Sex","Year of Birth","Diagnosis","Component"] - else: + assert response_dt == [ + "Patient ID", + "Sex", + "Year of Birth", + "Diagnosis", + "Component", + ] + else: assert "Year of Birth" in response_dt assert "Diagnosis" in response_dt - assert "Patient ID" in response_dt + assert "Patient ID" in response_dt else: assert "YearofBirth" in response_dt elif source_node == "Biospecimen": if return_display_names == True or return_display_names == None: assert "Tissue Status" in response_dt - else: + else: assert "TissueStatus" in response_dt + @pytest.mark.schematic_api class TestManifestOperation: - def ifExcelExists(self, response, file_name): # return one excel file - d = response.headers['content-disposition'] + d = response.headers["content-disposition"] fname = re.findall("filename=(.+)", d)[0] assert fname == file_name - + def ifGoogleSheetExists(self, response_dt): - for i in response_dt: + for i in response_dt: assert i.startswith("https://docs.google.com/") + def ifPandasDataframe(self, response_dt): for i in response_dt: df = pd.read_json(i) assert isinstance(df, pd.DataFrame) - @pytest.mark.empty_token - #@pytest.mark.parametrize("output_format", [None, "excel", "google_sheet", "dataframe (only if getting existing manifests)"]) + # @pytest.mark.parametrize("output_format", [None, "excel", "google_sheet", "dataframe (only if getting existing manifests)"]) @pytest.mark.parametrize("output_format", ["excel"]) - @pytest.mark.parametrize("data_type", ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]]) - def test_generate_existing_manifest(self, client, data_model_jsonld, data_type, output_format, caplog, request_headers): + @pytest.mark.parametrize( + "data_type", + ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]], + ) + def test_generate_existing_manifest( + self, + client, + data_model_jsonld, + data_type, + output_format, + caplog, + request_headers, + ): # set dataset if data_type == "Patient": - dataset_id = ["syn51730545"] #Mock Patient Manifest folder on synapse + dataset_id = ["syn51730545"] # Mock Patient Manifest folder on synapse elif data_type == "Biospecimen": - dataset_id = ["syn51730547"] #Mock biospecimen manifest folder + dataset_id = ["syn51730547"] # Mock biospecimen manifest folder elif data_type == ["Biospecimen", "Patient"]: dataset_id = ["syn51730547", "syn51730545"] - else: - dataset_id = None #if "all manifests", dataset id is None + else: + dataset_id = None # if "all manifests", dataset id is None params = { "schema_url": data_model_jsonld, "asset_view": "syn23643253", "title": "Example", "data_type": data_type, - "use_annotations": False, - } - + "use_annotations": False, + } + # Previous form of the test had `access_token` set to `None` request_headers["Authorization"] = None - if dataset_id: - params['dataset_id'] = dataset_id - - if output_format: - params['output_format'] = output_format + if dataset_id: + params["dataset_id"] = dataset_id + + if output_format: + params["output_format"] = output_format - response = client.get('http://localhost:3001/v1/manifest/generate', query_string=params, headers=request_headers) + response = client.get( + "http://localhost:3001/v1/manifest/generate", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 @@ -466,28 +568,48 @@ def test_generate_existing_manifest(self, client, data_model_jsonld, data_type, if isinstance(data_type, list) and len(data_type) > 1: # return warning message for record in caplog.records: - if record.message == "Currently we do not support returning multiple files as Excel format at once.": + if ( + record.message + == "Currently we do not support returning multiple files as Excel format at once." + ): assert record.levelname == "WARNING" self.ifExcelExists(response, "Example.Biospecimen.manifest.xlsx") # for single data type - else: + else: self.ifExcelExists(response, "Example.xlsx") else: response_dt = json.loads(response.data) if "dataframe" in output_format: self.ifPandasDataframe(response_dt) assert len(response_dt) == len(dataset_id) - else: + else: self.ifGoogleSheetExists(response_dt) else: response_dt = json.loads(response.data) self.ifGoogleSheetExists(response_dt) - @pytest.mark.empty_token - @pytest.mark.parametrize("output_format", ["excel", "google_sheet", "dataframe (only if getting existing manifests)", None]) - @pytest.mark.parametrize("data_type", ["all manifests", ["Biospecimen", "Patient"], "Patient"]) - def test_generate_new_manifest(self, caplog, client, data_model_jsonld, data_type, output_format, request_headers): + @pytest.mark.parametrize( + "output_format", + [ + "excel", + "google_sheet", + "dataframe (only if getting existing manifests)", + None, + ], + ) + @pytest.mark.parametrize( + "data_type", ["all manifests", ["Biospecimen", "Patient"], "Patient"] + ) + def test_generate_new_manifest( + self, + caplog, + client, + data_model_jsonld, + data_type, + output_format, + request_headers, + ): params = { "schema_url": data_model_jsonld, "asset_view": "syn23643253", @@ -500,29 +622,37 @@ def test_generate_new_manifest(self, caplog, client, data_model_jsonld, data_typ # Previous form of the test had `access_token` set to `None` request_headers["Authorization"] = None - if output_format: + if output_format: params["output_format"] = output_format - - response = client.get('http://localhost:3001/v1/manifest/generate', query_string=params, headers=request_headers) + response = client.get( + "http://localhost:3001/v1/manifest/generate", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 - if output_format and output_format == "excel": if data_type == "all manifests": # return error message for record in caplog.records: - if record.message == "Currently we do not support returning multiple files as Excel format at once.": + if ( + record.message + == "Currently we do not support returning multiple files as Excel format at once." + ): assert record.levelname == "WARNING" elif isinstance(data_type, list) and len(data_type) > 1: # return warning message for record in caplog.records: - if record.message == "Currently we do not support returning multiple files as Excel format at once.": + if ( + record.message + == "Currently we do not support returning multiple files as Excel format at once." + ): assert record.levelname == "WARNING" self.ifExcelExists(response, "Example.Biospecimen.manifest.xlsx") else: self.ifExcelExists(response, "Example.xlsx") - + # return one or multiple google sheet links in all other cases # note: output_format == dataframe only matters when dataset_id is not None else: @@ -531,34 +661,75 @@ def test_generate_new_manifest(self, caplog, client, data_model_jsonld, data_typ if data_type == "all manifests": assert len(response_dt) == 3 - elif isinstance(data_type, list) and len(data_type) >1: + elif isinstance(data_type, list) and len(data_type) > 1: assert len(response_dt) == 2 - else: + else: assert len(response_dt) == 1 - + # test case: generate a manifest when use_annotations is set to True/False for a file-based component # based on the parameter, the columns in the manifests would be different - # the dataset folder does not contain an existing manifest - @pytest.mark.parametrize("use_annotations,expected",[(True, ['Filename', 'Sample ID', 'File Format', 'Component', 'Genome Build', 'Genome FASTA', 'impact', 'Year of Birth', 'date', 'confidence', 'IsImportantBool', 'IsImportantText', 'author', 'eTag', 'entityId']), - (False, ['Filename', 'Sample ID', 'File Format', 'Component', 'Genome Build', 'Genome FASTA', 'entityId'])]) - def test_generate_manifest_file_based_annotations(self, client, use_annotations, expected, data_model_jsonld): + # the dataset folder does not contain an existing manifest + @pytest.mark.parametrize( + "use_annotations,expected", + [ + ( + True, + [ + "Filename", + "Sample ID", + "File Format", + "Component", + "Genome Build", + "Genome FASTA", + "impact", + "Year of Birth", + "date", + "confidence", + "IsImportantBool", + "IsImportantText", + "author", + "eTag", + "entityId", + ], + ), + ( + False, + [ + "Filename", + "Sample ID", + "File Format", + "Component", + "Genome Build", + "Genome FASTA", + "entityId", + ], + ), + ], + ) + def test_generate_manifest_file_based_annotations( + self, client, use_annotations, expected, data_model_jsonld + ): params = { "schema_url": data_model_jsonld, "data_type": "BulkRNA-seqAssay", "dataset_id": "syn25614635", "asset_view": "syn51707141", - "output_format": "google_sheet", - "use_annotations": use_annotations + "output_format": "google_sheet", + "use_annotations": use_annotations, } - response = client.get('http://localhost:3001/v1/manifest/generate', query_string=params) + response = client.get( + "http://localhost:3001/v1/manifest/generate", query_string=params + ) assert response.status_code == 200 response_google_sheet = json.loads(response.data) - - # open the google sheet - google_sheet_df = pd.read_csv(response_google_sheet[0] + '/export?gid=0&format=csv') - + + # open the google sheet + google_sheet_df = pd.read_csv( + response_google_sheet[0] + "/export?gid=0&format=csv" + ) + # make sure that columns used in annotations get added # and also make sure that entityId column appears in the end @@ -567,81 +738,126 @@ def test_generate_manifest_file_based_annotations(self, client, use_annotations, assert sorted(google_sheet_df.columns.to_list()) == sorted(expected) # make sure Filename, entityId, and component get filled with correct value - assert google_sheet_df["Filename"].to_list() == ["TestDataset-Annotations-v3/Sample_A.txt", "TestDataset-Annotations-v3/Sample_B.txt", "TestDataset-Annotations-v3/Sample_C.txt"] - assert google_sheet_df["entityId"].to_list() == ["syn25614636", "syn25614637", "syn25614638"] - assert google_sheet_df["Component"].to_list() == ["BulkRNA-seqAssay", "BulkRNA-seqAssay", "BulkRNA-seqAssay"] + assert google_sheet_df["Filename"].to_list() == [ + "TestDataset-Annotations-v3/Sample_A.txt", + "TestDataset-Annotations-v3/Sample_B.txt", + "TestDataset-Annotations-v3/Sample_C.txt", + ] + assert google_sheet_df["entityId"].to_list() == [ + "syn25614636", + "syn25614637", + "syn25614638", + ] + assert google_sheet_df["Component"].to_list() == [ + "BulkRNA-seqAssay", + "BulkRNA-seqAssay", + "BulkRNA-seqAssay", + ] # test case: generate a manifest with annotations when use_annotations is set to True for a component that is not file-based - # the dataset folder does not contain an existing manifest - def test_generate_manifest_not_file_based_with_annotations(self, client, data_model_jsonld): + # the dataset folder does not contain an existing manifest + def test_generate_manifest_not_file_based_with_annotations( + self, client, data_model_jsonld + ): params = { "schema_url": data_model_jsonld, "data_type": "Patient", "dataset_id": "syn25614635", "asset_view": "syn51707141", - "output_format": "google_sheet", - "use_annotations": False + "output_format": "google_sheet", + "use_annotations": False, } - response = client.get('http://localhost:3001/v1/manifest/generate', query_string=params) + response = client.get( + "http://localhost:3001/v1/manifest/generate", query_string=params + ) assert response.status_code == 200 response_google_sheet = json.loads(response.data) - # open the google sheet - google_sheet_df = pd.read_csv(response_google_sheet[0] + '/export?gid=0&format=csv') + # open the google sheet + google_sheet_df = pd.read_csv( + response_google_sheet[0] + "/export?gid=0&format=csv" + ) # make sure that the result is basically the same as generating a new manifest - assert sorted(google_sheet_df.columns) == sorted(['Patient ID', 'Sex', 'Year of Birth', 'Diagnosis', 'Component', 'Cancer Type', 'Family History']) - + assert sorted(google_sheet_df.columns) == sorted( + [ + "Patient ID", + "Sex", + "Year of Birth", + "Diagnosis", + "Component", + "Cancer Type", + "Family History", + ] + ) + def test_populate_manifest(self, client, data_model_jsonld, test_manifest_csv): # test manifest test_manifest_data = open(test_manifest_csv, "rb") - + params = { "data_type": "MockComponent", "schema_url": data_model_jsonld, "title": "Example", - "csv_file": test_manifest_data + "csv_file": test_manifest_data, } - response = client.get('http://localhost:3001/v1/manifest/generate', query_string=params) + response = client.get( + "http://localhost:3001/v1/manifest/generate", query_string=params + ) assert response.status_code == 200 response_dt = json.loads(response.data) - - # should return a list with one google sheet link + + # should return a list with one google sheet link assert isinstance(response_dt[0], str) assert response_dt[0].startswith("https://docs.google.com/") @pytest.mark.parametrize("restrict_rules", [False, True, None]) - @pytest.mark.parametrize("json_str", [None, '[{"Patient ID": 123, "Sex": "Female", "Year of Birth": "", "Diagnosis": "Healthy", "Component": "Patient", "Cancer Type": "Breast", "Family History": "Breast, Lung"}]']) - def test_validate_manifest(self, data_model_jsonld, client, json_str, restrict_rules, test_manifest_csv, request_headers): - - params = { - "schema_url": data_model_jsonld, - "restrict_rules": restrict_rules - } + @pytest.mark.parametrize( + "json_str", + [ + None, + '[{"Patient ID": 123, "Sex": "Female", "Year of Birth": "", "Diagnosis": "Healthy", "Component": "Patient", "Cancer Type": "Breast", "Family History": "Breast, Lung"}]', + ], + ) + def test_validate_manifest( + self, + data_model_jsonld, + client, + json_str, + restrict_rules, + test_manifest_csv, + request_headers, + ): + params = {"schema_url": data_model_jsonld, "restrict_rules": restrict_rules} if json_str: params["json_str"] = json_str params["data_type"] = "Patient" - response = client.post('http://localhost:3001/v1/model/validate', query_string=params) + response = client.post( + "http://localhost:3001/v1/model/validate", query_string=params + ) response_dt = json.loads(response.data) assert response.status_code == 200 - else: + else: params["data_type"] = "MockComponent" - request_headers.update({ - 'Content-Type': "multipart/form-data", - 'Accept': "application/json" - }) + request_headers.update( + {"Content-Type": "multipart/form-data", "Accept": "application/json"} + ) # test uploading a csv file - response_csv = client.post('http://localhost:3001/v1/model/validate', query_string=params, data={"file_name": (open(test_manifest_csv, 'rb'), "test.csv")}, headers=request_headers) + response_csv = client.post( + "http://localhost:3001/v1/model/validate", + query_string=params, + data={"file_name": (open(test_manifest_csv, "rb"), "test.csv")}, + headers=request_headers, + ) response_dt = json.loads(response_csv.data) assert response_csv.status_code == 200 - # test uploading a json file # change data type to patient since the testing json manifest is using Patient component @@ -656,46 +872,67 @@ def test_validate_manifest(self, data_model_jsonld, client, json_str, restrict_r @pytest.mark.synapse_credentials_needed def test_get_datatype_manifest(self, client, request_headers): - params = { - "asset_view": "syn23643253", - "manifest_id": "syn27600110" - } + params = {"asset_view": "syn23643253", "manifest_id": "syn27600110"} - response = client.get('http://localhost:3001/v1/get/datatype/manifest', query_string=params, headers=request_headers) + response = client.get( + "http://localhost:3001/v1/get/datatype/manifest", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = json.loads(response.data) - assert response_dt =={ - "Cancer Type": "string", - "Component": "string", - "Diagnosis": "string", - "Family History": "string", - "Patient ID": "Int64", - "Sex": "string", - "Year of Birth": "Int64", - "entityId": "string"} + assert response_dt == { + "Cancer Type": "string", + "Component": "string", + "Diagnosis": "string", + "Family History": "string", + "Patient ID": "Int64", + "Sex": "string", + "Year of Birth": "Int64", + "entityId": "string", + } @pytest.mark.synapse_credentials_needed # small manifest: syn51078535; big manifest: syn51156998 - @pytest.mark.parametrize("manifest_id, expected_component, expected_file_name", [("syn51078535", "BulkRNA-seqAssay", "synapse_storage_manifest.csv"), ("syn51156998", "Biospecimen", "synapse_storage_manifest_biospecimen.csv")]) - @pytest.mark.parametrize("new_manifest_name",[None,"Example.csv"]) - @pytest.mark.parametrize("as_json",[None,True,False]) - def test_manifest_download(self, config: Configuration, client, request_headers, manifest_id, new_manifest_name, as_json, expected_component, expected_file_name): + @pytest.mark.parametrize( + "manifest_id, expected_component, expected_file_name", + [ + ("syn51078535", "BulkRNA-seqAssay", "synapse_storage_manifest.csv"), + ("syn51156998", "Biospecimen", "synapse_storage_manifest_biospecimen.csv"), + ], + ) + @pytest.mark.parametrize("new_manifest_name", [None, "Example.csv"]) + @pytest.mark.parametrize("as_json", [None, True, False]) + def test_manifest_download( + self, + config: Configuration, + client, + request_headers, + manifest_id, + new_manifest_name, + as_json, + expected_component, + expected_file_name, + ): params = { "manifest_id": manifest_id, - "new_manifest_name": new_manifest_name, - "as_json": as_json - + "new_manifest_name": new_manifest_name, + "as_json": as_json, } - response = client.get('http://localhost:3001/v1/manifest/download', query_string = params, headers = request_headers) + response = client.get( + "http://localhost:3001/v1/manifest/download", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 # if as_json is set to True or as_json is not defined, then a json gets returned if as_json or as_json is None: response_dta = json.loads(response.data) - # check if the correct manifest gets downloaded + # check if the correct manifest gets downloaded assert response_dta[0]["Component"] == expected_component current_work_dir = os.getcwd() @@ -704,10 +941,12 @@ def test_manifest_download(self, config: Configuration, client, request_headers, # if a manfiest gets renamed, get new manifest file path if new_manifest_name: - manifest_file_path = os.path.join(folder_dir, new_manifest_name + '.' + 'csv') + manifest_file_path = os.path.join( + folder_dir, new_manifest_name + "." + "csv" + ) # if a manifest does not get renamed, get existing manifest file path - else: - manifest_file_path = os.path.join(folder_dir,expected_file_name) + else: + manifest_file_path = os.path.join(folder_dir, expected_file_name) else: # manifest file path gets returned @@ -716,47 +955,55 @@ def test_manifest_download(self, config: Configuration, client, request_headers, file_base_name = os.path.basename(manifest_file_path) file_name = os.path.splitext(file_base_name)[0] - if new_manifest_name: + if new_manifest_name: assert file_name == new_manifest_name # make sure file gets correctly downloaded assert os.path.exists(manifest_file_path) - #delete files - try: + # delete files + try: os.remove(manifest_file_path) - except: + except: pass @pytest.mark.synapse_credentials_needed # test downloading a manifest with access restriction and see if the correct error message got raised def test_download_access_restricted_manifest(self, client, request_headers): - params = { - "manifest_id": "syn29862078" - } + params = {"manifest_id": "syn29862078"} - response = client.get('http://localhost:3001/v1/manifest/download', query_string = params, headers = request_headers) + response = client.get( + "http://localhost:3001/v1/manifest/download", + query_string=params, + headers=request_headers, + ) assert response.status_code == 500 with pytest.raises(TypeError) as exc_info: - raise TypeError('the type error got raised') + raise TypeError("the type error got raised") assert exc_info.value.args[0] == "the type error got raised" - + @pytest.mark.synapse_credentials_needed @pytest.mark.parametrize("as_json", [None, True, False]) @pytest.mark.parametrize("new_manifest_name", [None, "Test"]) - def test_dataset_manifest_download(self, client, as_json, request_headers, new_manifest_name): + def test_dataset_manifest_download( + self, client, as_json, request_headers, new_manifest_name + ): params = { "asset_view": "syn28559058", "dataset_id": "syn28268700", "as_json": as_json, - "new_manifest_name": new_manifest_name + "new_manifest_name": new_manifest_name, } - response = client.get('http://localhost:3001/v1/dataset/manifest/download', query_string = params, headers = request_headers) + response = client.get( + "http://localhost:3001/v1/dataset/manifest/download", + query_string=params, + headers=request_headers, + ) assert response.status_code == 200 response_dt = response.data - if as_json: + if as_json: response_json = json.loads(response_dt) assert response_json[0]["Component"] == "BulkRNA-seqAssay" assert response_json[0]["File Format"] == "CSV/TSV" @@ -764,50 +1011,70 @@ def test_dataset_manifest_download(self, client, as_json, request_headers, new_m assert response_json[0]["entityId"] == "syn28278954" else: # return a file path - response_path = response_dt.decode('utf-8') + response_path = response_dt.decode("utf-8") assert isinstance(response_path, str) assert response_path.endswith(".csv") @pytest.mark.synapse_credentials_needed @pytest.mark.submission - def test_submit_manifest_table_and_file_replace(self, client, request_headers, data_model_jsonld, test_manifest_submit): - """Testing submit manifest in a csv format as a table and a file. Only replace the table - """ + def test_submit_manifest_table_and_file_replace( + self, client, request_headers, data_model_jsonld, test_manifest_submit + ): + """Testing submit manifest in a csv format as a table and a file. Only replace the table""" params = { "schema_url": data_model_jsonld, "data_type": "Biospecimen", - "restrict_rules": False, - "hide_blanks": False, + "restrict_rules": False, + "hide_blanks": False, "manifest_record_type": "table_and_file", "asset_view": "syn51514344", "dataset_id": "syn51514345", - "table_manipulation": 'replace', - "table_column_names": 'class_label', + "table_manipulation": "replace", + "table_column_names": "class_label", } - response_csv = client.post('http://localhost:3001/v1/model/submit', query_string=params, data={"file_name": (open(test_manifest_submit, 'rb'), "test.csv")}, headers=request_headers) + response_csv = client.post( + "http://localhost:3001/v1/model/submit", + query_string=params, + data={"file_name": (open(test_manifest_submit, "rb"), "test.csv")}, + headers=request_headers, + ) assert response_csv.status_code == 200 @pytest.mark.synapse_credentials_needed @pytest.mark.submission - @pytest.mark.parametrize("data_type, manifest_path_fixture",[("Biospecimen","test_manifest_submit"), ("MockComponent", "test_manifest_csv")]) - def test_submit_manifest_file_only_replace(self, helpers, client, request_headers, data_model_jsonld, data_type, manifest_path_fixture, request): - """Testing submit manifest in a csv format as a file - """ + @pytest.mark.parametrize( + "data_type, manifest_path_fixture", + [ + ("Biospecimen", "test_manifest_submit"), + ("MockComponent", "test_manifest_csv"), + ], + ) + def test_submit_manifest_file_only_replace( + self, + helpers, + client, + request_headers, + data_model_jsonld, + data_type, + manifest_path_fixture, + request, + ): + """Testing submit manifest in a csv format as a file""" params = { "schema_url": data_model_jsonld, "data_type": data_type, - "restrict_rules": False, + "restrict_rules": False, "manifest_record_type": "file_only", - "table_manipulation": 'replace', - "table_column_names": 'class_label', + "table_manipulation": "replace", + "table_column_names": "class_label", } if data_type == "Biospecimen": specific_params = { - "asset_view": "syn51514344", - "dataset_id": "syn51514345", + "asset_view": "syn51514344", + "dataset_id": "syn51514345", } elif data_type == "MockComponent": @@ -815,120 +1082,170 @@ def test_submit_manifest_file_only_replace(self, helpers, client, request_header if python_version == "3.10": dataset_id = "syn52656106" - elif python_version =="3.9": + elif python_version == "3.9": dataset_id = "syn52656104" - specific_params = { - "asset_view": "syn23643253", - "dataset_id": dataset_id - } + specific_params = {"asset_view": "syn23643253", "dataset_id": dataset_id} params.update(specific_params) manifest_path = request.getfixturevalue(manifest_path_fixture) - response_csv = client.post('http://localhost:3001/v1/model/submit', query_string=params, data={"file_name": (open(manifest_path, 'rb'), "test.csv")}, headers=request_headers) - assert response_csv.status_code == 200 + response_csv = client.post( + "http://localhost:3001/v1/model/submit", + query_string=params, + data={"file_name": (open(manifest_path, "rb"), "test.csv")}, + headers=request_headers, + ) + assert response_csv.status_code == 200 - @pytest.mark.synapse_credentials_needed + @pytest.mark.synapse_credentials_needed @pytest.mark.submission - def test_submit_manifest_json_str_replace(self, client, request_headers, data_model_jsonld): - """Submit json str as a file - """ + def test_submit_manifest_json_str_replace( + self, client, request_headers, data_model_jsonld + ): + """Submit json str as a file""" json_str = '[{"Sample ID": 123, "Patient ID": 1,"Tissue Status": "Healthy","Component": "Biospecimen"}]' params = { "schema_url": data_model_jsonld, "data_type": "Biospecimen", "json_str": json_str, - "restrict_rules": False, + "restrict_rules": False, "manifest_record_type": "file_only", "asset_view": "syn51514344", "dataset_id": "syn51514345", - "table_manipulation": 'replace', - "table_column_names": 'class_label', + "table_manipulation": "replace", + "table_column_names": "class_label", } params["json_str"] = json_str - response = client.post('http://localhost:3001/v1/model/submit', query_string = params, data={"file_name":''}, headers = request_headers) + response = client.post( + "http://localhost:3001/v1/model/submit", + query_string=params, + data={"file_name": ""}, + headers=request_headers, + ) assert response.status_code == 200 @pytest.mark.synapse_credentials_needed @pytest.mark.submission - def test_submit_manifest_w_file_and_entities(self, client, request_headers, data_model_jsonld, test_manifest_submit): + def test_submit_manifest_w_file_and_entities( + self, client, request_headers, data_model_jsonld, test_manifest_submit + ): params = { "schema_url": data_model_jsonld, "data_type": "Biospecimen", - "restrict_rules": False, + "restrict_rules": False, "manifest_record_type": "file_and_entities", "asset_view": "syn51514501", "dataset_id": "syn51514523", - "table_manipulation": 'replace', - "table_column_names": 'class_label', - "annotation_keys": 'class_label', + "table_manipulation": "replace", + "table_column_names": "class_label", + "annotation_keys": "class_label", } # test uploading a csv file - response_csv = client.post('http://localhost:3001/v1/model/submit', query_string=params, data={"file_name": (open(test_manifest_submit, 'rb'), "test.csv")}, headers=request_headers) + response_csv = client.post( + "http://localhost:3001/v1/model/submit", + query_string=params, + data={"file_name": (open(test_manifest_submit, "rb"), "test.csv")}, + headers=request_headers, + ) assert response_csv.status_code == 200 @pytest.mark.synapse_credentials_needed @pytest.mark.submission - def test_submit_manifest_table_and_file_upsert(self, client, request_headers, data_model_jsonld, test_upsert_manifest_csv, ): + def test_submit_manifest_table_and_file_upsert( + self, + client, + request_headers, + data_model_jsonld, + test_upsert_manifest_csv, + ): params = { "schema_url": data_model_jsonld, "data_type": "MockRDB", - "restrict_rules": False, + "restrict_rules": False, "manifest_record_type": "table_and_file", "asset_view": "syn51514557", "dataset_id": "syn51514551", - "table_manipulation": 'upsert', - "table_column_names": 'display_name' # have to set table_column_names to display_name to ensure upsert feature works + "table_manipulation": "upsert", + "table_column_names": "display_name", # have to set table_column_names to display_name to ensure upsert feature works } # test uploading a csv file - response_csv = client.post('http://localhost:3001/v1/model/submit', query_string=params, data={"file_name": (open(test_upsert_manifest_csv, 'rb'), "test.csv")}, headers=request_headers) - assert response_csv.status_code == 200 + response_csv = client.post( + "http://localhost:3001/v1/model/submit", + query_string=params, + data={"file_name": (open(test_upsert_manifest_csv, "rb"), "test.csv")}, + headers=request_headers, + ) + assert response_csv.status_code == 200 + @pytest.mark.schematic_api class TestSchemaVisualization: def test_visualize_attributes(self, client, data_model_jsonld): - params = { - "schema_url": data_model_jsonld - } + params = {"schema_url": data_model_jsonld} - response = client.get("http://localhost:3001/v1/visualize/attributes", query_string = params) + response = client.get( + "http://localhost:3001/v1/visualize/attributes", query_string=params + ) assert response.status_code == 200 @pytest.mark.parametrize("figure_type", ["component", "dependency"]) - def test_visualize_tangled_tree_layers(self, client, figure_type, data_model_jsonld): + def test_visualize_tangled_tree_layers( + self, client, figure_type, data_model_jsonld + ): # TODO: Determine a 2nd data model to use for this test, test both models sequentially, add checks for content of response - params = { - "schema_url": data_model_jsonld, - "figure_type": figure_type - } + params = {"schema_url": data_model_jsonld, "figure_type": figure_type} - response = client.get("http://localhost:3001/v1/visualize/tangled_tree/layers", query_string = params) + response = client.get( + "http://localhost:3001/v1/visualize/tangled_tree/layers", + query_string=params, + ) assert response.status_code == 200 - @pytest.mark.parametrize("component, response_text", [("Patient", "Component,Component,TBD,True,,,,Patient"), ("BulkRNA-seqAssay", "Component,Component,TBD,True,,,,BulkRNA-seqAssay")]) - def test_visualize_component(self, client, data_model_jsonld,component, response_text): + @pytest.mark.parametrize( + "component, response_text", + [ + ("Patient", "Component,Component,TBD,True,,,,Patient"), + ("BulkRNA-seqAssay", "Component,Component,TBD,True,,,,BulkRNA-seqAssay"), + ], + ) + def test_visualize_component( + self, client, data_model_jsonld, component, response_text + ): params = { "schema_url": data_model_jsonld, "component": component, - "include_index": False + "include_index": False, } - response = client.get("http://localhost:3001/v1/visualize/component", query_string = params) + response = client.get( + "http://localhost:3001/v1/visualize/component", query_string=params + ) assert response.status_code == 200 - assert "Attribute,Label,Description,Required,Cond_Req,Valid Values,Conditional Requirements,Component" in response.text + assert ( + "Attribute,Label,Description,Required,Cond_Req,Valid Values,Conditional Requirements,Component" + in response.text + ) assert response_text in response.text + @pytest.mark.schematic_api @pytest.mark.rule_benchmark -class TestValidationBenchmark(): - @pytest.mark.parametrize('MockComponent_attribute', get_MockComponent_attribute()) - def test_validation_performance(self, helpers, benchmark_data_model_jsonld, client, test_invalid_manifest, MockComponent_attribute ): +class TestValidationBenchmark: + @pytest.mark.parametrize("MockComponent_attribute", get_MockComponent_attribute()) + def test_validation_performance( + self, + helpers, + benchmark_data_model_jsonld, + client, + test_invalid_manifest, + MockComponent_attribute, + ): """ Test to benchamrk performance of validation rules on large manifests Test loads the invalid_test_manifest.csv and isolates one attribute at a time @@ -941,58 +1258,58 @@ def test_validation_performance(self, helpers, benchmark_data_model_jsonld, clie # Number of rows to target for large manfiest target_rows = 1000 # URL of validtion endpoint - endpoint_url = 'http://localhost:3001/v1/model/validate' + endpoint_url = "http://localhost:3001/v1/model/validate" # Set paramters for endpoint - params = { + params = { "schema_url": benchmark_data_model_jsonld, "data_type": "MockComponent", - - } - headers = { - 'Content-Type': "multipart/form-data", - 'Accept': "application/json" } + headers = {"Content-Type": "multipart/form-data", "Accept": "application/json"} # Enforce error rate when possible - if MockComponent_attribute == 'Check Ages': - test_invalid_manifest.loc[0,MockComponent_attribute] = '6550' - elif MockComponent_attribute == 'Check Date': - test_invalid_manifest.loc[0,MockComponent_attribute] = 'October 21 2022' - test_invalid_manifest.loc[2,MockComponent_attribute] = 'October 21 2022' - elif MockComponent_attribute == 'Check Unique': - test_invalid_manifest.loc[0,MockComponent_attribute] = 'str2' - + if MockComponent_attribute == "Check Ages": + test_invalid_manifest.loc[0, MockComponent_attribute] = "6550" + elif MockComponent_attribute == "Check Date": + test_invalid_manifest.loc[0, MockComponent_attribute] = "October 21 2022" + test_invalid_manifest.loc[2, MockComponent_attribute] = "October 21 2022" + elif MockComponent_attribute == "Check Unique": + test_invalid_manifest.loc[0, MockComponent_attribute] = "str2" # Isolate single attribute of interest, keep `Component` column - single_attribute_manfiest = test_invalid_manifest[['Component', MockComponent_attribute]] + single_attribute_manfiest = test_invalid_manifest[ + ["Component", MockComponent_attribute] + ] # Extend to ~1000 rows in size to for performance test - multi_factor = ceil(target_rows/single_attribute_manfiest.shape[0]) - large_manfiest = pd.concat([single_attribute_manfiest]*multi_factor, ignore_index = True) + multi_factor = ceil(target_rows / single_attribute_manfiest.shape[0]) + large_manfiest = pd.concat( + [single_attribute_manfiest] * multi_factor, ignore_index=True + ) try: # Convert manfiest to csv for api endpoint - large_manifest_path = helpers.get_data_path('mock_manifests/large_manifest_test.csv') + large_manifest_path = helpers.get_data_path( + "mock_manifests/large_manifest_test.csv" + ) large_manfiest.to_csv(large_manifest_path, index=False) # Run and time endpoint t_start = perf_counter() - response = client.post(endpoint_url, query_string=params, data={"file_name": (open(large_manifest_path, 'rb'), "large_test.csv")}, headers=headers) + response = client.post( + endpoint_url, + query_string=params, + data={"file_name": (open(large_manifest_path, "rb"), "large_test.csv")}, + headers=headers, + ) response_time = perf_counter() - t_start finally: # Remove temp manfiest os.remove(large_manifest_path) - + # Log and check time and ensure successful response - logger.warning(f"validation endpiont response time {round(response_time,2)} seconds.") + logger.warning( + f"validation endpiont response time {round(response_time,2)} seconds." + ) assert response.status_code == 200 - assert response_time < 5.00 - - - - - - - - + assert response_time < 5.00 From b9147b608cee6a02aef2bb1561394ffa5e95a7a3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 25 Jan 2024 11:05:56 -0800 Subject: [PATCH 082/199] update additional references to display_name_as_label to data_model_labels and run black on additional files --- schematic/help.py | 21 +++++++++++++++------ schematic/manifest/commands.py | 10 +++++----- schematic/models/commands.py | 11 ++++++----- schematic/schemas/commands.py | 12 ++++++------ schematic/store/synapse.py | 4 ++-- schematic/utils/schema_utils.py | 4 ++-- 6 files changed, 36 insertions(+), 26 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index cc96dc6a5..e34210391 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -135,8 +135,11 @@ "Upsert functionality requires primary keys to be specified in the data model and manfiest as _id." "Currently it is required to use -dl/--use_display_label with table upserts." ), - "display_name_as_label": ( - "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + "data_model_labels": ( + "Choose how to set the label in the data model. " + "display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. " + "class_label, default, use standard class or property label. " + "Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly." ), }, "validate": { @@ -163,8 +166,11 @@ "project_scope": ( "Specify a comma-separated list of projects to search through for cross manifest validation." ), - "display_name_as_label": ( - "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + "data_model_labels": ( + "Choose how to set the label in the data model. " + "display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. " + "class_label, default, use standard class or property label. " + "Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly." ), }, } @@ -181,8 +187,11 @@ "output_jsonld": ( "Path to where the generated JSON-LD file needs to be outputted." ), - "display_name_as_label": ( - "Flag to indicate that the display name should be used as the label. This requires that the display name be properly formatted without spaces or blacklisted characters." + "data_model_labels": ( + "Choose how to set the label in the data model. " + "display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. " + "class_label, default, use standard class or property label. " + "Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly." ), } } diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 91efe598a..f52dc8e14 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -106,10 +106,10 @@ def manifest(ctx, config): # use as `schematic manifest ...` help=query_dict(manifest_commands, ("manifest", "get", "alphabetize_valid_values")), ) @click.option( - "--display_name_as_label", - "-dnl", + "--data_model_labels", + "-dml", is_flag=True, - help=query_dict(manifest_commands, ("manifest", "get", "display_name_as_label")), + help=query_dict(manifest_commands, ("manifest", "get", "data_model_labels")), ) @click.pass_obj def get_manifest( @@ -124,7 +124,7 @@ def get_manifest( json_schema, output_xlsx, alphabetize_valid_values, - display_name_as_label, + data_model_labels, ): """ Running CLI with manifest generation options. @@ -148,7 +148,7 @@ def get_manifest( parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) + data_model_grapher = DataModelGraph(parsed_data_model, data_model_labels) # Generate graph logger.info("Generating data model graph.") diff --git a/schematic/models/commands.py b/schematic/models/commands.py index 601d86682..23e99b2d9 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -117,7 +117,8 @@ def model(ctx, config): # use as `schematic model ...` @click.option( "--data_model_labels", "-dml", - is_flag=True, + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "data_model_labels")), ) @click.pass_obj @@ -132,7 +133,7 @@ def submit_manifest( restrict_rules, project_scope, table_manipulation, - display_name_as_label, + data_model_labels, ): """ Running CLI with manifest validation (optional) and submission options. @@ -144,7 +145,7 @@ def submit_manifest( metadata_model = MetadataModel( inputMModelLocation=jsonld, inputMModelLocationType="local", - display_name_as_label=display_name_as_label, + data_model_labels=data_model_labels, ) manifest_id = metadata_model.submit_metadata_manifest( @@ -205,8 +206,8 @@ def submit_manifest( help=query_dict(model_commands, ("model", "validate", "project_scope")), ) @click.option( - "--display_name_as_label", - "-dnl", + "--data_model_labels", + "-dml", is_flag=True, help=query_dict(model_commands, ("model", "validate", "data_model_labels")), ) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 6ef4989e7..368082b4c 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -42,10 +42,10 @@ def schema(): # use as `schematic model ...` "schema", type=click.Path(exists=True), metavar="", nargs=1 ) @click.option( - "--display_name_as_label", - "-dnl", + "--data_model_labels", + "-dml", is_flag=True, - help=query_dict(schema_commands, ("schema", "convert", "display_name_as_label")), + help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), ) @click.option( "--output_jsonld", @@ -53,7 +53,7 @@ def schema(): # use as `schematic model ...` metavar="", help=query_dict(schema_commands, ("schema", "convert", "output_jsonld")), ) -def convert(schema, display_name_as_label, output_jsonld): +def convert(schema, data_model_labels, output_jsonld): """ Running CLI to convert data model specification in CSV format to data model in JSON-LD format. @@ -65,7 +65,7 @@ def convert(schema, display_name_as_label, output_jsonld): st = time.time() # Instantiate Parser - data_model_parser = DataModelParser(schema, display_name_as_label) + data_model_parser = DataModelParser(schema, data_model_labels) # Parse Model logger.info("Parsing data model.") @@ -73,7 +73,7 @@ def convert(schema, display_name_as_label, output_jsonld): # Convert parsed model to graph # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) + data_model_grapher = DataModelGraph(parsed_data_model, data_model_labels) # Generate graph logger.info("Generating data model graph.") diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index b3dc29727..8335b4886 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1524,10 +1524,10 @@ def _add_id_columns_to_manifest( manifest["Id"] = "" # Retrieve the ID column name (id, Id and ID) are treated the same. - id_col_name = [col for col in manifest.columns if col.lower() == 'id'][0] + id_col_name = [col for col in manifest.columns if col.lower() == "id"][0] # Check if values have been added to the Id coulumn, if not add a UUID so value in the row is not blank. - for idx,row in manifest.iterrows(): + for idx, row in manifest.iterrows(): if not row[id_col_name]: gen_uuid = str(uuid.uuid4()) row[id_col_name] = gen_uuid diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index ef9edac1c..eb5160ef9 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -92,7 +92,7 @@ def get_label_from_display_name( display_name: str, entry_type: str, strict_camel_case: bool = False, - data_model_labels: str = 'class_label', + data_model_labels: str = "class_label", ) -> str: """Get node label from provided display name, based on whether the node is a class or property Args: @@ -105,7 +105,7 @@ def get_label_from_display_name( ValueError if entry_type.lower(), is not either 'class' or 'property' """ - if data_model_labels=='class_label': + if data_model_labels == "class_label": blacklisted_chars = ["(", ")", ".", " ", "-"] # Check that display name can be used as a label. valid_display_name = check_if_display_name_is_valid_label( From 225140decd48a3136044c41569d6c3620786ecf0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 25 Jan 2024 18:26:10 -0800 Subject: [PATCH 083/199] update all references throughout, update tests, black --- schematic/schemas/commands.py | 5 +- schematic/schemas/data_model_parser.py | 165 ++------ schematic/utils/schema_utils.py | 4 +- schematic_api/api/openapi/api.yaml | 120 ++++-- schematic_api/api/routes.py | 544 +++++++++++++++---------- tests/conftest.py | 6 +- tests/test_api.py | 7 + tests/test_metadata.py | 30 +- tests/test_schemas.py | 76 ++-- tests/test_utils.py | 4 +- tests/test_validation.py | 2 +- tests/test_viz.py | 7 +- 12 files changed, 522 insertions(+), 448 deletions(-) diff --git a/schematic/schemas/commands.py b/schematic/schemas/commands.py index 368082b4c..f9660764c 100644 --- a/schematic/schemas/commands.py +++ b/schematic/schemas/commands.py @@ -44,7 +44,8 @@ def schema(): # use as `schematic model ...` @click.option( "--data_model_labels", "-dml", - is_flag=True, + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), ) @click.option( @@ -65,7 +66,7 @@ def convert(schema, data_model_labels, output_jsonld): st = time.time() # Instantiate Parser - data_model_parser = DataModelParser(schema, data_model_labels) + data_model_parser = DataModelParser(schema) # Parse Model logger.info("Parsing data model.") diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 100d6dbcf..680a7eb61 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -25,7 +25,6 @@ class DataModelParser: def __init__( self, path_to_data_model: str, - data_model_labels: bool = False, ) -> None: """ Args: @@ -35,7 +34,6 @@ def __init__( self.path_to_data_model = path_to_data_model self.model_type = self.get_model_type() self.base_schema_path = None - self.data_model_labels = data_model_labels def _get_base_schema_path(self, base_schema: str = None) -> str: """Evaluate path to base schema. @@ -100,9 +98,7 @@ def parse_model(self) -> Dict[str, dict[str, Any]]: model_dict = csv_parser.parse_csv_model(self.path_to_data_model) elif self.model_type == "JSONLD": jsonld_parser = DataModelJSONLDParser() - model_dict = jsonld_parser.parse_jsonld_model( - self.path_to_data_model, self.data_model_labels - ) + model_dict = jsonld_parser.parse_jsonld_model(self.path_to_data_model) else: raise ValueError( f"Schematic only accepts models of type CSV or JSONLD, you provided a model type {self.model_type}, please resubmit in the proper format." @@ -248,123 +244,47 @@ def __init__( # Load relationships dictionary. self.rel_dict = self.dmr.define_data_model_relationships() - def parse_list_of_dict_entry( - self, - rel_entry: list, - id_jsonld_key: str, - data_model_labels: str, - model_jsonld: list[dict], - ) -> list[str]: - """Parse a list of dictionaries entry, so it can be added to the attr_rel_dictionary - Args: - rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value - id_jsonld_key, str: the jsonld key for id - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model - Returns: - parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. - """ - parsed_rel_entry = [r[id_jsonld_key].split(":")[1] for r in rel_entry] - # Convert labels to display names if specified - if data_model_labels == "display_label": - parsed_rel_entry = self.convert_entry_to_dn_label( - parsed_rel_entry, model_jsonld - ) - return parsed_rel_entry - - def parse_string_entry( - self, rel_entry: str, data_model_labels: str, model_jsonld: list[dict] - ) -> Union[bool, str]: - """ - Parse a string entry, so it can be added to the attr_rel_dictionary - Args: - rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model - Returns: - parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. - """ - # Remove contexts and treat strings as appropriate. - if ":" in rel_entry and "http:" not in rel_entry: - parsed_rel_entry = rel_entry.split(":")[1] - - # Convert true/false strings to boolean - if parsed_rel_entry.lower() == "true": - parsed_rel_entry = True - elif parsed_rel_entry.lower == "false": - parsed_rel_entry = False - else: - # Convert labels to display names if specified - if data_model_labels == "display_label": - parsed_rel_entry = self.convert_entry_to_dn_label( - parsed_rel_entry, model_jsonld - ) - else: - parsed_rel_entry = rel_entry - # Convert labels to display names if specified - if data_model_labels == "display_label": - parsed_rel_entry = self.convert_entry_to_dn_label( - parsed_rel_entry, model_jsonld - ) - return parsed_rel_entry - - def parse_basic_entry( - self, rel_entry: str, data_model_labels: str, model_jsonld: list[dict] - ) -> str: - """For basic entry, just return or convert to display name if indicated. - Args: - rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model - Returns: - parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. - """ - parsed_rel_entry = rel_entry - # Convert labels to display names if specified - if data_model_labels == "display_label": - parsed_rel_entry = self.convert_entry_to_dn_label( - parsed_rel_entry, model_jsonld - ) - return parsed_rel_entry - def parse_entry( self, rel_entry: any, id_jsonld_key: str, - data_model_labels: str, - model_jsonld: list[dict], + dn_label_dict: dict[str:str], + model_jsonld: dict, ) -> Any: - """Parse an input entry based on certain attributes so it can be added used in further downstream processing + """Parse an input entry based on certain attributes Args: rel_entry: Given a single entry and relationship in a JSONLD data model, the recorded value id_jsonld_key, str: the jsonld key for id - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model Returns: - parsed_rel_entry: an entry that has been parsed based on its input type and characteristics. + parsed_rel_entry: an entry that has been parsed base on its input type and characteristics. """ # Retrieve ID from single value dictionary if type(rel_entry) == dict and len(rel_entry.keys()) == 1: parsed_rel_entry = rel_entry["@id"] # Parse list of dictionaries to make a list of entries with context stripped (will update this section when contexts added.) elif type(rel_entry) == list and type(rel_entry[0]) == dict: - parsed_rel_entry = self.parse_list_of_dict_entry( - rel_entry, id_jsonld_key, data_model_labels, model_jsonld + parsed_rel_entry = self.convert_entry_to_dn_label( + [r[id_jsonld_key].split(":")[1] for r in rel_entry], model_jsonld ) # Strip context from string and convert true/false to bool elif type(rel_entry) == str: - parsed_rel_entry = self.parse_string_entry( - rel_entry, data_model_labels, model_jsonld - ) + # Remove contexts and treat strings as appropriate. + if ":" in rel_entry and "http:" not in rel_entry: + parsed_rel_entry = rel_entry.split(":")[1] + # Convert true/false strings to boolean + if parsed_rel_entry.lower() == "true": + parsed_rel_entry = True + elif parsed_rel_entry.lower == "false": + parsed_rel_entry = False + else: + parsed_rel_entry = self.convert_entry_to_dn_label( + rel_entry, model_jsonld + ) + # For anything else get that else: - parsed_rel_entry = self.parse_basic_entry( - rel_entry, data_model_labels, model_jsonld - ) + parsed_rel_entry = self.convert_entry_to_dn_label(rel_entry, model_jsonld) + return parsed_rel_entry def label_to_dn_dict(self, model_jsonld: list[dict]): @@ -395,7 +315,6 @@ def convert_entry_to_dn_label( """ # Get a dictionary of display_names mapped to labels dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) - # Handle if using the display name as the label if type(parsed_rel_entry) == list: parsed_rel_entry = [ @@ -408,15 +327,11 @@ def convert_entry_to_dn_label( parsed_rel_entry = dn_label_dict.get(parsed_rel_entry) return parsed_rel_entry - def gather_jsonld_attributes_relationships( - self, model_jsonld: list[dict], data_model_labels: str - ) -> Dict: + def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Dict: """ Args: model_jsonld: list of dictionaries, each dictionary is an entry in the jsonld data model - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. Returns: + Returns: attr_rel_dictionary: dict, {Node Display Name: {Relationships: { @@ -439,6 +354,9 @@ def gather_jsonld_attributes_relationships( self.rel_dict[key]["jsonld_key"] for key in jsonld_keys_to_extract ] + # Get a dictionary of display names to labels to identify values explicitly recorded + dn_label_dict = self.label_to_dn_dict(model_jsonld=model_jsonld) + # Build the attr_rel_dictionary attr_rel_dictionary = {} # Move through each entry in the jsonld model @@ -463,13 +381,12 @@ def gather_jsonld_attributes_relationships( if rel_vals["jsonld_key"] in entry.keys() and rel_vals["csv_header"]: # Retrieve entry value associated with the given relationship rel_entry = entry[rel_vals["jsonld_key"]] - # If there is an entry parse it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, - data_model_labels=data_model_labels, + dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, ) rel_csv_header = self.rel_dict[rel_key]["csv_header"] @@ -520,30 +437,24 @@ def gather_jsonld_attributes_relationships( ].update( {rel_csv_header: [entry[label_jsonld_key]]} ) + else: attr_rel_dictionary[attr_key]["Relationships"].update( {rel_csv_header: parsed_rel_entry} ) - # Add values to the dictionary that do not directly have a corillary to the CSV + elif ( rel_vals["jsonld_key"] in entry.keys() and not rel_vals["csv_header"] ): - # If using the display name as the label, ensure that the display name is set for the label - if ( - data_model_labels == "display_label" - and rel_vals["jsonld_key"] == label_jsonld_key - ): - rel_entry = entry[dn_jsonld_key] - else: - rel_entry = entry[rel_vals["jsonld_key"]] - - # If there is an entry parse it by type and add to the attr:relationships dictionary. + # Retrieve entry value associated with the given relationship + rel_entry = entry[rel_vals["jsonld_key"]] + # If there is an entry parset it by type and add to the attr:relationships dictionary. if rel_entry: parsed_rel_entry = self.parse_entry( rel_entry=rel_entry, id_jsonld_key=id_jsonld_key, - data_model_labels=data_model_labels, + dn_label_dict=dn_label_dict, model_jsonld=model_jsonld, ) # Add relationships for each attribute and relationship to the dictionary @@ -555,14 +466,10 @@ def gather_jsonld_attributes_relationships( def parse_jsonld_model( self, path_to_data_model: str, - data_model_labels: str, ): """Convert raw JSONLD data model to attributes relationship dictionary. Args: path_to_data_model: str, path to JSONLD data model - data_model_labels: str, display_label or class_label. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to schema_label. - class_label, default, use standard class or property label. Returns: model_dict: dict, {Node Display Name: @@ -576,7 +483,5 @@ def parse_jsonld_model( # Load the json_ld model to df json_load = load_json(path_to_data_model) # Convert dataframe to attributes relationship dictionary. - model_dict = self.gather_jsonld_attributes_relationships( - json_load["@graph"], data_model_labels - ) + model_dict = self.gather_jsonld_attributes_relationships(json_load["@graph"]) return model_dict diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index eb5160ef9..d43d86887 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -105,7 +105,7 @@ def get_label_from_display_name( ValueError if entry_type.lower(), is not either 'class' or 'property' """ - if data_model_labels == "class_label": + if data_model_labels == "display_label": blacklisted_chars = ["(", ")", ".", " ", "-"] # Check that display name can be used as a label. valid_display_name = check_if_display_name_is_valid_label( @@ -129,7 +129,7 @@ def get_label_from_display_name( ][0] logger.warning( - f"Cannot use display name {display_name} as the schema label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}" + f"Cannot use display name {display_name} as the data model label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}" ) else: if entry_type.lower() == "class": diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 62f70060a..ead5d9688 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -99,12 +99,16 @@ paths: description: If using Google Sheets, can set the strictness of Google Sheets regex match validation. True (default) will block users from entering incorrect values, False will throw a warning to users. required: false - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false operationId: schematic_api.api.routes.get_manifest_route responses: @@ -247,12 +251,16 @@ paths: example: Patient required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If 'true' then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if 'false', default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false - in: query name: restrict_rules @@ -341,12 +349,16 @@ paths: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false - in: query name: data_type @@ -471,12 +483,16 @@ paths: description: if False return component requirements as a list; if True return component requirements as a dependency graph (i.e. a DAG) required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false operationId: schematic_api.api.routes.get_component_requirements responses: @@ -512,12 +528,16 @@ paths: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false - in: query name: data_type @@ -1179,12 +1199,16 @@ paths: example: FamilyHistory required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false - in: query name: return_display_names @@ -1225,12 +1249,16 @@ paths: example: 'component' required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false responses: "200": @@ -1273,12 +1301,16 @@ paths: example: 'plain' required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false responses: "200": @@ -1306,12 +1338,16 @@ paths: https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false responses: "200": @@ -1354,12 +1390,16 @@ paths: description: Whether to include the indexes of the datafram in the returned JSON string (true) or not (false). required: true - in: query - name: display_name_as_label + name: data_model_labels schema: - type: boolean + type: string nullable: true - default: false - description: If true then the display name used in the model will be used as the SchemaLabel (if it is fomatted properly), if false, default, then a SchemaLabel will be generated. + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. required: false responses: "200": diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index b12b3e18d..713a3a59b 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -28,17 +28,25 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer -#from schematic.schemas.data_model_relationships import DataModelRelationships + +# from schematic.schemas.data_model_relationships import DataModelRelationships from schematic.store.synapse import SynapseStorage, ManifestDownload -from synapseclient.core.exceptions import SynapseHTTPError, SynapseAuthenticationError, SynapseUnmetAccessRestrictions, SynapseNoCredentialsError, SynapseTimeoutError +from synapseclient.core.exceptions import ( + SynapseHTTPError, + SynapseAuthenticationError, + SynapseUnmetAccessRestrictions, + SynapseNoCredentialsError, + SynapseTimeoutError, +) from schematic.utils.general import entity_type_mapping from schematic.utils.schema_utils import get_property_label_from_display_name logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) -def config_handler(asset_view: str=None): + +def config_handler(asset_view: str = None): # check if path to config is provided path_to_config = app.config["SCHEMATIC_CONFIG"] if path_to_config is not None and os.path.isfile(path_to_config): @@ -46,81 +54,83 @@ def config_handler(asset_view: str=None): if asset_view is not None: CONFIG.synapse_master_fileview_id = asset_view + class JsonConverter: - ''' + """ Mainly handle converting json str or json file to csv - ''' + """ + def readJson(self, json_str=None, manifest_file=None): - ''' + """ The purpose of this function is to read either json str or json file - input: + input: json_str: json object - manifest_file: manifest file object - output: + manifest_file: manifest file object + output: return a dataframe - ''' + """ if json_str: df = pd.read_json(json_str) - elif manifest_file: + elif manifest_file: df = pd.read_json(manifest_file.read()) return df - + def get_file(self, file_key): - ''' + """ The purpose of this function is to get the file uploaded by user - input: - file_key: Defined in api.yaml. This key refers to the files uploaded. - manifest_file: manifest file object - output: + input: + file_key: Defined in api.yaml. This key refers to the files uploaded. + manifest_file: manifest file object + output: return file object - ''' + """ manifest_file = connexion.request.files[file_key] return manifest_file def IsJsonFile(self, manifest_file): - ''' + """ The purpose of this function is check if the manifest file that gets uploaded is a json or not - input: - manifest_file: manifest file object - output: + input: + manifest_file: manifest file object + output: return True if it is json - ''' + """ file_type = manifest_file.content_type - if file_type == 'application/json': + if file_type == "application/json": return True - else: + else: return False def convert_df_to_csv(self, df, file_name): - ''' + """ The purpose of this function is to convert dataframe to a temporary CSV file - input: + input: df: dataframe file_name: file name of the output csv - output: + output: return temporary file path of the output csv - ''' + """ # convert dataframe to a temporary csv file temp_dir = tempfile.gettempdir() temp_path = os.path.join(temp_dir, file_name) - df.to_csv(temp_path, encoding = 'utf-8', index=False) + df.to_csv(temp_path, encoding="utf-8", index=False) return temp_path def convert_json_str_to_csv(self, json_str, file_name): - ''' + """ The purpose of this function is to convert json str to a temporary csv file - input: + input: json_str: json object file_name: file name of the output csv - output: + output: return temporary file path of the output csv - ''' + """ # convert json to df - df = self.readJson(json_str = json_str) + df = self.readJson(json_str=json_str) # convert dataframe to a temporary csv file temp_path = self.convert_df_to_csv(df, file_name) @@ -128,31 +138,32 @@ def convert_json_str_to_csv(self, json_str, file_name): return temp_path def convert_json_file_to_csv(self, file_key): - ''' + """ The purpose of this function is to convert json str to a temporary csv file - input: - file_key: Defined in api.yaml. This key refers to the files uploaded. - output: + input: + file_key: Defined in api.yaml. This key refers to the files uploaded. + output: return temporary file path of the output csv - ''' + """ # get manifest file manifest_file = self.get_file(file_key) if self.IsJsonFile(manifest_file): # read json as dataframe - df = self.readJson(manifest_file = manifest_file) + df = self.readJson(manifest_file=manifest_file) # get base file name base = os.path.splitext(manifest_file.filename)[0] - # name the new csv file - new_file_name = base + '.csv' + # name the new csv file + new_file_name = base + ".csv" # convert to csv temp_path = self.convert_df_to_csv(df, new_file_name) return temp_path - else: - temp_path = save_file(file_key='file_name') + else: + temp_path = save_file(file_key="file_name") return temp_path + def get_access_token() -> str: """Get access token from header""" bearer_token = None @@ -164,28 +175,31 @@ def get_access_token() -> str: if auth_header.startswith("Bearer "): bearer_token = auth_header.split(" ")[1] return bearer_token - + + def parse_bool(str_bool): - if str_bool.lower().startswith('t'): + if str_bool.lower().startswith("t"): return True - elif str_bool.lower().startswith('f'): + elif str_bool.lower().startswith("f"): return False else: raise ValueError( "String boolean does not appear to be true or false. Please verify input." ) + def return_as_json(manifest_local_file_path): manifest_csv = pd.read_csv(manifest_local_file_path) manifest_json = manifest_csv.to_dict(orient="records") return manifest_json + def save_file(file_key="csv_file"): - ''' - input: + """ + input: file_key: Defined in api.yaml. This key refers to the files uploaded. By default, set to "csv_file" Return a temporary file path for the uploaded a given file - ''' + """ manifest_file = connexion.request.files[file_key] # save contents of incoming manifest CSV file to temp file @@ -197,25 +211,31 @@ def save_file(file_key="csv_file"): return temp_path + def initalize_metadata_model(schema_url, data_model_labels): # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - metadata_model = MetadataModel( - inputMModelLocation=data_model, inputMModelLocationType="local", data_model_labels=data_model_labels, + inputMModelLocation=data_model, + inputMModelLocationType="local", + data_model_labels=data_model_labels, ) return metadata_model + def get_temp_jsonld(schema_url): # retrieve a JSON-LD via URL and store it in a temporary location with urllib.request.urlopen(schema_url) as response: - with tempfile.NamedTemporaryFile(delete=False, suffix=".model.jsonld") as tmp_file: + with tempfile.NamedTemporaryFile( + delete=False, suffix=".model.jsonld" + ) as tmp_file: shutil.copyfileobj(response, tmp_file) # get path to temporary JSON-LD file return tmp_file.name + def get_temp_csv(schema_url): # retrieve a CSV via URL and store it in a temporary location with urllib.request.urlopen(schema_url) as response: @@ -225,138 +245,180 @@ def get_temp_csv(schema_url): # get path to temporary csv file return tmp_file.name + def get_temp_model_path(schema_url): # Get model type: - model_extension = pathlib.Path(schema_url).suffix.replace('.', '').upper() - if model_extension == 'CSV': + model_extension = pathlib.Path(schema_url).suffix.replace(".", "").upper() + if model_extension == "CSV": temp_path = get_temp_csv(schema_url) - elif model_extension == 'JSONLD': + elif model_extension == "JSONLD": temp_path = get_temp_jsonld(schema_url) else: - raise ValueError("Did not provide a valid model type CSV or JSONLD, please check submission and try again.") + raise ValueError( + "Did not provide a valid model type CSV or JSONLD, please check submission and try again." + ) return temp_path # @before_request -def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, strict_validation:bool=True): +def get_manifest_route( + schema_url: str, + use_annotations: bool, + dataset_ids=None, + asset_view=None, + output_format=None, + title=None, + strict_validation: bool = True, +): """Get the immediate dependencies that are related to a given source node. - Args: - schema_url: link to data model in json ld or csv format - title: title of a given manifest. - dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). - output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet - use_annotations: Whether to use existing annotations during manifest generation - asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. - strict: bool, strictness with which to apply validation rules to google sheets. - Returns: - Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). + Args: + schema_url: link to data model in json ld or csv format + title: title of a given manifest. + dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). + output_format: contains three option: "excel", "google_sheet", and "dataframe". if set to "excel", return an excel spreadsheet + use_annotations: Whether to use existing annotations during manifest generation + asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. + strict: bool, strictness with which to apply validation rules to google sheets. + Returns: + Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). """ # Get access token from request header access_token = get_access_token() # call config_handler() - config_handler(asset_view = asset_view) - + config_handler(asset_view=asset_view) + temp_path = get_temp_model_path(schema_url=schema_url) # Gather all data_types to make manifests for. all_args = connexion.request.args args_dict = dict(all_args.lists()) - data_type = args_dict['data_type'] + data_type = args_dict["data_type"] # Gather all dataset_ids try: - dataset_ids = args_dict['dataset_id'] + dataset_ids = args_dict["dataset_id"] except: pass - + if dataset_ids: # Check that the number of submitted data_types matches # the number of dataset_ids (if applicable) len_data_types = len(data_type) len_dataset_ids = len(dataset_ids) - + try: len_data_types == len_dataset_ids except: raise ValueError( - f"There is a mismatch in the number of data_types and dataset_id's that " - f"submitted. Please check your submission and try again." - ) - + f"There is a mismatch in the number of data_types and dataset_id's that " + f"submitted. Please check your submission and try again." + ) + # Raise an error if used in conjunction with datatype = 'all_manifests' try: - data_type[0] != 'all manifests' + data_type[0] != "all manifests" except: raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." - ) - - all_results = ManifestGenerator.create_manifests(path_to_data_model=schema_url, output_format=output_format, data_types=data_type, title=title, access_token=access_token, dataset_ids=dataset_ids, strict=strict_validation, use_annotations=use_annotations) - + f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + f"Please check your submission and try again." + ) + + all_results = ManifestGenerator.create_manifests( + path_to_data_model=schema_url, + output_format=output_format, + data_types=data_type, + title=title, + access_token=access_token, + dataset_ids=dataset_ids, + strict=strict_validation, + use_annotations=use_annotations, + ) + return all_results -#####profile validate manifest route function -#@profile(sort_by='cumulative', strip_dirs=True) -def validate_manifest_route(schema_url, data_type, data_model_labels, restrict_rules=None, json_str=None, asset_view=None, project_scope = None): + +#####profile validate manifest route function +# @profile(sort_by='cumulative', strip_dirs=True) +def validate_manifest_route( + schema_url, + data_type, + data_model_labels, + restrict_rules=None, + json_str=None, + asset_view=None, + project_scope=None, +): # Access token now stored in request header access_token = get_access_token() - + # if restrict rules is set to None, default it to False if not restrict_rules: - restrict_rules=False - + restrict_rules = False + # call config_handler() - config_handler(asset_view = asset_view) + config_handler(asset_view=asset_view) - #If restrict_rules parameter is set to None, then default it to False + # If restrict_rules parameter is set to None, then default it to False if not restrict_rules: restrict_rules = False - #Get path to temp file where manifest file contents will be saved + # Get path to temp file where manifest file contents will be saved jsc = JsonConverter() if json_str: - temp_path = jsc.convert_json_str_to_csv(json_str = json_str, file_name = "example_json") - else: + temp_path = jsc.convert_json_str_to_csv( + json_str=json_str, file_name="example_json" + ) + else: temp_path = jsc.convert_json_file_to_csv("file_name") # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) metadata_model = MetadataModel( - inputMModelLocation=data_model, inputMModelLocationType="local", data_model_labels=data_model_labels + inputMModelLocation=data_model, + inputMModelLocationType="local", + data_model_labels=data_model_labels, ) errors, warnings = metadata_model.validateModelManifest( - manifestPath=temp_path, rootNode=data_type, restrict_rules=restrict_rules, project_scope=project_scope, access_token=access_token + manifestPath=temp_path, + rootNode=data_type, + restrict_rules=restrict_rules, + project_scope=project_scope, + access_token=access_token, ) - + res_dict = {"errors": errors, "warnings": warnings} return res_dict -#####profile validate manifest route function -#@profile(sort_by='cumulative', strip_dirs=True) -def submit_manifest_route(schema_url, - data_model_labels:str, - asset_view=None, - manifest_record_type=None, - json_str=None, - table_manipulation=None, - data_type=None, - hide_blanks=False): +#####profile validate manifest route function +# @profile(sort_by='cumulative', strip_dirs=True) +def submit_manifest_route( + schema_url, + data_model_labels: str, + asset_view=None, + manifest_record_type=None, + json_str=None, + table_manipulation=None, + data_type=None, + hide_blanks=False, + project_scope=None, +): # call config_handler() - config_handler(asset_view = asset_view) + config_handler(asset_view=asset_view) # convert Json file to CSV if applicable jsc = JsonConverter() if json_str: - temp_path = jsc.convert_json_str_to_csv(json_str = json_str, file_name = "example_json.csv") - else: + temp_path = jsc.convert_json_str_to_csv( + json_str=json_str, file_name="example_json.csv" + ) + else: temp_path = jsc.convert_json_file_to_csv("file_name") dataset_id = connexion.request.args["dataset_id"] @@ -368,44 +430,46 @@ def submit_manifest_route(schema_url, # Access token now stored in request header access_token = get_access_token() - use_schema_label = connexion.request.args["use_schema_label"] - if use_schema_label == 'None': + if use_schema_label == "None": use_schema_label = True else: use_schema_label = parse_bool(use_schema_label) - if not table_manipulation: + if not table_manipulation: table_manipulation = "replace" if not manifest_record_type: manifest_record_type = "table_file_and_entities" - if data_type == 'None': + if data_type == "None": validate_component = None else: validate_component = data_type - + # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) manifest_id = metadata_model.submit_metadata_manifest( - path_to_json_ld = data_model, - manifest_path=temp_path, - dataset_id=dataset_id, - validate_component=validate_component, - access_token=access_token, - manifest_record_type = manifest_record_type, - restrict_rules = restrict_rules, + path_to_json_ld=data_model, + manifest_path=temp_path, + dataset_id=dataset_id, + validate_component=validate_component, + access_token=access_token, + manifest_record_type=manifest_record_type, + restrict_rules=restrict_rules, hide_blanks=hide_blanks, - table_manipulation = table_manipulation, + table_manipulation=table_manipulation, use_schema_label=use_schema_label, project_scope=project_scope, - ) + ) return manifest_id -def populate_manifest_route(schema_url, data_model_labels:str, title=None, data_type=None, return_excel=None): + +def populate_manifest_route( + schema_url, data_model_labels: str, title=None, data_type=None, return_excel=None +): # call config_handler() config_handler() @@ -414,30 +478,41 @@ def populate_manifest_route(schema_url, data_model_labels:str, title=None, data_ # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - - #Initalize MetadataModel - metadata_model = MetadataModel(inputMModelLocation=data_model, inputMModelLocationType='local', data_model_labels=data_model_labels) - #Call populateModelManifest class - populated_manifest_link = metadata_model.populateModelManifest(title=title, manifestPath=temp_path, rootNode=data_type, return_excel=return_excel) + # Initalize MetadataModel + metadata_model = MetadataModel( + inputMModelLocation=data_model, + inputMModelLocationType="local", + data_model_labels=data_model_labels, + ) + + # Call populateModelManifest class + populated_manifest_link = metadata_model.populateModelManifest( + title=title, + manifestPath=temp_path, + rootNode=data_type, + return_excel=return_excel, + ) return populated_manifest_link + def get_storage_projects(asset_view): # Access token now stored in request header access_token = get_access_token() - # call config handler + # call config handler config_handler(asset_view=asset_view) - # use Synapse storage + # use Synapse storage store = SynapseStorage(access_token=access_token) # call getStorageProjects function lst_storage_projects = store.getStorageProjects() - + return lst_storage_projects + def get_storage_projects_datasets(asset_view, project_id): # Access token now stored in request header access_token = get_access_token() @@ -449,10 +524,11 @@ def get_storage_projects_datasets(asset_view, project_id): store = SynapseStorage(access_token=access_token) # call getStorageDatasetsInProject function - sorted_dataset_lst = store.getStorageDatasetsInProject(projectId = project_id) - + sorted_dataset_lst = store.getStorageDatasetsInProject(projectId=project_id) + return sorted_dataset_lst + def get_files_storage_dataset(asset_view, dataset_id, full_path, file_names=None): # Access token now stored in request header access_token = get_access_token() @@ -464,18 +540,21 @@ def get_files_storage_dataset(asset_view, dataset_id, full_path, file_names=None store = SynapseStorage(access_token=access_token) # no file names were specified (file_names = ['']) - if file_names and not all(file_names): - file_names=None - + if file_names and not all(file_names): + file_names = None + # call getFilesInStorageDataset function - file_lst = store.getFilesInStorageDataset(datasetId=dataset_id, fileNames=file_names, fullpath=full_path) + file_lst = store.getFilesInStorageDataset( + datasetId=dataset_id, fileNames=file_names, fullpath=full_path + ) return file_lst + def check_if_files_in_assetview(asset_view, entity_id): # Access token now stored in request header access_token = get_access_token() - - # call config handler + + # call config handler config_handler(asset_view=asset_view) # use Synapse Storage @@ -486,25 +565,32 @@ def check_if_files_in_assetview(asset_view, entity_id): return if_exists + def check_entity_type(entity_id): # Access token now stored in request header access_token = get_access_token() - - # call config handler + + # call config handler config_handler() - syn = SynapseStorage.login(access_token = access_token) + syn = SynapseStorage.login(access_token=access_token) entity_type = entity_type_mapping(syn, entity_id) - return entity_type + return entity_type + -def get_component_requirements(schema_url, source_component, as_graph, data_model_labels): +def get_component_requirements( + schema_url, source_component, as_graph, data_model_labels +): metadata_model = initalize_metadata_model(schema_url, data_model_labels) - req_components = metadata_model.get_component_requirements(source_component=source_component, as_graph = as_graph) + req_components = metadata_model.get_component_requirements( + source_component=source_component, as_graph=as_graph + ) return req_components + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_attributes_explorer(schema_url, data_model_labels): # call config_handler() @@ -513,25 +599,34 @@ def get_viz_attributes_explorer(schema_url, data_model_labels): # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(data_model, data_model_labels).parse_attributes(save_file=False) + attributes_csv = AttributesExplorer(data_model, data_model_labels).parse_attributes( + save_file=False + ) return attributes_csv -def get_viz_component_attributes_explorer(schema_url, component, include_index, data_model_labels): + +def get_viz_component_attributes_explorer( + schema_url, component, include_index, data_model_labels +): # call config_handler() config_handler() - # get path to temp data model file (csv or jsonld) as appropriate + # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - attributes_csv = AttributesExplorer(data_model, data_model_labels).parse_component_attributes(component, save_file=False, include_index=include_index) + attributes_csv = AttributesExplorer( + data_model, data_model_labels + ).parse_component_attributes( + component, save_file=False, include_index=include_index + ) return attributes_csv + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_text(schema_url, figure_type, text_format, data_model_labels): - - # get path to temp data model file (csv or jsonld) as appropriate + # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) # Initialize TangledTree @@ -539,12 +634,12 @@ def get_viz_tangled_tree_text(schema_url, figure_type, text_format, data_model_l # Get text for tangled tree. text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=False) - + return text_df + @cross_origin(["http://localhost", "https://sage-bionetworks.github.io"]) def get_viz_tangled_tree_layers(schema_url, figure_type, data_model_labels): - # call config_handler() config_handler() @@ -553,20 +648,21 @@ def get_viz_tangled_tree_layers(schema_url, figure_type, data_model_labels): # Initialize Tangled Tree tangled_tree = TangledTree(data_model, figure_type, data_model_labels) - + # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=False) return layers[0] -def download_manifest(manifest_id, new_manifest_name='', as_json=True): + +def download_manifest(manifest_id, new_manifest_name="", as_json=True): """ - Download a manifest based on a given manifest id. + Download a manifest based on a given manifest id. Args: manifest_syn_id: syn id of a manifest newManifestName: new name of a manifest that gets downloaded. as_json: boolean; If true, return a manifest as a json. Default to True - Return: + Return: file path of the downloaded manifest """ # Access token now stored in request header @@ -577,24 +673,25 @@ def download_manifest(manifest_id, new_manifest_name='', as_json=True): # use login method in synapse storage syn = SynapseStorage.login(access_token=access_token) - try: + try: md = ManifestDownload(syn, manifest_id) manifest_data = ManifestDownload.download_manifest(md, new_manifest_name) - #return local file path - manifest_local_file_path = manifest_data['path'] + # return local file path + manifest_local_file_path = manifest_data["path"] except TypeError as e: - raise TypeError(f'Failed to download manifest {manifest_id}.') + raise TypeError(f"Failed to download manifest {manifest_id}.") if as_json: manifest_json = return_as_json(manifest_local_file_path) return manifest_json else: return manifest_local_file_path -#@profile(sort_by='cumulative', strip_dirs=True) -def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name=''): + +# @profile(sort_by='cumulative', strip_dirs=True) +def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name=""): # Access token now stored in request header access_token = get_access_token() - + # call config handler config_handler(asset_view=asset_view) @@ -602,22 +699,25 @@ def download_dataset_manifest(dataset_id, asset_view, as_json, new_manifest_name store = SynapseStorage(access_token=access_token) # download existing file - manifest_data = store.getDatasetManifest(datasetId=dataset_id, downloadFile=True, newManifestName=new_manifest_name) + manifest_data = store.getDatasetManifest( + datasetId=dataset_id, downloadFile=True, newManifestName=new_manifest_name + ) - #return local file path + # return local file path try: - manifest_local_file_path = manifest_data['path'] + manifest_local_file_path = manifest_data["path"] except KeyError as e: - raise KeyError(f'Failed to download manifest from dataset: {dataset_id}') from e + raise KeyError(f"Failed to download manifest from dataset: {dataset_id}") from e - #return a json (if as_json = True) + # return a json (if as_json = True) if as_json: manifest_json = return_as_json(manifest_local_file_path) return manifest_json return manifest_local_file_path + def get_asset_view_table(asset_view, return_type): # Access token now stored in request header access_token = get_access_token() @@ -637,7 +737,7 @@ def get_asset_view_table(asset_view, return_type): return json_res else: path = os.getcwd() - export_path = os.path.join(path, 'tests/data/file_view_table.csv') + export_path = os.path.join(path, "tests/data/file_view_table.csv") file_view_table_df.to_csv(export_path, index=False) return export_path @@ -645,7 +745,7 @@ def get_asset_view_table(asset_view, return_type): def get_project_manifests(project_id, asset_view): # Access token now stored in request header access_token = get_access_token() - + # use the default asset view from config config_handler(asset_view=asset_view) @@ -657,10 +757,11 @@ def get_project_manifests(project_id, asset_view): return lst_manifest + def get_manifest_datatype(manifest_id, asset_view): # Access token now stored in request header access_token = get_access_token() - + # use the default asset view from config config_handler(asset_view=asset_view) @@ -668,14 +769,14 @@ def get_manifest_datatype(manifest_id, asset_view): store = SynapseStorage(access_token=access_token) # get data types of an existing manifest - manifest_dtypes_dict= store.getDataTypeFromManifest(manifest_id) - + manifest_dtypes_dict = store.getDataTypeFromManifest(manifest_id) return manifest_dtypes_dict + def get_schema_pickle(schema_url, data_model_labels): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -686,17 +787,17 @@ def get_schema_pickle(schema_url, data_model_labels): # write to local pickle file path = os.getcwd() - export_path = os.path.join(path, 'tests/data/schema.gpickle') + export_path = os.path.join(path, "tests/data/schema.gpickle") - with open(export_path, 'wb') as file: + with open(export_path, "wb") as file: pickle.dump(graph_data_model, file) return export_path def get_subgraph_by_edge_type(schema_url, relationship, data_model_labels): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -706,10 +807,10 @@ def get_subgraph_by_edge_type(schema_url, relationship, data_model_labels): graph_data_model = data_model_grapher.generate_data_model_graph() dmge = DataModelGraphExplorer(graph_data_model) - + # relationship subgraph relationship_subgraph = dmge.get_subgraph_by_edge_type(relationship) - # return relationship + # return relationship Arr = [] for t in relationship_subgraph.edges: lst = list(t) @@ -719,8 +820,8 @@ def get_subgraph_by_edge_type(schema_url, relationship, data_model_labels): def find_class_specific_properties(schema_url, schema_class, data_model_labels): - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -742,7 +843,7 @@ def get_node_dependencies( source_node: str, data_model_labels: str, return_display_names: bool = True, - return_schema_ordered: bool = True, + return_schema_ordered: bool = True, ) -> list[str]: """Get the immediate dependencies that are related to a given source node. @@ -761,8 +862,8 @@ def get_node_dependencies( Returns: list[str]: List of nodes that are dependent on the source node. """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -772,7 +873,7 @@ def get_node_dependencies( graph_data_model = data_model_grapher.generate_data_model_graph() dmge = DataModelGraphExplorer(graph_data_model) - + dependencies = dmge.get_node_dependencies( source_node, return_display_names, return_schema_ordered ) @@ -780,8 +881,7 @@ def get_node_dependencies( def get_property_label_from_display_name_route( - display_name: str, - strict_camel_case: bool = False + display_name: str, strict_camel_case: bool = False ) -> str: """Converts a given display name string into a proper property label string @@ -794,15 +894,17 @@ def get_property_label_from_display_name_route( Returns: str: The property label of the display name """ - label = get_property_label_from_display_name(display_name=display_name, strict_camel_case=strict_camel_case) + label = get_property_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) return label def get_node_range( schema_url: str, node_label: str, - data_model_labels:str, - return_display_names: bool = True + data_model_labels: str, + return_display_names: bool = True, ) -> list[str]: """Get the range, i.e., all the valid values that are associated with a node label. @@ -815,8 +917,8 @@ def get_node_range( Returns: list[str]: A list of nodes """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -830,7 +932,10 @@ def get_node_range( node_range = dmge.get_node_range(node_label, return_display_names) return node_range -def get_if_node_required(schema_url: str, node_display_name: str, data_model_labels:str) -> bool: + +def get_if_node_required( + schema_url: str, node_display_name: str, data_model_labels: str +) -> bool: """Check if the node is required Args: @@ -841,8 +946,8 @@ def get_if_node_required(schema_url: str, node_display_name: str, data_model_lab True: If the given node is a "required" node. False: If the given node is not a "required" (i.e., an "optional") node. """ - data_model_parser = DataModelParser(path_to_data_model = schema_url) - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -857,7 +962,10 @@ def get_if_node_required(schema_url: str, node_display_name: str, data_model_lab return is_required -def get_node_validation_rules(schema_url: str, node_display_name: str, data_model_labels:str) -> list: + +def get_node_validation_rules( + schema_url: str, node_display_name: str, data_model_labels: str +) -> list: """ Args: schema_url (str): Data Model URL @@ -866,9 +974,9 @@ def get_node_validation_rules(schema_url: str, node_display_name: str, data_mode List of valiation rules for a given node. """ # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -877,28 +985,31 @@ def get_node_validation_rules(schema_url: str, node_display_name: str, data_mode # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - #Instantiate DataModelGraphExplorer + # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) node_validation_rules = dmge.get_node_validation_rules(node_display_name) return node_validation_rules -def get_nodes_display_names(schema_url: str, node_list: list[str], data_model_labels:str) -> list: + +def get_nodes_display_names( + schema_url: str, node_list: list[str], data_model_labels: str +) -> list: """From a list of node labels retrieve their display names, return as list. - + Args: schema_url (str): Data Model URL node_list (List[str]): List of node labels. - + Returns: node_display_names (List[str]): List of node display names. """ # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = schema_url) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=schema_url) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph @@ -907,12 +1018,13 @@ def get_nodes_display_names(schema_url: str, node_list: list[str], data_model_la # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() - #Instantiate DataModelGraphExplorer + # Instantiate DataModelGraphExplorer dmge = DataModelGraphExplorer(graph_data_model) node_display_names = dmge.get_nodes_display_names(node_list) return node_display_names + def get_schematic_version() -> str: """ Return the current version of schematic diff --git a/tests/conftest.py b/tests/conftest.py index c1489782c..6f72cf596 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -59,7 +59,7 @@ def get_data_frame(path, *paths, **kwargs): @staticmethod def get_data_model_graph_explorer( - path=None, display_name_as_label: bool = False, *paths + path=None, data_model_labels: str = "class_label", *paths ): # commenting this now bc we dont want to have multiple instances if path is None: @@ -69,7 +69,7 @@ def get_data_model_graph_explorer( # Instantiate DataModelParser data_model_parser = DataModelParser( - path_to_data_model=fullpath, display_name_as_label=display_name_as_label + path_to_data_model=fullpath, ) # Parse Model @@ -77,7 +77,7 @@ def get_data_model_graph_explorer( # Instantiate DataModelGraph data_model_grapher = DataModelGraph( - parsed_data_model, display_name_as_label=display_name_as_label + parsed_data_model, data_model_labels=data_model_labels ) # Generate graph diff --git a/tests/test_api.py b/tests/test_api.py index 4b14ad448..09755405c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -544,6 +544,7 @@ def test_generate_existing_manifest( "title": "Example", "data_type": data_type, "use_annotations": False, + "data_model_labels": "class_label", } # Previous form of the test had `access_token` set to `None` @@ -1033,6 +1034,7 @@ def test_submit_manifest_table_and_file_replace( "dataset_id": "syn51514345", "table_manipulation": "replace", "use_schema_label": True, + "data_model_labels": "class_label", } response_csv = client.post( @@ -1070,6 +1072,7 @@ def test_submit_manifest_file_only_replace( "manifest_record_type": "file_only", "table_manipulation": "replace", "use_schema_label": True, + "data_model_labels": "class_label", } if data_type == "Biospecimen": @@ -1116,6 +1119,7 @@ def test_submit_manifest_json_str_replace( "dataset_id": "syn51514345", "table_manipulation": "replace", "use_schema_label": True, + "data_model_labels": "class_label", } params["json_str"] = json_str response = client.post( @@ -1140,6 +1144,7 @@ def test_submit_manifest_w_file_and_entities( "dataset_id": "syn51514523", "table_manipulation": "replace", "use_schema_label": True, + "data_model_labels": "class_label", } # test uploading a csv file @@ -1169,6 +1174,7 @@ def test_submit_manifest_table_and_file_upsert( "dataset_id": "syn51514551", "table_manipulation": "upsert", "use_schema_label": False, # have to set use_schema_label to false to ensure upsert feature works + "data_model_labels": "class_label", } # test uploading a csv file @@ -1220,6 +1226,7 @@ def test_visualize_component( "schema_url": data_model_jsonld, "component": component, "include_index": False, + "data_model_labels": "class_label", } response = client.get( diff --git a/tests/test_metadata.py b/tests/test_metadata.py index a92a3bb6b..9bc5e5ae5 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -9,10 +9,10 @@ logger = logging.getLogger(__name__) -def metadata_model(helpers, display_name_as_label): +def metadata_model(helpers, data_model_labels): metadata_model = MetadataModel( inputMModelLocation=helpers.get_data_path("example.model.jsonld"), - display_name_as_label=display_name_as_label, + data_model_labels=data_model_labels, inputMModelLocationType="local", ) @@ -22,15 +22,15 @@ def metadata_model(helpers, display_name_as_label): class TestMetadataModel: @pytest.mark.parametrize("as_graph", [True, False], ids=["as_graph", "as_list"]) @pytest.mark.parametrize( - "display_name_as_label", - [True, False], - ids=["display_name_as_label-True", "display_name_as_label-False"], + "data_model_labels", + ["display_label", "class_label"], + ids=["data_model_labels-display_label", "data_model_labels-class_label"], ) - def test_get_component_requirements(self, helpers, as_graph, display_name_as_label): + def test_get_component_requirements(self, helpers, as_graph, data_model_labels): # Instantiate MetadataModel - meta_data_model = metadata_model(helpers, display_name_as_label) + meta_data_model = metadata_model(helpers, data_model_labels) - if display_name_as_label: + if data_model_labels == "display_label": source_component = "BulkRNAseqAssay" else: source_component = "BulkRNA-seqAssay" @@ -43,28 +43,28 @@ def test_get_component_requirements(self, helpers, as_graph, display_name_as_lab if as_graph: assert ("Biospecimen", "Patient") in output - if display_name_as_label: + if data_model_labels == "display_label": assert ("BulkRNAseqAssay", "Biospecimen") in output else: assert ("BulkRNA-seqAssay", "Biospecimen") in output else: assert "Biospecimen" in output assert "Patient" in output - if display_name_as_label: + if data_model_labels == "display_label": assert "BulkRNAseqAssay" in output else: assert "BulkRNA-seqAssay" in output @pytest.mark.parametrize("return_excel", [None, True, False]) @pytest.mark.parametrize( - "display_name_as_label", - [True, False], - ids=["display_name_as_label-True", "display_name_as_label-False"], + "data_model_labels", + ["display_label", "class_label"], + ids=["data_model_labels-display_label", "data_model_labels-class_label"], ) @pytest.mark.google_credentials_needed - def test_populate_manifest(self, helpers, return_excel, display_name_as_label): + def test_populate_manifest(self, helpers, return_excel, data_model_labels): # Instantiate MetadataModel - meta_data_model = metadata_model(helpers, display_name_as_label) + meta_data_model = metadata_model(helpers, data_model_labels) # Get path of manifest manifestPath = helpers.get_data_path("mock_manifests/Valid_Test_Manifest.csv") diff --git a/tests/test_schemas.py b/tests/test_schemas.py index 2f7ab3ff9..b262fcf6a 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -65,20 +65,21 @@ def test_fake_func(): def get_data_model_parser( - helpers, data_model_name: str = None, display_name_as_label: bool = False + helpers, + data_model_name: str = None, ): # Get path to data model fullpath = helpers.get_data_path(path=data_model_name) # Instantiate DataModelParser data_model_parser = DataModelParser( - path_to_data_model=fullpath, display_name_as_label=display_name_as_label + path_to_data_model=fullpath, ) return data_model_parser def generate_graph_data_model( - helpers, data_model_name: str, display_name_as_label: bool = False + helpers, data_model_name: str, data_model_labels: str = "class_label" ) -> nx.MultiDiGraph: """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model @@ -87,7 +88,6 @@ def generate_graph_data_model( data_model_parser = get_data_model_parser( helpers=helpers, data_model_name=data_model_name, - display_name_as_label=display_name_as_label, ) # Parse Model @@ -95,7 +95,7 @@ def generate_graph_data_model( # Convert parsed model to graph # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model, display_name_as_label) + data_model_grapher = DataModelGraph(parsed_data_model, data_model_labels) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -103,10 +103,13 @@ def generate_graph_data_model( return graph_data_model -def generate_data_model_nodes(helpers, data_model_name: str) -> DataModelNodes: +def generate_data_model_nodes( + helpers, data_model_name: str, data_model_labels: str = "class_label" +) -> DataModelNodes: # Instantiate Parser data_model_parser = get_data_model_parser( - helpers=helpers, data_model_name=data_model_name + helpers=helpers, + data_model_name=data_model_name, ) # Parse Model parsed_data_model = data_model_parser.parse_model() @@ -289,7 +292,6 @@ def test_gather_jsonld_attributes_relationships( # Get output of the function: attr_rel_dict = jsonld_parser.gather_jsonld_attributes_relationships( model_jsonld=model_jsonld["@graph"], - display_name_as_label=False, ) # Test the attr_rel_dict is formatted as expected: @@ -315,7 +317,6 @@ def test_parse_jsonld_model( # Get output of the function: attr_rel_dictionary = jsonld_parser.parse_jsonld_model( path_to_data_model=path_to_data_model, - display_name_as_label=False, ) # Test the attr_rel_dictionary is formatted as expected: @@ -401,36 +402,34 @@ class TestDataModelGraph: ids=["csv", "jsonld"], ) @pytest.mark.parametrize( - "display_name_as_label", - [True, False], - ids=["display_name_as_label-True", "display_name_as_label-False"], + "data_model_labels", + ["display_label", "class_label"], + ids=["data_model_labels-display_label", "data_model_labels-class_label"], ) - def test_generate_data_model_graph( - self, helpers, data_model, display_name_as_label - ): + def test_generate_data_model_graph(self, helpers, data_model, data_model_labels): """Check that data model graph is constructed properly, requires calling various classes. TODO: In another test, check conditional dependencies. """ graph = generate_graph_data_model( helpers=helpers, data_model_name=data_model, - display_name_as_label=display_name_as_label, + data_model_labels=data_model_labels, ) # Check that some edges are present as expected: assert ("FamilyHistory", "Breast") in graph.edges("FamilyHistory") - if display_name_as_label: + if data_model_labels == "display_label": expected_valid_values = ["ab", "cd", "ef", "gh"] mock_id_label = "MockRDB_id" assert ("BulkRNAseqAssay", "Biospecimen") in graph.edges("BulkRNAseqAssay") + else: expected_valid_values = ["Ab", "Cd", "Ef", "Gh"] mock_id_label = "MockRDBId" assert ("BulkRNA-seqAssay", "Biospecimen") in graph.edges( "BulkRNA-seqAssay" ) - assert expected_valid_values == [ k for k, v in graph["CheckList"].items() @@ -573,7 +572,8 @@ def test_gather_nodes(self, helpers, data_model): # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes( - helpers, data_model_name=data_model + helpers, + data_model_name=data_model, ) attr_info = ("Patient", attr_rel_dictionary["Patient"]) @@ -833,32 +833,35 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo ids=["Node_required-" + str(v) for v in NODE_DISPLAY_NAME_DICT.values()], ) @pytest.mark.parametrize( - "display_name_as_label", - [True, False], - ids=["Display_name_as_label-True", "Display_name_as_label-False"], + "data_model_labels", + ["display_label", "class_label"], + ids=["data_model_labels-display_label", "data_model_labels-class_label"], ) def test_generate_node_dict( - self, helpers, data_model, node_display_name, display_name_as_label + self, helpers, data_model, node_display_name, data_model_labels ): # Instantiate Parser data_model_parser = get_data_model_parser( - helpers=helpers, data_model_name=data_model + helpers=helpers, + data_model_name=data_model, ) # Parse Model attr_rel_dictionary = data_model_parser.parse_model() - # Change SourceManifest to sockComponent so we can check the display_name_as_label is working as expected + # Change SourceManifest to sockComponent so we can check the data_model_labels is working as expected # Instantiate DataModelNodes data_model_nodes = generate_data_model_nodes( - helpers, data_model_name=data_model + helpers, + data_model_name=data_model, + data_model_labels=data_model_labels, ) node_dict = data_model_nodes.generate_node_dict( node_display_name=node_display_name, attr_rel_dict=attr_rel_dictionary, - display_name_as_label=display_name_as_label, + data_model_labels=data_model_labels, ) # Check that the output is as expected for the required key. @@ -870,8 +873,11 @@ def test_generate_node_dict( assert DATA_MODEL_DICT[data_model] == "JSONLD" # Check that the display name matches the label - if display_name_as_label: - assert node_display_name == node_dict["label"] + if data_model_labels == "display_label": + try: + assert node_display_name == node_dict["label"] + except: + breakpoint() def test_generate_node(self, helpers, data_model): # Test adding a dummy node @@ -1314,18 +1320,18 @@ def test_class_template(self, helpers): ) @pytest.mark.parametrize("node", ["", "Patient"], ids=["no node", "Patient"]) @pytest.mark.parametrize( - "display_name_as_label", - [True, False], - ids=["display_name_as_label-True", "display_name_as_label-False"], + "data_model_labels", + ["display_label", "class_label"], + ids=["data_model_labels-display_label", "data_model_labels-class_label"], ) def test_fill_entry_template( - self, helpers, data_model, template_type, node, display_name_as_label + self, helpers, data_model, template_type, node, data_model_labels ): # Get Graph graph_data_model = generate_graph_data_model( helpers, data_model_name=data_model, - display_name_as_label=display_name_as_label, + data_model_labels=data_model_labels, ) # Instantiate DataModelJsonLD @@ -1383,7 +1389,7 @@ def test_fill_entry_template( assert (set(actual_keys) - set(expected_keys)) == ( set(expected_keys) - set(actual_keys) ) - if display_name_as_label: + if data_model_labels == "display_label": assert ( object_template["rdfs:label"] == object_template["sms:displayName"] ) diff --git a/tests/test_utils.py b/tests/test_utils.py index 63be6e5f2..8a54207eb 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -605,7 +605,7 @@ def test_strip_context(self, helpers, context_value): ["display_label", "class_label"], ids=["display_label", "class_label"], ) - def test_get_label_from_display_name(self, test_dn, use_label): + def test_get_label_from_display_name(self, test_dn, data_model_labels): display_name = test_dn for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): label = "" @@ -625,7 +625,7 @@ def test_get_label_from_display_name(self, test_dn, use_label): "bio_things", ] if label: - if use_label: + if data_model_labels == "display_label": if test_dn in ["Bio Things", "bio things", "Bio-things"]: assert label == expected_result diff --git a/tests/test_validation.py b/tests/test_validation.py index 51cc2f18b..fd852f7dc 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -33,7 +33,7 @@ def metadataModel(helpers): metadataModel = MetadataModel( inputMModelLocation=helpers.get_data_path("example.model.jsonld"), inputMModelLocationType="local", - display_name_as_label=False, + data_model_labels="class_label", ) yield metadataModel diff --git a/tests/test_viz.py b/tests/test_viz.py index ad4187bbd..3837620af 100644 --- a/tests/test_viz.py +++ b/tests/test_viz.py @@ -20,7 +20,8 @@ def attributes_explorer(helpers): # Initialize TangledTree attributes_explorer = AttributesExplorer( - path_to_jsonld, display_name_as_label=False + path_to_jsonld, + data_model_labels="class_label", ) yield attributes_explorer @@ -33,7 +34,9 @@ def tangled_tree(helpers): path_to_jsonld = helpers.get_data_path("example.model.jsonld") # Initialize TangledTree - tangled_tree = TangledTree(path_to_jsonld, figure_type, display_name_as_label=False) + tangled_tree = TangledTree( + path_to_jsonld, figure_type, data_model_labels="class_label" + ) yield tangled_tree From 16ec34f49f2e9421b6677f0d9b4bba59b25d5b61 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 25 Jan 2024 19:45:32 -0800 Subject: [PATCH 084/199] remove unnecessary ref to data_model_labels for DataModelParser --- schematic/visualization/attributes_explorer.py | 1 - schematic/visualization/tangled_tree.py | 1 - 2 files changed, 2 deletions(-) diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 0d1f3b6e1..95f1e6e8e 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -28,7 +28,6 @@ def __init__( # Instantiate Data Model Parser data_model_parser = DataModelParser( path_to_data_model=self.path_to_jsonld, - data_model_labels=data_model_labels, ) # Parse Model diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index ef9b00b65..41160b438 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -48,7 +48,6 @@ def __init__( # Instantiate Data Model Parser data_model_parser = DataModelParser( path_to_data_model=self.path_to_json_ld, - data_model_labels=data_model_labels, ) # Parse Model From b60c781dceaa6e431e90530a3e58db548301ab19 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 26 Jan 2024 13:58:42 -0700 Subject: [PATCH 085/199] regen `.lock` --- poetry.lock | 433 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 270 insertions(+), 163 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3997367ad..e1457f930 100644 --- a/poetry.lock +++ b/poetry.lock @@ -607,63 +607,63 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -1025,10 +1025,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" +version = "2.115.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ + {file = "google-api-python-client-2.115.0.tar.gz", hash = "sha256:96af11376535236ba600ebbe23588cfe003ec9b74e66dd6ddb53aa3ec87e1b52"}, + {file = "google_api_python_client-2.115.0-py2.py3-none-any.whl", hash = "sha256:26178e33684763099142e2cad201057bd27d4efefd859a495aac21ab3e6129c2"}, ] [package.dependencies] @@ -1040,13 +1043,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.26.2" +version = "2.27.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, + {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, ] [package.dependencies] @@ -1610,13 +1613,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.21.0" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, - {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1772,13 +1775,13 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.5.1" +version = "0.5.2" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, - {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, + {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, + {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, ] [package.dependencies] @@ -1791,13 +1794,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.0.10" +version = "4.0.11" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, - {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, + {file = "jupyterlab-4.0.11-py3-none-any.whl", hash = "sha256:536bf0e78723153a5016ca7efb88ed0ecd7070d3f1555d5b0e2770658f900a3c"}, + {file = "jupyterlab-4.0.11.tar.gz", hash = "sha256:d1aec24712566bc25a36229788242778e498ca4088028e2f9aa156b8b7fdc8fc"}, ] [package.dependencies] @@ -2152,10 +2155,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" +version = "7.14.2" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ + {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, + {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, ] [package.dependencies] @@ -2208,13 +2214,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.9" +version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, - {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] [[package]] @@ -2237,13 +2243,13 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "notebook" -version = "7.0.6" +version = "7.0.7" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, - {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, + {file = "notebook-7.0.7-py3-none-any.whl", hash = "sha256:289b606d7e173f75a18beb1406ef411b43f97f7a9c55ba03efa3622905a62346"}, + {file = "notebook-7.0.7.tar.gz", hash = "sha256:3bcff00c17b3ac142ef5f436d50637d936b274cfa0b41f6ac0175363de9b4e09"}, ] [package.dependencies] @@ -2465,13 +2471,13 @@ files = [ [[package]] name = "overrides" -version = "7.4.0" +version = "7.6.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, - {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, + {file = "overrides-7.6.0-py3-none-any.whl", hash = "sha256:c36e6635519ea9c5b043b65c36d4b886aee8bd45b7d4681d2a6df0898df4b654"}, + {file = "overrides-7.6.0.tar.gz", hash = "sha256:01e15bbbf15b766f0675c275baa1878bd1c7dc9bc7b9ee13e677cdba93dc1bd9"}, ] [[package]] @@ -2638,13 +2644,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2701,27 +2707,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -2812,47 +2818,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -3523,10 +3529,110 @@ files = [ [[package]] name = "rpds-py" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, ] [[package]] @@ -3652,45 +3758,45 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] [[package]] name = "scipy" -version = "1.11.4" +version = "1.12.0" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, - {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, - {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, - {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, - {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, - {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, - {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, - {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, - {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, - {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, - {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, + {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, + {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, + {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, + {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, + {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, + {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, + {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, + {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, + {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, ] [package.dependencies] -numpy = ">=1.21.6,<1.28.0" +numpy = ">=1.22.4,<1.29.0" [package.extras] dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "secretstorage" @@ -4207,13 +4313,13 @@ files = [ [[package]] name = "toolz" -version = "0.12.0" +version = "0.12.1" description = "List processing tools and functional utilities" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, - {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, + {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, + {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, ] [[package]] @@ -4578,3 +4684,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" +content-hash = "e7a53bb762e4472eb7fefd0ea60c026f3ec037a8c5e268e613e959500cde0ebf" From 8d1d90072fcf14ed8c44eb9e462f1aa119c31850 Mon Sep 17 00:00:00 2001 From: GiaJordan Date: Fri, 26 Jan 2024 14:03:32 -0700 Subject: [PATCH 086/199] run black --- schematic/utils/df_utils.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 483602848..bb3c63073 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -10,7 +10,12 @@ logger = logging.getLogger(__name__) -def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = False, **load_args) -> pd.DataFrame: +def load_df( + file_path: str, + preserve_raw_input: bool = True, + data_model: bool = False, + **load_args, +) -> pd.DataFrame: """ Universal function to load CSVs and return DataFrames Parses string entries to convert as appropriate to type int, float, and pandas timestamp @@ -27,13 +32,13 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = large_manifest_cutoff_size = 1000 # start performance timer t_load_df = perf_counter() - - #Read CSV to df as type specified in kwargs - org_df = pd.read_csv(file_path, keep_default_na = True, encoding='utf8', **load_args) - #only trim if not data model csv + # Read CSV to df as type specified in kwargs + org_df = pd.read_csv(file_path, keep_default_na=True, encoding="utf8", **load_args) + + # only trim if not data model csv if not data_model: - org_df=trim_commas_df(org_df) + org_df = trim_commas_df(org_df) # If type inference not allowed: trim and return if preserve_raw_input: @@ -76,8 +81,8 @@ def load_df(file_path: str, preserve_raw_input: bool = True, data_model: bool = float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) # Store values that were converted to type int in the final dataframe - processed_df=float_df.mask(ints_tf_df, other = ints) - + processed_df = float_df.mask(ints_tf_df, other=ints) + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From 122575903c2618d195553ecdaf8a55c2cb696041 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 13:54:25 -0700 Subject: [PATCH 087/199] update `pandas` and `schematic_db` deps --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8413cda00..bd9af508d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ jsonschema = "^4.0.0" networkx = ">=2.2.8" numpy = "^1.21.1" oauth2client = "^4.1.0" # Specified because of bug in version ^4.0.0 -pandas = "^1.3.1" +pandas = "^2.0.0" pygsheets = "^2.0.4" PyYAML = "^6.0.0" rdflib = "^6.0.0" @@ -70,7 +70,7 @@ Flask-Cors = "^3.0.10" pdoc = "^12.2.0" dateparser = "^1.1.4" pandarallel = "^1.6.4" -schematic-db = {version = "0.0.dev33", extras = ["synapse"]} +schematic-db = {version = "0.0.34", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" dataclasses-json = "^0.6.1" From b09fac8bdc9a4c834ffacb1851788ef3b5c312e4 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 11 Jan 2024 15:06:04 -0700 Subject: [PATCH 088/199] regen `.lock` file --- poetry.lock | 750 ++++++++++++++++++++++++++-------------------------- 1 file changed, 372 insertions(+), 378 deletions(-) diff --git a/poetry.lock b/poetry.lock index e1457f930..cb25afc9f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -236,22 +236,19 @@ files = [ [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.12.2" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] @@ -607,63 +604,63 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -717,21 +714,6 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dataclasses-json" -version = "0.6.3" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, - {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - [[package]] name = "dateparser" version = "1.2.0" @@ -1025,13 +1007,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.115.0" +version = "2.113.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.115.0.tar.gz", hash = "sha256:96af11376535236ba600ebbe23588cfe003ec9b74e66dd6ddb53aa3ec87e1b52"}, - {file = "google_api_python_client-2.115.0-py2.py3-none-any.whl", hash = "sha256:26178e33684763099142e2cad201057bd27d4efefd859a495aac21ab3e6129c2"}, + {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, + {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, ] [package.dependencies] @@ -1043,13 +1025,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.27.0" +version = "2.26.2" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, - {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, + {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, + {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, ] [package.dependencies] @@ -1379,13 +1361,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.29.0" +version = "6.28.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, - {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, + {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, + {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, ] [package.dependencies] @@ -1408,7 +1390,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -1613,13 +1595,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.21.1" +version = "4.20.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, - {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, ] [package.dependencies] @@ -1724,13 +1706,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.2" +version = "2.2.1" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, - {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, + {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, + {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, ] [package.dependencies] @@ -1739,13 +1721,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.5" +version = "2.12.4" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, + {file = "jupyter_server-2.12.4-py3-none-any.whl", hash = "sha256:a125ae18a60de568f78f55c84dd58759901a18ef279abf0418ac220653ca1320"}, + {file = "jupyter_server-2.12.4.tar.gz", hash = "sha256:41f4a1e6b912cc24a7c6c694851b37d3d8412b180f43d72315fe422cb2b85cc2"}, ] [package.dependencies] @@ -1775,13 +1757,13 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.5.2" +version = "0.5.1" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, - {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, + {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, + {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, ] [package.dependencies] @@ -1794,13 +1776,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.0.11" +version = "4.0.10" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.11-py3-none-any.whl", hash = "sha256:536bf0e78723153a5016ca7efb88ed0ecd7070d3f1555d5b0e2770658f900a3c"}, - {file = "jupyterlab-4.0.11.tar.gz", hash = "sha256:d1aec24712566bc25a36229788242778e498ca4088028e2f9aa156b8b7fdc8fc"}, + {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, + {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, ] [package.dependencies] @@ -2155,13 +2137,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.14.2" +version = "7.14.1" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, - {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, + {file = "nbconvert-7.14.1-py3-none-any.whl", hash = "sha256:aa83e3dd27ea38d0c1d908e3ce9518d15fa908dd30521b6d5040bd23f33fffb0"}, + {file = "nbconvert-7.14.1.tar.gz", hash = "sha256:20cba10e0448dc76b3bebfe1adf923663e3b98338daf77b97b42511ef5a88618"}, ] [package.dependencies] @@ -2214,13 +2196,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.6.0" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] @@ -2243,13 +2225,13 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "notebook" -version = "7.0.7" +version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.7-py3-none-any.whl", hash = "sha256:289b606d7e173f75a18beb1406ef411b43f97f7a9c55ba03efa3622905a62346"}, - {file = "notebook-7.0.7.tar.gz", hash = "sha256:3bcff00c17b3ac142ef5f436d50637d936b274cfa0b41f6ac0175363de9b4e09"}, + {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, + {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, ] [package.dependencies] @@ -2471,13 +2453,13 @@ files = [ [[package]] name = "overrides" -version = "7.6.0" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.6.0-py3-none-any.whl", hash = "sha256:c36e6635519ea9c5b043b65c36d4b886aee8bd45b7d4681d2a6df0898df4b654"}, - {file = "overrides-7.6.0.tar.gz", hash = "sha256:01e15bbbf15b766f0675c275baa1878bd1c7dc9bc7b9ee13e677cdba93dc1bd9"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] @@ -2512,60 +2494,77 @@ doc = ["mkdocs-material"] [[package]] name = "pandas" -version = "1.5.3" +version = "2.1.4" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, ] [package.dependencies] -numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, -] -python-dateutil = ">=2.8.1" +numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""} +python-dateutil = ">=2.8.2" pytz = ">=2020.1" +tzdata = ">=2022.1" [package.extras] -test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] [[package]] name = "pandocfilters" -version = "1.5.1" +version = "1.5.0" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] [[package]] @@ -2644,13 +2643,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.4.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2707,27 +2706,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.8" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -2818,47 +2817,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -3529,110 +3528,110 @@ files = [ [[package]] name = "rpds-py" -version = "0.17.1" +version = "0.16.2" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, - {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, - {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, - {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, - {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, - {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, - {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, - {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, - {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, - {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, - {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, - {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, - {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, + {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"}, + {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"}, + {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"}, + {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"}, + {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"}, + {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"}, + {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"}, + {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"}, + {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"}, + {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"}, + {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"}, + {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"}, + {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"}, ] [[package]] @@ -3728,20 +3727,20 @@ files = [ [[package]] name = "schematic-db" -version = "0.0.dev33" +version = "0.0.34" description = "" optional = false python-versions = ">=3.9,<4.0" files = [ - {file = "schematic_db-0.0.dev33-py3-none-any.whl", hash = "sha256:9a274b038e5d3f382fd22300350fb4c02e0f147e5846808b324714fb30bd9e75"}, - {file = "schematic_db-0.0.dev33.tar.gz", hash = "sha256:01cadedbfa10915727c0bdf88c9184353db1294d8c941e69a824d16f12bb4701"}, + {file = "schematic_db-0.0.34-py3-none-any.whl", hash = "sha256:fb9194ee9085c12e4044b6f4bdb934807767fbf80e3d792c5e8a65c0a6e43e95"}, + {file = "schematic_db-0.0.34.tar.gz", hash = "sha256:29e1fb752e330c3319670d4c562f7f54d554c23c47d9ed19910a107438c70169"}, ] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" interrogate = ">=1.5.0,<2.0.0" networkx = ">=2.8.6,<3.0.0" -pandas = "1.5.3" +pandas = ">=2.0.0,<3.0.0" pydantic = ">=1.10.7,<2.0.0" PyYAML = ">=6.0,<7.0" requests = ">=2.28.1,<3.0.0" @@ -3758,45 +3757,45 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] [[package]] name = "scipy" -version = "1.12.0" +version = "1.11.4" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, - {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, - {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, - {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, - {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, - {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, - {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, - {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, - {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, - {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, - {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, + {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, + {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, + {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, + {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, + {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, + {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, + {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, + {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, + {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, + {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, + {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, ] [package.dependencies] -numpy = ">=1.22.4,<1.29.0" +numpy = ">=1.21.6,<1.28.0" [package.extras] dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "secretstorage" @@ -3942,50 +3941,56 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "1.0.7" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, + {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "1.0.5" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, + {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.0.4" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, + {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -4004,34 +4009,38 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "1.0.6" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, + {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "1.1.9" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, + {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, ] +[package.dependencies] +Sphinx = ">=5" + [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] -standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] @@ -4313,13 +4322,13 @@ files = [ [[package]] name = "toolz" -version = "0.12.1" +version = "0.12.0" description = "List processing tools and functional utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.5" files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, + {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, + {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, ] [[package]] @@ -4399,21 +4408,6 @@ files = [ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "tzdata" version = "2023.4" @@ -4684,4 +4678,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "e7a53bb762e4472eb7fefd0ea60c026f3ec037a8c5e268e613e959500cde0ebf" +content-hash = "65fd6ec0494aecb3e9b89b59479440ff24be22c8867df6718ddd16eac3e7bdec" From 16145bd9c877f51c51bb71c482c344f79af11ab1 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:04:29 -0700 Subject: [PATCH 089/199] regen `.lock` file --- poetry.lock | 288 +++++++++++++++++++++++++--------------------------- 1 file changed, 139 insertions(+), 149 deletions(-) diff --git a/poetry.lock b/poetry.lock index cb25afc9f..9eb41743d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1007,13 +1007,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.113.0" +version = "2.114.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.113.0.tar.gz", hash = "sha256:bcffbc8ffbad631f699cf85aa91993f3dc03060b234ca9e6e2f9135028bd9b52"}, - {file = "google_api_python_client-2.113.0-py2.py3-none-any.whl", hash = "sha256:25659d488df6c8a69615b2a510af0e63b4c47ab2cb87d71c1e13b28715906e27"}, + {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, + {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, ] [package.dependencies] @@ -1361,13 +1361,13 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.28.0" +version = "6.29.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, - {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, ] [package.dependencies] @@ -1390,7 +1390,7 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.2)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -1595,13 +1595,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.20.0" +version = "4.21.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, - {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, + {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, + {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, ] [package.dependencies] @@ -1721,13 +1721,13 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.4" +version = "2.12.5" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.4-py3-none-any.whl", hash = "sha256:a125ae18a60de568f78f55c84dd58759901a18ef279abf0418ac220653ca1320"}, - {file = "jupyter_server-2.12.4.tar.gz", hash = "sha256:41f4a1e6b912cc24a7c6c694851b37d3d8412b180f43d72315fe422cb2b85cc2"}, + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, ] [package.dependencies] @@ -2137,13 +2137,13 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.14.1" +version = "7.14.2" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.14.1-py3-none-any.whl", hash = "sha256:aa83e3dd27ea38d0c1d908e3ce9518d15fa908dd30521b6d5040bd23f33fffb0"}, - {file = "nbconvert-7.14.1.tar.gz", hash = "sha256:20cba10e0448dc76b3bebfe1adf923663e3b98338daf77b97b42511ef5a88618"}, + {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, + {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, ] [package.dependencies] @@ -2196,13 +2196,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.8" +version = "1.5.9" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, - {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, + {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, + {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, ] [[package]] @@ -3528,110 +3528,110 @@ files = [ [[package]] name = "rpds-py" -version = "0.16.2" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"}, - {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"}, - {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"}, - {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"}, - {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"}, - {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"}, - {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"}, - {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"}, - {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"}, - {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"}, - {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"}, - {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"}, - {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"}, - {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"}, - {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"}, - {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"}, - {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"}, - {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"}, - {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"}, - {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"}, - {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"}, - {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"}, - {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"}, - {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"}, - {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"}, - {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"}, - {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"}, - {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"}, - {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"}, - {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"}, - {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"}, - {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"}, - {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"}, - {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, ] [[package]] @@ -3941,56 +3941,50 @@ sphinx = ">=2.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.7" +version = "1.0.8" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_applehelp-1.0.7-py3-none-any.whl", hash = "sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d"}, - {file = "sphinxcontrib_applehelp-1.0.7.tar.gz", hash = "sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa"}, + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.5" +version = "1.0.6" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_devhelp-1.0.5-py3-none-any.whl", hash = "sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f"}, - {file = "sphinxcontrib_devhelp-1.0.5.tar.gz", hash = "sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212"}, + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.4" +version = "2.0.5" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_htmlhelp-2.0.4-py3-none-any.whl", hash = "sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9"}, - {file = "sphinxcontrib_htmlhelp-2.0.4.tar.gz", hash = "sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a"}, + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -4009,38 +4003,34 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.6" +version = "1.0.7" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_qthelp-1.0.6-py3-none-any.whl", hash = "sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4"}, - {file = "sphinxcontrib_qthelp-1.0.6.tar.gz", hash = "sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d"}, + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.9" +version = "1.1.10" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" files = [ - {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"}, - {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"}, + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, ] -[package.dependencies] -Sphinx = ">=5" - [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] From a28570a365f01972443bfcaede85530b2f470f49 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:18:03 -0700 Subject: [PATCH 090/199] Update df_utils.py --- schematic/utils/df_utils.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index bb3c63073..65841eab7 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -50,7 +50,7 @@ def load_df( else: # create a separate copy of the manifest # before beginning conversions to store float values - float_df = deepcopy(org_df) + float_df=deepcopy(org_df) # Cast the columns in the dataframe to string and # replace Null values with empty strings @@ -58,31 +58,29 @@ def load_df( org_df = org_df.astype(str).mask(null_cells, "") # Find integers stored as strings and replace with entries of type np.int64 - if ( - org_df.size < large_manifest_cutoff_size - ): # If small manifest, iterate as normal for improved performance - ints = org_df.applymap( - lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" - ).fillna(False) - - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose=1) - ints = org_df.parallel_applymap( - lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" - ).fillna(False) + if org_df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance + ints = org_df.map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose = 1) + ints = org_df.parallel_map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) # Identify cells converted to intergers - ints_tf_df = ints.applymap(pd.api.types.is_integer) + ints_tf_df = ints.map(pd.api.types.is_integer) # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in org_df.columns: - float_df[col] = pd.to_numeric(float_df[col], errors="coerce") + float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + # replace values that couldn't be converted to float with the original str values float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) # Store values that were converted to type int in the final dataframe - processed_df = float_df.mask(ints_tf_df, other=ints) + processed_df=float_df.mask(ints_tf_df, other = ints) + # Infer dtypes for columns when possible to restore type masking + processed_df = processed_df.infer_objects() + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From ff6551fc87cc1fc4ac5007f743f26700f26a8127 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:28:54 -0700 Subject: [PATCH 091/199] Update GE_Helpers.py --- schematic/models/GE_Helpers.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 194b268c6..c8a2bd9c2 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -163,10 +163,8 @@ def build_expectation_suite( meta = {} # remove trailing/leading whitespaces from manifest - self.manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.dmge.get_node_validation_rules( - node_display_name=col - ) + self.manifest.map(lambda x: x.strip() if isinstance(x, str) else x) + validation_rules = self.sg.get_node_validation_rules(col) # check if attribute has any rules associated with it if validation_rules: From 0c4a04ee2fb3840fc4a52fa84bca736d45c5375e Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:29:07 -0700 Subject: [PATCH 092/199] Update validate_manifest.py --- schematic/models/validate_manifest.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index d3af5c575..9d4b3b275 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -179,7 +179,7 @@ def validate_manifest_rules( regex_re = re.compile("regex.*") for col in manifest.columns: # remove trailing/leading whitespaces from manifest - manifest.applymap(lambda x: x.strip() if isinstance(x, str) else x) + manifest.map(lambda x: x.strip() if isinstance(x, str) else x) validation_rules = dmge.get_node_validation_rules(node_display_name=col) # TODO: Can remove when handling updated so split within graph @@ -257,16 +257,9 @@ def validate_manifest_values( col_attr = {} # save the mapping between column index and attribute name # numerical values need to be type string for the jsonValidator - for col in manifest.select_dtypes( - include=[int, np.int64, float, np.float64] - ).columns: - manifest[col] = manifest[col].astype("string") - manifest = manifest.applymap( - lambda x: str(x) - if isinstance(x, (int, np.int64, float, np.float64)) - else x, - na_action="ignore", - ) + for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: + manifest[col]=manifest[col].astype('string') + manifest = manifest.map(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): From 8654ec5854cf43ca25b0990a08ed6e7daa943c4d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 10:29:11 -0700 Subject: [PATCH 093/199] Update synapse.py --- schematic/store/synapse.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 1144dc88a..98a697563 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -494,15 +494,15 @@ def _get_manifest_id(self, manifest: pd.DataFrame) -> str: # Try to use uncensored manifest first not_censored = ~censored if any(not_censored): - manifest_syn_id = manifest[not_censored]["id"][0] + manifest_syn_id=manifest[not_censored]["id"].iloc[0] # if only censored manifests are available, just use the first censored manifest - else: - manifest_syn_id = manifest["id"][0] + else: + manifest_syn_id = manifest["id"].iloc[0] # otherwise, use the first (implied only) version that exists else: - manifest_syn_id = manifest["id"][0] - + manifest_syn_id = manifest["id"].iloc[0] + return manifest_syn_id def getDatasetManifest( From 56a6dc987c22dcd8302e2229b987a5a47b3b5e08 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 16 Jan 2024 15:05:25 -0700 Subject: [PATCH 094/199] change object inference --- schematic/utils/df_utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 65841eab7..40b122e2a 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -79,8 +79,9 @@ def load_df( processed_df=float_df.mask(ints_tf_df, other = ints) # Infer dtypes for columns when possible to restore type masking - processed_df = processed_df.infer_objects() - + # This mostly just labels string columns as such, changing the column type of columns with mixed numberical values converts ints to floats + processed_df = processed_df.convert_dtypes(infer_objects=False, convert_string=True, convert_integer=True, convert_boolean=False, convert_floating=True) + # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From c3b541b25463a0ea87988906a7b1f554665d56f1 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 09:56:30 -0700 Subject: [PATCH 095/199] leave column types as object --- schematic/utils/df_utils.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 40b122e2a..9dbaadbf9 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -78,10 +78,6 @@ def load_df( # Store values that were converted to type int in the final dataframe processed_df=float_df.mask(ints_tf_df, other = ints) - # Infer dtypes for columns when possible to restore type masking - # This mostly just labels string columns as such, changing the column type of columns with mixed numberical values converts ints to floats - processed_df = processed_df.convert_dtypes(infer_objects=False, convert_string=True, convert_integer=True, convert_boolean=False, convert_floating=True) - # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df From fcc8debfb6124666c445a7b2a18b2653d8863b05 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:33:48 -0700 Subject: [PATCH 096/199] clean up `load_df` --- schematic/utils/df_utils.py | 89 ++++++++++++++++++++++++------------- 1 file changed, 59 insertions(+), 30 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 9dbaadbf9..6ca74896b 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -6,6 +6,7 @@ import pandas as pd import numpy as np from pandarallel import pandarallel +from typing import Union logger = logging.getLogger(__name__) @@ -29,7 +30,6 @@ def load_df( Returns: a processed dataframe for manifests or unprocessed df for data models and where indicated """ - large_manifest_cutoff_size = 1000 # start performance timer t_load_df = perf_counter() @@ -40,48 +40,77 @@ def load_df( if not data_model: org_df = trim_commas_df(org_df) - # If type inference not allowed: trim and return if preserve_raw_input: - # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df + + is_null = org_df.isnull() + org_df = org_df.astype(str).mask(is_null, '') + + ints, is_int = find_and_convert_ints(org_df) - # If type inferences is allowed: infer types, trim, and return - else: - # create a separate copy of the manifest - # before beginning conversions to store float values - float_df=deepcopy(org_df) + float_df = convert_floats(org_df) - # Cast the columns in the dataframe to string and - # replace Null values with empty strings - null_cells = org_df.isnull() - org_df = org_df.astype(str).mask(null_cells, "") + # Store values that were converted to type int in the final dataframe + processed_df=float_df.mask(is_int, other = ints) - # Find integers stored as strings and replace with entries of type np.int64 - if org_df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance - ints = org_df.map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") + return processed_df - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose = 1) - ints = org_df.parallel_map(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) +def find_and_convert_ints(df: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: + """ + Find strings that represent integers and convert to type int + Args: + df: dataframe with nulls masked as empty strings + Returns: + ints: dataframe with values that were converted to type int + is_int: dataframe with boolean values indicating which cells were converted to type int - # Identify cells converted to intergers - ints_tf_df = ints.map(pd.api.types.is_integer) + """ + large_manifest_cutoff_size = 1000 + # Find integers stored as strings and replace with entries of type np.int64 + if df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance + ints = df.map(lambda x: convert_ints(x), na_action='ignore').fillna(False) - # convert strings to numerical dtype (float) if possible, preserve non-numerical strings - for col in org_df.columns: - float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose = 1) + ints = df.parallel_map(lambda x: convert_ints(x), na_action='ignore').fillna(False) - # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) + # Identify cells converted to intergers + is_int = ints.map(pd.api.types.is_integer) - # Store values that were converted to type int in the final dataframe - processed_df=float_df.mask(ints_tf_df, other = ints) + return ints, is_int - # log manifest load and processing time - logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") - return processed_df +def convert_ints(x: str) -> Union[np.int64, bool]: + """ + Lambda function to convert a string to an integer if possible, otherwise returns False + Args: + x: string to attempt conversion to int + Returns: + x converted to type int if possible, otherwise False + """ + return np.int64(x) if str.isdigit(x) else False +def convert_floats(df: pd.DataFrame) -> pd.DataFrame: + """ + Convert strings that represent floats to type float + Args: + df: dataframe with nulls masked as empty strings + Returns: + float_df: dataframe with values that were converted to type float. Columns are type object + """ + # create a separate copy of the manifest + # before beginning conversions to store float values + float_df=deepcopy(df) + + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings + for col in df.columns: + float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + + # replace values that couldn't be converted to float with the original str values + float_df[col].fillna(df[col][float_df[col].isna()], inplace=True) + + return float_df def _parse_dates(date_string): try: From 1ffb190c690f618eab51480dc1a38abb6ee247d1 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 13:52:04 -0700 Subject: [PATCH 097/199] update lambda fxn --- schematic/models/validate_manifest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 9d4b3b275..32e4cc9de 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -10,6 +10,7 @@ import re import sys from time import perf_counter +from numbers import Number # allows specifying explicit variable types from typing import Any, Dict, Optional, Text, List @@ -259,7 +260,8 @@ def validate_manifest_values( # numerical values need to be type string for the jsonValidator for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: manifest[col]=manifest[col].astype('string') - manifest = manifest.map(lambda x: str(x) if isinstance(x, (int, np.int64, float, np.float64)) else x, na_action='ignore') + + manifest = manifest.map(lambda x: str(x) if isinstance(x, Number) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): From f356bdc3a5c60b344624233d9695078a05285fe5 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 17 Jan 2024 14:04:08 -0700 Subject: [PATCH 098/199] spacing --- schematic/models/validate_manifest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 32e4cc9de..9cbb28593 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -260,7 +260,6 @@ def validate_manifest_values( # numerical values need to be type string for the jsonValidator for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: manifest[col]=manifest[col].astype('string') - manifest = manifest.map(lambda x: str(x) if isinstance(x, Number) else x, na_action='ignore') annotations = json.loads(manifest.to_json(orient="records")) From 583918db66cee5869f7f243aed911450f3055221 Mon Sep 17 00:00:00 2001 From: GiaJordan Date: Fri, 26 Jan 2024 14:27:40 -0700 Subject: [PATCH 099/199] regen .lock --- poetry.lock | 483 ++++++++++++++++++++++++++++------------------------ 1 file changed, 260 insertions(+), 223 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9eb41743d..c2587c2ac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -236,19 +236,22 @@ files = [ [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] @@ -604,63 +607,63 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -714,6 +717,21 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "dataclasses-json" +version = "0.6.3" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, + {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + [[package]] name = "dateparser" version = "1.2.0" @@ -1007,13 +1025,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.114.0" +version = "2.115.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, - {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, + {file = "google-api-python-client-2.115.0.tar.gz", hash = "sha256:96af11376535236ba600ebbe23588cfe003ec9b74e66dd6ddb53aa3ec87e1b52"}, + {file = "google_api_python_client-2.115.0-py2.py3-none-any.whl", hash = "sha256:26178e33684763099142e2cad201057bd27d4efefd859a495aac21ab3e6129c2"}, ] [package.dependencies] @@ -1025,13 +1043,13 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.26.2" +version = "2.27.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, + {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, ] [package.dependencies] @@ -1595,13 +1613,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.21.0" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, - {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1706,13 +1724,13 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.1" +version = "2.2.2" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, - {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, + {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, + {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, ] [package.dependencies] @@ -1757,13 +1775,13 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.5.1" +version = "0.5.2" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, - {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, + {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, + {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, ] [package.dependencies] @@ -1776,13 +1794,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.0.10" +version = "4.0.11" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, - {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, + {file = "jupyterlab-4.0.11-py3-none-any.whl", hash = "sha256:536bf0e78723153a5016ca7efb88ed0ecd7070d3f1555d5b0e2770658f900a3c"}, + {file = "jupyterlab-4.0.11.tar.gz", hash = "sha256:d1aec24712566bc25a36229788242778e498ca4088028e2f9aa156b8b7fdc8fc"}, ] [package.dependencies] @@ -2196,13 +2214,13 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.9" +version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, - {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] [[package]] @@ -2225,13 +2243,13 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "notebook" -version = "7.0.6" +version = "7.0.7" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, - {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, + {file = "notebook-7.0.7-py3-none-any.whl", hash = "sha256:289b606d7e173f75a18beb1406ef411b43f97f7a9c55ba03efa3622905a62346"}, + {file = "notebook-7.0.7.tar.gz", hash = "sha256:3bcff00c17b3ac142ef5f436d50637d936b274cfa0b41f6ac0175363de9b4e09"}, ] [package.dependencies] @@ -2453,13 +2471,13 @@ files = [ [[package]] name = "overrides" -version = "7.4.0" +version = "7.6.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, - {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, + {file = "overrides-7.6.0-py3-none-any.whl", hash = "sha256:c36e6635519ea9c5b043b65c36d4b886aee8bd45b7d4681d2a6df0898df4b654"}, + {file = "overrides-7.6.0.tar.gz", hash = "sha256:01e15bbbf15b766f0675c275baa1878bd1c7dc9bc7b9ee13e677cdba93dc1bd9"}, ] [[package]] @@ -2494,77 +2512,81 @@ doc = ["mkdocs-material"] [[package]] name = "pandas" -version = "2.1.4" +version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, - {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, - {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, - {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, - {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, - {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, - {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, - {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, - {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, - {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, - {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, - {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, - {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, - {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, - {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, - {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, - {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, - {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [package.dependencies] numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""} python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pandocfilters" -version = "1.5.0" +version = "1.5.1" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, ] [[package]] @@ -2643,13 +2665,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2706,27 +2728,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -2817,47 +2839,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.14" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, ] [package.dependencies] @@ -3757,45 +3779,45 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] [[package]] name = "scipy" -version = "1.11.4" +version = "1.12.0" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, - {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, - {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, - {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, - {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, - {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, - {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, - {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, - {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, - {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, - {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, + {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, + {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, + {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, + {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, + {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, + {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, + {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, + {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, + {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, ] [package.dependencies] -numpy = ">=1.21.6,<1.28.0" +numpy = ">=1.22.4,<1.29.0" [package.extras] dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "secretstorage" @@ -4312,13 +4334,13 @@ files = [ [[package]] name = "toolz" -version = "0.12.0" +version = "0.12.1" description = "List processing tools and functional utilities" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, - {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, + {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, + {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, ] [[package]] @@ -4398,6 +4420,21 @@ files = [ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + [[package]] name = "tzdata" version = "2023.4" @@ -4668,4 +4705,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "65fd6ec0494aecb3e9b89b59479440ff24be22c8867df6718ddd16eac3e7bdec" +content-hash = "8b8825da1b417ca244afafcf5ece4f94adcf7cce3d4097f1b95c408a43710e49" From 80cff9d9c0b42c9e255d9da04dc4cf2d655b9d60 Mon Sep 17 00:00:00 2001 From: GiaJordan Date: Fri, 26 Jan 2024 14:28:19 -0700 Subject: [PATCH 100/199] run black --- schematic/models/validate_manifest.py | 10 +++++--- schematic/store/synapse.py | 6 ++--- schematic/utils/df_utils.py | 36 ++++++++++++++++----------- 3 files changed, 32 insertions(+), 20 deletions(-) diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 9cbb28593..5ed1afa96 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -258,9 +258,13 @@ def validate_manifest_values( col_attr = {} # save the mapping between column index and attribute name # numerical values need to be type string for the jsonValidator - for col in manifest.select_dtypes(include=[int, np.int64, float, np.float64]).columns: - manifest[col]=manifest[col].astype('string') - manifest = manifest.map(lambda x: str(x) if isinstance(x, Number) else x, na_action='ignore') + for col in manifest.select_dtypes( + include=[int, np.int64, float, np.float64] + ).columns: + manifest[col] = manifest[col].astype("string") + manifest = manifest.map( + lambda x: str(x) if isinstance(x, Number) else x, na_action="ignore" + ) annotations = json.loads(manifest.to_json(orient="records")) for i, annotation in enumerate(annotations): diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 98a697563..b4b67cadf 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -494,15 +494,15 @@ def _get_manifest_id(self, manifest: pd.DataFrame) -> str: # Try to use uncensored manifest first not_censored = ~censored if any(not_censored): - manifest_syn_id=manifest[not_censored]["id"].iloc[0] + manifest_syn_id = manifest[not_censored]["id"].iloc[0] # if only censored manifests are available, just use the first censored manifest - else: + else: manifest_syn_id = manifest["id"].iloc[0] # otherwise, use the first (implied only) version that exists else: manifest_syn_id = manifest["id"].iloc[0] - + return manifest_syn_id def getDatasetManifest( diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 6ca74896b..ed2943324 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -43,20 +43,21 @@ def load_df( if preserve_raw_input: logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return org_df - - is_null = org_df.isnull() - org_df = org_df.astype(str).mask(is_null, '') + + is_null = org_df.isnull() + org_df = org_df.astype(str).mask(is_null, "") ints, is_int = find_and_convert_ints(org_df) float_df = convert_floats(org_df) # Store values that were converted to type int in the final dataframe - processed_df=float_df.mask(is_int, other = ints) + processed_df = float_df.mask(is_int, other=ints) logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") return processed_df + def find_and_convert_ints(df: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: """ Find strings that represent integers and convert to type int @@ -69,18 +70,23 @@ def find_and_convert_ints(df: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame] """ large_manifest_cutoff_size = 1000 # Find integers stored as strings and replace with entries of type np.int64 - if df.size < large_manifest_cutoff_size: # If small manifest, iterate as normal for improved performance - ints = df.map(lambda x: convert_ints(x), na_action='ignore').fillna(False) - - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose = 1) - ints = df.parallel_map(lambda x: convert_ints(x), na_action='ignore').fillna(False) + if ( + df.size < large_manifest_cutoff_size + ): # If small manifest, iterate as normal for improved performance + ints = df.map(lambda x: convert_ints(x), na_action="ignore").fillna(False) + + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose=1) + ints = df.parallel_map(lambda x: convert_ints(x), na_action="ignore").fillna( + False + ) # Identify cells converted to intergers is_int = ints.map(pd.api.types.is_integer) return ints, is_int + def convert_ints(x: str) -> Union[np.int64, bool]: """ Lambda function to convert a string to an integer if possible, otherwise returns False @@ -91,6 +97,7 @@ def convert_ints(x: str) -> Union[np.int64, bool]: """ return np.int64(x) if str.isdigit(x) else False + def convert_floats(df: pd.DataFrame) -> pd.DataFrame: """ Convert strings that represent floats to type float @@ -99,19 +106,20 @@ def convert_floats(df: pd.DataFrame) -> pd.DataFrame: Returns: float_df: dataframe with values that were converted to type float. Columns are type object """ - # create a separate copy of the manifest + # create a separate copy of the manifest # before beginning conversions to store float values - float_df=deepcopy(df) - + float_df = deepcopy(df) + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in df.columns: - float_df[col]=pd.to_numeric(float_df[col], errors='coerce').astype('object') + float_df[col] = pd.to_numeric(float_df[col], errors="coerce").astype("object") # replace values that couldn't be converted to float with the original str values float_df[col].fillna(df[col][float_df[col].isna()], inplace=True) return float_df + def _parse_dates(date_string): try: date = dp.parse(date_string=date_string, settings={"STRICT_PARSING": True}) From a7828072b10cce21bcd30113e2ae1ee907042e88 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 26 Jan 2024 15:16:47 -0700 Subject: [PATCH 101/199] update GX val rule call --- schematic/models/GE_Helpers.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index c8a2bd9c2..d1fcfb577 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -164,7 +164,9 @@ def build_expectation_suite( # remove trailing/leading whitespaces from manifest self.manifest.map(lambda x: x.strip() if isinstance(x, str) else x) - validation_rules = self.sg.get_node_validation_rules(col) + validation_rules = self.dmge.get_node_validation_rules( + node_display_name=col + ) # check if attribute has any rules associated with it if validation_rules: From 157359d8bdabd6e0727b60f6004356a443e62990 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 29 Jan 2024 10:27:58 -0500 Subject: [PATCH 102/199] add strict param --- schematic/manifest/generator.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index e3e244271..dbd340c67 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1689,6 +1689,7 @@ def create_manifests( graph_data_model=graph_data_model, output_format=output_format, title=t, + strict=strict, access_token=access_token, ) all_results.append(result) @@ -1714,6 +1715,7 @@ def create_manifests( dataset_id=dataset_ids[i], output_format=output_format, title=t, + strict=strict, access_token=access_token, use_annotations=use_annotations, ) @@ -1724,6 +1726,7 @@ def create_manifests( graph_data_model=graph_data_model, output_format=output_format, title=t, + strict=strict, access_token=access_token, use_annotations=use_annotations, ) From fe3925b8d2c74b252ccad92bfc6b3e1bba793ff3 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 29 Jan 2024 10:51:01 -0500 Subject: [PATCH 103/199] fix typing --- schematic/manifest/generator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index dbd340c67..c19062eac 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1574,7 +1574,7 @@ def create_single_manifest( title: Optional[str] = None, output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", use_annotations: Optional[bool] = False, - ) -> Union[str, pd.DataFrame, BinaryIO]: + ) -> Union[str, pd.DataFrame]: """Create a single manifest Args: @@ -1588,7 +1588,7 @@ def create_single_manifest( use_annotations (bool, optional): whether to use annotations. Defaults to False. Returns: - Union[str, pd.DataFrame, BinaryIO]: Googlesheet URL or pandas dataframe or Excel. + Union[str, pd.DataFrame]: Googlesheet URL or pandas dataframe or an excel file path """ # create object of type ManifestGenerator manifest_generator = ManifestGenerator( @@ -1625,7 +1625,7 @@ def create_manifests( title: Optional[str] = None, strict: Optional[bool] = True, use_annotations: Optional[bool] = False, - ) -> Union[List[str], List[pd.DataFrame], BinaryIO]: + ) -> Union[List[str], List[pd.DataFrame]]: """Create multiple manifests Args: @@ -1639,7 +1639,7 @@ def create_manifests( use_annotations (bool, optional): whether to use annotations. Defaults to False. Returns: - Union[List[str], List[pd.DataFrame], BinaryIO]: a list of Googlesheet URLs, a list of pandas dataframes or an Excel file. + Union[List[str], List[pd.DataFrame]]: a list of Googlesheet URLs, a list of pandas dataframes or excel file paths """ if dataset_ids: # Check that the number of submitted data_types matches From d87126a4200ab3de7c21ee1b8dc3df4dda0ff88c Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 29 Jan 2024 09:39:58 -0800 Subject: [PATCH 104/199] added credentials setter --- schematic/configuration/configuration.py | 9 +++++++++ tests/test_configuration.py | 10 ++++++++++ 2 files changed, 19 insertions(+) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index c31007672..7c6507468 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -181,6 +181,15 @@ def service_account_credentials_path(self) -> str: self._google_sheets_config.service_acct_creds, self._parent_directory ) + @service_account_credentials_path.setter + def service_account_credentials_path(self, path: str) -> None: + """Sets the path of the Google service account credentials. + + Args: + path (str): The path of the Google service account credentials. + """ + self._google_sheets_config.service_acct_creds = path + @property def google_sheets_master_template_id(self) -> str: """ diff --git a/tests/test_configuration.py b/tests/test_configuration.py index b2fd59a7a..60785d656 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -213,3 +213,13 @@ def test_set_synapse_master_fileview_id(self) -> None: assert config.synapse_master_fileview_id == "syn1" with pytest.raises(ValidationError): config.synapse_master_fileview_id = "syn" + + def test_set_service_account_credentials_path(self) -> None: + """Testing for Configuration service_account_credentials_path setter""" + config = Configuration() + assert ( + os.path.basename(config.service_account_credentials_path) + == "schematic_service_account_creds.json" + ) + config.service_account_credentials_path = "test.json" + assert os.path.basename(config.service_account_credentials_path) == "test.json" From 5ebe7828d6cbc2521fa6b70edfbe7df5188b5a76 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 29 Jan 2024 09:55:00 -0800 Subject: [PATCH 105/199] fix configuration when config file has no asset store section --- schematic/configuration/configuration.py | 9 +++++---- tests/data/test_configs/valid_config2.yml | 15 +++++++++++++++ tests/test_configuration.py | 8 ++++++++ 3 files changed, 28 insertions(+), 4 deletions(-) create mode 100644 tests/data/test_configs/valid_config2.yml diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 7c6507468..1bd3f1c40 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -83,13 +83,14 @@ def load_config(self, config_path: str) -> None: self._google_sheets_config = GoogleSheetsConfig( **config.get("google_sheets", {}) ) - self._set_asset_store(config.get("asset_store", {})) + asset_store_config = config.get("asset_store", None) + if asset_store_config: + self._set_asset_store(asset_store_config) def _set_asset_store(self, config: dict[str, Any]) -> None: allowed_config_fields = {"synapse"} - if not config: - pass - if not set(config.keys()).issubset(allowed_config_fields): + all_fields_are_valid = set(config.keys()).issubset(allowed_config_fields) + if not all_fields_are_valid: raise ConfigNonAllowedFieldError( "Non allowed fields in asset_store of configuration file.", list(config.keys()), diff --git a/tests/data/test_configs/valid_config2.yml b/tests/data/test_configs/valid_config2.yml new file mode 100644 index 000000000..78306ee18 --- /dev/null +++ b/tests/data/test_configs/valid_config2.yml @@ -0,0 +1,15 @@ +# This is a valid config, but missing the asset store section + +manifest: + manifest_folder: "folder_name" + title: "title" + data_type: + - "data_type" + +model: + location: "model.jsonld" + +google_sheets: + service_acct_creds_synapse_id: "syn1" + service_acct_creds: "creds.json" + strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 60785d656..e236d0605 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -198,6 +198,14 @@ def test_load_config3(self) -> None: is not valid """ config = Configuration() + config.load_config("tests/data/test_configs/valid_config2.yml") + + def test_load_config4(self) -> None: + """ + Testing for Configuration.load_config where config file + has no asset store section + """ + config = Configuration() with pytest.raises(ConfigNonAllowedFieldError): config.load_config("tests/data/test_configs/invalid_config1.yml") with pytest.raises(ConfigNonAllowedFieldError): From 08ef48fa0d7a10cbda30e8fdaa0dd46b19bfb4c5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Mon, 29 Jan 2024 18:17:13 -0800 Subject: [PATCH 106/199] fix missed merged conflict --- tests/test_api.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 945fdfff5..e9461364d 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -86,10 +86,7 @@ def data_model_jsonld(): @pytest.fixture(scope="class") def benchmark_data_model_jsonld(): # benchmark_data_model_jsonld = "https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.single_rule.model.jsonld" -<<<<<<< HEAD # Placeholder till the model is updated on develop -======= ->>>>>>> 01208bd31df6c6573780f51ca63ffcb17e75dd0c benchmark_data_model_jsonld = "https://raw.githubusercontent.com/mialy-defelice/data_models/main/example.single_rule.model.jsonld" yield benchmark_data_model_jsonld From cb7bfab283dc5af71d0a0920df300b8861495dea Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 31 Jan 2024 10:58:32 -0500 Subject: [PATCH 107/199] add test endpoint; try solution --- certificate.conf | 2 ++ schematic_api/api/openapi/api.yaml | 35 +++++++++++++++++++++++++++++- schematic_api/api/routes.py | 17 +++++++++++++-- uwsgi-nginx-entrypoint.sh | 2 ++ 4 files changed, 53 insertions(+), 3 deletions(-) diff --git a/certificate.conf b/certificate.conf index eb5bf94d9..a61e8af82 100644 --- a/certificate.conf +++ b/certificate.conf @@ -7,6 +7,8 @@ server { proxy_read_timeout 300; proxy_connect_timeout 300; proxy_send_timeout 300; + send_timeout 300; + uwsgi_read_timeout 300; location / { try_files $uri @app; } diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index e4a4b66b0..337dfc6b7 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -1252,4 +1252,37 @@ paths: description: Schematic version was not able to be identified. tags: - Version - \ No newline at end of file + + /test_time_out: + get: + summary: Test time out 1 + description: Test time out 1 + operationId: schematic_api.api.routes.test_time_out + responses: + "200": + description: Test + content: + text/plain: + schema: + type: string + "500": + description: Test + tags: + - Test + + /test_time_out_two: + get: + summary: Test time out 2 + description: Test time out 2 + operationId: schematic_api.api.routes.test_time_out_two + responses: + "200": + description: Test + content: + text/plain: + schema: + type: string + "500": + description: Test + tags: + - Test \ No newline at end of file diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 44fdd3e42..23b3b41e8 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -7,6 +7,7 @@ import logging import pathlib import pickle +import time import connexion from connexion.decorators.uri_parsing import Swagger2URIParser @@ -39,7 +40,7 @@ ) from schematic.utils.general import entity_type_mapping from schematic.utils.schema_utils import get_property_label_from_display_name - +from schematic.utils.general import profile logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) @@ -728,7 +729,7 @@ def get_asset_view_table(asset_view, return_type): file_view_table_df.to_csv(export_path, index=False) return export_path - +@profile(sort_by='cumulative', strip_dirs=True) def get_project_manifests(project_id, asset_view): # Access token now stored in request header access_token = get_access_token() @@ -1013,3 +1014,15 @@ def get_schematic_version() -> str: "Using this endpoint to check the version of schematic is only supported when the API is running in a docker container." ) return version + +def test_time_out(): + """return test time out + """ + time.sleep(60) + return "okay" + +def test_time_out_two(): + """return test time out + """ + time.sleep(59.9) + return "okay" \ No newline at end of file diff --git a/uwsgi-nginx-entrypoint.sh b/uwsgi-nginx-entrypoint.sh index 0fa2e6188..722d49941 100644 --- a/uwsgi-nginx-entrypoint.sh +++ b/uwsgi-nginx-entrypoint.sh @@ -22,6 +22,8 @@ else content_server=$content_server' proxy_read_timeout 300;\n' content_server=$content_server' proxy_connect_timeout 300;\n' content_server=$content_server' proxy_send_timeout 300;\n' + content_server=$content_server' send_timeout 300;\n' + content_server=$content_server' uwsgi_read_timeout 300;\n' content_server=$content_server' location / {\n' content_server=$content_server' try_files $uri @app;\n' content_server=$content_server' }\n' From fbab0a1677a6b5594b1239ecdfb6d4841d04da94 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 31 Jan 2024 14:25:13 -0500 Subject: [PATCH 108/199] remove unnecessary fstring --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index c19062eac..d67229fe4 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1656,8 +1656,8 @@ def create_manifests( # Raise an error if used in conjunction with datatype = 'all_manifests' if data_types[0] == "all manifests": raise ValueError( - f"When submitting 'all manifests' as the data_type cannot also submit dataset_id. " - f"Please check your submission and try again." + "When submitting 'all manifests' as the data_type cannot also submit dataset_id. " + "Please check your submission and try again." ) data_model_parser = DataModelParser(path_to_data_model=path_to_data_model) From f3d8e27895e3712c01193f9877632bf77a17bc58 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 31 Jan 2024 14:34:58 -0500 Subject: [PATCH 109/199] regenerate docstring --- schematic/manifest/generator.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index d67229fe4..8822bc322 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1578,14 +1578,15 @@ def create_single_manifest( """Create a single manifest Args: - jsonld (str): jsonld schema + path_to_data_model (str): data model schema + graph_data_model (nx.MultiDiGraph): graph data model data_type (str): data type of a manifest - access_token (str, optional): synapse access token. Required when getting an existing manifest. Defaults to None. - dataset_id (str, optional): dataset id when generating an existing manifest. Defaults to None. - strict (bool, optional): strictness with which to apply validation rules to google sheets. Defaults to True. - title (str, optional): title of a given manifest. Defaults to None. - output_format (str, optional): format of manifest. It has three options: google sheet, excel or dataframe. Defaults to None. - use_annotations (bool, optional): whether to use annotations. Defaults to False. + access_token (Optional[str], optional): synapse access token. Required when getting an existing manifest. Defaults to None. + dataset_id (Optional[str], optional):dataset id when generating an existing manifest. Defaults to None. Defaults to None. + strict (Optional[bool], optional): strictness with which to apply validation rules to google sheets. Defaults to True. + title (Optional[str], optional):title of a given manifest. Defaults to None. + output_format (Literal['google_sheet', 'excel', 'dataframe'], optional): format of manifest. Defaults to "google_sheet". + use_annotations (Optional[bool], optional):whether to use annotations. Defaults to False. Returns: Union[str, pd.DataFrame]: Googlesheet URL or pandas dataframe or an excel file path @@ -1632,7 +1633,7 @@ def create_manifests( path_to_data_model (str): str path to data model data_type (list): a list of data types access_token (str, optional): synapse access token. Required when getting an existing manifest. Defaults to None. - dataset_id (list, optional): a list of dataset ids when generating an existing manifest. Defaults to None. + dataset_ids (list, optional): a list of dataset ids when generating an existing manifest. Defaults to None. output_format (str, optional):format of manifest. It has three options: google sheet, excel or dataframe. Defaults to None. title (str, optional): title of a given manifest. Defaults to None. strict (bool, optional): strictness with which to apply validation rules to google sheets. Defaults to None. From 3dfa6cd43d4a0414ab175f5049c7297d62f5bcfb Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 31 Jan 2024 14:37:35 -0500 Subject: [PATCH 110/199] correct doc string typo --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 8822bc322..46175cd2e 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1631,7 +1631,7 @@ def create_manifests( Args: path_to_data_model (str): str path to data model - data_type (list): a list of data types + data_types (list): a list of data types access_token (str, optional): synapse access token. Required when getting an existing manifest. Defaults to None. dataset_ids (list, optional): a list of dataset ids when generating an existing manifest. Defaults to None. output_format (str, optional):format of manifest. It has three options: google sheet, excel or dataframe. Defaults to None. From bbabe96385bc7eea444f7b2f135e0747a5dbad4c Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 1 Feb 2024 10:58:59 -0500 Subject: [PATCH 111/199] run blakc --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 46175cd2e..090024f04 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1579,7 +1579,7 @@ def create_single_manifest( Args: path_to_data_model (str): data model schema - graph_data_model (nx.MultiDiGraph): graph data model + graph_data_model (nx.MultiDiGraph): graph data model data_type (str): data type of a manifest access_token (Optional[str], optional): synapse access token. Required when getting an existing manifest. Defaults to None. dataset_id (Optional[str], optional):dataset id when generating an existing manifest. Defaults to None. Defaults to None. From f2370ee8d66246ee22745814f1c7b40c574fb3c2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 09:30:31 -0800 Subject: [PATCH 112/199] fix command to treat data_model_labels as a choice, rather than bool --- schematic/manifest/commands.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index f52dc8e14..703249b08 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -108,7 +108,8 @@ def manifest(ctx, config): # use as `schematic manifest ...` @click.option( "--data_model_labels", "-dml", - is_flag=True, + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), help=query_dict(manifest_commands, ("manifest", "get", "data_model_labels")), ) @click.pass_obj From 2e6b5362ce8ee5cc4586b53a3e563da96b6f1117 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 09:31:06 -0800 Subject: [PATCH 113/199] add data_model_labels option to create_manifests --- schematic/manifest/generator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index b8b8180bd..a552d4bac 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1635,6 +1635,7 @@ def create_single_manifest( def create_manifests( path_to_data_model: str, data_types: list, + data_model_labels: str = 'class_label', access_token: Optional[str] = None, dataset_ids: Optional[list] = None, output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", @@ -1663,7 +1664,7 @@ def create_manifests( parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, data_model_labels) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() From 4e11de2fb8426317038489643f0fc6113bc988d7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:41:03 -0800 Subject: [PATCH 114/199] resolve merge error in schematic/models/commands, separate the table_column_names and data_model_labels --- schematic/models/commands.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index f17f97880..6260125f7 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -114,6 +114,8 @@ def model(ctx, config): # use as `schematic model ...` default="class_label", type=click.Choice(["display_label", "class_label"], case_sensitive=True), help=query_dict(model_commands, ("model", "submit", "data_model_labels")), +) +@click.option( "--table_column_names", "-tcn", default="class_label", From ed9cc8b5874446445d65ef74daffb91f1aef36cf Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:41:56 -0800 Subject: [PATCH 115/199] add data_model_labels to get_manifest_route and remove unused lines --- schematic_api/api/routes.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 65dccb1ea..99cfd908c 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -267,6 +267,7 @@ def get_manifest_route( output_format=None, title=None, strict_validation: bool = True, + data_model_labels: str = 'class_label' ): """Get the immediate dependencies that are related to a given source node. Args: @@ -327,6 +328,7 @@ def get_manifest_route( path_to_data_model=schema_url, output_format=output_format, data_types=data_type, + data_model_labels=data_model_labels, title=title, access_token=access_token, dataset_ids=dataset_ids, @@ -441,15 +443,13 @@ def submit_manifest_route( # get path to temp data model file (csv or jsonld) as appropriate data_model = get_temp_model_path(schema_url) - # table_column_names = connexion.request.args["table_column_names"] if not table_column_names: table_column_names = "class_label" - # annotation_keys = connexion.request.args["retain_dl_formatting"] if not annotation_keys: annotation_keys = "class_label" - metadata_model = initalize_metadata_model(schema_url) + metadata_model = initalize_metadata_model(schema_url, data_model_labels) # Access token now stored in request header access_token = get_access_token() From c110a524377749c4290af1bb0c3964d46ef08976 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:43:00 -0800 Subject: [PATCH 116/199] add data_model_labels to test_get_schema and test_if_node_required --- tests/test_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index e9461364d..e2dc2ad5f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -352,7 +352,8 @@ def test_get_property_label_from_display_name(self, client, strict_camel_case): @pytest.mark.schematic_api class TestDataModelGraphExplorerOperation: def test_get_schema(self, client, data_model_jsonld): - params = {"schema_url": data_model_jsonld} + params = {"schema_url": data_model_jsonld, + "data_model_labels": 'class_label'} response = client.get( "http://localhost:3001/v1/schemas/get/schema", query_string=params ) @@ -366,7 +367,7 @@ def test_get_schema(self, client, data_model_jsonld): os.remove(response_dt) def test_if_node_required(test, client, data_model_jsonld): - params = {"schema_url": data_model_jsonld, "node_display_name": "FamilyHistory"} + params = {"schema_url": data_model_jsonld, "node_display_name": "FamilyHistory", "data_model_labels": "class_label"} response = client.get( "http://localhost:3001/v1/schemas/is_node_required", query_string=params From 70507b24a1a998d24a337aed9e9eb2d02b7e0c50 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:43:45 -0800 Subject: [PATCH 117/199] add data_model_labels to TestSchemaCli --- tests/test_cli.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_cli.py b/tests/test_cli.py index 0d3242d7a..4631e9a9c 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -49,8 +49,10 @@ def test_schema_convert_cli(self, runner, helpers): output_path = helpers.get_data_path("example.model.jsonld") + label_type = 'class_label' + result = runner.invoke( - schema, ["convert", data_model_csv_path, "--output_jsonld", output_path] + schema, ["convert", data_model_csv_path, "--output_jsonld", output_path, "--data_model_labels", label_type] ) assert result.exit_code == 0 From 3476d0d20676af0df88a9c0e404dbe2c108d7e21 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:44:33 -0800 Subject: [PATCH 118/199] add data_model_labels parameter throughout --- tests/test_manifest.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 67f5785ef..0a6102f8a 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -18,7 +18,7 @@ logger = logging.getLogger(__name__) -def generate_graph_data_model(helpers, path_to_data_model): +def generate_graph_data_model(helpers, path_to_data_model, data_model_labels): """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model """ @@ -31,7 +31,7 @@ def generate_graph_data_model(helpers, path_to_data_model): # Convert parsed model to graph # Instantiate DataModelGraph - data_model_grapher = DataModelGraph(parsed_data_model) + data_model_grapher = DataModelGraph(parsed_data_model, data_model_labels) # Generate graph graph_data_model = data_model_grapher.generate_data_model_graph() @@ -61,7 +61,7 @@ def manifest_generator(helpers, request): # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) manifest_generator = ManifestGenerator( @@ -118,7 +118,7 @@ def test_init(self, helpers): # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) generator = ManifestGenerator( @@ -153,7 +153,7 @@ def test_missing_root_error(self, helpers, data_type, exc, exc_message): # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) # A LookupError should be raised and include message when the component cannot be found @@ -238,7 +238,7 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) generator = ManifestGenerator( @@ -296,7 +296,7 @@ def test_get_manifest_no_annos(self, helpers, dataset_id): # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) # Instantiate object with use_annotations set to True @@ -412,7 +412,7 @@ def test_add_root_to_component_without_additional_metadata( # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) manifest_generator = ManifestGenerator( @@ -449,7 +449,7 @@ def test_add_root_to_component_with_additional_metadata( # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) manifest_generator = ManifestGenerator( @@ -533,7 +533,7 @@ def test_update_dataframe_with_existing_df(self, helpers, existing_manifest): # Get graph data model graph_data_model = generate_graph_data_model( - helpers, path_to_data_model=path_to_data_model + helpers, path_to_data_model=path_to_data_model, data_model_labels='class_label', ) # Instantiate the Manifest Generator. From c5ab49593bafc840104bb0478423a59fd2fe3e9d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:46:38 -0800 Subject: [PATCH 119/199] add data_model_labels to get_schema_pickle --- schematic_api/api/openapi/api.yaml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index b37e51bfb..4eb2710cd 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -835,18 +835,6 @@ paths: description: ID of file or folder. For example, for Synapse this would be the Synapse ID. example: syn30988314 required: true - - in: query - name: data_model_labels - schema: - type: string - nullable: true - enum: ["display_label", "class_label"] - default: 'class_label' - description: Choose how to set the label in the data model. - display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. - class_label, default, use standard class or property label. - Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. - required: false responses: "200": description: return "true" or "false" @@ -868,6 +856,18 @@ paths: example: >- https://raw.githubusercontent.com/Sage-Bionetworks/schematic/develop/tests/data/example.model.jsonld required: true + - in: query + name: data_model_labels + schema: + type: string + nullable: true + enum: ["display_label", "class_label"] + default: 'class_label' + description: Choose how to set the label in the data model. + display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. + class_label, default, use standard class or property label. + Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly. + required: false responses: "200": description: A pickle file gets downloaded and local file path of the pickle file gets returned. From 625e30afa5ebf6cfc7691376c48560ecebfe5471 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 1 Feb 2024 13:47:03 -0500 Subject: [PATCH 120/199] add if statement for making sure len of result greater than 0 --- schematic_api/api/routes.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index c38d41dd1..a365d9f09 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -283,11 +283,12 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_id=None, # return an excel file if output_format is set to "excel" if output_format == "excel": # should only contain one excel spreadsheet path - result = all_results[0] - dir_name = os.path.dirname(result) - file_name = os.path.basename(result) - mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' - return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) + if len(all_results) > 0: + result = all_results[0] + dir_name = os.path.dirname(result) + file_name = os.path.basename(result) + mimetype='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' + return send_from_directory(directory=dir_name, path=file_name, as_attachment=True, mimetype=mimetype, max_age=0) return all_results From ced1c1d6ed72892012dde7b522d47eeeb9da6853 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:51:02 -0800 Subject: [PATCH 121/199] ran black on manifest/generator.py --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index a552d4bac..acfcbebb5 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1635,7 +1635,7 @@ def create_single_manifest( def create_manifests( path_to_data_model: str, data_types: list, - data_model_labels: str = 'class_label', + data_model_labels: str = "class_label", access_token: Optional[str] = None, dataset_ids: Optional[list] = None, output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", From 0ff295cdc2e0e25277aac17f6551b5387be20e29 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 1 Feb 2024 21:50:25 -0800 Subject: [PATCH 122/199] fix bug in recording a list, now will record properly --- schematic/schemas/data_model_parser.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 680a7eb61..0020f2f8f 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -419,8 +419,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di else: attr_rel_dictionary[p_attr_key][ "Relationships" - ].update( - {rel_csv_header: [entry[dn_jsonld_key]]} + ][rel_csv_header].extend([entry[dn_jsonld_key]] ) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: From 5a7ea8eac165f26e8371134553a59783cc2dc623 Mon Sep 17 00:00:00 2001 From: linglp Date: Fri, 2 Feb 2024 10:47:52 -0500 Subject: [PATCH 123/199] add test endpoints --- schematic_api/api/openapi/api.yaml | 44 ++++++++++++++++++++++++++---- schematic_api/api/routes.py | 16 +++++++++-- 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 337dfc6b7..ece6616be 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -1255,8 +1255,8 @@ paths: /test_time_out: get: - summary: Test time out 1 - description: Test time out 1 + summary: sleep 59.9s + description: sleep 59.9s operationId: schematic_api.api.routes.test_time_out responses: "200": @@ -1272,8 +1272,8 @@ paths: /test_time_out_two: get: - summary: Test time out 2 - description: Test time out 2 + summary: sleep 60s + description: sleep 60s operationId: schematic_api.api.routes.test_time_out_two responses: "200": @@ -1285,4 +1285,38 @@ paths: "500": description: Test tags: - - Test \ No newline at end of file + - Test + + /test_time_out_three: + get: + summary: sleep 120s + description: sleep 120s + operationId: schematic_api.api.routes.test_time_out_three + responses: + "200": + description: Test + content: + text/plain: + schema: + type: string + "500": + description: Test + tags: + - Test + + /test_time_out_four: + get: + summary: sleep 180s + description: sleep 180s + operationId: schematic_api.api.routes.test_time_out_four + responses: + "200": + description: Test + content: + text/plain: + schema: + type: string + "500": + description: Test + tags: + - Test \ No newline at end of file diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 23b3b41e8..400e920a1 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -1018,11 +1018,23 @@ def get_schematic_version() -> str: def test_time_out(): """return test time out """ - time.sleep(60) + time.sleep(59.9) return "okay" def test_time_out_two(): """return test time out """ - time.sleep(59.9) + time.sleep(60) + return "okay" + +def test_time_out_three(): + """return test time out + """ + time.sleep(120) + return "okay" + +def test_time_out_four(): + """return test time out + """ + time.sleep(180) return "okay" \ No newline at end of file From d4c40e311bac1e0cfbd040a7470ced1fe5b1361b Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 2 Feb 2024 14:25:43 -0800 Subject: [PATCH 124/199] linted visualization module --- .github/workflows/test.yml | 2 +- schematic/visualization/__init__.py | 1 + .../visualization/attributes_explorer.py | 117 ++++--- schematic/visualization/commands.py | 28 +- schematic/visualization/tangled_tree.py | 311 +++++++++++------- 5 files changed, 268 insertions(+), 191 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2adf95f8..23b81b1d5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -116,7 +116,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run pylint schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + poetry run pylint schematic/visualization/* schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py #---------------------------------------------- # run test suite diff --git a/schematic/visualization/__init__.py b/schematic/visualization/__init__.py index a96118fc3..d5526b1ad 100644 --- a/schematic/visualization/__init__.py +++ b/schematic/visualization/__init__.py @@ -1,2 +1,3 @@ +"""visualization imports""" from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 90877e7e8..71747f999 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -1,21 +1,22 @@ -import gc +"""Attributes Explorer Class""" import json import logging -import numpy as np import os + +import numpy as np import pandas as pd -from typing import Any, Dict, Optional, Text, List from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.schemas.data_model_json_schema import DataModelJSONSchema - from schematic.utils.io_utils import load_json logger = logging.getLogger(__name__) class AttributesExplorer: + """AttributesExplorer class""" + def __init__( self, path_to_jsonld: str, @@ -46,7 +47,7 @@ def __init__( self.output_path = self.create_output_path("merged_csv") - def create_output_path(self, terminal_folder): + def create_output_path(self, terminal_folder: str) -> str: """Create output path to store Observable visualization data if it does not already exist. Args: self.path_to_jsonld @@ -62,20 +63,22 @@ def create_output_path(self, terminal_folder): os.makedirs(output_path) return output_path - def convert_string_cols_to_json(self, df: pd.DataFrame, cols_to_modify: list): + def convert_string_cols_to_json( + self, dataframe: pd.DataFrame, cols_to_modify: list[str] + ) -> pd.DataFrame: """Converts values in a column from strings to JSON list for upload to Synapse. """ - for col in df.columns: + for col in dataframe.columns: if col in cols_to_modify: - df[col] = df[col].apply( + dataframe[col] = dataframe[col].apply( lambda x: json.dumps([y.strip() for y in x]) if x != "NaN" and x and x == np.nan else x ) - return df + return dataframe - def parse_attributes(self, save_file=True): + def parse_attributes(self, save_file: bool = True) -> pd.DataFrame: """ Args: save_file (bool): True: merged_df is saved locally to output_path. @@ -92,13 +95,13 @@ def parse_attributes(self, save_file=True): component_dg = self.dmge.get_digraph_by_edge_type("requiresComponent") components = component_dg.nodes() - # For each data type to be loaded gather all attribtes the user would + # For each data type to be loaded gather all attributes the user would # have to provide. return self._parse_attributes(components, save_file) def parse_component_attributes( - self, component=None, save_file=True, include_index=True - ): + self, component=None, save_file: bool = True, include_index: bool = True + ) -> pd.DataFrame: """ Args: save_file (bool): True: merged_df is saved locally to output_path. @@ -115,10 +118,11 @@ def parse_component_attributes( if not component: raise ValueError("You must provide a component to visualize.") - else: - return self._parse_attributes([component], save_file, include_index) + return self._parse_attributes([component], save_file, include_index) - def _parse_attributes(self, components, save_file=True, include_index=True): + def _parse_attributes( + self, components: list, save_file=True, include_index=True + ) -> pd.DataFrame: """ Args: save_file (bool): True: merged_df is saved locally to output_path. @@ -138,22 +142,27 @@ def _parse_attributes(self, components, save_file=True, include_index=True): If unable hits an error while attempting to get conditional requirements. This error is likely to be found if there is a mismatch in naming. """ + # This function needs to be refactored, temporarily disabling some pylint errors + # pylint: disable=too-many-locals + # pylint: disable=too-many-nested-blocks + # pylint: disable=too-many-branches + # pylint: disable=too-many-statements - # For each data type to be loaded gather all attribtes the user would + # For each data type to be loaded gather all attributes the user would # have to provide. df_store = [] for component in components: - data_dict = {} + data_dict: dict = {} # get the json schema json_schema = self.data_model_js.get_json_validation_schema( source_node=component, schema_name=self.path_to_jsonld ) - # Gather all attribues, their valid values and requirements + # Gather all attributes, their valid values and requirements for key, value in json_schema["properties"].items(): data_dict[key] = {} - for k, v in value.items(): + for k, _ in value.items(): if k == "enum": data_dict[key]["Valid Values"] = value["enum"] if key in json_schema["required"]: @@ -163,20 +172,20 @@ def _parse_attributes(self, components, save_file=True, include_index=True): data_dict[key]["Component"] = component # Add additional details per key (from the JSON-ld) for dic in self.jsonld["@graph"]: - if "sms:displayName" in dic.keys(): + if "sms:displayName" in dic: key = dic["sms:displayName"] - if key in data_dict.keys(): + if key in data_dict: data_dict[key]["Attribute"] = dic["sms:displayName"] data_dict[key]["Label"] = dic["rdfs:label"] data_dict[key]["Description"] = dic["rdfs:comment"] if "validationRules" in dic.keys(): data_dict[key]["Validation Rules"] = dic["validationRules"] # Find conditional dependencies - if "allOf" in json_schema.keys(): + if "allOf" in json_schema: for conditional_dependencies in json_schema["allOf"]: key = list(conditional_dependencies["then"]["properties"])[0] try: - if key in data_dict.keys(): + if key in data_dict: if "Cond_Req" not in data_dict[key].keys(): data_dict[key]["Cond_Req"] = [] data_dict[key]["Conditional Requirements"] = [] @@ -186,11 +195,12 @@ def _parse_attributes(self, components, save_file=True, include_index=True): value = conditional_dependencies["if"]["properties"][ attribute ]["enum"] - # Capitalize attribute if it begins with a lowercase letter, for aesthetics. + # Capitalize attribute if it begins with a lowercase + # letter, for aesthetics. if attribute[0].islower(): attribute = attribute.capitalize() - # Remove "Type" (i.e. turn "Biospecimen Type" to "Biospcimen") + # Remove "Type" (i.e. turn "Biospecimen Type" to "Biospecimen") if "Type" in attribute: attribute = attribute.split(" ")[0] @@ -207,38 +217,37 @@ def _parse_attributes(self, components, save_file=True, include_index=True): data_dict[key]["Conditional Requirements"].extend( [conditional_statement] ) - except: + except Exception as exc: raise ValueError( - f"There is an error getting conditional requirements related " - "to the attribute: {key}. The error is likely caused by naming inconsistencies (e.g. uppercase, camelcase, ...)" - ) - - for key, value in data_dict.items(): - if "Conditional Requirements" in value.keys(): + ( + "There is an error getting conditional requirements related " + f"to the attribute: {key}. The error is likely caused by naming " + "inconsistencies (e.g. uppercase, camelcase, ...)" + ) + ) from exc + + for outer_dict_key, inner_dict in data_dict.items(): + if "Conditional Requirements" in inner_dict.keys(): ## reformat conditional requirement + conditional_requirements = inner_dict["Conditional Requirements"] # get all attributes - attr_lst = [ - i.split(" is ")[-1] - for i in data_dict[key]["Conditional Requirements"] - ] + attr_lst = [i.split(" is ")[-1] for i in conditional_requirements] # join a list of attributes by using OR attr_str = " OR ".join(attr_lst) # reformat the conditional requirement - component_name = data_dict[key]["Conditional Requirements"][ - 0 - ].split(" is ")[0] + component_name = conditional_requirements[0].split(" is ")[0] + conditional_statement_str = ( - f' If {component_name} is {attr_str} then "{key}" is required' + f" If {component_name} is {attr_str} then " + f'"{outer_dict_key}" is required' ) + conditional_requirements = conditional_statement_str - data_dict[key][ - "Conditional Requirements" - ] = conditional_statement_str - df = pd.DataFrame(data_dict) - df = df.T + data_dict_df = pd.DataFrame(data_dict) + data_dict_df = data_dict_df.T cols = [ "Attribute", "Label", @@ -250,11 +259,12 @@ def _parse_attributes(self, components, save_file=True, include_index=True): "Validation Rules", "Component", ] - cols = [col for col in cols if col in df.columns] - df = df[cols] - df = self.convert_string_cols_to_json(df, ["Valid Values"]) - # df.to_csv(os.path.join(csv_output_path, data_type + '.vis_data.csv')) - df_store.append(df) + cols = [col for col in cols if col in data_dict_df.columns] + data_dict_df = data_dict_df[cols] + data_dict_df = self.convert_string_cols_to_json( + data_dict_df, ["Valid Values"] + ) + df_store.append(data_dict_df) merged_attributes_df = pd.concat(df_store, join="outer") cols = [ @@ -271,12 +281,11 @@ def _parse_attributes(self, components, save_file=True, include_index=True): cols = [col for col in cols if col in merged_attributes_df.columns] merged_attributes_df = merged_attributes_df[cols] - if save_file == True: + if save_file: return merged_attributes_df.to_csv( os.path.join( self.output_path, self.schema_name + "attributes_data.vis_data.csv" ), index=include_index, ) - elif save_file == False: - return merged_attributes_df.to_csv(index=include_index) + return merged_attributes_df.to_csv(index=include_index) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 5ecc4f8f7..d69354a7b 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -1,10 +1,14 @@ -#!/usr/bin/env python3 +"""visualization commands""" +# pylint: disable=unused-argument +# pylint: disable=useless-return +# pylint: disable=unused-variable import logging import sys +from typing import Any import click -import click_log +import click_log # type: ignore from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree @@ -16,10 +20,11 @@ logger = logging.getLogger(__name__) click_log.basic_config(logger) -CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options +CONTEXT_SETTINGS = {"help_option_names": ["--help", "-h"]} # help options -# invoke_without_command=True -> forces the application not to show aids before losing them with a --h +# invoke_without_command=True -> forces the application not to show aids before +# losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @click.option( @@ -30,17 +35,18 @@ help=query_dict(model_commands, ("model", "config")), ) @click.pass_context -def viz(ctx, config): # use as `schematic model ...` +def viz(ctx: Any, config: str) -> None: # use as `schematic model ...` """ Sub-commands for Visualization methods. """ try: + # pylint: disable=logging-fstring-interpolation logger.debug(f"Loading config file contents in '{config}'") CONFIG.load_config(config) ctx.obj = CONFIG - except ValueError as e: + except ValueError as exc: logger.error("'--config' not provided or environment variable not set.") - logger.exception(e) + logger.exception(exc) sys.exit(1) @@ -49,8 +55,8 @@ def viz(ctx, config): # use as `schematic model ...` ) @click_log.simple_verbosity_option(logger) @click.pass_obj -def get_attributes(ctx): - """ """ +def get_attributes(ctx: Any) -> None: + """Gets attributes""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) @@ -74,7 +80,7 @@ def get_attributes(ctx): help=query_dict(viz_commands, ("visualization", "tangled_tree", "text_format")), ) @click.pass_obj -def get_tangled_tree_text(ctx, figure_type, text_format): +def get_tangled_tree_text(ctx: Any, figure_type: str, text_format: str) -> None: """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location @@ -97,7 +103,7 @@ def get_tangled_tree_text(ctx, figure_type, text_format): help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) @click.pass_obj -def get_tangled_tree_component_layers(ctx, figure_type): +def get_tangled_tree_component_layers(ctx: Any, figure_type: str) -> None: """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index d0de01bf7..eaba44fb1 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -1,36 +1,31 @@ +"""Tangled tree class""" + + from io import StringIO import json import logging -import networkx as nx -import numpy as np import os from os import path -import pandas as pd +from typing import Optional, Any -# allows specifying explicit variable types -from typing import Any, Dict, Optional, Text, List +import networkx as nx # type: ignore +import numpy as np +import pandas as pd -from schematic.utils.viz_utils import visualize from schematic.visualization.attributes_explorer import AttributesExplorer - from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer -from schematic.schemas.data_model_relationships import DataModelRelationships - -from schematic import LOADER from schematic.utils.io_utils import load_json -from copy import deepcopy - -# Make sure to have newest version of decorator logger = logging.getLogger(__name__) -# OUTPUT_DATA_DIR = str(Path('tests/data/visualization/AMPAD').resolve()) -# DATA_DIR = str(Path('tests/data').resolve()) -class TangledTree(object): - """ """ +class TangledTree: + """Tangled tree class""" + + # pylint: disable=too-many-instance-attributes + # pylint: disable=invalid-name def __init__( self, @@ -74,7 +69,15 @@ def __init__( self.text_csv_output_path = self.ae.create_output_path("text_csv") self.json_output_path = self.ae.create_output_path("tangled_tree_json") - def strip_double_quotes(self, string): + def strip_double_quotes(self, string: str) -> str: + """Removes double quotes from string + + Args: + string (str): The string to remove quotes from + + Returns: + str: The processed string + """ # Remove double quotes from beginning and end of string. if string.startswith('"') and string.endswith('"'): string = string[1:-1] @@ -83,7 +86,9 @@ def strip_double_quotes(self, string): return string def get_text_for_tangled_tree(self, text_type, save_file=False): - """Gather the text that needs to be either higlighted or plain for the tangled tree visualization. + """ + Gather the text that needs to be either highlighted or plain for the + tangled tree visualization. Args: text_type (str): Choices = ['highlighted', 'plain'], determines the type of text rendering to return. @@ -92,6 +97,7 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): If save_file==True: Saves plain or highlighted text as a CSV (to disk). save_file==False: Returns plain or highlighted text as a csv string. """ + # pylint: disable=too-many-locals # Get nodes in the digraph, many more nodes returned if figure type is dependency cdg = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = cdg.nodes() @@ -107,7 +113,7 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): highlighted = [] plain = [] - # For each component node in the tangled tree gather the plain and higlighted text. + # For each component node in the tangled tree gather the plain and highlighted text. for node in component_nodes: # Get the highlighted components based on figure_type if self.figure_type == "component": @@ -117,17 +123,18 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): elif self.figure_type == "dependency": highlight_descendants = [node] - # Format text to be higlighted and gather text to be formated plain. + # Format text to be highlighted and gather text to be formatted plain. if not highlight_descendants: - # If there are no highlighted descendants just highlight the selected node (format for observable.) + # If there are no highlighted descendants just highlight the selected + # node (format for observable.) highlighted.append([node, "id", node]) # Gather all the text as plain text. plain_descendants = [n for n in nodes if n != node] else: - # Format higlighted text for Observable. + # Format highlighted text for Observable. for hd in highlight_descendants: highlighted.append([node, "id", hd]) - # Gather the non-higlighted text as plain text descendants. + # Gather the non-highlighted text as plain text descendants. plain_descendants = [ node for node in nodes if node not in highlight_descendants ] @@ -142,46 +149,47 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): ) # Depending on input either export csv locally to disk or as a string. - if save_file == True: + if save_file: file_name = f"{self.schema_abbr}_{self.figure_type}_{text_type}.csv" df.to_csv(os.path.join(self.text_csv_output_path, file_name)) - return - elif save_file == False: - return df.to_csv() + return None + + return df.to_csv() def get_topological_generations(self): """Gather topological_gen, nodes and edges based on figure type. Outputs: topological_gen (List(list)):list of lists. Indicates layers of nodes. - nodes: (Networkx NodeView) Nodes of the component or dependency graph. When iterated over it functions like a list. - edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. + nodes: (Networkx NodeView) Nodes of the component or dependency graph. + When iterated over it functions like a list. + edges: (Networkx EdgeDataView) Edges of component or dependency graph. + When iterated over it works like a list of tuples. """ # Get nodes in the digraph digraph = self.dmge.get_digraph_by_edge_type(self.dependency_type) nodes = digraph.nodes() # Get subgraph - # mm_graph = self.sg.se.get_nx_schema() - # subg = self.sg.get_subgraph_by_edge_type(mm_graph, self.dependency_type) - subg = self.dmge.get_subgraph_by_edge_type(self.dependency_type) + subgraph = self.dmge.get_subgraph_by_edge_type(self.dependency_type) # Get edges and topological_gen based on figure type. if self.figure_type == "component": edges = digraph.edges() - topological_gen = list(reversed(list(nx.topological_generations(subg)))) + topological_gen = list(reversed(list(nx.topological_generations(subgraph)))) elif self.figure_type == "dependency": rev_digraph = nx.DiGraph.reverse(digraph) edges = rev_digraph.edges() - topological_gen = list(nx.topological_generations(subg)) + topological_gen = list(nx.topological_generations(subgraph)) - return topological_gen, nodes, edges, subg + return topological_gen, nodes, edges, subgraph def remove_unwanted_characters_from_conditional_statement( self, cond_req: str ) -> str: """Remove unwanted characters from conditional statement - Example of conditional requirement: If File Format IS "BAM" OR "CRAM" OR "CSV/TSV" then Genome Build is required + Example of conditional requirement: If File Format IS "BAM" OR "CRAM" OR + "CSV/TSV" then Genome Build is required Example output: File Format IS "BAM" OR "CRAM" OR "CSV/TSV" """ if "then" in cond_req: @@ -195,11 +203,12 @@ def remove_unwanted_characters_from_conditional_statement( def get_ca_alias(self, conditional_requirements: list) -> dict: """Get the alias for each conditional attribute. - NOTE: Obtaining attributes(attr) and aliases(ali) in this function is specific to how formatting - is set in AttributesExplorer. If that formatting changes, this section - will likely break or in the worst case have a silent error. + NOTE: Obtaining attributes(attr) and aliases(ali) in this function is specific + to how formatting is set in AttributesExplorer. If that formatting changes, + this section will likely break or in the worst case have a silent error. Input: - conditional_requirements_list (list): list of strings of conditional requirements from outputs of AttributesExplorer. + conditional_requirements_list (list): list of strings of conditional + requirements from outputs of AttributesExplorer. Output: ca_alias (dict): key: alias (attribute response) @@ -213,7 +222,7 @@ def get_ca_alias(self, conditional_requirements: list) -> dict: for req in conditional_requirements ] - for i, req in enumerate(conditional_requirements): + for _, req in enumerate(conditional_requirements): if "OR" not in req: attr, ali = req.split(" is ") attr = "".join(attr.split()) @@ -231,7 +240,8 @@ def gather_component_dependency_info(self, cn, attributes_df): """Gather all component dependency information. Inputs: cn: (str) component name - attributes_df: (Pandas DataFrame) Details for all attributes across all components. From AttributesExplorer. + attributes_df: (Pandas DataFrame) Details for all attributes across all components. + From AttributesExplorer. Outputs: conditional_attributes (list): List of conditional attributes for a particular component ca_alias (dict): @@ -253,17 +263,12 @@ def gather_component_dependency_info(self, cn, attributes_df): if "Cond_Req" in attributes_df.columns: conditional_attributes = list( attributes_df[ - (attributes_df["Cond_Req"] == True) - & (attributes_df["Component"] == cn) + (attributes_df["Cond_Req"]) & (attributes_df["Component"] == cn) ]["Label"] ) - ca_df = attributes_df[ - (attributes_df["Cond_Req"] == True) & (attributes_df["Component"] == cn) - ] conditional_requirements = list( attributes_df[ - (attributes_df["Cond_Req"] == True) - & (attributes_df["Component"] == cn) + (attributes_df["Cond_Req"]) & (attributes_df["Component"] == cn) ]["Conditional Requirements"] ) ca_alias = self.get_ca_alias(conditional_requirements) @@ -277,16 +282,21 @@ def gather_component_dependency_info(self, cn, attributes_df): return conditional_attributes, ca_alias, all_attributes - def find_source_nodes(self, nodes, edges, all_attributes=[]): + def find_source_nodes(self, nodes, edges, all_attributes=None): """Find all nodes in the graph that do not have a parent node. Inputs: - nodes: (Networkx NodeView) Nodes of the component or dependency graph. When iterated over it functions like a list. - edges: (Networkx EdgeDataView) Edges of component or dependency graph. When iterated over it works like a list of tuples. - attributes_df: (Pandas DataFrame) Details for all attributes across all components. From AttributesExplorer. + nodes: (Networkx NodeView) Nodes of the component or dependency graph. + When iterated over it functions like a list. + edges: (Networkx EdgeDataView) Edges of component or dependency graph. + When iterated over it works like a list of tuples. + attributes_df: (Pandas DataFrame) Details for all attributes across all + components. From AttributesExplorer. Outputs: source_nodes (list(str)): List of parentless nodes in """ + if all_attributes is None: + all_attributes = [] # Find edges that are not source nodes. not_source = [] for node in nodes: @@ -305,21 +315,25 @@ def find_source_nodes(self, nodes, edges, all_attributes=[]): source_nodes.append(node) return source_nodes - def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): - """Based on the dependency type, create dictionaries between parent and child and child and parent attributes. + def get_parent_child_dictionary(self, edges, all_attributes=None): + """ + Based on the dependency type, create dictionaries between parent and + child and child and parent attributes. Input: - nodes: (Networkx NodeView) Nodes of the component or dependency graph. edges: (Networkx EdgeDataView (component figure) or List(list) (dependency figure)) Edges of component or dependency graph. all_attributes: Output: child_parents (dict): key: child - value: list of the childs parents + value: list of the child's parents parent_children (dict): key: parent value: list of the parents children """ + # pylint: disable=too-many-branches + if all_attributes is None: + all_attributes = [] child_parents = {} parent_children = {} @@ -327,7 +341,7 @@ def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): # Construct child_parents dictionary for edge in edges: # Add child as a key - if edge[0] not in child_parents.keys(): + if edge[0] not in child_parents: child_parents[edge[0]] = [] # Add parents to list @@ -336,7 +350,7 @@ def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): # Construct parent_children dictionary for edge in edges: # Add parent as a key - if edge[1] not in parent_children.keys(): + if edge[1] not in parent_children: parent_children[edge[1]] = [] # Add children to list @@ -348,10 +362,10 @@ def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): # Check if child is an attribute for the current component if edge[0] in all_attributes: # Add child as a key - if edge[0] not in child_parents.keys(): + if edge[0] not in child_parents: child_parents[edge[0]] = [] - # Add parent to list if it is an attriute for the current component + # Add parent to list if it is an attribute for the current component if edge[1] in all_attributes: child_parents[edge[0]].append(edge[1]) @@ -360,16 +374,16 @@ def get_parent_child_dictionary(self, nodes, edges, all_attributes=[]): # Check if parent is an attribute for the current component if edge[1] in all_attributes: # Add parent as a key - if edge[1] not in parent_children.keys(): + if edge[1] not in parent_children: parent_children[edge[1]] = [] - # Add child to list if it is an attriute for the current component + # Add child to list if it is an attribute for the current component if edge[0] in all_attributes: parent_children[edge[1]].append(edge[0]) return child_parents, parent_children - def alias_edges(self, ca_alias: dict, edges) -> List[list]: + def alias_edges(self, ca_alias: dict, edges) -> list[list]: """Create new edges based on aliasing between an attribute and its response. Purpose: Create aliased edges. @@ -387,16 +401,18 @@ def alias_edges(self, ca_alias: dict, edges) -> List[list]: ca_alias (dict): key: alias (attribute response) value: attribute - edges (Networkx EdgeDataView): Edges of component or dependency graph. When iterated over it works like a list of tuples. + edges (Networkx EdgeDataView): Edges of component or dependency graph. + When iterated over it works like a list of tuples. Output: - aliased_edges (List[lists]) of aliased edges. + aliased_edges (list[list]) of aliased edges. """ aliased_edges = [] - for i, edge in enumerate(edges): + for _, edge in enumerate(edges): # construct one set of edges at a time edge_set = [] - # If the first edge has an alias add alias to the first position in the current edge set + # If the first edge has an alias add alias to the first + # position in the current edge set if edge[0] in ca_alias.keys(): edge_set.append(ca_alias[edge[0]]) @@ -404,7 +420,8 @@ def alias_edges(self, ca_alias: dict, edges) -> List[list]: else: edge_set.append(edge[0]) - # If the secod edge has an alias add alias to the first position in the current edge set + # If the second edge has an alias add alias to the first + # position in the current edge set if edge[1] in ca_alias.keys(): edge_set.append(ca_alias[edge[1]]) @@ -440,13 +457,14 @@ def prune_expand_topological_gen( pruned_topological_gen = [] # For each layer(gen) in the topological generation list - for i, layer in enumerate(topological_gen): + for _, layer in enumerate(topological_gen): current_layer = [] next_layer = [] # For each node in the layer for node in layer: - # If the node is relevant to this component and is not a conditional attribute add it to the current layer. + # If the node is relevant to this component and is not a conditional + # attribute add it to the current layer. if node in all_attributes and node not in conditional_attributes: current_layer.append(node) @@ -462,13 +480,19 @@ def prune_expand_topological_gen( return pruned_topological_gen - def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): + def get_base_layers( + self, + topological_gen: list[list], + child_parents: dict, + source_nodes: list, + cn: str, + ) -> tuple[dict[str, Any], dict[str, Any]]: """ Purpose: Reconfigure topological gen to move things back appropriate layers if they would have a back reference. - The Tangle Tree figure requrires an acyclic directed graph that has additional + The Tangle Tree figure requires an acyclic directed graph that has additional layering rules between connected nodes. - If there is a backward connection then the line connecting them will break (this would suggest a cyclic connection.) @@ -485,12 +509,12 @@ def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): topological_gen: list of lists. Indicates layers of nodes. child_parents (dict): key: child - value: list of the childs parents + value: list of the child's parents source_nodes: list, list of nodes that do not have a parent. cn: str, component name, default='' Output: base_layers: dict, key: component name, value: layer - represents initial layering of toplogical_gen + represents initial layering of topological_gen base_layers_copy_copy: dict, key: component name, value: layer represents the final layering after moving the components/attributes to their desired layer.c @@ -518,7 +542,8 @@ def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): # Get the max layer a parent of the node can be found. max_parent_level = max(parent_levels) - # Move the node one layer beyond the max parent node position, so it will be downstream of its parents. + # Move the node one layer beyond the max parent node position, + # so it will be downstream of its parents. base_layers_copy[node] = max_parent_level + 1 # Make another version of updated positions iterate on further. @@ -541,7 +566,8 @@ def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): # that the connections will not be backwards (and result in a broken line) for par in child_parents[node]: # For a given parent determine if its a source node and that the parents - # are not already at level 0, and the parent is not the current component node. + # are not already at level 0, and the parent is not the current component + # node. if ( par in source_nodes and ( @@ -562,25 +588,30 @@ def get_base_layers(self, topological_gen, child_parents, source_nodes, cn): # Move the node one position downstream of its max parent level. base_layers_copy_copy[node] = max_parent_level + 1 - # For each parental position to modify, move the parents level up to the max_parent_level. + # For each parental position to modify, move the parents level up to + # the max_parent_level. for par in modify_par: base_layers_copy_copy[par] = max_parent_level return base_layers, base_layers_copy_copy def adjust_node_placement( - self, base_layers_copy_copy, base_layers, topological_gen - ): - """Reorder nodes within topological_generations to match how they were ordered in base_layers_copy_copy + self, + base_layers_copy_copy: dict[str, Any], + base_layers: dict[str, Any], + topological_gen: list[list], + ) -> list[list]: + """Reorder nodes within topological_generations to match how they were ordered in + base_layers_copy_copy Input: topological_gen: list of lists. Indicates layers of nodes. base_layers: dict, key: component name, value: layer - represents initial layering of toplogical_gen + represents initial layering of topological_gen base_layers_copy_copy: dict, key: component name, value: layer represents the final layering after moving the components/attributes to their desired layer. Output: - topological_gen: same format but as the incoming topologial_gen but + topological_gen: same format but as the incoming topological_gen but ordered to match base_layers_copy_copy. """ if self.figure_type == "component": @@ -614,38 +645,48 @@ def adjust_node_placement( topological_gen[base_layers[node]].remove(node) return topological_gen - def move_source_nodes_to_bottom_of_layer(self, node_layers, source_nodes): + def move_source_nodes_to_bottom_of_layer( + self, node_layers: list[list], source_nodes: list + ) -> list[list]: """For aesthetic purposes move source nodes to the bottom of their respective layers. Input: - node_layers (List(list)): Lists of lists of each layer and the nodes contained in that layer as strings. + node_layers (List(list)): Lists of lists of each layer and the nodes contained + in that layer as strings. source_nodes (list): list of nodes that do not have a parent. Output: node_layers (List(list)): modified to move source nodes to the bottom of each layer. """ - for i, layer in enumerate(node_layers): + for _, layer in enumerate(node_layers): nodes_to_move = [] for node in layer: if node in source_nodes: nodes_to_move.append(node) for node in nodes_to_move: - node_layers[i].remove(node) - node_layers[i].append(node) + layer.remove(node) + layer.append(node) return node_layers def get_layers_dict_list( - self, node_layers, child_parents, parent_children, all_parent_children + self, + node_layers: list[list], + child_parents: dict, + parent_children: dict, + all_parent_children: dict, ): - """Convert node_layers to a list of lists of dictionaries that specifies each node and its parents (if applicable). + """Convert node_layers to a list of lists of dictionaries that specifies each node and + its parents (if applicable). Inputs: - node_layers: list of lists of each layer and the nodes contained in that layer as strings. + node_layers: list of lists of each layer and the nodes contained in that layer + as strings. child_parents (dict): key: child - value: list of the childs parents + value: list of the child's parents parent_children (dict): key: parent value: list of the parents children Outputs: - layers_list (List(list): list of lists of dictionaries that specifies each node and its parents (if applicable) + layers_list (List(list): list of lists of dictionaries that specifies each node and its + parents (if applicable) """ num_layers = len(node_layers) layers_list = [[] for i in range(0, num_layers)] @@ -676,28 +717,29 @@ def get_layers_dict_list( return layers_list - def get_node_layers_json( + def get_node_layers_json( # pylint: disable=too-many-arguments self, - topological_gen, - source_nodes, - child_parents, - parent_children, - cn="", - all_parent_children=None, - ): + topological_gen: list[list], + source_nodes: list[str], + child_parents: dict, + parent_children: dict, + cn: str = "", + all_parent_children: Optional[dict] = None, + ) -> str: """Return all the layers of a single tangled tree as a JSON String. Inputs: topological_gen:list of lists. Indicates layers of nodes. source_nodes: list of nodes that do not have a parent. child_parents (dict): key: child - value: list of the childs parents + value: list of the child's parents parent_children (dict): key: parent value: list of the parents children all_parent_children (dict): key: parent - value: list of the parents children (including all downstream nodes). Default to an empty dictionary + value: list of the parents children (including all downstream nodes). + Default to an empty dictionary Outputs: layers_json (JSON String): Layers of nodes in the tangled tree as a json string. """ @@ -719,7 +761,7 @@ def get_node_layers_json( # Convert layers to a list of dictionaries if not all_parent_children: # default to an empty dictionary - all_parent_children = dict() + all_parent_children = {} layers_dicts = self.get_layers_dict_list( node_layers, child_parents, parent_children, all_parent_children @@ -730,23 +772,30 @@ def get_node_layers_json( return layers_json - def save_outputs(self, save_file, layers_json, cn="", all_layers=None): + def save_outputs( + self, + save_file: bool, + layers_json, + cn: str = "", + all_layers: Optional[list[str]] = None, + ): """ Inputs: save_file (bool): Indicates whether to save a file locally or not.: layers_json (JSON String): Layers of nodes in the tangled tree as a json string. cn (str): component name, default='' - all_layers (list of json strings): Each string represents contains the layers for a single tangled tree. - If a dependency figure the list is added to each time this function is called, so starts incomplete. - default=[]. + all_layers (list of json strings): Each string represents contains the layers for + a single tangled tree. If a dependency figure the list is added to each time + this function is called, so starts incomplete. default=[]. Outputs: all_layers (list of json strings): - If save_file == False: Each string represents contains the layers for a single tangled tree. + If save_file == False: Each string represents contains the layers for a single + tangled tree. If save_file ==True: is an empty list. """ if all_layers is None: all_layers = [] - if save_file == True: + if save_file: if cn: output_file_name = ( f"{self.schema_abbr}_{self.figure_type}_{cn}_tangled_tree.json" @@ -756,24 +805,34 @@ def save_outputs(self, save_file, layers_json, cn="", all_layers=None): f"{self.schema_abbr}_{self.figure_type}_tangled_tree.json" ) with open( - os.path.join(self.json_output_path, output_file_name), "w" + os.path.join(self.json_output_path, output_file_name), + mode="w", + encoding="utf-8", ) as outfile: outfile.write(layers_json) + + # pylint: disable=logging-fstring-interpolation logger.info( - f"Tangled Tree JSON String saved to {os.path.join(self.json_output_path, output_file_name)}." + ( + "Tangled Tree JSON String saved to " + f"{os.path.join(self.json_output_path, output_file_name)}" + ) ) all_layers = layers_json - elif save_file == False: + else: all_layers.append(layers_json) return all_layers - def get_ancestors_nodes(self, subgraph, components): + def get_ancestors_nodes( + self, subgraph: nx.DiGraph, components: list[str] + ) -> dict[str, list[str]]: """ Inputs: subgraph: networkX graph object components: a list of nodes outputs: - all_parent_children: a dictionary that indicates a list of children (including all the intermediate children) of a given node + all_parent_children: a dictionary that indicates a list of children + (including all the intermediate children) of a given node """ all_parent_children = {} for component in components: @@ -784,35 +843,37 @@ def get_ancestors_nodes(self, subgraph, components): return all_parent_children - def get_tangled_tree_layers(self, save_file=True): + def get_tangled_tree_layers(self, save_file: bool = True): """Based on user indicated figure type, construct the layers of nodes of a tangled tree. Inputs: save_file (bool): Indicates whether to save a file locally or not. Outputs: all_layers (list of json strings): - If save_file == False: Each string represents contains the layers for a single tangled tree. + If save_file == False: Each string represents contains the layers + for a single tangled tree. If save_file ==True: is an empty list. Note on Dependency Tangled Tree: - If there are many conditional requirements associated with a depependency, and those + If there are many conditional requirements associated with a dependency, and those conditional requirements have overlapping attributes associated with them the tangled tree will only report one """ + # pylint: disable=too-many-locals # Gather the data model's, topological generations, nodes and edges - topological_gen, nodes, edges, subg = self.get_topological_generations() + topological_gen, nodes, edges, subgraph = self.get_topological_generations() if self.figure_type == "component": # Gather all source nodes source_nodes = self.find_source_nodes(nodes, edges) # Map all children to their parents and vice versa - child_parents, parent_children = self.get_parent_child_dictionary( - nodes, edges - ) + child_parents, parent_children = self.get_parent_child_dictionary(edges) # find all the downstream nodes - all_parent_children = self.get_ancestors_nodes(subg, parent_children.keys()) + all_parent_children = self.get_ancestors_nodes( + subgraph, parent_children.keys() + ) # Get the layers that each node belongs to. layers_json = self.get_node_layers_json( @@ -855,7 +916,7 @@ def get_tangled_tree_layers(self, save_file=True): # Gather relationships between children and their parents. child_parents, parent_children = self.get_parent_child_dictionary( - nodes, aliased_edges, all_attributes + aliased_edges, all_attributes ) # Remake topological_gen so it has only relevant nodes. From ba043c6df04cd067f4f6e8208d288571ef3389b7 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 11:59:36 -0800 Subject: [PATCH 125/199] lint all utils files except schema utils --- schematic/manifest/generator.py | 2 +- schematic/utils/cli_utils.py | 92 ++++++----- schematic/utils/curie_utils.py | 47 +++--- schematic/utils/df_utils.py | 152 +++++++++--------- schematic/utils/general.py | 195 +++++++++++++++--------- schematic/utils/google_api_utils.py | 121 +++++++++------ schematic/utils/io_utils.py | 29 ++-- schematic/utils/validate_rules_utils.py | 50 +++--- schematic/utils/validate_utils.py | 73 ++++----- schematic/utils/viz_utils.py | 22 ++- 10 files changed, 452 insertions(+), 331 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 090024f04..79c8ed4af 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1503,7 +1503,7 @@ def export_sheet_to_excel( export_manifest_drive_service( manifest_url, file_path=output_excel_file_path, - mimeType="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + mime_Type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", ) return output_excel_file_path diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 684bafba1..ce701834e 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -1,9 +1,8 @@ -#!/usr/bin/env python3 +"""CLI utils""" -import inspect import logging -from typing import Any, Mapping, Sequence, Union, List +from typing import Any, Mapping, Sequence, Union, Optional from functools import reduce import re @@ -11,6 +10,7 @@ # We are using fstrings in logger methods # pylint: disable=logging-fstring-interpolation +# pylint: disable = anomalous-backslash-in-string def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]: @@ -48,52 +48,66 @@ def log_value_from_config(arg_name: str, config_value: Any): ) -def parse_synIDs( - ctx, - param, - synIDs, -) -> List[str]: - """Parse and validate a comma separated string of synIDs +def parse_synIDs( # pylint: disable=invalid-name + ctx: Any, # pylint: disable=unused-argument + param: str, # pylint: disable=unused-argument + synIDs: str, # pylint: disable=invalid-name +) -> Optional[list[str]]: + """For backwards compatibility""" + parse_syn_ids(ctx, param, synIDs) - Args: - ctx: - click option context - param: - click option argument name - synIDs: - comma separated string of synIDs - Returns: - List of synID strings +def parse_syn_ids( + ctx: Any, # pylint: disable=unused-argument + param: str, # pylint: disable=unused-argument + syn_ids: str, +) -> Optional[list[str]]: + """Parse and validate a comma separated string of synapse ids + + Args: + ctx (Any): click option context + param (str): click option argument name + syn_ids (str): comma separated string of synapse ids Raises: - ValueError: If the entire string does not match a regex for + ValueError: If the entire string does not match a regex for a valid comma separated string of SynIDs + + Returns: + Optional[list[str]]: List of synapse ids """ - if synIDs: - project_regex = re.compile("(syn\d+\,?)+") - valid = project_regex.fullmatch(synIDs) + if not syn_ids: + return None - if valid: - synIDs = synIDs.split(",") + project_regex = re.compile("(syn\d+\,?)+") + valid = project_regex.fullmatch(syn_ids) - return synIDs + if not valid: + raise ValueError( + f"The provided list of project synID(s): {syn_ids}, is not formatted correctly. " + "\nPlease check your list of projects for errors." + ) - else: - raise ValueError( - f"The provided list of project synID(s): {synIDs}, is not formatted correctly. " - "\nPlease check your list of projects for errors." - ) - else: - return + syn_ids = syn_ids.split(",") + return syn_ids def parse_comma_str_to_list( - ctx, - param, - comma_string, -) -> List[str]: - if comma_string: - return comma_string.split(",") - else: + ctx: Any, # pylint: disable=unused-argument + param: str, # pylint: disable=unused-argument + comma_string: str, +) -> Optional[list[str]]: + """Separates a comma separated sting into a list of strings + + Args: + ctx (Any): click option context + param (str): click option argument name + comma_string (str): comma separated string + + Returns: + Optional[list[str]]: _description_ + """ + if not comma_string: return None + + return comma_string.split(",") diff --git a/schematic/utils/curie_utils.py b/schematic/utils/curie_utils.py index 42361b482..fd24fd297 100644 --- a/schematic/utils/curie_utils.py +++ b/schematic/utils/curie_utils.py @@ -1,3 +1,5 @@ +"""Curie utils""" + import logging @@ -8,10 +10,9 @@ def extract_name_from_uri_or_curie(item): """Extract name from uri or curie""" if "http" not in item and len(item.split(":")) == 2: return item.split(":")[-1] - elif len(item.split("//")[-1].split("/")) > 1: + if len(item.split("//")[-1].split("/")) > 1: return item.split("//")[-1].split("/")[-1] - else: - raise ValueError("Error extracting name from URI or Curie.") + raise ValueError("Error extracting name from URI or Curie.") def expand_curie_to_uri(curie, context_info): @@ -24,17 +25,15 @@ def expand_curie_to_uri(curie, context_info): "http://schema.biothings.io/"}) """ # as suggested in SchemaOrg standard file, these prefixes don't expand - PREFIXES_NOT_EXPAND = ["rdf", "rdfs", "xsd"] + prefixes_not_expand = ["rdf", "rdfs", "xsd"] # determine if a value is curie if len(curie.split(":")) == 2: prefix, value = curie.split(":") - if prefix in context_info and prefix not in PREFIXES_NOT_EXPAND: + if prefix in context_info and prefix not in prefixes_not_expand: return context_info[prefix] + value # if the input is not curie, return the input unmodified - else: - return curie - else: return curie + return curie def expand_curies_in_schema(schema): @@ -44,28 +43,28 @@ def expand_curies_in_schema(schema): new_schema = {"@context": context, "@graph": [], "@id": schema["@id"]} for record in graph: new_record = {} - for k, v in record.items(): - if type(v) == str: - new_record[expand_curie_to_uri(k, context)] = expand_curie_to_uri( - v, context + for key, value in record.items(): + if isinstance(value, str): + new_record[expand_curie_to_uri(key, context)] = expand_curie_to_uri( + value, context ) - elif type(v) == list: - if type(v[0]) == dict: - new_record[expand_curie_to_uri(k, context)] = [] - for _item in v: - new_record[expand_curie_to_uri(k, context)].append( + elif isinstance(value, list): + if isinstance(value[0], dict): + new_record[expand_curie_to_uri(key, context)] = [] + for _item in value: + new_record[expand_curie_to_uri(key, context)].append( {"@id": expand_curie_to_uri(_item["@id"], context)} ) else: - new_record[expand_curie_to_uri(k, context)] = [ - expand_curie_to_uri(_item, context) for _item in v + new_record[expand_curie_to_uri(key, context)] = [ + expand_curie_to_uri(_item, context) for _item in value ] - elif type(v) == dict and "@id" in v: - new_record[expand_curie_to_uri(k, context)] = { - "@id": expand_curie_to_uri(v["@id"], context) + elif isinstance(value, dict) and "@id" in value: + new_record[expand_curie_to_uri(key, context)] = { + "@id": expand_curie_to_uri(value["@id"], context) } - elif v == None: - new_record[expand_curie_to_uri(k, context)] = None + elif value is None: + new_record[expand_curie_to_uri(key, context)] = None new_schema["@graph"].append(new_record) return new_schema diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 14c8d6b2b..2fbd8cc4d 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -1,28 +1,38 @@ +"""df utils""" + import logging from copy import deepcopy from time import perf_counter -import datetime as dt import dateparser as dp import pandas as pd import numpy as np from pandarallel import pandarallel +# pylint: disable=logging-fstring-interpolation + logger = logging.getLogger(__name__) -def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): +def load_df( + file_path: str, + preserve_raw_input: bool = True, + data_model: bool = False, + **load_args: dict, +) -> pd.DataFrame: """ Universal function to load CSVs and return DataFrames Parses string entries to convert as appropriate to type int, float, and pandas timestamp - Pandarallel is used for type inference for large manfiests to improve performance + Pandarallel is used for type inference for large manifests to improve performance + Args: - file_path: path of csv to open - preserve_raw_input: Bool. If false, convert cell datatypes to an inferred type - data_model: bool, indicates if importing a data model - load_args: dict of key value pairs to be passed to the pd.read_csv function - **kwargs: keyword arguments for pd.read_csv() + file_path (str): path of csv to open + preserve_raw_input (bool, optional): If false, convert cell datatypes to an inferred type + data_model (bool, optional): bool, indicates if importing a data model + **load_args(dict): dict of key value pairs to be passed to the pd.read_csv function - Returns: a processed dataframe for manifests or unprocessed df for data models and where indicated + Returns: + pd.DataFrame: a processed dataframe for manifests or unprocessed df for data models and + where indicated """ large_manifest_cutoff_size = 1000 # start performance timer @@ -42,48 +52,48 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): return org_df # If type inferences is allowed: infer types, trim, and return - else: - # create a separate copy of the manifest - # before beginning conversions to store float values - float_df = deepcopy(org_df) - - # Cast the columns in the dataframe to string and - # replace Null values with empty strings - null_cells = org_df.isnull() - org_df = org_df.astype(str).mask(null_cells, "") - - # Find integers stored as strings and replace with entries of type np.int64 - if ( - org_df.size < large_manifest_cutoff_size - ): # If small manifest, iterate as normal for improved performance - ints = org_df.applymap( - lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" - ).fillna(False) - - else: # parallelize iterations for large manfiests - pandarallel.initialize(verbose=1) - ints = org_df.parallel_applymap( - lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" - ).fillna(False) - - # Identify cells converted to intergers - ints_tf_df = ints.applymap(pd.api.types.is_integer) - - # convert strings to numerical dtype (float) if possible, preserve non-numerical strings - for col in org_df.columns: - float_df[col] = pd.to_numeric(float_df[col], errors="coerce") - # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) - - # Trim nans and empty rows and columns - processed_df = trim_commas_df(float_df) - - # Store values that were converted to type int in the final dataframe - processed_df = processed_df.mask(ints_tf_df, other=ints) - - # log manifest load and processing time - logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") - return processed_df + + # create a separate copy of the manifest + # before beginning conversions to store float values + float_df = deepcopy(org_df) + + # Cast the columns in the dataframe to string and + # replace Null values with empty strings + null_cells = org_df.isnull() + org_df = org_df.astype(str).mask(null_cells, "") + + # Find integers stored as strings and replace with entries of type np.int64 + if ( + org_df.size < large_manifest_cutoff_size + ): # If small manifest, iterate as normal for improved performance + ints = org_df.applymap( + lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" + ).fillna(False) + + else: # parallelize iterations for large manfiests + pandarallel.initialize(verbose=1) + ints = org_df.parallel_applymap( + lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" + ).fillna(False) + + # Identify cells converted to integers + ints_tf_df = ints.applymap(pd.api.types.is_integer) + + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings + for col in org_df.columns: + float_df[col] = pd.to_numeric(float_df[col], errors="coerce") + # replace values that couldn't be converted to float with the original str values + float_df[col].fillna(org_df[col][float_df[col].isna()], inplace=True) + + # Trim nan's and empty rows and columns + processed_df = trim_commas_df(float_df) + + # Store values that were converted to type int in the final dataframe + processed_df = processed_df.mask(ints_tf_df, other=ints) + + # log manifest load and processing time + logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") + return processed_df def _parse_dates(date_string): @@ -94,11 +104,11 @@ def _parse_dates(date_string): return False -def normalize_table(df: pd.DataFrame, primary_key: str) -> pd.DataFrame: +def normalize_table(dataframe: pd.DataFrame, primary_key: str) -> pd.DataFrame: """ Function to normalize a table (e.g. dedup) Args: - df: data frame to normalize + dataframe: data frame to normalize primary_key: primary key on which to perform dedup Returns: a dedupped dataframe @@ -106,9 +116,9 @@ def normalize_table(df: pd.DataFrame, primary_key: str) -> pd.DataFrame: try: # if valid primary key has been provided normalize df - df = df.reset_index() - df_norm = df.drop_duplicates(subset=[primary_key]) - df_norm = df.drop(columns=["index"]) + dataframe = dataframe.reset_index() + df_norm = dataframe.drop_duplicates(subset=[primary_key]) + df_norm = dataframe.drop(columns=["index"]) return df_norm except KeyError: # if the primary key is not in the df; then return the same df w/o changes @@ -116,7 +126,7 @@ def normalize_table(df: pd.DataFrame, primary_key: str) -> pd.DataFrame: "Specified primary key is not in table schema. Proceeding without table changes." ) - return df + return dataframe def update_df( @@ -171,47 +181,47 @@ def update_df( return input_df_idx -def trim_commas_df(df: pd.DataFrame): +def trim_commas_df(dataframe: pd.DataFrame) -> pd.DataFrame: """Removes empty (trailing) columns and empty rows from pandas dataframe (manifest data). Args: - df: pandas dataframe with data from manifest file. + dataframe: pandas dataframe with data from manifest file. Returns: df: cleaned-up pandas dataframe. """ # remove all columns which have substring "Unnamed" in them - df = df.loc[:, ~df.columns.str.contains("^Unnamed")] + dataframe = dataframe.loc[:, ~dataframe.columns.str.contains("^Unnamed")] # remove all completely empty rows - df = df.dropna(how="all", axis=0) + dataframe = dataframe.dropna(how="all", axis=0) # Fill in nan cells with empty strings - df.fillna("", inplace=True) - return df + dataframe.fillna("", inplace=True) + return dataframe -def col_in_dataframe(col: str, df: pd.DataFrame) -> bool: - """Check if a column is in a dataframe, without worring about case +def col_in_dataframe(col: str, dataframe: pd.DataFrame) -> bool: + """Check if a column is in a dataframe, without worrying about case Args: col: name of column whose presence in the dataframe is being checked - df: pandas dataframe with data from manifest file. + dataframe: pandas dataframe with data from manifest file. Returns: bool: whether or not the column name is a column in the dataframe, case agnostic """ return col.lower() in [ - manifest_col.lower() for manifest_col in df.columns.to_list() + manifest_col.lower() for manifest_col in dataframe.columns.to_list() ] def populate_df_col_with_another_col( - df: pd.DataFrame, source_col: str, target_col: str + dataframe: pd.DataFrame, source_col: str, target_col: str ) -> pd.DataFrame: """Copy the values from one column in a dataframe to another column in the same dataframe Args: - df: pandas dataframe with data from manifest file. + dataframe: pandas dataframe with data from manifest file. source_col: column whose contents to copy over target_col: column to be updated with other contents @@ -219,5 +229,5 @@ def populate_df_col_with_another_col( dataframe with contents updated """ # Copy the contents over - df[target_col] = df[source_col] - return df + dataframe[target_col] = dataframe[source_col] + return dataframe diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 66ddb2252..5af2bf6a2 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -1,6 +1,6 @@ -# allows specifying explicit variable types +"""General utils""" + import logging -import math import os import pstats import subprocess @@ -8,53 +8,92 @@ from cProfile import Profile from datetime import datetime, timedelta from functools import wraps -from typing import Union +from typing import Union, TypeVar, Any, Optional, Sequence, Callable from synapseclient.core.exceptions import SynapseHTTPError from synapseclient.entity import File, Folder, Project from synapseclient.table import EntityViewSchema +from synapseclient.core import cache +from synapseclient import Synapse -import synapseclient.core.cache as cache +# pylint: disable=logging-fstring-interpolation logger = logging.getLogger(__name__) +T = TypeVar("T") + -def find_duplicates(_list): +def find_duplicates(_list: list[T]) -> set[T]: """Find duplicate items in a list""" - return set([x for x in _list if _list.count(x) > 1]) + return {x for x in _list if _list.count(x) > 1} -def dict2list(dictionary): - if type(dictionary) == list: - return dictionary - elif type(dictionary) == dict: - return [dictionary] +def dict2list(item: Any) -> Optional[Union[dict, list]]: + """Puts a dictionary into a list + Args: + item (Any): Any type of input -def str2list(_str): - if type(_str) == str: - return [_str] - elif type(_str) == list: - return _str + Returns: + Optional[Union[dict, list]]: + If input is a list, return it + If input is a dict, return it in a list + Return None for anything else + """ + if isinstance(item, list): + return item + if isinstance(item, dict): + return [item] + return None -def unlist(_list): - if len(_list) == 1: - return _list[0] - else: - return _list +def str2list(item: Any) -> Optional[list]: + """Puts a string into a list + + Args: + item (Any): Any type of input + + Returns: + Optional[list]: + If input is a list, return it + If input is a string, return it in a list + Return None for anything else + """ + if isinstance(item, str): + return [item] + if isinstance(item, list): + return item + return None + + +def unlist(seq: Sequence) -> Any: + """Returns the first item of a sequence + + Args: + seq (Sequence): Any sequence + + Returns: + Any: + if sequence is length one, return the first item + otherwise return the sequence + """ + if len(seq) == 1: + return seq[0] + return seq -def get_dir_size(path: str): - """Recursively descend the directory tree rooted at the top and call .st_size function to calculate size of files in bytes. +def get_dir_size(path: str) -> int: + """ + Recursively descend the directory tree rooted at the top and call + .st_size function to calculate size of files in bytes. Args: path: path to a folder return: total size of all the files in a given directory in bytes. """ total = 0 # Recursively scan directory to find entries - with os.scandir(path) as it: - for entry in it: + with os.scandir(path) as itr: + for entry in itr: if entry.is_file(): total += entry.stat().st_size elif entry.is_dir(): @@ -70,7 +109,8 @@ def calculate_datetime( Args: input_date (datetime): date time object provided by users minutes (int): number of minutes - before_or_after (str): default to "before". if "before", calculate x minutes before current date time. if "after", calculate x minutes after current date time. + before_or_after (str): default to "before". if "before", calculate x minutes before + current date time. if "after", calculate x minutes after current date time. Returns: datetime: return result of date time calculation @@ -93,9 +133,10 @@ def check_synapse_cache_size(directory="/root/.synapseCache") -> Union[float, in Returns: float or integer: returns size of .synapsecache directory in bytes """ - # Note: this command might fail on windows user. But since this command is primarily for running on AWS, it is fine. + # Note: this command might fail on windows user. + # But since this command is primarily for running on AWS, it is fine. command = ["du", "-sh", directory] - output = subprocess.run(command, capture_output=True).stdout.decode("utf-8") + output = subprocess.run(command, capture_output=True, check=False).stdout.decode("utf-8") # Parsing the output to extract the directory size size = output.split("\t")[0] @@ -115,11 +156,11 @@ def check_synapse_cache_size(directory="/root/.synapseCache") -> Union[float, in return byte_size -def clear_synapse_cache(cache: cache.Cache, minutes: int) -> int: +def clear_synapse_cache(synapse_cache: cache.Cache, minutes: int) -> int: """clear synapse cache before a certain time Args: - cache: an object of synapseclient Cache. + synapse_cache: an object of synapseclient Cache. minutes (int): all files before this minute will be removed Returns: int: number of files that get deleted @@ -128,49 +169,54 @@ def clear_synapse_cache(cache: cache.Cache, minutes: int) -> int: minutes_earlier = calculate_datetime( input_date=current_date, minutes=minutes, before_or_after="before" ) - num_of_deleted_files = cache.purge(before_date=minutes_earlier) + num_of_deleted_files = synapse_cache.purge(before_date=minutes_earlier) return num_of_deleted_files -def convert_gb_to_bytes(gb: int): +def convert_gb_to_bytes(g_bytes: int) -> int: """convert gb to bytes Args: - gb: number of gb + g_bytes: number of gb return: total number of bytes """ - return gb * 1024 * 1024 * 1024 + return g_bytes * 1024 * 1024 * 1024 -def entity_type_mapping(syn, entity_id): - """ - Return the entity type of manifest +def entity_type_mapping(syn: Synapse, entity_id: str) -> str: + """Return the entity type of manifest + Args: - entity_id: id of an entity - Return: - type_entity: type of the manifest being returned + syn (Synapse): Synapse object + entity_id (str): id of an entity + + Raises: + SynapseHTTPError: Re-raised SynapseHTTPError + + Returns: + str: type of the manifest being returned """ # check the type of entity try: entity = syn.get(entity_id, downloadFile=False) - except SynapseHTTPError as e: + except SynapseHTTPError as exc: logger.error( f"cannot get {entity_id} from asset store. Please make sure that {entity_id} exists" ) raise SynapseHTTPError( f"cannot get {entity_id} from asset store. Please make sure that {entity_id} exists" - ) from e + ) from exc if isinstance(entity, EntityViewSchema): return "asset view" - elif isinstance(entity, Folder): + if isinstance(entity, Folder): return "folder" - elif isinstance(entity, File): + if isinstance(entity, File): return "file" - elif isinstance(entity, Project): + if isinstance(entity, Project): return "project" - else: - # if there's no matching type, return concreteType - return entity.concreteType + + # if there's no matching type, return concreteType + return entity.concreteType def create_temp_folder(path: str) -> str: @@ -185,59 +231,70 @@ def create_temp_folder(path: str) -> str: def profile( - output_file=None, sort_by="cumulative", lines_to_print=None, strip_dirs=False -): + output_file: Optional[str] = None, + sort_by="cumulative", + lines_to_print: Optional[int] = None, + strip_dirs: bool = False, +) -> Callable: """ - The function was initially taken from: https://towardsdatascience.com/how-to-profile-your-code-in-python-e70c834fad89 + The function was initially taken from: + https://towardsdatascience.com/how-to-profile-your-code-in-python-e70c834fad89 A time profiler decorator. Inspired by and modified the profile decorator of Giampaolo Rodola: http://code.activestate.com/recipes/577817-profile-decorator/ + Args: - output_file: str or None. Default is None + output_file (Optional[str], optional): Path of the output file. If only name of the file is given, it's saved in the current directory. If it's None, the name of the decorated function is used. - sort_by: str or SortKey enum or tuple/list of str/SortKey enum + Defaults to None. + sort_by (str, optional): + str or SortKey enum or tuple/list of str/SortKey enum Sorting criteria for the Stats object. For a list of valid string and SortKey refer to: https://docs.python.org/3/library/profile.html#pstats.Stats.sort_stats - lines_to_print: int or None - Number of lines to print. Default (None) is for all the lines. + Defaults to "cumulative". + lines_to_print (Optional[int], optional): + Number of lines to print. This is useful in reducing the size of the printout, especially that sorting by 'cumulative', the time consuming operations are printed toward the top of the file. - strip_dirs: bool + Default (None) is for all the lines. + strip_dirs (bool, optional): Whether to remove the leading path info from file names. This is also useful in reducing the size of the printout + Defaults to False. + Returns: - Profile of the decorated function + Callable: Profile of the decorated function """ def inner(func): @wraps(func) def wrapper(*args, **kwargs): _output_file = output_file or func.__name__ + ".prof" - pr = Profile() - pr.enable() + profiler = Profile() + profiler.enable() retval = func(*args, **kwargs) - pr.disable() - pr.dump_stats(_output_file) + profiler.disable() + profiler.dump_stats(_output_file) # if we are running the functions on AWS: if "SECRETS_MANAGER_SECRETS" in os.environ: - ps = pstats.Stats(pr) + p_stats = pstats.Stats(profiler) # limit this to 30 line for now otherwise it will be too long for AWS log - ps.sort_stats("cumulative").print_stats(30) + p_stats.sort_stats("cumulative").print_stats(30) else: - with open(_output_file, "w") as f: - ps = pstats.Stats(pr, stream=f) + with open(_output_file, "w", encoding="utf-8") as fle: + p_stats = pstats.Stats(profiler, stream=fle) if strip_dirs: - ps.strip_dirs() + p_stats.strip_dirs() if isinstance(sort_by, (tuple, list)): - ps.sort_stats(*sort_by) + p_stats.sort_stats(*sort_by) else: - ps.sort_stats(sort_by) - ps.print_stats(lines_to_print) + p_stats.sort_stats(sort_by) + p_stats.print_stats(lines_to_print) return retval return wrapper diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index f7862a3a2..05d8e095d 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -1,19 +1,18 @@ +"""Google API utils""" + import os -import pickle import logging import json -import pygsheets as ps - -from typing import Dict, Any +from typing import Any, Union, Optional +import pandas as pd from googleapiclient.discovery import build -from google_auth_oauthlib.flow import InstalledAppFlow -from google.auth.transport.requests import Request from google.oauth2 import service_account -from google.oauth2.credentials import Credentials + from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage -import pandas as pd + +# pylint: disable=logging-fstring-interpolation logger = logging.getLogger(__name__) @@ -25,9 +24,10 @@ ] -# TODO: replace by pygsheets calls? -def build_credentials() -> Dict[str, Any]: - creds = generate_token() +# This function doesn't appear to be used or tested anywhere in schematic. +# TO DO: replace by pygsheets calls? +def build_credentials() -> dict[str, Any]: # pylint: disable=missing-function-docstring + creds = generate_token() # pylint: disable=undefined-variable # get a Google Sheet API service sheet_service = build("sheets", "v4", credentials=creds) @@ -41,7 +41,12 @@ def build_credentials() -> Dict[str, Any]: } -def build_service_account_creds() -> Dict[str, Any]: +def build_service_account_creds() -> dict[str, Any]: + """Build Google service account credentials + + Returns: + dict[str, Any]: The credentials + """ if "SERVICE_ACCOUNT_CREDS" in os.environ: dict_creds = json.loads(os.environ["SERVICE_ACCOUNT_CREDS"]) credentials = service_account.Credentials.from_service_account_info( @@ -73,6 +78,7 @@ def build_service_account_creds() -> Dict[str, Any]: def download_creds_file() -> None: + """Download google credentials file""" syn = SynapseStorage.login() # if file path of service_account does not exist @@ -83,13 +89,13 @@ def download_creds_file() -> None: and "SERVICE_ACCOUNT_CREDS" not in os.environ ): # synapse ID of the 'schematic_service_account_creds.json' file - API_CREDS = CONFIG.service_account_credentials_synapse_id + api_creds = CONFIG.service_account_credentials_synapse_id # Download in parent directory of SERVICE_ACCT_CREDS to # ensure same file system for os.rename() creds_dir = os.path.dirname(CONFIG.service_account_credentials_path) - creds_file = syn.get(API_CREDS, downloadLocation=creds_dir) + creds_file = syn.get(api_creds, downloadLocation=creds_dir) os.rename(creds_file.path, CONFIG.service_account_credentials_path) logger.info( @@ -104,16 +110,19 @@ def download_creds_file() -> None: ) -def execute_google_api_requests(service, requests_body, **kwargs): +def execute_google_api_requests(service: Any, requests_body: Any, **kwargs) -> Any: """ Execute google API requests batch; attempt to execute in parallel. + Args: - service: google api service; for now assume google sheets service that is instantiated and authorized - service_type: default batchUpdate; TODO: add logic for values update + service (Any): google api service; for now assume google sheets service that is + instantiated and authorized + requests_body (Any): _description_ kwargs: google API service parameters - Return: google API response - """ + Returns: + Any: google API response or None + """ if ( "spreadsheet_id" in kwargs and "service_type" in kwargs @@ -127,21 +136,23 @@ def execute_google_api_requests(service, requests_body, **kwargs): ) return response + return None -def export_manifest_drive_service(manifest_url, file_path, mimeType): +def export_manifest_drive_service( + manifest_url: str, file_path: str, mime_type: str +) -> None: """ - Export manifest by using google drive api. If export as an Excel spreadsheet, the exported spreasheet would also include a hidden sheet - Args: - manifest_url: google sheet manifest url - file_path: file path of the exported manifest - mimeType: exporting mimetype - + Export manifest by using google drive api. If export as an Excel spreadsheet, + the exported spreadsheet would also include a hidden sheet result: Google sheet gets exported in desired format + Args: + manifest_url (str): google sheet manifest url + file_path (str): file path of the exported manifest + mime_type (str): exporting mimetype """ - - # intialize drive service + # initialize drive service services_creds = build_service_account_creds() drive_service = services_creds["drive_service"] @@ -150,45 +161,51 @@ def export_manifest_drive_service(manifest_url, file_path, mimeType): # use google drive data = ( - drive_service.files().export(fileId=spreadsheet_id, mimeType=mimeType).execute() + drive_service.files() + .export(fileId=spreadsheet_id, mimeType=mime_type) + .execute() # pylint: disable=no-member ) # open file and write data - with open(os.path.abspath(file_path), "wb") as f: + with open(os.path.abspath(file_path), "wb") as fle: try: - f.write(data) + fle.write(data) except FileNotFoundError as not_found: logger.error(f"{not_found.filename} could not be found") - f.close - -def export_manifest_csv(file_path, manifest): +def export_manifest_csv(file_path: str, manifest: Union[pd.DataFrame, str]) -> None: """ Export manifest as a CSV by using google drive api - Args: - manifest: could be a dataframe or a manifest url - file_path: file path of the exported manifest - mimeType: exporting mimetype - result: Google sheet gets exported as a CSV - """ + Args: + file_path (str): file path of the exported manifest + manifest (Union[pd.DataFrame, str]): could be a dataframe or a manifest url + """ if isinstance(manifest, pd.DataFrame): manifest.to_csv(file_path, index=False) else: - export_manifest_drive_service(manifest, file_path, mimeType="text/csv") + export_manifest_drive_service(manifest, file_path, mime_type="text/csv") -def export_manifest_excel(manifest, output_excel=None): +# This function doesn't appear to be used or tested +# pd.ExcelWriter is an ABC class which means it SHOULD NOT be instantiated +def export_manifest_excel( + manifest: Union[pd.DataFrame, str], output_excel: Optional[str] = None +) -> None: """ - Export manifest as an Excel spreadsheet by using google sheet API. This approach could export hidden sheet + Export manifest as an Excel spreadsheet by using google sheet API. + This approach could export hidden sheet + Google sheet gets exported as an excel spreadsheet. + If there's a hidden sheet, the hidden sheet also gets exported. + Args: - manifest: could be a dataframe or a manifest url - output_excel: name of the exported manifest sheet - result: Google sheet gets exported as an excel spreadsheet. If there's a hidden sheet, the hidden sheet also gets exported. + manifest (Union[pd.DataFrame, str]): could be a dataframe or a manifest url + output_excel (Optional[str], optional): name of the exported manifest sheet. + Defaults to None. """ - # intialize drive service + # initialize drive service services_creds = build_service_account_creds() sheet_service = services_creds["sheet_service"] @@ -200,17 +217,21 @@ def export_manifest_excel(manifest, output_excel=None): # use google sheet api sheet_metadata = ( - sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute() + sheet_service.spreadsheets() # pylint: disable=no-member + .get(spreadsheetId=spreadsheet_id) + .execute() ) sheets = sheet_metadata.get("sheets") # export to Excel - writer = pd.ExcelWriter(output_excel) + writer = pd.ExcelWriter( + output_excel + ) # pylint: disable=abstract-class-instantiated # export each sheet in manifest for sheet in sheets: dataset = ( - sheet_service.spreadsheets() + sheet_service.spreadsheets() # pylint: disable=no-member .values() .get(spreadsheetId=spreadsheet_id, range=sheet["properties"]["title"]) .execute() diff --git a/schematic/utils/io_utils.py b/schematic/utils/io_utils.py index 016ea5dcd..1651d085e 100644 --- a/schematic/utils/io_utils.py +++ b/schematic/utils/io_utils.py @@ -1,11 +1,12 @@ -import os +"""io utils""" + +from typing import Any import json import urllib.request - from schematic import LOADER -def load_json(file_path): +def load_json(file_path: str) -> Any: """Load json document from file path or url :arg str file_path: The path of the url doc, could be url or file path @@ -16,28 +17,26 @@ def load_json(file_path): return data # handle file path else: - with open(file_path, encoding="utf8") as f: - data = json.load(f) + with open(file_path, encoding="utf8") as fle: + data = json.load(fle) return data -def export_json(json_doc, file_path): +def export_json(json_doc: Any, file_path: str) -> None: """Export JSON doc to file""" - with open(file_path, "w", encoding="utf8") as f: - json.dump(json_doc, f, sort_keys=True, indent=4, ensure_ascii=False) + with open(file_path, "w", encoding="utf8") as fle: + json.dump(json_doc, fle, sort_keys=True, indent=4, ensure_ascii=False) -def load_default(): +def load_default() -> Any: """Load biolink vocabulary""" data_path = "data_models/biothings.model.jsonld" biothings_path = LOADER.filename(data_path) - return load_json(biothings_path) -def load_schemaorg(): - """Load SchemOrg vocabulary""" +def load_schemaorg() -> Any: + """Load SchemaOrg vocabulary""" data_path = "data_models/schema_org.model.jsonld" - schemaorg_path = LOADER.filename(data_path) - - return load_json(schemaorg_path) + schema_org_path = LOADER.filename(data_path) + return load_json(schema_org_path) diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index f1588ed2e..f2a333ecc 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -1,14 +1,14 @@ -from ast import arg -from jsonschema import ValidationError +"""validate rules utils""" + +from typing import Any import logging -import pandas as pd -from typing import Any, Dict, Optional, Text, List +from jsonschema import ValidationError logger = logging.getLogger(__name__) -def validation_rule_info(): +def validation_rule_info() -> dict[str, dict[str, Any]]: """ Function to return dict that holds information about each rule Will be pulled into validate_single_rule, validate_manifest_rules, validate_schema_rules @@ -127,7 +127,7 @@ def get_error( attribute_name: str, error_type: str, input_filetype: str, -) -> List[str]: +) -> list[str]: """ Generate error message for errors when trying to specify multiple validation rules. @@ -137,32 +137,35 @@ def get_error( if error_type == "delimiter": error_str = ( f"The {input_filetype}, has an error in the validation rule " - f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) are improperly " - f"specified. Please check your delimiter is '::'" + f"for the attribute: {attribute_name}, the provided validation rules " + f"({validation_rules}) are improperly " + "specified. Please check your delimiter is '::'" ) logging.error(error_str) error_message = error_str - error_val = f"Multiple Rules: Delimiter" + error_val = "Multiple Rules: Delimiter" if error_type == "not_rule": error_str = ( f"The {input_filetype}, has an error in the validation rule " - f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not " - f"a valid rule. Please check spelling." + f"for the attribute: {attribute_name}, the provided validation rules " + f"({validation_rules}) is not " + "a valid rule. Please check spelling." ) logging.error(error_str) error_message = error_str - error_val = f"Not a Rule" + error_val = "Not a Rule" if error_type == "args_not_allowed": error_str = ( f"The {input_filetype}, has an error in the validation rule " - f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not" - f"formatted properly. No additional arguments are allowed for this rule." + f"for the attribute: {attribute_name}, the provided validation rules " + f"({validation_rules}) is not" + "formatted properly. No additional arguments are allowed for this rule." ) logging.error(error_str) error_message = error_str - error_val = f"Args not allowed." + error_val = "Args not allowed." if error_type == "incorrect_num_args": rule_type = validation_rules.split(" ")[0] @@ -173,19 +176,22 @@ def get_error( error_str = ( f"The {input_filetype}, has an error in the validation rule " - f"for the attribute: {attribute_name}, the provided validation rules ({validation_rules}) is not " - f"formatted properly. The number of provided arguments does not match the number allowed({no_allowed}) or required({no_required})." + f"for the attribute: {attribute_name}, the provided validation rules " + f"({validation_rules}) is not " + "formatted properly. The number of provided arguments does not match the " + f"number allowed({no_allowed}) or required({no_required})." ) logging.error(error_str) error_message = error_str - error_val = f"Incorrect num arguments." + error_val = "Incorrect num arguments." return ["NA", error_col, error_message, error_val] def validate_single_rule(validation_rule, attribute, input_filetype): """ - Perform validation for a single rule to ensure it is specified correctly with an appropriate number of arguments + Perform validation for a single rule to ensure it is specified + correctly with an appropriate number of arguments Inputs: validation_rule: single rule being validated attribute: attribute validation rule was specified for @@ -213,7 +219,7 @@ def validate_single_rule(validation_rule, attribute, input_filetype): ) ) # Check that the rule is actually a valid rule type. - elif rule_type not in validation_types.keys(): + elif rule_type not in validation_types: errors.append( get_error( validation_rule, @@ -263,7 +269,7 @@ def validate_single_rule(validation_rule, attribute, input_filetype): return errors -def validate_schema_rules(validation_rules, attribute, input_filetype): +def validate_schema_rules(validation_rules, attribute, input_filetype: str) -> None: """ validation_rules: list input_filetype: str, used in error generation to aid user in @@ -285,5 +291,3 @@ def validate_schema_rules(validation_rules, attribute, input_filetype): f"for attribute {attribute}. " f"Validation failed with the following errors: {errors}" ) - - return diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index ee64728a4..1e98b79c9 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -1,12 +1,15 @@ -import os -import pandas as pd +"""Validation utils""" + +import re +from typing import Pattern, Union, Iterable +from numbers import Number from jsonschema import validate -from re import compile, search, IGNORECASE +import numpy as np +import pandas as pd from schematic.utils.io_utils import load_json from schematic import LOADER -from typing import List -import numpy as np -from numbers import Number + +# pylint: disable = anomalous-backslash-in-string def validate_schema(schema): @@ -33,32 +36,38 @@ def validate_class_schema(schema): return validate(schema, json_schema) -def comma_separated_list_regex(): - # Regex to match with comma separated list - # Requires at least one element and a comma to be valid - # Does not require a trailing comma - csv_list_regex = compile("([^\,]+\,)(([^\,]+\,?)*)") +def comma_separated_list_regex() -> Pattern[str]: + """ + Regex to match with comma separated list + Requires at least one element and a comma to be valid + Does not require a trailing comma - return csv_list_regex + Returns: + Pattern[str]: + """ + csv_list_regex = re.compile("([^\,]+\,)(([^\,]+\,?)*)") + return csv_list_regex -def rule_in_rule_list(rule: str, rule_list: List[str]): - # Function to standardize - # checking to see if a rule is contained in a list of rules. - # Uses regex to avoid issues arising from validation rules with arguments - # or rules that have arguments updated. - # seperate rule type if arguments are specified +def rule_in_rule_list(rule: str, rule_list: list[str]) -> re.Match: + """ + Function to standardize + checking to see if a rule is contained in a list of rules. + Uses regex to avoid issues arising from validation rules with arguments + or rules that have arguments updated. + """ + # separate rule type if arguments are specified rule_type = rule.split(" ")[0] # Process string and list of strings for regex comparison rule_type = rule_type + "[^\|]*" rule_list = "|".join(rule_list) - return search(rule_type, rule_list, flags=IGNORECASE) + return re.search(rule_type, rule_list, flags=re.IGNORECASE) -def parse_str_series_to_list(col: pd.Series): +def parse_str_series_to_list(col: pd.Series) -> pd.Series: """ Parse a pandas series of comma delimited strings into a series with values that are lists of strings @@ -72,29 +81,25 @@ def parse_str_series_to_list(col: pd.Series): return col -def np_array_to_str_list(np_array): +def np_array_to_str_list(np_array: np.array) -> list[str]: """ Parse a numpy array of ints to a list of strings """ return np.char.mod("%d", np_array).tolist() -def iterable_to_str_list(iterable): +def iterable_to_str_list(obj: Union[str, Number, Iterable]) -> list[str]: """ Parse an object into a list of strings Accepts str, Number, and iterable inputs """ # If object is a string, just return wrapped as a list - if isinstance(iterable, str): - return [iterable] - # If object is numberical, convert to string and wrap as a list - elif isinstance(iterable, Number): - return [str(iterable)] - # If the object is iterable and not a string, convert every element to string and wratp as a list - else: - strlist = [] - for element in iterable: - strlist.append(str(element)) - - return strlist + if isinstance(obj, str): + return [obj] + # If object is numerical, convert to string and wrap as a list + if isinstance(obj, Number): + return [str(obj)] + # If the object is iterable and not a string, convert every element + # to string and wrap as a list + return [str(item) for item in obj] diff --git a/schematic/utils/viz_utils.py b/schematic/utils/viz_utils.py index b62c9be47..262ee2f90 100644 --- a/schematic/utils/viz_utils.py +++ b/schematic/utils/viz_utils.py @@ -1,12 +1,24 @@ +"""viz utils""" + +from typing import Optional import graphviz -def visualize(edges, size=None): +def visualize(edges, size: Optional[float] = None) -> graphviz.Digraph: + """_summary_ + + Args: + edges (_type_): _description_ + size (Optional[float], optional): _description_. Defaults to None. + + Returns: + graphviz.Digraph: _description_ + """ if size: - d = graphviz.Digraph(graph_attr=[("size", size)]) + digraph = graphviz.Digraph(graph_attr=[("size", size)]) else: - d = graphviz.Digraph() + digraph = graphviz.Digraph() for _item in edges: - d.edge(_item[0], _item[1]) - return d + digraph.edge(_item[0], _item[1]) + return digraph From 33227daa2fab98388d5b1d008ac43a549f3cb2c5 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:04:41 -0800 Subject: [PATCH 126/199] pylint utils check in github workflow --- .github/workflows/test.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2adf95f8..534feaac4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -117,6 +117,10 @@ jobs: # ran only on certain files for now # add here when checked poetry run pylint schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + # do all utils but schema_utils.py + poetry run pylint schematic/utils/cli_utils.py schematic/utils/curie_utils.py schematic/utils/df_utils.py + poetry run pylint schematic/utils/general.py schematic/utils/google_api_utils.py schematic/utils/io_utils.py + poetry run pylint schematic/utils/validate_rules_utils.py schematic/utils/validate_utils.py schematic/utils/viz_utils.py #---------------------------------------------- # run test suite From 88b7bcf6b015fe10d6f699f8dd48aef47b2ec5c8 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:12:21 -0800 Subject: [PATCH 127/199] fix misnamed argument --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 79c8ed4af..31a020e08 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1503,7 +1503,7 @@ def export_sheet_to_excel( export_manifest_drive_service( manifest_url, file_path=output_excel_file_path, - mime_Type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + mime_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", ) return output_excel_file_path From a8cc2ca1e25d66919a703072fe406421eb394da0 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:12:42 -0800 Subject: [PATCH 128/199] ran black --- schematic/utils/general.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 5af2bf6a2..e19f5047d 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -136,7 +136,9 @@ def check_synapse_cache_size(directory="/root/.synapseCache") -> Union[float, in # Note: this command might fail on windows user. # But since this command is primarily for running on AWS, it is fine. command = ["du", "-sh", directory] - output = subprocess.run(command, capture_output=True, check=False).stdout.decode("utf-8") + output = subprocess.run(command, capture_output=True, check=False).stdout.decode( + "utf-8" + ) # Parsing the output to extract the directory size size = output.split("\t")[0] From 48f98a29651b426aa69df3877076062ad0bb9c1e Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:19:45 -0800 Subject: [PATCH 129/199] fix sonar lint issue --- schematic/utils/general.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index e19f5047d..7c32b6386 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -209,16 +209,17 @@ def entity_type_mapping(syn: Synapse, entity_id: str) -> str: ) from exc if isinstance(entity, EntityViewSchema): - return "asset view" - if isinstance(entity, Folder): - return "folder" - if isinstance(entity, File): - return "file" - if isinstance(entity, Project): - return "project" - - # if there's no matching type, return concreteType - return entity.concreteType + entity_type = "asset view" + elif isinstance(entity, Folder): + entity_type = "folder" + elif isinstance(entity, File): + entity_type = "file" + elif isinstance(entity, Project): + entity_type = "project" + else: + # if there's no matching type, return concreteType + entity_type = entity.concreteType + return entity_type def create_temp_folder(path: str) -> str: From 44b7b135a4ffaa8d83100baf03f43fe6919f107e Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:19:59 -0800 Subject: [PATCH 130/199] fix sonar lint issue --- schematic/utils/general.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 7c32b6386..c1d8d7058 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -235,7 +235,7 @@ def create_temp_folder(path: str) -> str: def profile( output_file: Optional[str] = None, - sort_by="cumulative", + sort_by:Any="cumulative", lines_to_print: Optional[int] = None, strip_dirs: bool = False, ) -> Callable: From 44f03fc2a9521bc24d78f1a0141ecf28ebd0f182 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:22:42 -0800 Subject: [PATCH 131/199] fix pylint/black issue --- schematic/utils/general.py | 2 +- schematic/utils/google_api_utils.py | 17 ++++++++--------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index c1d8d7058..3dcbd029f 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -235,7 +235,7 @@ def create_temp_folder(path: str) -> str: def profile( output_file: Optional[str] = None, - sort_by:Any="cumulative", + sort_by: Any = "cumulative", lines_to_print: Optional[int] = None, strip_dirs: bool = False, ) -> Callable: diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 05d8e095d..01ff765ba 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -161,9 +161,9 @@ def export_manifest_drive_service( # use google drive data = ( - drive_service.files() + drive_service.files() # pylint: disable=no-member .export(fileId=spreadsheet_id, mimeType=mime_type) - .execute() # pylint: disable=no-member + .execute() ) # open file and write data @@ -205,6 +205,9 @@ def export_manifest_excel( output_excel (Optional[str], optional): name of the exported manifest sheet. Defaults to None. """ + # pylint: disable=abstract-class-instantiated + # pylint: disable=no-member + # initialize drive service services_creds = build_service_account_creds() sheet_service = services_creds["sheet_service"] @@ -217,21 +220,17 @@ def export_manifest_excel( # use google sheet api sheet_metadata = ( - sheet_service.spreadsheets() # pylint: disable=no-member - .get(spreadsheetId=spreadsheet_id) - .execute() + sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute() ) sheets = sheet_metadata.get("sheets") # export to Excel - writer = pd.ExcelWriter( - output_excel - ) # pylint: disable=abstract-class-instantiated + writer = pd.ExcelWriter(output_excel) # export each sheet in manifest for sheet in sheets: dataset = ( - sheet_service.spreadsheets() # pylint: disable=no-member + sheet_service.spreadsheets() .values() .get(spreadsheetId=spreadsheet_id, range=sheet["properties"]["title"]) .execute() From 1426a6fa9411bc2991cf5d97e4f494ccd329e733 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 12:36:38 -0800 Subject: [PATCH 132/199] sonar cloud fixes --- schematic/utils/validate_rules_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index f2a333ecc..dfb746a29 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -188,7 +188,7 @@ def get_error( return ["NA", error_col, error_message, error_val] -def validate_single_rule(validation_rule, attribute, input_filetype): +def validate_single_rule(validation_rule:str, attribute:str, input_filetype:str): """ Perform validation for a single rule to ensure it is specified correctly with an appropriate number of arguments @@ -269,7 +269,7 @@ def validate_single_rule(validation_rule, attribute, input_filetype): return errors -def validate_schema_rules(validation_rules, attribute, input_filetype: str) -> None: +def validate_schema_rules(validation_rules:list[str], attribute:str, input_filetype: str) -> None: """ validation_rules: list input_filetype: str, used in error generation to aid user in From 35b7c6cc8bb1c256fa3f0c762fce79d65ca4bf57 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 13:20:18 -0800 Subject: [PATCH 133/199] fixed some typing --- schematic/utils/cli_utils.py | 5 ++--- schematic/utils/df_utils.py | 26 ++++++++++++++++++++----- schematic/utils/general.py | 12 ++++++------ schematic/utils/google_api_utils.py | 5 ++--- schematic/utils/validate_rules_utils.py | 6 +++--- schematic/utils/validate_utils.py | 11 +++++------ 6 files changed, 39 insertions(+), 26 deletions(-) diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index ce701834e..03017f5af 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -54,7 +54,7 @@ def parse_synIDs( # pylint: disable=invalid-name synIDs: str, # pylint: disable=invalid-name ) -> Optional[list[str]]: """For backwards compatibility""" - parse_syn_ids(ctx, param, synIDs) + return parse_syn_ids(ctx, param, synIDs) def parse_syn_ids( @@ -88,8 +88,7 @@ def parse_syn_ids( "\nPlease check your list of projects for errors." ) - syn_ids = syn_ids.split(",") - return syn_ids + return syn_ids.split(",") def parse_comma_str_to_list( diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 2fbd8cc4d..76f403382 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -3,10 +3,12 @@ import logging from copy import deepcopy from time import perf_counter +from typing import Union, Any +from datetime import datetime import dateparser as dp import pandas as pd import numpy as np -from pandarallel import pandarallel +from pandarallel import pandarallel #type: ignore # pylint: disable=logging-fstring-interpolation @@ -39,7 +41,13 @@ def load_df( t_load_df = perf_counter() # Read CSV to df as type specified in kwargs - org_df = pd.read_csv(file_path, keep_default_na=True, encoding="utf8", **load_args) + org_df = pd.read_csv( #type: ignore + file_path, + keep_default_na=True, + encoding="utf8", + **load_args + ) + assert isinstance(org_df, pd.DataFrame) # If type inference not allowed: trim and return if preserve_raw_input: @@ -66,13 +74,13 @@ def load_df( if ( org_df.size < large_manifest_cutoff_size ): # If small manifest, iterate as normal for improved performance - ints = org_df.applymap( + ints: pd.DataFrame = org_df.applymap( lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" ).fillna(False) else: # parallelize iterations for large manfiests pandarallel.initialize(verbose=1) - ints = org_df.parallel_applymap( + ints: pd.DataFrame = org_df.parallel_applymap( #type: ignore lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" ).fillna(False) @@ -96,7 +104,15 @@ def load_df( return processed_df -def _parse_dates(date_string): +def parse_dates(date_string: str) -> Union[datetime, bool]: + """Gets a datetime from a string + + Args: + date_string (str): The string to get the datetime from + + Returns: + Union[datetime, bool]: The parsed datetime or False + """ try: date = dp.parse(date_string=date_string, settings={"STRICT_PARSING": True}) return date if date else False diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 3dcbd029f..5ce4bd1c7 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -10,11 +10,11 @@ from functools import wraps from typing import Union, TypeVar, Any, Optional, Sequence, Callable -from synapseclient.core.exceptions import SynapseHTTPError -from synapseclient.entity import File, Folder, Project -from synapseclient.table import EntityViewSchema -from synapseclient.core import cache -from synapseclient import Synapse +from synapseclient.core.exceptions import SynapseHTTPError #type: ignore +from synapseclient.entity import File, Folder, Project #type: ignore +from synapseclient.table import EntityViewSchema #type: ignore +from synapseclient.core import cache #type: ignore +from synapseclient import Synapse #type: ignore # pylint: disable=logging-fstring-interpolation @@ -124,7 +124,7 @@ def calculate_datetime( return date_time_result -def check_synapse_cache_size(directory="/root/.synapseCache") -> Union[float, int]: +def check_synapse_cache_size(directory:str="/root/.synapseCache") -> Union[float, int]: """use du --sh command to calculate size of .synapseCache. Args: diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 01ff765ba..948fb286f 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -6,9 +6,8 @@ from typing import Any, Union, Optional import pandas as pd -from googleapiclient.discovery import build -from google.oauth2 import service_account - +from googleapiclient.discovery import build # type :ignore +from google.oauth2 import service_account # type :ignoreS from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index dfb746a29..1b0d3c55c 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -1,6 +1,6 @@ """validate rules utils""" -from typing import Any +from typing import Union import logging from jsonschema import ValidationError @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -def validation_rule_info() -> dict[str, dict[str, Any]]: +def validation_rule_info() -> dict[str, dict[str, Union[tuple[int, int], str, list[str], None]]]: """ Function to return dict that holds information about each rule Will be pulled into validate_single_rule, validate_manifest_rules, validate_schema_rules @@ -123,7 +123,7 @@ def validation_rule_info() -> dict[str, dict[str, Any]]: def get_error( - validation_rules: list, + validation_rules: str, attribute_name: str, error_type: str, input_filetype: str, diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index 1e98b79c9..a7e7be552 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -1,7 +1,7 @@ """Validation utils""" import re -from typing import Pattern, Union, Iterable +from typing import Pattern, Union, Iterable, Any, Optional from numbers import Number from jsonschema import validate import numpy as np @@ -50,7 +50,7 @@ def comma_separated_list_regex() -> Pattern[str]: return csv_list_regex -def rule_in_rule_list(rule: str, rule_list: list[str]) -> re.Match: +def rule_in_rule_list(rule: str, rule_list: list[str]) -> Optional[re.Match[str]]: """ Function to standardize checking to see if a rule is contained in a list of rules. @@ -62,9 +62,8 @@ def rule_in_rule_list(rule: str, rule_list: list[str]) -> re.Match: # Process string and list of strings for regex comparison rule_type = rule_type + "[^\|]*" - rule_list = "|".join(rule_list) - - return re.search(rule_type, rule_list, flags=re.IGNORECASE) + rule_list_str = "|".join(rule_list) + return re.search(rule_type, rule_list_str, flags=re.IGNORECASE) def parse_str_series_to_list(col: pd.Series) -> pd.Series: @@ -81,7 +80,7 @@ def parse_str_series_to_list(col: pd.Series) -> pd.Series: return col -def np_array_to_str_list(np_array: np.array) -> list[str]: +def np_array_to_str_list(np_array: Any) -> list[str]: """ Parse a numpy array of ints to a list of strings """ From 5ab2d9fb80edd0e7029e438f1874b04d1b291c0b Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 13:24:27 -0800 Subject: [PATCH 134/199] ran black --- schematic/utils/df_utils.py | 13 +++++-------- schematic/utils/general.py | 14 ++++++++------ schematic/utils/google_api_utils.py | 4 ++-- schematic/utils/validate_rules_utils.py | 10 +++++++--- 4 files changed, 22 insertions(+), 19 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 76f403382..29e70a6ed 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -3,12 +3,12 @@ import logging from copy import deepcopy from time import perf_counter -from typing import Union, Any +from typing import Union from datetime import datetime import dateparser as dp import pandas as pd import numpy as np -from pandarallel import pandarallel #type: ignore +from pandarallel import pandarallel # type: ignore # pylint: disable=logging-fstring-interpolation @@ -41,11 +41,8 @@ def load_df( t_load_df = perf_counter() # Read CSV to df as type specified in kwargs - org_df = pd.read_csv( #type: ignore - file_path, - keep_default_na=True, - encoding="utf8", - **load_args + org_df = pd.read_csv( # type: ignore + file_path, keep_default_na=True, encoding="utf8", **load_args ) assert isinstance(org_df, pd.DataFrame) @@ -80,7 +77,7 @@ def load_df( else: # parallelize iterations for large manfiests pandarallel.initialize(verbose=1) - ints: pd.DataFrame = org_df.parallel_applymap( #type: ignore + ints: pd.DataFrame = org_df.parallel_applymap( # type: ignore lambda x: np.int64(x) if str.isdigit(x) else False, na_action="ignore" ).fillna(False) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 5ce4bd1c7..92068e109 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -10,11 +10,11 @@ from functools import wraps from typing import Union, TypeVar, Any, Optional, Sequence, Callable -from synapseclient.core.exceptions import SynapseHTTPError #type: ignore -from synapseclient.entity import File, Folder, Project #type: ignore -from synapseclient.table import EntityViewSchema #type: ignore -from synapseclient.core import cache #type: ignore -from synapseclient import Synapse #type: ignore +from synapseclient.core.exceptions import SynapseHTTPError # type: ignore +from synapseclient.entity import File, Folder, Project # type: ignore +from synapseclient.table import EntityViewSchema # type: ignore +from synapseclient.core import cache # type: ignore +from synapseclient import Synapse # type: ignore # pylint: disable=logging-fstring-interpolation @@ -124,7 +124,9 @@ def calculate_datetime( return date_time_result -def check_synapse_cache_size(directory:str="/root/.synapseCache") -> Union[float, int]: +def check_synapse_cache_size( + directory: str = "/root/.synapseCache", +) -> Union[float, int]: """use du --sh command to calculate size of .synapseCache. Args: diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 948fb286f..618a7227a 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -6,8 +6,8 @@ from typing import Any, Union, Optional import pandas as pd -from googleapiclient.discovery import build # type :ignore -from google.oauth2 import service_account # type :ignoreS +from googleapiclient.discovery import build # type :ignore +from google.oauth2 import service_account # type :ignoreS from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index 1b0d3c55c..9a245d470 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -8,7 +8,9 @@ logger = logging.getLogger(__name__) -def validation_rule_info() -> dict[str, dict[str, Union[tuple[int, int], str, list[str], None]]]: +def validation_rule_info() -> ( + dict[str, dict[str, Union[tuple[int, int], str, list[str], None]]] +): """ Function to return dict that holds information about each rule Will be pulled into validate_single_rule, validate_manifest_rules, validate_schema_rules @@ -188,7 +190,7 @@ def get_error( return ["NA", error_col, error_message, error_val] -def validate_single_rule(validation_rule:str, attribute:str, input_filetype:str): +def validate_single_rule(validation_rule: str, attribute: str, input_filetype: str): """ Perform validation for a single rule to ensure it is specified correctly with an appropriate number of arguments @@ -269,7 +271,9 @@ def validate_single_rule(validation_rule:str, attribute:str, input_filetype:str) return errors -def validate_schema_rules(validation_rules:list[str], attribute:str, input_filetype: str) -> None: +def validate_schema_rules( + validation_rules: list[str], attribute: str, input_filetype: str +) -> None: """ validation_rules: list input_filetype: str, used in error generation to aid user in From 318c538a901009aac044ea9fd00f3c8d2cf14116 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 15:12:41 -0800 Subject: [PATCH 135/199] added some typing fixes --- schematic/utils/cli_utils.py | 2 +- schematic/utils/curie_utils.py | 4 ++-- schematic/utils/google_api_utils.py | 4 ++-- schematic/utils/validate_rules_utils.py | 22 ++++++++++++++-------- schematic/utils/validate_utils.py | 7 ++++--- schematic/utils/viz_utils.py | 2 +- 6 files changed, 24 insertions(+), 17 deletions(-) diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 03017f5af..58180796e 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -36,7 +36,7 @@ def extract(dictionary: Any, key: Any) -> Union[Any, None]: return reduce(extract, keys, dictionary) -def log_value_from_config(arg_name: str, config_value: Any): +def log_value_from_config(arg_name: str, config_value: Any) -> None: """Logs when getting a value from the config Args: diff --git a/schematic/utils/curie_utils.py b/schematic/utils/curie_utils.py index fd24fd297..9345f2b87 100644 --- a/schematic/utils/curie_utils.py +++ b/schematic/utils/curie_utils.py @@ -6,7 +6,7 @@ logger = logging.getLogger(__name__) -def extract_name_from_uri_or_curie(item): +def extract_name_from_uri_or_curie(item: str) -> str: """Extract name from uri or curie""" if "http" not in item and len(item.split(":")) == 2: return item.split(":")[-1] @@ -15,7 +15,7 @@ def extract_name_from_uri_or_curie(item): raise ValueError("Error extracting name from URI or Curie.") -def expand_curie_to_uri(curie, context_info): +def expand_curie_to_uri(curie: str, context_info: dict[str, str]) -> str: """Expand curie to uri based on the context given parmas diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 618a7227a..3ac80ac05 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -6,8 +6,8 @@ from typing import Any, Union, Optional import pandas as pd -from googleapiclient.discovery import build # type :ignore -from google.oauth2 import service_account # type :ignoreS +from googleapiclient.discovery import build # type: ignore +from google.oauth2 import service_account # type: ignore from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index 9a245d470..cce26a7b4 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -21,7 +21,7 @@ def validation_rule_info() -> ( 'complementary_rules': []} } """ - rule_dict = { + return { "int": { "arguments": (1, 0), "type": "type_validation", @@ -121,8 +121,6 @@ def validation_rule_info() -> ( }, } - return rule_dict - def get_error( validation_rules: str, @@ -172,16 +170,20 @@ def get_error( rule_type = validation_rules.split(" ")[0] if rule_type in validation_rule_info(): - no_allowed, no_required = validation_rule_info()[rule_type]["arguments"] + arg_tuple = validation_rule_info()[rule_type]["arguments"] + assert isinstance(arg_tuple, tuple) + assert len(arg_tuple) == 2 + number_allowed = str(arg_tuple[0]) + number_required = str(arg_tuple[1]) else: - no_allowed, no_required = ("", "") + number_allowed, number_required = ("", "") error_str = ( f"The {input_filetype}, has an error in the validation rule " f"for the attribute: {attribute_name}, the provided validation rules " f"({validation_rules}) is not " "formatted properly. The number of provided arguments does not match the " - f"number allowed({no_allowed}) or required({no_required})." + f"number allowed({number_allowed}) or required({number_required})." ) logging.error(error_str) error_message = error_str @@ -232,10 +234,14 @@ def validate_single_rule(validation_rule: str, attribute: str, input_filetype: s ) # if the rule is indeed a rule and formatted correctly, check that arguments are appropriate else: - arguments_allowed, arguments_required = validation_types[rule_type]["arguments"] + arg_tuple = validation_rule_info()[rule_type]["arguments"] + assert isinstance(arg_tuple, tuple) + assert len(arg_tuple) == 2 + arguments_allowed, arguments_required = arg_tuple # Remove any fixed args from our calc. - if "fixed_arg" in validation_types[rule_type].keys(): + if "fixed_arg" in validation_types[rule_type]: fixed_args = validation_types[rule_type]["fixed_arg"] + assert isinstance(fixed_args, list) num_args = ( len([vr for vr in validation_rule_with_args if vr not in fixed_args]) - 1 diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index a7e7be552..de7c4e66b 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -1,6 +1,7 @@ """Validation utils""" import re +from collections.abc import Mapping from typing import Pattern, Union, Iterable, Any, Optional from numbers import Number from jsonschema import validate @@ -12,7 +13,7 @@ # pylint: disable = anomalous-backslash-in-string -def validate_schema(schema): +def validate_schema(schema: Union[Mapping, bool]) -> None: """Validate schema against schema.org standard""" data_path = "validation_schemas/model.schema.json" json_schema_path = LOADER.filename(data_path) @@ -20,7 +21,7 @@ def validate_schema(schema): return validate(schema, json_schema) -def validate_property_schema(schema): +def validate_property_schema(schema: Union[Mapping, bool]) -> None: """Validate schema against SchemaORG property definition standard""" data_path = "validation_schemas/property.schema.json" json_schema_path = LOADER.filename(data_path) @@ -28,7 +29,7 @@ def validate_property_schema(schema): return validate(schema, json_schema) -def validate_class_schema(schema): +def validate_class_schema(schema: Union[Mapping, bool]) -> None: """Validate schema against SchemaORG class definition standard""" data_path = "validation_schemas/class.schema.json" json_schema_path = LOADER.filename(data_path) diff --git a/schematic/utils/viz_utils.py b/schematic/utils/viz_utils.py index 262ee2f90..58a904728 100644 --- a/schematic/utils/viz_utils.py +++ b/schematic/utils/viz_utils.py @@ -1,7 +1,7 @@ """viz utils""" from typing import Optional -import graphviz +import graphviz # type: ignore def visualize(edges, size: Optional[float] = None) -> graphviz.Digraph: From 8169f6a6df9857f88f2eeba22d42897e0712560e Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sun, 4 Feb 2024 15:40:33 -0800 Subject: [PATCH 136/199] remove get_synIDs function --- schematic/manifest/commands.py | 4 ++-- schematic/models/commands.py | 4 ++-- schematic/utils/cli_utils.py | 9 --------- 3 files changed, 4 insertions(+), 13 deletions(-) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 002ada68c..d6b20d3cf 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -10,7 +10,7 @@ from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.manifest.generator import ManifestGenerator -from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs +from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_syn_ids from schematic.utils.google_api_utils import export_manifest_csv from schematic.help import manifest_commands @@ -253,7 +253,7 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): "-ps", "--project_scope", default=None, - callback=parse_synIDs, + callback=parse_syn_ids, help=query_dict(manifest_commands, ("manifest", "migrate", "project_scope")), ) @click.option( diff --git a/schematic/models/commands.py b/schematic/models/commands.py index 0c1e6e8a3..ac6f4946a 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -14,7 +14,7 @@ from schematic.utils.cli_utils import ( log_value_from_config, query_dict, - parse_synIDs, + parse_syn_ids, parse_comma_str_to_list, ) from schematic.help import model_commands @@ -98,7 +98,7 @@ def model(ctx, config): # use as `schematic model ...` "-ps", "--project_scope", default=None, - callback=parse_synIDs, + callback=parse_syn_ids, help=query_dict(model_commands, ("model", "validate", "project_scope")), ) @click.option( diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 58180796e..342053600 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -48,15 +48,6 @@ def log_value_from_config(arg_name: str, config_value: Any) -> None: ) -def parse_synIDs( # pylint: disable=invalid-name - ctx: Any, # pylint: disable=unused-argument - param: str, # pylint: disable=unused-argument - synIDs: str, # pylint: disable=invalid-name -) -> Optional[list[str]]: - """For backwards compatibility""" - return parse_syn_ids(ctx, param, synIDs) - - def parse_syn_ids( ctx: Any, # pylint: disable=unused-argument param: str, # pylint: disable=unused-argument From 7bf62b3fa4aea0943f26fcede5ea0ae0d2498568 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 07:35:55 -0800 Subject: [PATCH 137/199] added more typing --- schematic/visualization/tangled_tree.py | 67 ++++++++++++++++--------- 1 file changed, 42 insertions(+), 25 deletions(-) diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index eaba44fb1..cb0649a52 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -6,9 +6,10 @@ import logging import os from os import path -from typing import Optional, Any +from typing import Optional, Any, Literal import networkx as nx # type: ignore +from networkx.classes.reportviews import NodeView, EdgeDataView # type: ignore import numpy as np import pandas as pd @@ -85,7 +86,9 @@ def strip_double_quotes(self, string: str) -> str: string = "".join(string.split()) return string - def get_text_for_tangled_tree(self, text_type, save_file=False): + def get_text_for_tangled_tree( + self, text_type: Literal["highlighted", "plain"], save_file: bool = False + ) -> Optional[str]: """ Gather the text that needs to be either highlighted or plain for the tangled tree visualization. @@ -156,7 +159,9 @@ def get_text_for_tangled_tree(self, text_type, save_file=False): return df.to_csv() - def get_topological_generations(self): + def get_topological_generations( + self, + ) -> tuple[list[list], NodeView, EdgeDataView, nx.DiGraph]: """Gather topological_gen, nodes and edges based on figure type. Outputs: topological_gen (List(list)):list of lists. Indicates layers of nodes. @@ -200,7 +205,7 @@ def remove_unwanted_characters_from_conditional_statement( cond_req = cond_req_new.replace("If", "").lstrip().rstrip() return cond_req - def get_ca_alias(self, conditional_requirements: list) -> dict: + def get_ca_alias(self, conditional_requirements: list) -> dict[str, str]: """Get the alias for each conditional attribute. NOTE: Obtaining attributes(attr) and aliases(ali) in this function is specific @@ -236,7 +241,9 @@ def get_ca_alias(self, conditional_requirements: list) -> dict: ca_alias[elem] = attr return ca_alias - def gather_component_dependency_info(self, cn, attributes_df): + def gather_component_dependency_info( + self, cn: str, attributes_df: pd.DataFrame + ) -> tuple[list[str], dict[str, str], list[str]]: """Gather all component dependency information. Inputs: cn: (str) component name @@ -282,7 +289,12 @@ def gather_component_dependency_info(self, cn, attributes_df): return conditional_attributes, ca_alias, all_attributes - def find_source_nodes(self, nodes, edges, all_attributes=None): + def find_source_nodes( + self, + nodes: NodeView, + edges: EdgeDataView, + all_attributes: Optional[list[str]] = None, + ) -> list[str]: """Find all nodes in the graph that do not have a parent node. Inputs: nodes: (Networkx NodeView) Nodes of the component or dependency graph. @@ -315,7 +327,9 @@ def find_source_nodes(self, nodes, edges, all_attributes=None): source_nodes.append(node) return source_nodes - def get_parent_child_dictionary(self, edges, all_attributes=None): + def get_parent_child_dictionary( + self, edges: EdgeDataView, all_attributes: Optional[list[str]] = None + ) -> tuple[dict[str, list[str]], dict[str, list[str]]]: """ Based on the dependency type, create dictionaries between parent and child and child and parent attributes. @@ -332,10 +346,9 @@ def get_parent_child_dictionary(self, edges, all_attributes=None): value: list of the parents children """ # pylint: disable=too-many-branches - if all_attributes is None: - all_attributes = [] - child_parents = {} - parent_children = {} + all_attributes_list = [] if all_attributes is None else all_attributes + child_parents: dict[str, list[str]] = {} + parent_children: dict[str, list[str]] = {} if self.dependency_type == "requiresComponent": # Construct child_parents dictionary @@ -360,30 +373,30 @@ def get_parent_child_dictionary(self, edges, all_attributes=None): # Construct child_parents dictionary for edge in edges: # Check if child is an attribute for the current component - if edge[0] in all_attributes: + if edge[0] in all_attributes_list: # Add child as a key if edge[0] not in child_parents: child_parents[edge[0]] = [] # Add parent to list if it is an attribute for the current component - if edge[1] in all_attributes: + if edge[1] in all_attributes_list: child_parents[edge[0]].append(edge[1]) # Construct parent_children dictionary for edge in edges: # Check if parent is an attribute for the current component - if edge[1] in all_attributes: + if edge[1] in all_attributes_list: # Add parent as a key if edge[1] not in parent_children: parent_children[edge[1]] = [] # Add child to list if it is an attribute for the current component - if edge[0] in all_attributes: + if edge[0] in all_attributes_list: parent_children[edge[1]].append(edge[0]) return child_parents, parent_children - def alias_edges(self, ca_alias: dict, edges) -> list[list]: + def alias_edges(self, ca_alias: dict[str, str], edges: EdgeDataView) -> list[list]: """Create new edges based on aliasing between an attribute and its response. Purpose: Create aliased edges. @@ -435,8 +448,11 @@ def alias_edges(self, ca_alias: dict, edges) -> list[list]: return aliased_edges def prune_expand_topological_gen( - self, topological_gen, all_attributes, conditional_attributes - ): + self, + topological_gen: list[list[str]], + all_attributes: list[str], + conditional_attributes: list[str], + ) -> list[list[str]]: """ Purpose: Remake topological_gen with only relevant nodes. @@ -672,7 +688,7 @@ def get_layers_dict_list( child_parents: dict, parent_children: dict, all_parent_children: dict, - ): + ) -> list[list[dict[str, list[str]]]]: """Convert node_layers to a list of lists of dictionaries that specifies each node and its parents (if applicable). Inputs: @@ -689,7 +705,9 @@ def get_layers_dict_list( parents (if applicable) """ num_layers = len(node_layers) - layers_list = [[] for i in range(0, num_layers)] + layers_list: list[list[dict[str, list[str]]]] = [ + [] for i in range(0, num_layers) + ] for i, layer in enumerate(node_layers): for node in layer: if node in child_parents.keys(): @@ -793,8 +811,7 @@ def save_outputs( tangled tree. If save_file ==True: is an empty list. """ - if all_layers is None: - all_layers = [] + all_layers_list = [] if all_layers is None else all_layers if save_file: if cn: output_file_name = ( @@ -818,10 +835,10 @@ def save_outputs( f"{os.path.join(self.json_output_path, output_file_name)}" ) ) - all_layers = layers_json + all_layers_list = layers_json else: - all_layers.append(layers_json) - return all_layers + all_layers_list.append(layers_json) + return all_layers_list def get_ancestors_nodes( self, subgraph: nx.DiGraph, components: list[str] From 11e547f1dc4f0bde20166cd29610a3624ff8ab94 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 07:36:37 -0800 Subject: [PATCH 138/199] added more typing --- schematic/visualization/tangled_tree.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index cb0649a52..ccda4dedc 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -796,7 +796,7 @@ def save_outputs( layers_json, cn: str = "", all_layers: Optional[list[str]] = None, - ): + ) -> list[str]: """ Inputs: save_file (bool): Indicates whether to save a file locally or not.: From 8b2b33815144e61d681d5326ee130326d3bb0693 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 08:30:17 -0800 Subject: [PATCH 139/199] moved some pylint disbale lines around --- schematic/visualization/commands.py | 2 +- schematic/visualization/tangled_tree.py | 74 ++++++++++++++----------- 2 files changed, 42 insertions(+), 34 deletions(-) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index d69354a7b..4ed31595f 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=useless-return # pylint: disable=unused-variable +# pylint: disable=logging-fstring-interpolation import logging import sys @@ -40,7 +41,6 @@ def viz(ctx: Any, config: str) -> None: # use as `schematic model ...` Sub-commands for Visualization methods. """ try: - # pylint: disable=logging-fstring-interpolation logger.debug(f"Loading config file contents in '{config}'") CONFIG.load_config(config) ctx.obj = CONFIG diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index ccda4dedc..9c14cb3bb 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -1,5 +1,7 @@ """Tangled tree class""" +# pylint: disable=logging-fstring-interpolation +# pylint: disable=too-many-instance-attributes from io import StringIO import json @@ -25,9 +27,6 @@ class TangledTree: """Tangled tree class""" - # pylint: disable=too-many-instance-attributes - # pylint: disable=invalid-name - def __init__( self, path_to_json_ld: str, @@ -64,11 +63,15 @@ def __init__( self.schema_abbr = self.schema_name.split("_")[0] # Initialize AttributesExplorer - self.ae = AttributesExplorer(self.path_to_json_ld) + self.attributes_explorer = AttributesExplorer(self.path_to_json_ld) # Create output paths. - self.text_csv_output_path = self.ae.create_output_path("text_csv") - self.json_output_path = self.ae.create_output_path("tangled_tree_json") + self.text_csv_output_path = self.attributes_explorer.create_output_path( + "text_csv" + ) + self.json_output_path = self.attributes_explorer.create_output_path( + "tangled_tree_json" + ) def strip_double_quotes(self, string: str) -> str: """Removes double quotes from string @@ -135,29 +138,29 @@ def get_text_for_tangled_tree( plain_descendants = [n for n in nodes if n != node] else: # Format highlighted text for Observable. - for hd in highlight_descendants: - highlighted.append([node, "id", hd]) + for descendant in highlight_descendants: + highlighted.append([node, "id", descendant]) # Gather the non-highlighted text as plain text descendants. plain_descendants = [ node for node in nodes if node not in highlight_descendants ] # Format all the plain text for observable. - for nd in plain_descendants: - plain.append([node, "id", nd]) + for descendant in plain_descendants: + plain.append([node, "id", descendant]) # Prepare df depending on what type of text we need. - df = pd.DataFrame( + dataframe = pd.DataFrame( locals()[text_type.lower()], columns=["Component", "type", "name"] ) # Depending on input either export csv locally to disk or as a string. if save_file: file_name = f"{self.schema_abbr}_{self.figure_type}_{text_type}.csv" - df.to_csv(os.path.join(self.text_csv_output_path, file_name)) + dataframe.to_csv(os.path.join(self.text_csv_output_path, file_name)) return None - return df.to_csv() + return dataframe.to_csv() def get_topological_generations( self, @@ -242,11 +245,11 @@ def get_ca_alias(self, conditional_requirements: list) -> dict[str, str]: return ca_alias def gather_component_dependency_info( - self, cn: str, attributes_df: pd.DataFrame + self, component_name: str, attributes_df: pd.DataFrame ) -> tuple[list[str], dict[str, str], list[str]]: """Gather all component dependency information. Inputs: - cn: (str) component name + component name: (str) component name attributes_df: (Pandas DataFrame) Details for all attributes across all components. From AttributesExplorer. Outputs: @@ -259,7 +262,7 @@ def gather_component_dependency_info( # Gather all component dependency information component_attributes = self.dmge.get_descendants_by_edge_type( - cn, self.dependency_type, connected=True + component_name, self.dependency_type, connected=True ) # Dont want to display `Component` in the figure so remove @@ -270,12 +273,14 @@ def gather_component_dependency_info( if "Cond_Req" in attributes_df.columns: conditional_attributes = list( attributes_df[ - (attributes_df["Cond_Req"]) & (attributes_df["Component"] == cn) + (attributes_df["Cond_Req"]) + & (attributes_df["Component"] == component_name) ]["Label"] ) conditional_requirements = list( attributes_df[ - (attributes_df["Cond_Req"]) & (attributes_df["Component"] == cn) + (attributes_df["Cond_Req"]) + & (attributes_df["Component"] == component_name) ]["Conditional Requirements"] ) ca_alias = self.get_ca_alias(conditional_requirements) @@ -501,7 +506,7 @@ def get_base_layers( topological_gen: list[list], child_parents: dict, source_nodes: list, - cn: str, + component_name: str, ) -> tuple[dict[str, Any], dict[str, Any]]: """ Purpose: @@ -527,7 +532,7 @@ def get_base_layers( key: child value: list of the child's parents source_nodes: list, list of nodes that do not have a parent. - cn: str, component name, default='' + component_name: str, component name, default='' Output: base_layers: dict, key: component name, value: layer represents initial layering of topological_gen @@ -590,7 +595,7 @@ def get_base_layers( parent_levels.count(parent_levels[0]) != len(parent_levels) ) - and par != cn + and par != component_name ): # If so, remove its position from parent_levels parent_levels.remove(base_layers_copy[par]) @@ -741,7 +746,7 @@ def get_node_layers_json( # pylint: disable=too-many-arguments source_nodes: list[str], child_parents: dict, parent_children: dict, - cn: str = "", + component_name: str = "", all_parent_children: Optional[dict] = None, ) -> str: """Return all the layers of a single tangled tree as a JSON String. @@ -763,7 +768,7 @@ def get_node_layers_json( # pylint: disable=too-many-arguments """ base_layers, base_layers_copy_copy = self.get_base_layers( - topological_gen, child_parents, source_nodes, cn + topological_gen, child_parents, source_nodes, component_name ) # Rearrange node_layers to follow the pattern laid out in component layers. @@ -794,14 +799,14 @@ def save_outputs( self, save_file: bool, layers_json, - cn: str = "", + component_name: str = "", all_layers: Optional[list[str]] = None, ) -> list[str]: """ Inputs: save_file (bool): Indicates whether to save a file locally or not.: layers_json (JSON String): Layers of nodes in the tangled tree as a json string. - cn (str): component name, default='' + component_name (str): component name, default='' all_layers (list of json strings): Each string represents contains the layers for a single tangled tree. If a dependency figure the list is added to each time this function is called, so starts incomplete. default=[]. @@ -813,9 +818,9 @@ def save_outputs( """ all_layers_list = [] if all_layers is None else all_layers if save_file: - if cn: + if component_name: output_file_name = ( - f"{self.schema_abbr}_{self.figure_type}_{cn}_tangled_tree.json" + f"{self.schema_abbr}_{self.figure_type}_{component_name}_tangled_tree.json" ) else: output_file_name = ( @@ -828,7 +833,6 @@ def save_outputs( ) as outfile: outfile.write(layers_json) - # pylint: disable=logging-fstring-interpolation logger.info( ( "Tangled Tree JSON String saved to " @@ -910,17 +914,19 @@ def get_tangled_tree_layers(self, save_file: bool = True): component_nodes = component_dg.nodes() # Get table of attributes. - attributes_csv_str = self.ae.parse_attributes(save_file=False) + attributes_csv_str = self.attributes_explorer.parse_attributes( + save_file=False + ) attributes_df = pd.read_table(StringIO(attributes_csv_str), sep=",") all_layers = [] - for cn in component_nodes: + for component_name in component_nodes: # Gather attribute and dependency information per node ( conditional_attributes, ca_alias, all_attributes, - ) = self.gather_component_dependency_info(cn, attributes_df) + ) = self.gather_component_dependency_info(component_name, attributes_df) # Gather all source nodes source_nodes = self.find_source_nodes( @@ -947,9 +953,11 @@ def get_tangled_tree_layers(self, save_file: bool = True): source_nodes, child_parents, parent_children, - cn, + component_name, ) # If indicated save outputs locally else, gather all layers. - all_layers = self.save_outputs(save_file, layers_json, cn, all_layers) + all_layers = self.save_outputs( + save_file, layers_json, component_name, all_layers + ) return all_layers From ca57ec6c720563527d980c35eaa5ce2ff4f99d55 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 08:34:38 -0800 Subject: [PATCH 140/199] fix some linting --- schematic/visualization/tangled_tree.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index 9c14cb3bb..ed4db2b8c 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -1,7 +1,6 @@ """Tangled tree class""" # pylint: disable=logging-fstring-interpolation -# pylint: disable=too-many-instance-attributes from io import StringIO import json @@ -24,7 +23,7 @@ logger = logging.getLogger(__name__) -class TangledTree: +class TangledTree: # pylint: disable=too-many-instance-attributes """Tangled tree class""" def __init__( @@ -820,7 +819,8 @@ def save_outputs( if save_file: if component_name: output_file_name = ( - f"{self.schema_abbr}_{self.figure_type}_{component_name}_tangled_tree.json" + f"{self.schema_abbr}_{self.figure_type}_" + f"{component_name}_tangled_tree.json" ) else: output_file_name = ( From c2151299e8f0a33a5c7a5289a9ce1c5f1add431a Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 09:12:36 -0800 Subject: [PATCH 141/199] removed unsused functions, moved around linting disables --- schematic/utils/cli_utils.py | 7 +- schematic/utils/df_utils.py | 4 +- schematic/utils/general.py | 4 +- schematic/utils/google_api_utils.py | 80 +------------------ schematic/utils/validate_utils.py | 4 +- tests/data/mock_manifests/test_BulkRNAseq.csv | 6 +- 6 files changed, 16 insertions(+), 89 deletions(-) diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 342053600..52debd51c 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -1,5 +1,8 @@ """CLI utils""" +# pylint: disable=logging-fstring-interpolation +# pylint: disable=anomalous-backslash-in-string + import logging from typing import Any, Mapping, Sequence, Union, Optional @@ -8,10 +11,6 @@ logger = logging.getLogger(__name__) -# We are using fstrings in logger methods -# pylint: disable=logging-fstring-interpolation -# pylint: disable = anomalous-backslash-in-string - def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]: """Access a nested value in a dictionary corresponding diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 29e70a6ed..a95c280ea 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -1,5 +1,7 @@ """df utils""" +# pylint: disable=logging-fstring-interpolation + import logging from copy import deepcopy from time import perf_counter @@ -10,8 +12,6 @@ import numpy as np from pandarallel import pandarallel # type: ignore -# pylint: disable=logging-fstring-interpolation - logger = logging.getLogger(__name__) diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 92068e109..246d0bc9e 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -1,5 +1,7 @@ """General utils""" +# pylint: disable=logging-fstring-interpolation + import logging import os import pstats @@ -16,8 +18,6 @@ from synapseclient.core import cache # type: ignore from synapseclient import Synapse # type: ignore -# pylint: disable=logging-fstring-interpolation - logger = logging.getLogger(__name__) T = TypeVar("T") diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 3ac80ac05..4cc743e6b 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -1,9 +1,11 @@ """Google API utils""" +# pylint: disable=logging-fstring-interpolation + import os import logging import json -from typing import Any, Union, Optional +from typing import Any, Union import pandas as pd from googleapiclient.discovery import build # type: ignore @@ -11,8 +13,6 @@ from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage -# pylint: disable=logging-fstring-interpolation - logger = logging.getLogger(__name__) @@ -23,23 +23,6 @@ ] -# This function doesn't appear to be used or tested anywhere in schematic. -# TO DO: replace by pygsheets calls? -def build_credentials() -> dict[str, Any]: # pylint: disable=missing-function-docstring - creds = generate_token() # pylint: disable=undefined-variable - - # get a Google Sheet API service - sheet_service = build("sheets", "v4", credentials=creds) - # get a Google Drive API service - drive_service = build("drive", "v3", credentials=creds) - - return { - "sheet_service": sheet_service, - "drive_service": drive_service, - "creds": creds, - } - - def build_service_account_creds() -> dict[str, Any]: """Build Google service account credentials @@ -159,6 +142,7 @@ def export_manifest_drive_service( spreadsheet_id = manifest_url.split("/")[-1] # use google drive + # Pylint seems to have trouble with the google api classes, recognizing their methods data = ( drive_service.files() # pylint: disable=no-member .export(fileId=spreadsheet_id, mimeType=mime_type) @@ -186,59 +170,3 @@ def export_manifest_csv(file_path: str, manifest: Union[pd.DataFrame, str]) -> N manifest.to_csv(file_path, index=False) else: export_manifest_drive_service(manifest, file_path, mime_type="text/csv") - - -# This function doesn't appear to be used or tested -# pd.ExcelWriter is an ABC class which means it SHOULD NOT be instantiated -def export_manifest_excel( - manifest: Union[pd.DataFrame, str], output_excel: Optional[str] = None -) -> None: - """ - Export manifest as an Excel spreadsheet by using google sheet API. - This approach could export hidden sheet - Google sheet gets exported as an excel spreadsheet. - If there's a hidden sheet, the hidden sheet also gets exported. - - Args: - manifest (Union[pd.DataFrame, str]): could be a dataframe or a manifest url - output_excel (Optional[str], optional): name of the exported manifest sheet. - Defaults to None. - """ - # pylint: disable=abstract-class-instantiated - # pylint: disable=no-member - - # initialize drive service - services_creds = build_service_account_creds() - sheet_service = services_creds["sheet_service"] - - if isinstance(manifest, pd.DataFrame): - manifest.to_excel(output_excel, index=False) - else: - # get spreadsheet id from url - spreadsheet_id = manifest.split("/")[-1] - - # use google sheet api - sheet_metadata = ( - sheet_service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute() - ) - sheets = sheet_metadata.get("sheets") - - # export to Excel - writer = pd.ExcelWriter(output_excel) - - # export each sheet in manifest - for sheet in sheets: - dataset = ( - sheet_service.spreadsheets() - .values() - .get(spreadsheetId=spreadsheet_id, range=sheet["properties"]["title"]) - .execute() - ) - dataset_df = pd.DataFrame(dataset["values"]) - dataset_df.columns = dataset_df.iloc[0] - dataset_df.drop(dataset_df.index[0], inplace=True) - dataset_df.to_excel( - writer, sheet_name=sheet["properties"]["title"], index=False - ) - writer.save() - writer.close() diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index de7c4e66b..fed5b422c 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -1,5 +1,7 @@ """Validation utils""" +# pylint: disable = anomalous-backslash-in-string + import re from collections.abc import Mapping from typing import Pattern, Union, Iterable, Any, Optional @@ -10,8 +12,6 @@ from schematic.utils.io_utils import load_json from schematic import LOADER -# pylint: disable = anomalous-backslash-in-string - def validate_schema(schema: Union[Mapping, bool]) -> None: """Validate schema against schema.org standard""" diff --git a/tests/data/mock_manifests/test_BulkRNAseq.csv b/tests/data/mock_manifests/test_BulkRNAseq.csv index facfa3f6a..49e1a38e5 100644 --- a/tests/data/mock_manifests/test_BulkRNAseq.csv +++ b/tests/data/mock_manifests/test_BulkRNAseq.csv @@ -1,3 +1,3 @@ -Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, +Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA,Id,entityId +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38,,dcb30f9c-0810-4159-aead-6aefcec19d36,syn39242580 +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39,,8ae18fa9-e68e-4c56-b9aa-9a55cb4ccf94,syn51900502 From 84e83e63f1a39ab2405d95ac7b05ff7880824632 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 09:19:31 -0800 Subject: [PATCH 142/199] fix function call whos name was changed --- schematic/models/commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index ac6f4946a..6e545f95f 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -204,7 +204,7 @@ def submit_manifest( "-ps", "--project_scope", default=None, - callback=parse_synIDs, + callback=parse_syn_ids, help=query_dict(model_commands, ("model", "validate", "project_scope")), ) @click.pass_obj From 483c9915a3f732b2a0e8ab5eb12a5fffefc0523f Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Feb 2024 09:44:29 -0800 Subject: [PATCH 143/199] fix mock manifest --- tests/data/mock_manifests/test_BulkRNAseq.csv | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/data/mock_manifests/test_BulkRNAseq.csv b/tests/data/mock_manifests/test_BulkRNAseq.csv index 49e1a38e5..facfa3f6a 100644 --- a/tests/data/mock_manifests/test_BulkRNAseq.csv +++ b/tests/data/mock_manifests/test_BulkRNAseq.csv @@ -1,3 +1,3 @@ -Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA,Id,entityId -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38,,dcb30f9c-0810-4159-aead-6aefcec19d36,syn39242580 -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39,,8ae18fa9-e68e-4c56-b9aa-9a55cb4ccf94,syn51900502 +Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, From cd999a5450da982c1a1b3b732b97c63ace5a9791 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 5 Feb 2024 13:05:46 -0500 Subject: [PATCH 144/199] added project scope parameter --- schematic_api/api/openapi/api.yaml | 10 ++++++++++ schematic_api/api/routes.py | 5 ++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index ece6616be..5c3040453 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -709,6 +709,16 @@ paths: description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview_id in config_example.yml) example: syn23643253 required: true + - in: query + name: project_scope + schema: + type: array + items: + type: string + nullable: false + description: List, a subset of the projects contained within the asset view that are relevant for the current operation. Speeds up some operations that interact with Synapse. + example: ['syn23643250', 'syn47218127', 'syn47218347'] + required: false responses: "200": description: A list of tuples(json). diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 400e920a1..661fc7493 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -729,8 +729,7 @@ def get_asset_view_table(asset_view, return_type): file_view_table_df.to_csv(export_path, index=False) return export_path -@profile(sort_by='cumulative', strip_dirs=True) -def get_project_manifests(project_id, asset_view): +def get_project_manifests(project_id, asset_view, project_scope=None): # Access token now stored in request header access_token = get_access_token() @@ -738,7 +737,7 @@ def get_project_manifests(project_id, asset_view): config_handler(asset_view=asset_view) # use Synapse Storage - store = SynapseStorage(access_token=access_token) + store = SynapseStorage(access_token=access_token, project_scope=project_scope) # call getprojectManifest function lst_manifest = store.getProjectManifests(projectId=project_id) From 756e35e40b8a677acfdcde8e8758d938e928fcf3 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 5 Feb 2024 13:13:11 -0500 Subject: [PATCH 145/199] remove test time out operation --- schematic_api/api/openapi/api.yaml | 70 +----------------------------- schematic_api/api/routes.py | 26 +---------- 2 files changed, 2 insertions(+), 94 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 5c3040453..2d66cb640 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -1261,72 +1261,4 @@ paths: "500": description: Schematic version was not able to be identified. tags: - - Version - - /test_time_out: - get: - summary: sleep 59.9s - description: sleep 59.9s - operationId: schematic_api.api.routes.test_time_out - responses: - "200": - description: Test - content: - text/plain: - schema: - type: string - "500": - description: Test - tags: - - Test - - /test_time_out_two: - get: - summary: sleep 60s - description: sleep 60s - operationId: schematic_api.api.routes.test_time_out_two - responses: - "200": - description: Test - content: - text/plain: - schema: - type: string - "500": - description: Test - tags: - - Test - - /test_time_out_three: - get: - summary: sleep 120s - description: sleep 120s - operationId: schematic_api.api.routes.test_time_out_three - responses: - "200": - description: Test - content: - text/plain: - schema: - type: string - "500": - description: Test - tags: - - Test - - /test_time_out_four: - get: - summary: sleep 180s - description: sleep 180s - operationId: schematic_api.api.routes.test_time_out_four - responses: - "200": - description: Test - content: - text/plain: - schema: - type: string - "500": - description: Test - tags: - - Test \ No newline at end of file + - Version \ No newline at end of file diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 661fc7493..a3a1aaaa7 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -1012,28 +1012,4 @@ def get_schematic_version() -> str: raise NotImplementedError( "Using this endpoint to check the version of schematic is only supported when the API is running in a docker container." ) - return version - -def test_time_out(): - """return test time out - """ - time.sleep(59.9) - return "okay" - -def test_time_out_two(): - """return test time out - """ - time.sleep(60) - return "okay" - -def test_time_out_three(): - """return test time out - """ - time.sleep(120) - return "okay" - -def test_time_out_four(): - """return test time out - """ - time.sleep(180) - return "okay" \ No newline at end of file + return version \ No newline at end of file From e54dd729d9d9ee4e5547cf0e99861c525358d396 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 5 Feb 2024 13:14:49 -0500 Subject: [PATCH 146/199] remove import --- schematic_api/api/routes.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index a3a1aaaa7..2e8b8f990 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -7,7 +7,6 @@ import logging import pathlib import pickle -import time import connexion from connexion.decorators.uri_parsing import Swagger2URIParser @@ -40,7 +39,6 @@ ) from schematic.utils.general import entity_type_mapping from schematic.utils.schema_utils import get_property_label_from_display_name -from schematic.utils.general import profile logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) From 68f25c2c754aaa7dfaae6ac8441f1582387ebd71 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 5 Feb 2024 11:17:08 -0700 Subject: [PATCH 147/199] add extra assertion for row removal --- tests/test_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index 6c154cbd2..cb2627361 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -260,9 +260,13 @@ def test_load_df(self, helpers, preserve_raw_input): test_col = "Check NA" file_path = helpers.get_data_path("mock_manifests", "Invalid_Test_Manifest.csv") + unprocessed_df = pd.read_csv(file_path, encoding="utf8") df = df_utils.load_df(file_path, preserve_raw_input=preserve_raw_input, data_model=False) assert df["Component"].dtype == "object" + + # Ensure empty rows are removed from the dataframe + assert unprocessed_df.shape[0] == 4 assert df.shape[0] == 3 if preserve_raw_input: From 67639a28bec28725dee71f55d29a93510f98af33 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 5 Feb 2024 11:20:54 -0700 Subject: [PATCH 148/199] reorg variables --- tests/test_utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index cb2627361..2efee6aa9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -265,9 +265,11 @@ def test_load_df(self, helpers, preserve_raw_input): assert df["Component"].dtype == "object" - # Ensure empty rows are removed from the dataframe - assert unprocessed_df.shape[0] == 4 - assert df.shape[0] == 3 + n_unprocessed_rows = unprocessed_df.shape[0] + n_processed_rows = df.shape[0] + + assert n_unprocessed_rows == 4 + assert n_processed_rows == 3 if preserve_raw_input: assert isinstance(df[test_col].iloc[0], str) From 5d3cdc6f2ab9cb69cc8aadc6c05fd1d59bee356f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 6 Feb 2024 11:28:37 -0800 Subject: [PATCH 149/199] update dataset_ids to dataset_id --- schematic_api/api/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index fea9eba99..68980a261 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -262,7 +262,7 @@ def get_temp_model_path(schema_url): def get_manifest_route( schema_url: str, use_annotations: bool, - dataset_ids=None, + dataset_id=None, asset_view=None, output_format=None, title=None, From 61404f233532db456fc6ef201916522798adc179 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Tue, 6 Feb 2024 12:39:05 -0800 Subject: [PATCH 150/199] make api packages optional --- poetry.lock | 4348 +++++++++++++++++++++++++----------------------- pyproject.toml | 10 +- 2 files changed, 2288 insertions(+), 2070 deletions(-) diff --git a/poetry.lock b/poetry.lock index e48b01605..086f607ef 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,26 +1,18 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - [[package]] name = "alabaster" version = "0.7.16" description = "A light, configurable Sphinx theme" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, - {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, -] [[package]] name = "altair" version = "4.2.0" description = "Altair: A declarative statistical visualization library for Python." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "altair-4.2.0-py3-none-any.whl", hash = "sha256:0c724848ae53410c13fa28be2b3b9a9dcb7b5caa1a70f7f217bd663bb419935a"}, - {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, -] [package.dependencies] entrypoints = "*" @@ -37,12 +29,9 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, -] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} @@ -59,23 +48,17 @@ trio = ["trio (>=0.23)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "main" optional = false python-versions = "*" -files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] [[package]] name = "argon2-cffi" version = "23.1.0" description = "Argon2 for Python" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] [package.dependencies] argon2-cffi-bindings = "*" @@ -90,31 +73,9 @@ typing = ["mypy"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] [package.dependencies] cffi = ">=1.0.1" @@ -127,12 +88,9 @@ tests = ["pytest"] name = "arrow" version = "1.3.0" description = "Better dates & times for Python" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] [package.dependencies] python-dateutil = ">=2.7.0" @@ -140,18 +98,15 @@ types-python-dateutil = ">=2.8.10" [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] +test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] [[package]] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" -files = [ - {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, - {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, -] [package.dependencies] lazy-object-proxy = ">=1.4.0" @@ -162,12 +117,9 @@ wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" +category = "main" optional = false python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] [package.dependencies] six = ">=1.12.0" @@ -180,12 +132,9 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} @@ -194,12 +143,9 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] @@ -213,12 +159,9 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p name = "babel" version = "2.14.0" description = "Internationalization utilities" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, -] [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] @@ -227,23 +170,17 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" +category = "main" optional = false python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] [[package]] name = "beautifulsoup4" version = "4.12.3" description = "Screen-scraping library" +category = "main" optional = false python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] [package.dependencies] soupsieve = ">1.2" @@ -259,32 +196,9 @@ lxml = ["lxml"] name = "black" version = "23.12.1" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, -] [package.dependencies] click = ">=8.0.0" @@ -305,12 +219,9 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] [package.dependencies] six = ">=1.9.0" @@ -323,84 +234,25 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] name = "cachetools" version = "5.3.2" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, -] [[package]] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, -] [[package]] name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] [package.dependencies] pycparser = "*" @@ -409,111 +261,17 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -522,12 +280,9 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-log" version = "0.4.0" description = "Logging integration for Click" +category = "main" optional = false python-versions = "*" -files = [ - {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, - {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, -] [package.dependencies] click = "*" @@ -536,12 +291,9 @@ click = "*" name = "clickclick" version = "20.10.2" description = "Click utility functions" -optional = false +category = "main" +optional = true python-versions = "*" -files = [ - {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, - {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, -] [package.dependencies] click = ">=4.0" @@ -551,23 +303,17 @@ PyYAML = ">=3.11" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] [[package]] name = "comm" version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, - {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, -] [package.dependencies] traitlets = ">=4" @@ -579,12 +325,9 @@ test = ["pytest"] name = "connexion" version = "2.14.2" description = "Connexion - API first applications with OpenAPI/Swagger and Flask" -optional = false +category = "main" +optional = true python-versions = ">=3.6" -files = [ - {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, - {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, -] [package.dependencies] clickclick = ">=1.2,<21" @@ -609,62 +352,9 @@ tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14 name = "coverage" version = "7.4.0" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, -] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -676,33 +366,9 @@ toml = ["tomli"] name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] [package.dependencies] cffi = ">=1.12" @@ -721,12 +387,9 @@ test-randomorder = ["pytest-randomly"] name = "dataclasses-json" version = "0.6.3" description = "Easily serialize dataclasses to and from JSON." +category = "main" optional = false python-versions = ">=3.7,<4.0" -files = [ - {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, - {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, -] [package.dependencies] marshmallow = ">=3.18.0,<4.0.0" @@ -736,12 +399,9 @@ typing-inspect = ">=0.4.0,<1" name = "dateparser" version = "1.2.0" description = "Date parsing library designed to parse dates from HTML pages" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, - {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, -] [package.dependencies] python-dateutil = "*" @@ -758,61 +418,33 @@ langdetect = ["langdetect"] name = "debugpy" version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, - {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, - {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, - {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, - {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, - {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, - {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, - {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, - {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, - {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, - {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, - {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, - {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, - {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, - {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, - {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, - {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, - {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, -] [[package]] name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] [[package]] name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] [package.dependencies] wrapt = ">=1.10,<2" @@ -824,12 +456,9 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "deprecation" version = "2.1.0" description = "A library to handle automated deprecations" +category = "main" optional = false python-versions = "*" -files = [ - {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, - {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, -] [package.dependencies] packaging = "*" @@ -838,12 +467,9 @@ packaging = "*" name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] [package.extras] graph = ["objgraph (>=1.7.2)"] @@ -852,45 +478,33 @@ graph = ["objgraph (>=1.7.2)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, -] [[package]] name = "entrypoints" version = "0.4" description = "Discover and load entry points from installed packages." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] [[package]] name = "et-xmlfile" version = "1.1.0" description = "An implementation of lxml.xmlfile for the standard library" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, -] [[package]] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] [package.extras] test = ["pytest (>=6)"] @@ -899,12 +513,9 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.0.2" description = "execnet: rapid multi-Python deployment" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, - {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, -] [package.extras] testing = ["hatch", "pre-commit", "pytest", "tox"] @@ -913,12 +524,9 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] @@ -927,12 +535,9 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth name = "fastjsonschema" version = "2.19.1" description = "Fastest Python implementation of JSON schema" +category = "main" optional = false python-versions = "*" -files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, -] [package.extras] devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] @@ -941,12 +546,9 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, -] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" @@ -957,12 +559,9 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.1.3" description = "A simple framework for building complex web applications." -optional = false +category = "main" +optional = true python-versions = ">=3.7" -files = [ - {file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"}, - {file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"}, -] [package.dependencies] click = ">=8.0" @@ -979,12 +578,9 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "3.0.10" description = "A Flask extension adding a decorator for CORS support" -optional = false +category = "main" +optional = true python-versions = "*" -files = [ - {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, - {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, -] [package.dependencies] Flask = ">=0.9" @@ -994,23 +590,17 @@ Six = "*" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "main" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] [[package]] name = "google-api-core" version = "2.15.0" description = "Google API client core library" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, -] [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" @@ -1027,15 +617,12 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-api-python-client" version = "2.114.0" description = "Google API Client Library for Python" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, - {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, -] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" httplib2 = ">=0.15.0,<1.dev0" @@ -1045,12 +632,9 @@ uritemplate = ">=3.0.1,<5" name = "google-auth" version = "2.26.2" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, -] [package.dependencies] cachetools = ">=2.0.0,<6.0" @@ -1068,12 +652,9 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-httplib2" version = "0.1.1" description = "Google Authentication Library: httplib2 transport" +category = "main" optional = false python-versions = "*" -files = [ - {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, - {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, -] [package.dependencies] google-auth = "*" @@ -1083,12 +664,9 @@ httplib2 = ">=0.19.0" name = "google-auth-oauthlib" version = "0.8.0" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "google-auth-oauthlib-0.8.0.tar.gz", hash = "sha256:81056a310fb1c4a3e5a7e1a443e1eb96593c6bbc55b26c0261e4d3295d3e6593"}, - {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, -] [package.dependencies] google-auth = ">=2.15.0" @@ -1101,12 +679,9 @@ tool = ["click (>=6.0.0)"] name = "googleapis-common-protos" version = "1.62.0" description = "Common protobufs used in Google APIs" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, -] [package.dependencies] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" @@ -1118,12 +693,9 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "graphviz" version = "0.20.1" description = "Simple Python interface for Graphviz" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, - {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, -] [package.extras] dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"] @@ -1134,12 +706,9 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= name = "great-expectations" version = "0.15.50" description = "Always know what to expect from your data." +category = "main" optional = false python-versions = "*" -files = [ - {file = "great_expectations-0.15.50-py3-none-any.whl", hash = "sha256:bda4c6bfe199dc0610273a1c160aab3876583266b1957a34a7edb72b055fd13d"}, - {file = "great_expectations-0.15.50.tar.gz", hash = "sha256:0b00c974410d598a97b4c662d7955d80d6268e35c5f3893ddb546f75432412db"}, -] [package.dependencies] altair = ">=4.0.0,<4.2.1" @@ -1208,68 +777,9 @@ vertica = ["sqlalchemy (>=1.3.18,<2.0.0)", "sqlalchemy-vertica-python (>=0.5.10) name = "greenlet" version = "3.0.3" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] [package.extras] docs = ["Sphinx", "furo"] @@ -1279,12 +789,9 @@ test = ["objgraph", "psutil"] name = "httplib2" version = "0.22.0" description = "A comprehensive HTTP client library." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] [package.dependencies] pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} @@ -1293,34 +800,25 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0 name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] [[package]] name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, -] [package.dependencies] zipp = ">=0.5" @@ -1334,34 +832,25 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] [[package]] name = "interrogate" version = "1.5.0" description = "Interrogate a codebase for docstring coverage." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "interrogate-1.5.0-py3-none-any.whl", hash = "sha256:a4ccc5cbd727c74acc98dee6f5e79ef264c0bcfa66b68d4e123069b2af89091a"}, - {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, -] [package.dependencies] attrs = "*" @@ -1381,12 +870,9 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] name = "ipykernel" version = "6.29.0" description = "IPython Kernel for Jupyter" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, - {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, -] [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} @@ -1394,7 +880,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1414,12 +900,9 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio name = "ipython" version = "8.18.1" description = "IPython: Productive Interactive Computing" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} @@ -1451,12 +934,9 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pa name = "ipywidgets" version = "8.1.1" description = "Jupyter interactive widgets" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, - {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, -] [package.dependencies] comm = ">=0.1.3" @@ -1472,12 +952,9 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = false python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] [package.dependencies] six = "*" @@ -1486,12 +963,9 @@ six = "*" name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] [package.dependencies] arrow = ">=0.15.0" @@ -1500,12 +974,9 @@ arrow = ">=0.15.0" name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] [package.extras] colors = ["colorama (>=0.4.6)"] @@ -1514,23 +985,17 @@ colors = ["colorama (>=0.4.6)"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] [[package]] name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] [package.dependencies] parso = ">=0.8.3,<0.9.0" @@ -1544,12 +1009,9 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, - {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, -] [package.extras] test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] @@ -1559,12 +1021,9 @@ trio = ["async_generator", "trio"] name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] [package.dependencies] MarkupSafe = ">=2.0" @@ -1576,12 +1035,9 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "main" optional = false python-versions = "*" -files = [ - {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, - {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, -] [package.extras] dev = ["hypothesis"] @@ -1590,12 +1046,9 @@ dev = ["hypothesis"] name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, -] [package.dependencies] jsonpointer = ">=1.9" @@ -1604,23 +1057,17 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] [[package]] name = "jsonschema" version = "4.21.0" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, - {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, -] [package.dependencies] attrs = ">=22.2.0" @@ -1644,12 +1091,9 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] [package.dependencies] referencing = ">=0.31.0" @@ -1658,16 +1102,13 @@ referencing = ">=0.31.0" name = "jupyter-client" version = "8.6.0" description = "Jupyter protocol implementation and client libraries" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, - {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, -] [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1681,12 +1122,9 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-core" version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, - {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, -] [package.dependencies] platformdirs = ">=2.5" @@ -1701,12 +1139,9 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.9.0" description = "Jupyter Event System library" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, - {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, -] [package.dependencies] jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} @@ -1726,12 +1161,9 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.2" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, - {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, -] [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} @@ -1741,19 +1173,16 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.12.5" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, -] [package.dependencies] anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.9.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1777,12 +1206,9 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.5.1" description = "A Jupyter Server Extension Providing Terminals." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, - {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, -] [package.dependencies] pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} @@ -1796,12 +1222,9 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> name = "jupyterlab" version = "4.0.10" description = "JupyterLab computational environment" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, - {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, -] [package.dependencies] async-lru = ">=1.0.0" @@ -1828,23 +1251,17 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] [[package]] name = "jupyterlab-server" version = "2.25.2" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.25.2-py3-none-any.whl", hash = "sha256:5b1798c9cc6a44f65c757de9f97fc06fc3d42535afbf47d2ace5e964ab447aaf"}, - {file = "jupyterlab_server-2.25.2.tar.gz", hash = "sha256:bd0ec7a99ebcedc8bcff939ef86e52c378e44c2707e053fcd81d046ce979ee63"}, -] [package.dependencies] babel = ">=2.10" @@ -1865,23 +1282,17 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v name = "jupyterlab-widgets" version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, - {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, -] [[package]] name = "keyring" version = "23.4.1" description = "Store and access your passwords safely." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, - {file = "keyring-23.4.1.tar.gz", hash = "sha256:89cbd74d4683ed164c8082fb38619341097741323b3786905c6dac04d6915a55"}, -] [package.dependencies] importlib-metadata = ">=3.6" @@ -1897,12 +1308,9 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "keyrings-alt" version = "3.1" description = "Alternate keyring implementations" +category = "main" optional = false python-versions = ">=2.7" -files = [ - {file = "keyrings.alt-3.1-py2.py3-none-any.whl", hash = "sha256:6a00fa799baf1385cf9620bd01bcc815aa56e6970342a567bcfea0c4d21abe5f"}, - {file = "keyrings.alt-3.1.tar.gz", hash = "sha256:b59c86b67b9027a86e841a49efc41025bcc3b1b0308629617b66b7011e52db5a"}, -] [package.dependencies] six = "*" @@ -1915,118 +1323,33 @@ testing = ["backports.unittest-mock", "collective.checkdocs", "fs (>=0.5,<2)", " name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] [[package]] name = "makefun" version = "1.15.2" description = "Small library to dynamically create python functions." +category = "main" optional = false python-versions = "*" -files = [ - {file = "makefun-1.15.2-py2.py3-none-any.whl", hash = "sha256:1c83abfaefb6c3c7c83ed4a993b4a310af80adf6db15625b184b1f0f7545a041"}, - {file = "makefun-1.15.2.tar.gz", hash = "sha256:16f2a2b34d9ee0c2b578c960a1808c974e2822cf79f6e9b9c455aace10882d45"}, -] [[package]] name = "markupsafe" version = "2.1.0" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, - {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, - {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, - {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, - {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, - {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, -] [[package]] name = "marshmallow" version = "3.20.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, - {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, -] [package.dependencies] packaging = ">=17.0" @@ -2041,12 +1364,9 @@ tests = ["pytest", "pytz", "simplejson"] name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] [package.dependencies] traitlets = "*" @@ -2055,59 +1375,25 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] [[package]] name = "mistune" version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] [[package]] name = "mypy" version = "1.8.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, -] [package.dependencies] mypy-extensions = ">=1.0.0" @@ -2124,27 +1410,21 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] [[package]] name = "nbclient" version = "0.9.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "main" optional = false python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, - {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, -] [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2157,12 +1437,9 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.14.2" description = "Converting Jupyter Notebooks" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, - {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, -] [package.dependencies] beautifulsoup4 = "*" @@ -2195,12 +1472,9 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, - {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, -] [package.dependencies] fastjsonschema = "*" @@ -2216,23 +1490,17 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.9" description = "Patch asyncio to allow nested event loops" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, - {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, -] [[package]] name = "networkx" version = "2.8.8" description = "Python package for creating and manipulating graphs and networks" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, - {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, -] [package.extras] default = ["matplotlib (>=3.4)", "numpy (>=1.19)", "pandas (>=1.3)", "scipy (>=1.8)"] @@ -2245,12 +1513,9 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "notebook" version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, - {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, -] [package.dependencies] jupyter-server = ">=2.4.0,<3" @@ -2268,12 +1533,9 @@ test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4 name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, - {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, -] [package.dependencies] jupyter-server = ">=1.8,<3" @@ -2285,57 +1547,17 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "numpy" version = "1.26.3" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, -] [[package]] name = "oauth2client" version = "4.1.3" description = "OAuth 2.0 client library" +category = "main" optional = false python-versions = "*" -files = [ - {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, - {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"}, -] [package.dependencies] httplib2 = ">=0.9.1" @@ -2348,12 +1570,9 @@ six = ">=1.6.1" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] [package.extras] rsa = ["cryptography (>=3.0.0)"] @@ -2364,12 +1583,9 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openpyxl" version = "3.1.2" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, -] [package.dependencies] et-xmlfile = "*" @@ -2378,12 +1594,9 @@ et-xmlfile = "*" name = "opentelemetry-api" version = "1.21.0" description = "OpenTelemetry Python API" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_api-1.21.0-py3-none-any.whl", hash = "sha256:4bb86b28627b7e41098f0e93280fe4892a1abed1b79a19aec6f928f39b17dffb"}, - {file = "opentelemetry_api-1.21.0.tar.gz", hash = "sha256:d6185fd5043e000075d921822fd2d26b953eba8ca21b1e2fa360dd46a7686316"}, -] [package.dependencies] deprecated = ">=1.2.6" @@ -2393,12 +1606,9 @@ importlib-metadata = ">=6.0,<7.0" name = "opentelemetry-exporter-otlp-proto-common" version = "1.21.0" description = "OpenTelemetry Protobuf encoding" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.21.0-py3-none-any.whl", hash = "sha256:97b1022b38270ec65d11fbfa348e0cd49d12006485c2321ea3b1b7037d42b6ec"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.21.0.tar.gz", hash = "sha256:61db274d8a68d636fb2ec2a0f281922949361cdd8236e25ff5539edf942b3226"}, -] [package.dependencies] backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} @@ -2408,12 +1618,9 @@ opentelemetry-proto = "1.21.0" name = "opentelemetry-exporter-otlp-proto-http" version = "1.21.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.21.0-py3-none-any.whl", hash = "sha256:56837773de6fb2714c01fc4895caebe876f6397bbc4d16afddf89e1299a55ee2"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.21.0.tar.gz", hash = "sha256:19d60afa4ae8597f7ef61ad75c8b6c6b7ef8cb73a33fb4aed4dbc86d5c8d3301"}, -] [package.dependencies] backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} @@ -2432,12 +1639,9 @@ test = ["responses (==0.22.0)"] name = "opentelemetry-proto" version = "1.21.0" description = "OpenTelemetry Python Proto" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_proto-1.21.0-py3-none-any.whl", hash = "sha256:32fc4248e83eebd80994e13963e683f25f3b443226336bb12b5b6d53638f50ba"}, - {file = "opentelemetry_proto-1.21.0.tar.gz", hash = "sha256:7d5172c29ed1b525b5ecf4ebe758c7138a9224441b3cfe683d0a237c33b1941f"}, -] [package.dependencies] protobuf = ">=3.19,<5.0" @@ -2446,12 +1650,9 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-sdk" version = "1.21.0" description = "OpenTelemetry Python SDK" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_sdk-1.21.0-py3-none-any.whl", hash = "sha256:9fe633243a8c655fedace3a0b89ccdfc654c0290ea2d8e839bd5db3131186f73"}, - {file = "opentelemetry_sdk-1.21.0.tar.gz", hash = "sha256:3ec8cd3020328d6bc5c9991ccaf9ae820ccb6395a5648d9a95d3ec88275b8879"}, -] [package.dependencies] opentelemetry-api = "1.21.0" @@ -2462,44 +1663,33 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.42b0" description = "OpenTelemetry Semantic Conventions" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "opentelemetry_semantic_conventions-0.42b0-py3-none-any.whl", hash = "sha256:5cd719cbfec448af658860796c5d0fcea2fdf0945a2bed2363f42cb1ee39f526"}, - {file = "opentelemetry_semantic_conventions-0.42b0.tar.gz", hash = "sha256:44ae67a0a3252a05072877857e5cc1242c98d4cf12870159f1a94bec800d38ec"}, -] [[package]] name = "overrides" version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, - {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, -] [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] [[package]] name = "pandarallel" version = "1.6.5" description = "An easy to use library to speed up computation (by parallelizing on multi CPUs) with pandas." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pandarallel-1.6.5.tar.gz", hash = "sha256:1c2df98ff6441e8ae13ff428ceebaa7ec42d731f7f972c41ce4fdef1d3adf640"}, -] [package.dependencies] dill = ">=0.3.1" @@ -2514,37 +1704,9 @@ doc = ["mkdocs-material"] name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, - {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, - {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, - {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, - {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, - {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, - {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, - {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, - {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, - {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, - {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, - {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, - {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, - {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, - {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, - {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, -] [package.dependencies] numpy = [ @@ -2561,23 +1723,17 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pandocfilters" version = "1.5.1" description = "Utilities for writing pandoc filters in python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] [[package]] name = "parso" version = "0.8.3" description = "A Python Parser" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] [package.extras] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] @@ -2587,23 +1743,17 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] [[package]] name = "pdoc" version = "12.3.1" description = "API Documentation for Python Projects" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"}, - {file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"}, -] [package.dependencies] Jinja2 = ">=2.11.0" @@ -2617,12 +1767,9 @@ dev = ["black", "hypothesis", "mypy", "pytest", "pytest-cov", "pytest-timeout", name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." +category = "main" optional = false python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] [package.dependencies] ptyprocess = ">=0.5" @@ -2631,12 +1778,9 @@ ptyprocess = ">=0.5" name = "platformdirs" version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, -] [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] @@ -2646,12 +1790,9 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, -] [package.extras] dev = ["pre-commit", "tox"] @@ -2661,12 +1802,9 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, - {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, -] [package.extras] twisted = ["twisted"] @@ -2675,12 +1813,9 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, -] [package.dependencies] wcwidth = "*" @@ -2689,46 +1824,17 @@ wcwidth = "*" name = "protobuf" version = "4.25.2" description = "" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, -] [[package]] name = "psutil" version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, -] [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] @@ -2737,23 +1843,17 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "main" optional = false python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] [[package]] name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "main" optional = false python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] [package.extras] tests = ["pytest"] @@ -2762,34 +1862,25 @@ tests = ["pytest"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] [[package]] name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, -] [[package]] name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, -] [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" @@ -2798,68 +1889,25 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] [[package]] name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] [[package]] name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, -] [package.dependencies] typing-extensions = ">=4.2.0" @@ -2872,23 +1920,17 @@ email = ["email-validator (>=1.0.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, -] [[package]] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] [package.extras] plugins = ["importlib-metadata"] @@ -2898,12 +1940,9 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygsheets" version = "2.0.6" description = "Google Spreadsheets Python API v4" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pygsheets-2.0.6-py3-none-any.whl", hash = "sha256:3338c2eb8990fdee9f463b42a370ec0870c118d607d775471a6dfb8b08f6cd87"}, - {file = "pygsheets-2.0.6.tar.gz", hash = "sha256:bff46c812e99f9b8b81a09b456581365281c797620ec08530b0d0e48fa9299e2"}, -] [package.dependencies] google-api-python-client = ">=2.50.0" @@ -2916,12 +1955,9 @@ pandas = ["pandas (>=0.14.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" -files = [ - {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, - {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, -] [package.dependencies] astroid = ">=2.15.8,<=2.17.0-dev0" @@ -2942,12 +1978,9 @@ testutils = ["gitpython (>3)"] name = "pyopenssl" version = "23.3.0" description = "Python wrapper module around the OpenSSL library" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, - {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, -] [package.dependencies] cryptography = ">=41.0.5,<42" @@ -2960,12 +1993,9 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, -] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] @@ -2974,12 +2004,9 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} @@ -2996,12 +2023,9 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, -] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} @@ -3014,12 +2038,9 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, -] [package.dependencies] pytest = ">=5.0" @@ -3031,12 +2052,9 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-rerunfailures" version = "12.0" description = "pytest plugin to re-run tests to eliminate flaky failures" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-rerunfailures-12.0.tar.gz", hash = "sha256:784f462fa87fe9bdf781d0027d856b47a4bfe6c12af108f6bd887057a917b48e"}, - {file = "pytest_rerunfailures-12.0-py3-none-any.whl", hash = "sha256:9a1afd04e21b8177faf08a9bbbf44de7a0fe3fc29f8ddbe83b9684bd5f8f92a9"}, -] [package.dependencies] packaging = ">=17.1" @@ -3046,12 +2064,9 @@ pytest = ">=6.2" name = "pytest-xdist" version = "3.5.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, - {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, -] [package.dependencies] execnet = ">=1.1" @@ -3066,12 +2081,9 @@ testing = ["filelock"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] [package.dependencies] six = ">=1.5" @@ -3080,12 +2092,9 @@ six = ">=1.5" name = "python-dotenv" version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, -] [package.extras] cli = ["click (>=5.0)"] @@ -3094,233 +2103,57 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] [[package]] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" -files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, -] [[package]] name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] [[package]] name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, - {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, -] [[package]] name = "pywinpty" version = "2.0.12" description = "Pseudo terminal support for Windows from Python." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, - {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, - {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, - {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, - {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, - {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, -] [[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] [[package]] name = "pyzmq" version = "25.1.2" description = "Python bindings for 0MQ" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, - {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, - {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, - {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, - {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, - {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, - {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, - {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, - {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, - {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, - {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, - {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, - {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, -] [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} @@ -3329,12 +2162,9 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "rdflib" version = "6.3.2" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +category = "main" optional = false python-versions = ">=3.7,<4.0" -files = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, -] [package.dependencies] isodate = ">=0.6.0,<0.7.0" @@ -3350,12 +2180,9 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] name = "referencing" version = "0.32.1" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, - {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, -] [package.dependencies] attrs = ">=22.2.0" @@ -3365,114 +2192,17 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, -] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] [package.dependencies] certifi = ">=2017.4.17" @@ -3488,12 +2218,9 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, -] [package.dependencies] oauthlib = ">=3.0.0" @@ -3506,12 +2233,9 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] [package.dependencies] six = "*" @@ -3520,131 +2244,25 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] [[package]] name = "rpds-py" version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, - {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, - {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, - {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, - {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, - {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, - {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, - {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, - {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, - {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, - {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, - {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, - {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, -] [[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] [package.dependencies] pyasn1 = ">=0.1.3" @@ -3653,12 +2271,9 @@ pyasn1 = ">=0.1.3" name = "ruamel-yaml" version = "0.17.17" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" optional = false python-versions = ">=3" -files = [ - {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, - {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, -] [package.dependencies] "ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} @@ -3671,71 +2286,17 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] [[package]] name = "schematic-db" version = "0.0.dev33" description = "" +category = "main" optional = false python-versions = ">=3.9,<4.0" -files = [ - {file = "schematic_db-0.0.dev33-py3-none-any.whl", hash = "sha256:9a274b038e5d3f382fd22300350fb4c02e0f147e5846808b324714fb30bd9e75"}, - {file = "schematic_db-0.0.dev33.tar.gz", hash = "sha256:01cadedbfa10915727c0bdf88c9184353db1294d8c941e69a824d16f12bb4701"}, -] [package.dependencies] deprecation = ">=2.1.0,<3.0.0" @@ -3760,35 +2321,9 @@ synapse = ["synapseclient (>=3.0.0,<4.0.0)"] name = "scipy" version = "1.11.4" description = "Fundamental algorithms for scientific computing in Python" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, - {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, - {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, - {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, - {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, - {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, - {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, - {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, - {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, - {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, - {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, - {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, - {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, - {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, - {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, - {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, - {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, - {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, -] [package.dependencies] numpy = ">=1.21.6,<1.28.0" @@ -3802,12 +2337,9 @@ test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeo name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, - {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, -] [package.dependencies] cryptography = ">=2.0" @@ -3817,12 +2349,9 @@ jeepney = ">=0.6" name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, - {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, -] [package.extras] nativelib = ["pyobjc-framework-Cocoa", "pywin32"] @@ -3833,12 +2362,9 @@ win32 = ["pywin32"] name = "setuptools" version = "66.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, - {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, -] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -3849,56 +2375,41 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] [[package]] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, -] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" optional = false python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] [[package]] name = "soupsieve" version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] [[package]] name = "sphinx" version = "7.2.6" description = "Python documentation generator" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, - {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, -] [package.dependencies] alabaster = ">=0.7,<0.8" @@ -3928,12 +2439,9 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools name = "sphinx-click" version = "4.4.0" description = "Sphinx extension that automatically documents click applications" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, - {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, -] [package.dependencies] click = ">=7.0" @@ -3944,12 +2452,9 @@ sphinx = ">=2.0" name = "sphinxcontrib-applehelp" version = "1.0.8" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, -] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -3960,12 +2465,9 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.6" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, -] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -3976,12 +2478,9 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.5" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, -] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -3992,12 +2491,9 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" optional = false python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] [package.extras] test = ["flake8", "mypy", "pytest"] @@ -4006,12 +2502,9 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.7" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, -] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -4022,12 +2515,9 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.10" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, -] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -4038,59 +2528,9 @@ test = ["pytest"] name = "sqlalchemy" version = "2.0.24" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, - {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, - {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, -] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} @@ -4125,12 +2565,9 @@ sqlcipher = ["sqlcipher3_binary"] name = "sqlalchemy-utils" version = "0.41.1" description = "Various utility functions for SQLAlchemy." +category = "main" optional = false python-versions = ">=3.6" -files = [ - {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, - {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, -] [package.dependencies] SQLAlchemy = ">=1.3" @@ -4153,12 +2590,9 @@ url = ["furl (>=0.4.1)"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "main" optional = false python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] [package.dependencies] asttokens = ">=2.1.0" @@ -4172,12 +2606,9 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "swagger-ui-bundle" version = "0.0.9" description = "swagger_ui_bundle - swagger-ui files in a pip package" -optional = false +category = "main" +optional = true python-versions = "*" -files = [ - {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, - {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, -] [package.dependencies] Jinja2 = ">=2.0" @@ -4186,12 +2617,9 @@ Jinja2 = ">=2.0" name = "synapseclient" version = "3.2.0" description = "A client for Synapse, a collaborative, open-source research platform that allows teams to share data, track analyses, and collaborate." +category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "synapseclient-3.2.0-py3-none-any.whl", hash = "sha256:ec1bb9c3ac2db995be25f6ced08a530a170219d23224d7c9b8a381166905fe6c"}, - {file = "synapseclient-3.2.0.tar.gz", hash = "sha256:87c91f03dbca7074efd18144325df07db24e07ea92e0b7c8691aaec46c28329a"}, -] [package.dependencies] deprecated = ">=1.2.4,<2.0" @@ -4215,12 +2643,9 @@ tests = ["flake8 (>=3.7.0,<4.0)", "func-timeout (>=4.3,<5.0)", "pytest (>=6.0.0, name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] [package.extras] widechars = ["wcwidth"] @@ -4229,12 +2654,9 @@ widechars = ["wcwidth"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" +category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, - {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, -] [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] @@ -4243,92 +2665,2082 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "terminado" version = "0.18.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "toolz" +version = "0.12.0" +description = "List processing tools and functional utilities" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" +optional = false +python-versions = ">= 3.8" + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.1" +description = "Traitlets Python configuration system" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.20240106" +description = "Typing stubs for python-dateutil" +category = "main" +optional = false +python-versions = ">=3.8" + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2023.4" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uwsgi" +version = "2.0.23" +description = "The uWSGI server" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "validators" +version = "0.20.0" +description = "Python Data Validation for Humans™." +category = "main" +optional = false +python-versions = ">=3.4" + +[package.dependencies] +decorator = ">=3.4.0" + +[package.extras] +test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" -files = [ + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "werkzeug" +version = "2.1.2" +description = "The comprehensive WSGI web application library." +category = "main" +optional = true +python-versions = ">=3.7" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "widgetsnbextension" +version = "4.0.9" +description = "Jupyter interactive widgets for Jupyter Notebook" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[extras] +api = ["connexion", "Flask", "Flask-Cors"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.9.0,<3.11" +content-hash = "a946c242eb4441fc7322f616a5781d0be4bbb8de8d298be51f8f4b090b01b775" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] +altair = [ + {file = "altair-4.2.0-py3-none-any.whl", hash = "sha256:0c724848ae53410c13fa28be2b3b9a9dcb7b5caa1a70f7f217bd663bb419935a"}, + {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, +] +anyio = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +argon2-cffi = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +arrow = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] +astroid = [ + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, +] +asttokens = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] +async-lru = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] +attrs = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] +babel = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] +backoff = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] +black = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] +bleach = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] +cachetools = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] +certifi = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] +cffi = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] +charset-normalizer = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] +click = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] +click-log = [ + {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, + {file = "click_log-0.4.0-py2.py3-none-any.whl", hash = "sha256:a43e394b528d52112af599f2fc9e4b7cf3c15f94e53581f74fa6867e68c91756"}, +] +clickclick = [ + {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, + {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +comm = [ + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, +] +connexion = [ + {file = "connexion-2.14.2-py2.py3-none-any.whl", hash = "sha256:a73b96a0e07b16979a42cde7c7e26afe8548099e352cf350f80c57185e0e0b36"}, + {file = "connexion-2.14.2.tar.gz", hash = "sha256:dbc06f52ebeebcf045c9904d570f24377e8bbd5a6521caef15a06f634cf85646"}, +] +coverage = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] +cryptography = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] +dataclasses-json = [ + {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, + {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, +] +dateparser = [ + {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, + {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, +] +debugpy = [ + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +deprecated = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] +deprecation = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] +dill = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] +docutils = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] +entrypoints = [ + {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, + {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, +] +et-xmlfile = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] +exceptiongroup = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] +execnet = [ + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, +] +executing = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, +] +flake8 = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] +flask = [ + {file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"}, + {file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"}, +] +flask-cors = [ + {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, + {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, +] +fqdn = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] +google-api-core = [ + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, +] +google-api-python-client = [ + {file = "google-api-python-client-2.114.0.tar.gz", hash = "sha256:e041bbbf60e682261281e9d64b4660035f04db1cccba19d1d68eebc24d1465ed"}, + {file = "google_api_python_client-2.114.0-py2.py3-none-any.whl", hash = "sha256:690e0bb67d70ff6dea4e8a5d3738639c105a478ac35da153d3b2a384064e9e1a"}, +] +google-auth = [ + {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, + {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, +] +google-auth-httplib2 = [ + {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, + {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, +] +google-auth-oauthlib = [ + {file = "google-auth-oauthlib-0.8.0.tar.gz", hash = "sha256:81056a310fb1c4a3e5a7e1a443e1eb96593c6bbc55b26c0261e4d3295d3e6593"}, + {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, +] +googleapis-common-protos = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] +graphviz = [ + {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, + {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, +] +great-expectations = [ + {file = "great_expectations-0.15.50-py3-none-any.whl", hash = "sha256:bda4c6bfe199dc0610273a1c160aab3876583266b1957a34a7edb72b055fd13d"}, + {file = "great_expectations-0.15.50.tar.gz", hash = "sha256:0b00c974410d598a97b4c662d7955d80d6268e35c5f3893ddb546f75432412db"}, +] +greenlet = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] +httplib2 = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] +idna = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] +imagesize = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] +importlib-metadata = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] +inflection = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] +iniconfig = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] +interrogate = [ + {file = "interrogate-1.5.0-py3-none-any.whl", hash = "sha256:a4ccc5cbd727c74acc98dee6f5e79ef264c0bcfa66b68d4e123069b2af89091a"}, + {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, +] +ipykernel = [ + {file = "ipykernel-6.29.0-py3-none-any.whl", hash = "sha256:076663ca68492576f051e4af7720d33f34383e655f2be0d544c8b1c9de915b2f"}, + {file = "ipykernel-6.29.0.tar.gz", hash = "sha256:b5dd3013cab7b330df712891c96cd1ab868c27a7159e606f762015e9bf8ceb3f"}, +] +ipython = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] +ipywidgets = [ + {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, + {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, +] +isodate = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] +isoduration = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] +isort = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] +itsdangerous = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] +jedi = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] +jeepney = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] +jinja2 = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] +json5 = [ + {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, + {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, +] +jsonpatch = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] +jsonpointer = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] +jsonschema = [ + {file = "jsonschema-4.21.0-py3-none-any.whl", hash = "sha256:70a09719d375c0a2874571b363c8a24be7df8071b80c9aa76bc4551e7297c63c"}, + {file = "jsonschema-4.21.0.tar.gz", hash = "sha256:3ba18e27f7491ea4a1b22edce00fb820eec968d397feb3f9cb61d5894bb38167"}, +] +jsonschema-specifications = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] +jupyter-client = [ + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, +] +jupyter-core = [ + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, +] +jupyter-events = [ + {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, + {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, +] +jupyter-lsp = [ + {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, + {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, +] +jupyter-server = [ + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, +] +jupyter-server-terminals = [ + {file = "jupyter_server_terminals-0.5.1-py3-none-any.whl", hash = "sha256:5e63e947ddd97bb2832db5ef837a258d9ccd4192cd608c1270850ad947ae5dd7"}, + {file = "jupyter_server_terminals-0.5.1.tar.gz", hash = "sha256:16d3be9cf48be6a1f943f3a6c93c033be259cf4779184c66421709cf63dccfea"}, +] +jupyterlab = [ + {file = "jupyterlab-4.0.10-py3-none-any.whl", hash = "sha256:fe010ad9e37017488b468632ef2ead255fc7c671c5b64d9ca13e1f7b7e665c37"}, + {file = "jupyterlab-4.0.10.tar.gz", hash = "sha256:46177eb8ede70dc73be922ac99f8ef943bdc2dfbc6a31b353c4bde848a35dee1"}, +] +jupyterlab-pygments = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] +jupyterlab-server = [ + {file = "jupyterlab_server-2.25.2-py3-none-any.whl", hash = "sha256:5b1798c9cc6a44f65c757de9f97fc06fc3d42535afbf47d2ace5e964ab447aaf"}, + {file = "jupyterlab_server-2.25.2.tar.gz", hash = "sha256:bd0ec7a99ebcedc8bcff939ef86e52c378e44c2707e053fcd81d046ce979ee63"}, +] +jupyterlab-widgets = [ + {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, + {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, +] +keyring = [ + {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, + {file = "keyring-23.4.1.tar.gz", hash = "sha256:89cbd74d4683ed164c8082fb38619341097741323b3786905c6dac04d6915a55"}, +] +keyrings-alt = [ + {file = "keyrings.alt-3.1-py2.py3-none-any.whl", hash = "sha256:6a00fa799baf1385cf9620bd01bcc815aa56e6970342a567bcfea0c4d21abe5f"}, + {file = "keyrings.alt-3.1.tar.gz", hash = "sha256:b59c86b67b9027a86e841a49efc41025bcc3b1b0308629617b66b7011e52db5a"}, +] +lazy-object-proxy = [ + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, +] +makefun = [ + {file = "makefun-1.15.2-py2.py3-none-any.whl", hash = "sha256:1c83abfaefb6c3c7c83ed4a993b4a310af80adf6db15625b184b1f0f7545a041"}, + {file = "makefun-1.15.2.tar.gz", hash = "sha256:16f2a2b34d9ee0c2b578c960a1808c974e2822cf79f6e9b9c455aace10882d45"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, + {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, +] +marshmallow = [ + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] +mccabe = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] +mistune = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] +mypy = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] +mypy-extensions = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] +nbclient = [ + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, +] +nbconvert = [ + {file = "nbconvert-7.14.2-py3-none-any.whl", hash = "sha256:db28590cef90f7faf2ebbc71acd402cbecf13d29176df728c0a9025a49345ea1"}, + {file = "nbconvert-7.14.2.tar.gz", hash = "sha256:a7f8808fd4e082431673ac538400218dd45efd076fbeb07cc6e5aa5a3a4e949e"}, +] +nbformat = [ + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, +] +nest-asyncio = [ + {file = "nest_asyncio-1.5.9-py3-none-any.whl", hash = "sha256:61ec07ef052e72e3de22045b81b2cc7d71fceb04c568ba0b2e4b2f9f5231bec2"}, + {file = "nest_asyncio-1.5.9.tar.gz", hash = "sha256:d1e1144e9c6e3e6392e0fcf5211cb1c8374b5648a98f1ebe48e5336006b41907"}, +] +networkx = [ + {file = "networkx-2.8.8-py3-none-any.whl", hash = "sha256:e435dfa75b1d7195c7b8378c3859f0445cd88c6b0375c181ed66823a9ceb7524"}, + {file = "networkx-2.8.8.tar.gz", hash = "sha256:230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"}, +] +notebook = [ + {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, + {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, +] +notebook-shim = [ + {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, + {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, +] +numpy = [ + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, +] +oauth2client = [ + {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, + {file = "oauth2client-4.1.3.tar.gz", hash = "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"}, +] +oauthlib = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] +openpyxl = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] +opentelemetry-api = [ + {file = "opentelemetry_api-1.21.0-py3-none-any.whl", hash = "sha256:4bb86b28627b7e41098f0e93280fe4892a1abed1b79a19aec6f928f39b17dffb"}, + {file = "opentelemetry_api-1.21.0.tar.gz", hash = "sha256:d6185fd5043e000075d921822fd2d26b953eba8ca21b1e2fa360dd46a7686316"}, +] +opentelemetry-exporter-otlp-proto-common = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.21.0-py3-none-any.whl", hash = "sha256:97b1022b38270ec65d11fbfa348e0cd49d12006485c2321ea3b1b7037d42b6ec"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.21.0.tar.gz", hash = "sha256:61db274d8a68d636fb2ec2a0f281922949361cdd8236e25ff5539edf942b3226"}, +] +opentelemetry-exporter-otlp-proto-http = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.21.0-py3-none-any.whl", hash = "sha256:56837773de6fb2714c01fc4895caebe876f6397bbc4d16afddf89e1299a55ee2"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.21.0.tar.gz", hash = "sha256:19d60afa4ae8597f7ef61ad75c8b6c6b7ef8cb73a33fb4aed4dbc86d5c8d3301"}, +] +opentelemetry-proto = [ + {file = "opentelemetry_proto-1.21.0-py3-none-any.whl", hash = "sha256:32fc4248e83eebd80994e13963e683f25f3b443226336bb12b5b6d53638f50ba"}, + {file = "opentelemetry_proto-1.21.0.tar.gz", hash = "sha256:7d5172c29ed1b525b5ecf4ebe758c7138a9224441b3cfe683d0a237c33b1941f"}, +] +opentelemetry-sdk = [ + {file = "opentelemetry_sdk-1.21.0-py3-none-any.whl", hash = "sha256:9fe633243a8c655fedace3a0b89ccdfc654c0290ea2d8e839bd5db3131186f73"}, + {file = "opentelemetry_sdk-1.21.0.tar.gz", hash = "sha256:3ec8cd3020328d6bc5c9991ccaf9ae820ccb6395a5648d9a95d3ec88275b8879"}, +] +opentelemetry-semantic-conventions = [ + {file = "opentelemetry_semantic_conventions-0.42b0-py3-none-any.whl", hash = "sha256:5cd719cbfec448af658860796c5d0fcea2fdf0945a2bed2363f42cb1ee39f526"}, + {file = "opentelemetry_semantic_conventions-0.42b0.tar.gz", hash = "sha256:44ae67a0a3252a05072877857e5cc1242c98d4cf12870159f1a94bec800d38ec"}, +] +overrides = [ + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, +] +packaging = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] +pandarallel = [ + {file = "pandarallel-1.6.5.tar.gz", hash = "sha256:1c2df98ff6441e8ae13ff428ceebaa7ec42d731f7f972c41ce4fdef1d3adf640"}, +] +pandas = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] +pandocfilters = [ + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, +] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pathspec = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] +pdoc = [ + {file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"}, + {file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"}, +] +pexpect = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] +platformdirs = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] +pluggy = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] +prometheus-client = [ + {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, + {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] +protobuf = [ + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, +] +psutil = [ + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pure-eval = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyasn1 = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] +pyasn1-modules = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] +pycodestyle = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pydantic = [ + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, +] +pyflakes = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] +pygments = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] +pygsheets = [ + {file = "pygsheets-2.0.6-py3-none-any.whl", hash = "sha256:3338c2eb8990fdee9f463b42a370ec0870c118d607d775471a6dfb8b08f6cd87"}, + {file = "pygsheets-2.0.6.tar.gz", hash = "sha256:bff46c812e99f9b8b81a09b456581365281c797620ec08530b0d0e48fa9299e2"}, +] +pylint = [ + {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, + {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, +] +pyopenssl = [ + {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, + {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, +] +pyparsing = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] +pytest = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] +pytest-cov = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] +pytest-mock = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] +pytest-rerunfailures = [ + {file = "pytest-rerunfailures-12.0.tar.gz", hash = "sha256:784f462fa87fe9bdf781d0027d856b47a4bfe6c12af108f6bd887057a917b48e"}, + {file = "pytest_rerunfailures-12.0-py3-none-any.whl", hash = "sha256:9a1afd04e21b8177faf08a9bbbf44de7a0fe3fc29f8ddbe83b9684bd5f8f92a9"}, +] +pytest-xdist = [ + {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, + {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-dotenv = [ + {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, + {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, +] +python-json-logger = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] +pytz = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] +pywin32 = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] +pywin32-ctypes = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] +pywinpty = [ + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, +] +pyyaml = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] +pyzmq = [ + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +] +rdflib = [ + {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, + {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, +] +referencing = [ + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, +] +regex = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] +requests = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] +requests-oauthlib = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] +rfc3339-validator = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] +rfc3986-validator = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] +rpds-py = [ + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, +] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] +ruamel-yaml = [ + {file = "ruamel.yaml-0.17.17-py3-none-any.whl", hash = "sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f"}, + {file = "ruamel.yaml-0.17.17.tar.gz", hash = "sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be"}, +] +ruamel-yaml-clib = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] +schematic-db = [ + {file = "schematic_db-0.0.dev33-py3-none-any.whl", hash = "sha256:9a274b038e5d3f382fd22300350fb4c02e0f147e5846808b324714fb30bd9e75"}, + {file = "schematic_db-0.0.dev33.tar.gz", hash = "sha256:01cadedbfa10915727c0bdf88c9184353db1294d8c941e69a824d16f12bb4701"}, +] +scipy = [ + {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, + {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, + {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, + {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, + {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, + {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, + {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, + {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, + {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, + {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, + {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, +] +secretstorage = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] +send2trash = [ + {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, + {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, +] +setuptools = [ + {file = "setuptools-66.1.1-py3-none-any.whl", hash = "sha256:6f590d76b713d5de4e49fe4fbca24474469f53c83632d5d0fd056f7ff7e8112b"}, + {file = "setuptools-66.1.1.tar.gz", hash = "sha256:ac4008d396bc9cd983ea483cb7139c0240a07bbc74ffb6232fceffedc6cf03a8"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +soupsieve = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] +sphinx = [ + {file = "sphinx-7.2.6-py3-none-any.whl", hash = "sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560"}, + {file = "sphinx-7.2.6.tar.gz", hash = "sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5"}, +] +sphinx-click = [ + {file = "sphinx-click-4.4.0.tar.gz", hash = "sha256:cc67692bd28f482c7f01531c61b64e9d2f069bfcf3d24cbbb51d4a84a749fa48"}, + {file = "sphinx_click-4.4.0-py3-none-any.whl", hash = "sha256:2821c10a68fc9ee6ce7c92fad26540d8d8c8f45e6d7258f0e4fb7529ae8fab49"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, +] +sqlalchemy = [ + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, + {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, + {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, +] +sqlalchemy-utils = [ + {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, + {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, +] +stack-data = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] +swagger-ui-bundle = [ + {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, + {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, +] +synapseclient = [ + {file = "synapseclient-3.2.0-py3-none-any.whl", hash = "sha256:ec1bb9c3ac2db995be25f6ced08a530a170219d23224d7c9b8a381166905fe6c"}, + {file = "synapseclient-3.2.0.tar.gz", hash = "sha256:87c91f03dbca7074efd18144325df07db24e07ea92e0b7c8691aaec46c28329a"}, +] +tabulate = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] +tenacity = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] +terminado = [ {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, ] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "tinycss2" -version = "1.2.1" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.7" -files = [ +tinycss2 = [ {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, ] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ +tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] - -[[package]] -name = "tomlkit" -version = "0.12.3" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ +tomlkit = [ {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] - -[[package]] -name = "toolz" -version = "0.12.0" -description = "List processing tools and functional utilities" -optional = false -python-versions = ">=3.5" -files = [ +toolz = [ {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, ] - -[[package]] -name = "tornado" -version = "6.4" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">= 3.8" -files = [ +tornado = [ {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, @@ -4341,259 +4753,77 @@ files = [ {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] - -[[package]] -name = "tqdm" -version = "4.66.1" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ +tqdm = [ {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, ] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.1" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ +traitlets = [ {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "types-python-dateutil" -version = "2.8.19.20240106" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ +types-python-dateutil = [ {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, ] - -[[package]] -name = "typing-extensions" -version = "4.5.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ +typing-extensions = [ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ +typing-inspect = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, ] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "tzdata" -version = "2023.4" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ +tzdata = [ {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] - -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ +tzlocal = [ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, ] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ +uri-template = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, ] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -optional = false -python-versions = ">=3.6" -files = [ +uritemplate = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, ] - -[[package]] -name = "urllib3" -version = "1.26.18" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ +urllib3 = [ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] - -[package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "uwsgi" -version = "2.0.23" -description = "The uWSGI server" -optional = false -python-versions = "*" -files = [ +uwsgi = [ {file = "uwsgi-2.0.23.tar.gz", hash = "sha256:0cafda0c16f921db7fe42cfaf81b167cf884ee17350efbdd87d1ecece2d7de37"}, ] - -[[package]] -name = "validators" -version = "0.20.0" -description = "Python Data Validation for Humans™." -optional = false -python-versions = ">=3.4" -files = [ +validators = [ {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, ] - -[package.dependencies] -decorator = ">=3.4.0" - -[package.extras] -test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ +wcwidth = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] - -[[package]] -name = "webcolors" -version = "1.13" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.7" -files = [ +webcolors = [ {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, ] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ +webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] - -[[package]] -name = "websocket-client" -version = "1.7.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ +websocket-client = [ {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, ] - -[package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "werkzeug" -version = "2.1.2" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.7" -files = [ +werkzeug = [ {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] - -[package.extras] -watchdog = ["watchdog"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.9" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ +widgetsnbextension = [ {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, ] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ +wrapt = [ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, @@ -4665,23 +4895,7 @@ files = [ {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] - -[[package]] -name = "zipp" -version = "3.17.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ +zipp = [ {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.9.0,<3.11" -content-hash = "e7a53bb762e4472eb7fefd0ea60c026f3ec037a8c5e268e613e959500cde0ebf" diff --git a/pyproject.toml b/pyproject.toml index 8413cda00..a79da0bf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,8 +57,6 @@ setuptools = "^66.0.0" synapseclient = "^3.2.0" tenacity = "^8.0.1" toml = "^0.10.2" -Flask = "^2.0.0" -connexion = {extras = ["swagger-ui"], version = "^2.8.0"} great-expectations = "^0.15.0" sphinx-click = "^4.0.0" MarkupSafe = "2.1.0" @@ -66,7 +64,6 @@ itsdangerous = "^2.0.0" Jinja2 = ">2.11.3" openpyxl = "^3.0.9" "backports.zoneinfo" = {markers = "python_version < \"3.9\"", version = "^0.2.1"} -Flask-Cors = "^3.0.10" pdoc = "^12.2.0" dateparser = "^1.1.4" pandarallel = "^1.6.4" @@ -74,6 +71,13 @@ schematic-db = {version = "0.0.dev33", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" dataclasses-json = "^0.6.1" +connexion = {extras = ["swagger-ui"], version = "^2.8.0", optional = true} +Flask = {version = "^2.0.0", optional = true} +Flask-Cors = {version = "^3.0.10", optional = true} + +[tool.poetry.extras] +api = ["connexion", "Flask", "Flask-Cors"] + [tool.poetry.group.dev.dependencies] pytest = "^7.0.0" From da6f4ed1ffb5ac4dcea95eac9e012cce01fea625 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Tue, 6 Feb 2024 13:41:29 -0800 Subject: [PATCH 151/199] add -all extras to "Install library" step of workflow --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2adf95f8..884632b5d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -76,7 +76,7 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install library - run: poetry install --no-interaction + run: poetry install --no-interaction -all-extras #---------------------------------------------- # perform linting From 6124104e64fa5b8ac19c8b1232175ff842df6ac4 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Tue, 6 Feb 2024 13:55:49 -0800 Subject: [PATCH 152/199] add -all extras to "Install library" step of workflow --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 884632b5d..1525c3640 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -76,7 +76,7 @@ jobs: # install your root project, if required #---------------------------------------------- - name: Install library - run: poetry install --no-interaction -all-extras + run: poetry install --no-interaction --all-extras #---------------------------------------------- # perform linting From 5e4c6f7f3bc6bde7f69f6a416e496b93471bffa7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:35:28 -0800 Subject: [PATCH 153/199] refactor the function get_label_from_display_name to more helper functions for clarity of function --- schematic/utils/schema_utils.py | 119 ++++++++++++++++++++++---------- 1 file changed, 84 insertions(+), 35 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 854eb9054..939227cde 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -8,6 +8,7 @@ logger = logging.getLogger(__name__) DisplayLabelType = Literal["class_label", "display_label"] +BLACKLISTED_CHARS = ["(", ")", ".", " ", "-"] def attr_dict_template(key_name: str) -> Dict[str, dict[str, dict]]: @@ -82,21 +83,59 @@ def get_attribute_display_name_from_label( def check_if_display_name_is_valid_label( - display_name: str, blacklisted_chars: list[str] + display_name: str, + blacklisted_chars: list[str] = BLACKLISTED_CHARS, ) -> bool: + """Check if the display name can be used as a display label + Args: + display_name, str: node display name + blacklisted_chars, list[str]: characters that are not permitted for synapse annotations uploads. + Returns: + valid_label, bool: True, if the display name can be used as a label, False, if it cannot. + """ valid_label = True if any(map(display_name.__contains__, blacklisted_chars)): valid_label = False return valid_label -def get_label_from_display_name( +def get_stripped_label( display_name: str, entry_type: str, - strict_camel_case: bool = False, - data_model_labels: str = "class_label", + blacklisted_chars: list[str] = BLACKLISTED_CHARS, ) -> str: - """Get node label from provided display name, based on whether the node is a class or property + """ + Args: + display_name, str: node display name + entry_type, str: 'class' or 'property', defines what type the entry is. + blacklisted_chars, list[str]: characters that are not permitted for synapse annotations uploads. + Returns: + stripped_label, str: class or property label that has been stripped of blacklisted characters. + """ + if entry_type.lower() == "class": + stripped_label = [ + get_class_label_from_display_name(str(display_name)).translate( + {ord(x): "" for x in blacklisted_chars} + ) + ][0] + + elif entry_type.lower() == "property": + stripped_label = [ + get_property_label_from_display_name(str(display_name)).translate( + {ord(x): "" for x in blacklisted_chars} + ) + ][0] + + logger.warning( + f"Cannot use display name {display_name} as the data model label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}" + ) + return stripped_label + + +def get_schema_label( + display_name: str, entry_type: str, strict_camel_case: bool +) -> str: + """Get the class or property label for a given display name Args: display_name, str: node display name entry_type, str: 'class' or 'property', defines what type the entry is. @@ -105,48 +144,58 @@ def get_label_from_display_name( label, str: class label of display name Raises: ValueError if entry_type.lower(), is not either 'class' or 'property' + """ + if entry_type.lower() == "class": + label = get_class_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) + elif entry_type.lower() == "property": + label = get_property_label_from_display_name( + display_name=display_name, strict_camel_case=strict_camel_case + ) + else: + raise ValueError( + f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'" + ) + return label + + +def get_label_from_display_name( + display_name: str, + entry_type: str, + strict_camel_case: bool = False, + data_model_labels: DisplayLabelType = "class_label", +) -> str: + """Get node label from provided display name, based on whether the node is a class or property + Args: + display_name, str: node display name + entry_type, str: 'class' or 'property', defines what type the entry is. + strict_camel_case, bool: Default, False; defines whether or not to use strict camel case or not for conversion. + Returns: + label, str: label to be used for the provided display name. """ if data_model_labels == "display_label": - blacklisted_chars = ["(", ")", ".", " ", "-"] # Check that display name can be used as a label. valid_display_name = check_if_display_name_is_valid_label( - display_name=display_name, blacklisted_chars=blacklisted_chars + display_name=display_name ) + # If the display name is valid, set the label to be the display name if valid_display_name: label = display_name + # If not, set get a stripped class or property label (as indicated by the entry type) else: - if entry_type.lower() == "class": - label = [ - get_class_label_from_display_name(str(display_name)).translate( - {ord(x): "" for x in blacklisted_chars} - ) - ][0] - - elif entry_type.lower() == "property": - label = [ - get_property_label_from_display_name(str(display_name)).translate( - {ord(x): "" for x in blacklisted_chars} - ) - ][0] - - logger.warning( - f"Cannot use display name {display_name} as the data model label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}" + label = get_stripped_label( + display_name=display_name, entry_type=entry_type ) + else: - if entry_type.lower() == "class": - label = get_class_label_from_display_name( - display_name=display_name, strict_camel_case=strict_camel_case - ) + label = get_schema_label( + display_name=display_name, + entry_type=entry_type, + strict_camel_case=strict_camel_case, + ) - elif entry_type.lower() == "property": - label = get_property_label_from_display_name( - display_name=display_name, strict_camel_case=strict_camel_case - ) - else: - raise ValueError( - f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'" - ) return label From a8333539684a148d7f482979fbe6a7f8521ff7d7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:38:44 -0800 Subject: [PATCH 154/199] import DataLabelType from schema_utils and apply across the codebase --- schematic/manifest/generator.py | 3 ++- schematic/schemas/data_model_graph.py | 3 ++- schematic/schemas/data_model_nodes.py | 7 ++++--- schematic/visualization/commands.py | 22 ++++++++++++++++++---- schematic_api/api/routes.py | 4 ++-- tests/test_schemas.py | 5 +++-- tests/test_utils.py | 2 +- 7 files changed, 32 insertions(+), 14 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 72c9f99df..f8fe13bf6 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -22,6 +22,7 @@ ) from schematic.utils.df_utils import update_df, load_df from schematic.utils.validate_utils import rule_in_rule_list +from schematic.utils.schema_utils import DisplayLabelType # TODO: This module should only be aware of the store interface # we shouldn't need to expose Synapse functionality explicitly @@ -1620,7 +1621,7 @@ def create_single_manifest( def create_manifests( path_to_data_model: str, data_types: list, - data_model_labels: str = "class_label", + data_model_labels: DisplayLabelType = "class_label", access_token: Optional[str] = None, dataset_ids: Optional[list] = None, output_format: Literal["google_sheet", "excel", "dataframe"] = "google_sheet", diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index 64320991c..dab042020 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -12,6 +12,7 @@ from schematic.utils.schema_utils import ( get_property_label_from_display_name, get_class_label_from_display_name, + DisplayLabelType, ) from schematic.utils.general import unlist from schematic.utils.viz_utils import visualize @@ -44,7 +45,7 @@ class DataModelGraph: __metaclass__ = DataModelGraphMeta def __init__( - self, attribute_relationships_dict: dict, data_model_labels: str = "class_label" + self, attribute_relationships_dict: dict, data_model_labels: DisplayLabelType = "class_label", ) -> None: """Load parsed data model. Args: diff --git a/schematic/schemas/data_model_nodes.py b/schematic/schemas/data_model_nodes.py index 3a9ff7c8c..2535e3f20 100644 --- a/schematic/schemas/data_model_nodes.py +++ b/schematic/schemas/data_model_nodes.py @@ -1,7 +1,7 @@ from inspect import isfunction import networkx as nx from rdflib import Namespace -from typing import Any, Dict, Optional, Text, List, Callable +from typing import Any, Dict, Optional, Text, List, Literal, Callable from schematic.schemas.data_model_parser import DataModelJSONLDParser from schematic.schemas.data_model_relationships import DataModelRelationships @@ -11,6 +11,7 @@ get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules, + DisplayLabelType, ) from schematic.utils.validate_rules_utils import validate_schema_rules from schematic.schemas.curie import uri2curie, curie2uri @@ -130,7 +131,7 @@ def run_rel_functions( attr_relationships={}, csv_header="", entry_type="", - data_model_labels: str = "class_label", + data_model_labels: DisplayLabelType = "class_label", ): """This function exists to centralzie handling of functions for filling out node information, makes sure all the proper parameters are passed to each function. Args: @@ -183,7 +184,7 @@ def generate_node_dict( self, node_display_name: str, attr_rel_dict: dict, - data_model_labels: str = "class_label", + data_model_labels: DisplayLabelType = "class_label", ) -> dict: """Gather information to be attached to each node. Args: diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 5ecc4f8f7..241895015 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -73,15 +73,22 @@ def get_attributes(ctx): type=click.Choice(["plain", "highlighted"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "text_format")), ) +@click.option( + "--data_model_labels", + "-dml", + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), + help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), +) @click.pass_obj -def get_tangled_tree_text(ctx, figure_type, text_format): +def get_tangled_tree_text(ctx, figure_type, text_format, data_model_labels): """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) # Initialize TangledTree - tangled_tree = TangledTree(path_to_jsonld, figure_type) + tangled_tree = TangledTree(path_to_jsonld, figure_type, data_model_labels) # Get text for tangled tree. text_df = tangled_tree.get_text_for_tangled_tree(text_format, save_file=True) @@ -96,15 +103,22 @@ def get_tangled_tree_text(ctx, figure_type, text_format): type=click.Choice(["component", "dependency"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) +@click.option( + "--data_model_labels", + "-dml", + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), + help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), +) @click.pass_obj -def get_tangled_tree_component_layers(ctx, figure_type): +def get_tangled_tree_component_layers(ctx, figure_type, data_model_labels): """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) # Initialize Tangled Tree - tangled_tree = TangledTree(path_to_jsonld, figure_type) + tangled_tree = TangledTree(path_to_jsonld, figure_type, data_model_labels) # Get tangled trees layers JSON. layers = tangled_tree.get_tangled_tree_layers(save_file=True) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 68980a261..0bf4295ac 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -38,7 +38,7 @@ SynapseTimeoutError, ) from schematic.utils.general import entity_type_mapping -from schematic.utils.schema_utils import get_property_label_from_display_name +from schematic.utils.schema_utils import get_property_label_from_display_name, DisplayLabelType logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) @@ -267,7 +267,7 @@ def get_manifest_route( output_format=None, title=None, strict_validation: bool = True, - data_model_labels: str = "class_label", + data_model_labels: DisplayLabelType = "class_label", data_type: str = None, ): """Get the immediate dependencies that are related to a given source node. diff --git a/tests/test_schemas.py b/tests/test_schemas.py index b262fcf6a..2e10ca61e 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -18,6 +18,7 @@ get_attribute_display_name_from_label, convert_bool_to_str, parse_validation_rules, + DisplayLabelType ) from schematic.utils.io_utils import load_json @@ -79,7 +80,7 @@ def get_data_model_parser( def generate_graph_data_model( - helpers, data_model_name: str, data_model_labels: str = "class_label" + helpers, data_model_name: str, data_model_labels: DisplayLabelType = "class_label", ) -> nx.MultiDiGraph: """ Simple helper function to generate a networkx graph data model from a CSV or JSONLD data model @@ -104,7 +105,7 @@ def generate_graph_data_model( def generate_data_model_nodes( - helpers, data_model_name: str, data_model_labels: str = "class_label" + helpers, data_model_name: str, data_model_labels: DisplayLabelType = "class_label", ) -> DataModelNodes: # Instantiate Parser data_model_parser = get_data_model_parser( diff --git a/tests/test_utils.py b/tests/test_utils.py index 8a54207eb..ab3b13aa2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -605,7 +605,7 @@ def test_strip_context(self, helpers, context_value): ["display_label", "class_label"], ids=["display_label", "class_label"], ) - def test_get_label_from_display_name(self, test_dn, data_model_labels): + def test_get_label_from_display_name(self, test_dn: str, data_model_labels: str): display_name = test_dn for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): label = "" From 8354ca529eae90e3860db070c05e88a482653254 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 7 Feb 2024 09:26:04 -0800 Subject: [PATCH 155/199] run black --- schematic/schemas/data_model_graph.py | 4 +++- schematic/schemas/data_model_parser.py | 3 +-- schematic/utils/schema_utils.py | 4 +--- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/schematic/schemas/data_model_graph.py b/schematic/schemas/data_model_graph.py index dab042020..72b1ebf56 100644 --- a/schematic/schemas/data_model_graph.py +++ b/schematic/schemas/data_model_graph.py @@ -45,7 +45,9 @@ class DataModelGraph: __metaclass__ = DataModelGraphMeta def __init__( - self, attribute_relationships_dict: dict, data_model_labels: DisplayLabelType = "class_label", + self, + attribute_relationships_dict: dict, + data_model_labels: DisplayLabelType = "class_label", ) -> None: """Load parsed data model. Args: diff --git a/schematic/schemas/data_model_parser.py b/schematic/schemas/data_model_parser.py index 0020f2f8f..c5dbacb3e 100644 --- a/schematic/schemas/data_model_parser.py +++ b/schematic/schemas/data_model_parser.py @@ -419,8 +419,7 @@ def gather_jsonld_attributes_relationships(self, model_jsonld: List[dict]) -> Di else: attr_rel_dictionary[p_attr_key][ "Relationships" - ][rel_csv_header].extend([entry[dn_jsonld_key]] - ) + ][rel_csv_header].extend([entry[dn_jsonld_key]]) # If the parsed_val is not already recorded in the dictionary, add it elif attr_in_dict == False: # Get the display name for the parsed value diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 939227cde..a8c4d8216 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -185,9 +185,7 @@ def get_label_from_display_name( label = display_name # If not, set get a stripped class or property label (as indicated by the entry type) else: - label = get_stripped_label( - display_name=display_name, entry_type=entry_type - ) + label = get_stripped_label(display_name=display_name, entry_type=entry_type) else: label = get_schema_label( From 1ceecaa74801b5fd3326afaf5b0b745f22809f65 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:54:35 -0700 Subject: [PATCH 156/199] remove logic for selectively running combo tests --- .github/workflows/test.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2adf95f8..9ef15b86c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -121,24 +121,13 @@ jobs: #---------------------------------------------- # run test suite #---------------------------------------------- - - name: Run regular tests and rule combination tests - env: - SYNAPSE_ACCESS_TOKEN: ${{ secrets.SYNAPSE_ACCESS_TOKEN }} - if: ${{ contains(github.event.head_commit.message, 'runcombos') }} - run: > - source .venv/bin/activate; - pytest --durations=0 --cov-report=term --cov-report=html:htmlcov --cov=schematic/ - -m "not (google_credentials_needed or schematic_api or table_operations)" --reruns 2 -n auto - - name: Run tests env: SYNAPSE_ACCESS_TOKEN: ${{ secrets.SYNAPSE_ACCESS_TOKEN }} - SERVICE_ACCOUNT_CREDS: ${{ secrets.SERVICE_ACCOUNT_CREDS }} - if: ${{ false == contains(github.event.head_commit.message, 'runcombos') }} run: > source .venv/bin/activate; pytest --durations=0 --cov-report=term --cov-report=html:htmlcov --cov=schematic/ - -m "not (google_credentials_needed or rule_combos or schematic_api or table_operations)" --reruns 2 -n auto + -m "not (google_credentials_needed or schematic_api or table_operations)" --reruns 2 -n auto - name: Upload pytest test results uses: actions/upload-artifact@v2 From e9b0917460a2521653ad6e0deb02bee1c5ab3b7c Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:59:58 -0800 Subject: [PATCH 157/199] in schema_utils update label name in str to stripped_label --- schematic/utils/schema_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index a8c4d8216..c47172cab 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -127,7 +127,7 @@ def get_stripped_label( ][0] logger.warning( - f"Cannot use display name {display_name} as the data model label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {label}" + f"Cannot use display name {display_name} as the data model label, becaues it is not formatted properly. Please remove all spaces and blacklisted characters: {str(blacklisted_chars)}. The following label was assigned instead: {stripped_label}" ) return stripped_label From 91995769d181ed9b3abf44f443250710e3585d2d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 7 Feb 2024 12:00:36 -0800 Subject: [PATCH 158/199] add additional tests to test_utils to ccover new functions in schema_utils --- tests/test_utils.py | 64 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/tests/test_utils.py b/tests/test_utils.py index ab3b13aa2..ea2c44404 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -52,6 +52,9 @@ get_class_label_from_display_name, strip_context, get_label_from_display_name, + get_schema_label, + get_stripped_label, + check_if_display_name_is_valid_label, ) @@ -595,6 +598,67 @@ def test_strip_context(self, helpers, context_value): elif "sms:required" == context_value: assert stripped_contex == ("sms", "required") + @pytest.mark.parametrize( + "test_dn", + list(TEST_DN_DICT.keys()), + ids=list(TEST_DN_DICT.keys()), + ) + def test_check_if_display_name_is_valid_label(self, test_dn): + display_name = test_dn + blacklisted_chars=["(", ")", ".", " ", "-"] + for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): + valid_label = check_if_display_name_is_valid_label(test_dn, blacklisted_chars) + if test_dn in ["Bio-things", "bio things", "Bio Things"]: + assert valid_label == False + else: + assert valid_label == True + + + @pytest.mark.parametrize( + "test_dn", + list(TEST_DN_DICT.keys())[-2:], + ids=list(TEST_DN_DICT.keys())[-2:], + ) + def test_get_stripped_label(self, test_dn:str): + display_name = test_dn + blacklisted_chars=["(", ")", ".", " ", "-"] + for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): + label = "" + + label = get_stripped_label( + entry_type=entry_type, + display_name=display_name, + blacklisted_chars=blacklisted_chars, + ) + assert label == expected_result + + @pytest.mark.parametrize( + "test_dn", + list(TEST_DN_DICT.keys()), + ids=list(TEST_DN_DICT.keys()), + ) + def test_get_schema_label(self, test_dn:str): + display_name = test_dn + for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): + label = "" + + label = get_schema_label( + entry_type=entry_type, + display_name=display_name, + strict_camel_case=False, + ) + + if '-' in display_name: + # In this case, biothings will not strip the blacklisted character, + # so it will not match the dictionary. + if entry_type == 'class': + assert label == display_name.capitalize() + else: + assert label == display_name[0].lower()+ display_name[1:] + else: + assert label == expected_result + + @pytest.mark.parametrize( "test_dn", list(TEST_DN_DICT.keys()), From 1b864a95049e7a23c367d1c8789ef172c29c7cb6 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 7 Feb 2024 12:02:06 -0800 Subject: [PATCH 159/199] black on test_utils.py --- tests/test_utils.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index ea2c44404..fba1340de 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -605,23 +605,24 @@ def test_strip_context(self, helpers, context_value): ) def test_check_if_display_name_is_valid_label(self, test_dn): display_name = test_dn - blacklisted_chars=["(", ")", ".", " ", "-"] + blacklisted_chars = ["(", ")", ".", " ", "-"] for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): - valid_label = check_if_display_name_is_valid_label(test_dn, blacklisted_chars) + valid_label = check_if_display_name_is_valid_label( + test_dn, blacklisted_chars + ) if test_dn in ["Bio-things", "bio things", "Bio Things"]: assert valid_label == False else: assert valid_label == True - @pytest.mark.parametrize( "test_dn", list(TEST_DN_DICT.keys())[-2:], ids=list(TEST_DN_DICT.keys())[-2:], ) - def test_get_stripped_label(self, test_dn:str): + def test_get_stripped_label(self, test_dn: str): display_name = test_dn - blacklisted_chars=["(", ")", ".", " ", "-"] + blacklisted_chars = ["(", ")", ".", " ", "-"] for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): label = "" @@ -637,7 +638,7 @@ def test_get_stripped_label(self, test_dn:str): list(TEST_DN_DICT.keys()), ids=list(TEST_DN_DICT.keys()), ) - def test_get_schema_label(self, test_dn:str): + def test_get_schema_label(self, test_dn: str): display_name = test_dn for entry_type, expected_result in TEST_DN_DICT[test_dn].items(): label = "" @@ -648,16 +649,15 @@ def test_get_schema_label(self, test_dn:str): strict_camel_case=False, ) - if '-' in display_name: - # In this case, biothings will not strip the blacklisted character, + if "-" in display_name: + # In this case, biothings will not strip the blacklisted character, # so it will not match the dictionary. - if entry_type == 'class': + if entry_type == "class": assert label == display_name.capitalize() else: - assert label == display_name[0].lower()+ display_name[1:] + assert label == display_name[0].lower() + display_name[1:] else: assert label == expected_result - @pytest.mark.parametrize( "test_dn", From 86e7d6f29592a4e4ebac687e0427411fbd9c6e81 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 7 Feb 2024 13:39:19 -0700 Subject: [PATCH 160/199] add service acct creds --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9ef15b86c..0a720b6fa 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -124,6 +124,7 @@ jobs: - name: Run tests env: SYNAPSE_ACCESS_TOKEN: ${{ secrets.SYNAPSE_ACCESS_TOKEN }} + SERVICE_ACCOUNT_CREDS: ${{ secrets.SERVICE_ACCOUNT_CREDS }} run: > source .venv/bin/activate; pytest --durations=0 --cov-report=term --cov-report=html:htmlcov --cov=schematic/ From 3a9e745cd48b5345361a4e0bf188198b216b917f Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 7 Feb 2024 17:19:39 -0500 Subject: [PATCH 161/199] update poetry version to 1.3.0 --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 6599ad15f..a9472a17d 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -12,7 +12,7 @@ build: python: "3.9" jobs: post_install: - - pip install poetry==1.2.0 + - pip install poetry==1.3.0 - poetry config virtualenvs.create false - poetry install --with doc #Poetry will install my dependencies into the virtualenv created by readthedocs if I set virtualenvs.create=false From d736f067d13fb0248f9a395ce3764efb624228b5 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 7 Feb 2024 16:29:11 -0800 Subject: [PATCH 162/199] mialys suggestions and fixes --- schematic/visualization/attributes_explorer.py | 4 ++-- schematic/visualization/tangled_tree.py | 12 +++++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index 71747f999..ff710952e 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -162,8 +162,8 @@ def _parse_attributes( # Gather all attributes, their valid values and requirements for key, value in json_schema["properties"].items(): data_dict[key] = {} - for k, _ in value.items(): - if k == "enum": + for key in value.keys(): + if key == "enum": data_dict[key]["Valid Values"] = value["enum"] if key in json_schema["required"]: data_dict[key]["Required"] = True diff --git a/schematic/visualization/tangled_tree.py b/schematic/visualization/tangled_tree.py index ed4db2b8c..a297aa400 100644 --- a/schematic/visualization/tangled_tree.py +++ b/schematic/visualization/tangled_tree.py @@ -229,7 +229,7 @@ def get_ca_alias(self, conditional_requirements: list) -> dict[str, str]: for req in conditional_requirements ] - for _, req in enumerate(conditional_requirements): + for req in conditional_requirements: if "OR" not in req: attr, ali = req.split(" is ") attr = "".join(attr.split()) @@ -424,7 +424,7 @@ def alias_edges(self, ca_alias: dict[str, str], edges: EdgeDataView) -> list[lis aliased_edges (list[list]) of aliased edges. """ aliased_edges = [] - for _, edge in enumerate(edges): + for edge in edges: # construct one set of edges at a time edge_set = [] @@ -477,7 +477,7 @@ def prune_expand_topological_gen( pruned_topological_gen = [] # For each layer(gen) in the topological generation list - for _, layer in enumerate(topological_gen): + for layer in topological_gen: current_layer = [] next_layer = [] @@ -676,7 +676,7 @@ def move_source_nodes_to_bottom_of_layer( Output: node_layers (List(list)): modified to move source nodes to the bottom of each layer. """ - for _, layer in enumerate(node_layers): + for layer in node_layers: nodes_to_move = [] for node in layer: if node in source_nodes: @@ -889,7 +889,9 @@ def get_tangled_tree_layers(self, save_file: bool = True): source_nodes = self.find_source_nodes(nodes, edges) # Map all children to their parents and vice versa - child_parents, parent_children = self.get_parent_child_dictionary(edges) + child_parents, parent_children = self.get_parent_child_dictionary( + edges=edges + ) # find all the downstream nodes all_parent_children = self.get_ancestors_nodes( From 60ac14d91ffbff9818bcda1068553657554f5899 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 7 Feb 2024 17:38:13 -0800 Subject: [PATCH 163/199] fix accidental name collision of dict keys --- schematic/visualization/attributes_explorer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/visualization/attributes_explorer.py b/schematic/visualization/attributes_explorer.py index ff710952e..f2dac9736 100644 --- a/schematic/visualization/attributes_explorer.py +++ b/schematic/visualization/attributes_explorer.py @@ -162,8 +162,8 @@ def _parse_attributes( # Gather all attributes, their valid values and requirements for key, value in json_schema["properties"].items(): data_dict[key] = {} - for key in value.keys(): - if key == "enum": + for inner_key in value.keys(): + if inner_key == "enum": data_dict[key]["Valid Values"] = value["enum"] if key in json_schema["required"]: data_dict[key]["Required"] = True From 92df2c1ec9b97f3016613c7b57d901dfc083c482 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Feb 2024 13:10:27 -0500 Subject: [PATCH 164/199] remove project scope parameter and update example project id in the yaml --- schematic_api/api/openapi/api.yaml | 12 +----------- schematic_api/api/routes.py | 4 ++-- 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 2d66cb640..10b949718 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -699,7 +699,7 @@ paths: type: string nullable: false description: Project ID - example: syn30988314 + example: syn23643250 required: true - in: query name: asset_view @@ -709,16 +709,6 @@ paths: description: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project.(i.e. master_fileview_id in config_example.yml) example: syn23643253 required: true - - in: query - name: project_scope - schema: - type: array - items: - type: string - nullable: false - description: List, a subset of the projects contained within the asset view that are relevant for the current operation. Speeds up some operations that interact with Synapse. - example: ['syn23643250', 'syn47218127', 'syn47218347'] - required: false responses: "200": description: A list of tuples(json). diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 2e8b8f990..dd3990de9 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -727,7 +727,7 @@ def get_asset_view_table(asset_view, return_type): file_view_table_df.to_csv(export_path, index=False) return export_path -def get_project_manifests(project_id, asset_view, project_scope=None): +def get_project_manifests(project_id, asset_view): # Access token now stored in request header access_token = get_access_token() @@ -735,7 +735,7 @@ def get_project_manifests(project_id, asset_view, project_scope=None): config_handler(asset_view=asset_view) # use Synapse Storage - store = SynapseStorage(access_token=access_token, project_scope=project_scope) + store = SynapseStorage(access_token=access_token, project_scope=[project_id]) # call getprojectManifest function lst_manifest = store.getProjectManifests(projectId=project_id) From ac9b601c6ed53dd704d8d7003d53d1536089afcb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 8 Feb 2024 11:37:48 -0800 Subject: [PATCH 165/199] update help for data_model_labels in viz commands --- schematic/help.py | 6 ++++++ schematic/visualization/commands.py | 8 ++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/schematic/help.py b/schematic/help.py index c0d9c2613..a7adcfc77 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -224,6 +224,12 @@ "text_format": ( "Specify the type of text to gather for tangled tree visualization, either 'plain' or 'highlighted'." ), + "data_model_labels": ( + "Choose how to set the label in the data model. " + "display_label, use the display name as a label, if it is valid (contains no blacklisted characters) otherwise will default to class_label. " + "class_label, default, use standard class or property label. " + "Do not change from default unless there is a real need, using 'display_label' can have consequences if not used properly." + ), }, } } diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 6b07ae0f4..201f58084 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -85,11 +85,11 @@ def get_attributes(ctx: Any) -> None: "-dml", default="class_label", type=click.Choice(["display_label", "class_label"], case_sensitive=True), - help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), + help=query_dict(viz_commands, ("visualization", "tangled_tree", "data_model_labels")), ) @click.pass_obj def get_tangled_tree_text( - ctx: Any, figure_type: str, text_format: str, data_model_labels: DisplayLabelType + ctx: Any, figure_type: str, text_format: str, data_model_labels: DisplayLabelType = "class_label", ) -> None: """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path @@ -117,11 +117,11 @@ def get_tangled_tree_text( "-dml", default="class_label", type=click.Choice(["display_label", "class_label"], case_sensitive=True), - help=query_dict(schema_commands, ("schema", "convert", "data_model_labels")), + help=query_dict(viz_commands, ("visualization", "tangled_tree", "data_model_labels")), ) @click.pass_obj def get_tangled_tree_component_layers( - ctx: Any, figure_type: str, data_model_labels: DisplayLabelType + ctx: Any, figure_type: str, data_model_labels: DisplayLabelType = "class_label", ) -> None: """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path From b33ee50caba61497b44c5caf6566a14137b35081 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Feb 2024 14:52:28 -0500 Subject: [PATCH 166/199] delete old readthedocs yaml file to avoid confusion --- .readthedocs.yaml | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml deleted file mode 100644 index 69f40ffe5..000000000 --- a/.readthedocs.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# .readthedocs.yaml -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required -version: 2 - -# Build documentation in the docs/ directory with Sphinx -sphinx: - configuration: docs/conf.py - -# Optionally set the version of Python and requirements required to build your docs -python: - version: 3.7 - install: - - requirements: docs/requirements.txt - - method: pip - path: . - system_packages: false \ No newline at end of file From 370de428c9c847db722f54c060fdd5e8408bfc78 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 8 Feb 2024 11:57:30 -0800 Subject: [PATCH 167/199] fix linting issues form merge --- schematic/utils/df_utils.py | 41 ++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 415fc0bf5..7c578f046 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -10,7 +10,7 @@ import dateparser as dp import pandas as pd import numpy as np -from pandarallel import pandarallel # type: ignore +from pandarallel import pandarallel # type: ignore logger = logging.getLogger(__name__) @@ -67,64 +67,67 @@ def load_df( return processed_df -def find_and_convert_ints(df: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: +def find_and_convert_ints(dataframe: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: """ Find strings that represent integers and convert to type int Args: - df: dataframe with nulls masked as empty strings + dataframe: dataframe with nulls masked as empty strings Returns: ints: dataframe with values that were converted to type int is_int: dataframe with boolean values indicating which cells were converted to type int """ + # pylint: disable=unnecessary-lambda large_manifest_cutoff_size = 1000 # Find integers stored as strings and replace with entries of type np.int64 if ( - df.size < large_manifest_cutoff_size + dataframe.size < large_manifest_cutoff_size ): # If small manifest, iterate as normal for improved performance - ints = df.map(lambda x: convert_ints(x), na_action="ignore").fillna(False) + ints = dataframe.map( + lambda cell: convert_ints(cell), na_action="ignore" + ).fillna(False) - else: # parallelize iterations for large manfiests + else: # parallelize iterations for large manifests pandarallel.initialize(verbose=1) - ints = df.parallel_map(lambda x: convert_ints(x), na_action="ignore").fillna( - False - ) + ints = dataframe.parallel_map( + lambda cell: convert_ints(cell), na_action="ignore" + ).fillna(False) - # Identify cells converted to intergers + # Identify cells converted to integers is_int = ints.map(pd.api.types.is_integer) return ints, is_int -def convert_ints(x: str) -> Union[np.int64, bool]: +def convert_ints(string: str) -> Union[np.int64, bool]: """ Lambda function to convert a string to an integer if possible, otherwise returns False Args: - x: string to attempt conversion to int + string: string to attempt conversion to int Returns: - x converted to type int if possible, otherwise False + string converted to type int if possible, otherwise False """ - return np.int64(x) if str.isdigit(x) else False + return np.int64(string) if str.isdigit(string) else False -def convert_floats(df: pd.DataFrame) -> pd.DataFrame: +def convert_floats(dataframe: pd.DataFrame) -> pd.DataFrame: """ Convert strings that represent floats to type float Args: - df: dataframe with nulls masked as empty strings + dataframe: dataframe with nulls masked as empty strings Returns: float_df: dataframe with values that were converted to type float. Columns are type object """ # create a separate copy of the manifest # before beginning conversions to store float values - float_df = deepcopy(df) + float_df = deepcopy(dataframe) # convert strings to numerical dtype (float) if possible, preserve non-numerical strings - for col in df.columns: + for col in dataframe.columns: float_df[col] = pd.to_numeric(float_df[col], errors="coerce").astype("object") # replace values that couldn't be converted to float with the original str values - float_df[col].fillna(df[col][float_df[col].isna()], inplace=True) + float_df[col].fillna(dataframe[col][float_df[col].isna()], inplace=True) return float_df From d0b5189333d7a8cac144d4782f53b616650caea8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 8 Feb 2024 13:11:39 -0800 Subject: [PATCH 168/199] run black on viz commands --- schematic/visualization/commands.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 201f58084..8abdba00f 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -85,11 +85,16 @@ def get_attributes(ctx: Any) -> None: "-dml", default="class_label", type=click.Choice(["display_label", "class_label"], case_sensitive=True), - help=query_dict(viz_commands, ("visualization", "tangled_tree", "data_model_labels")), + help=query_dict( + viz_commands, ("visualization", "tangled_tree", "data_model_labels") + ), ) @click.pass_obj def get_tangled_tree_text( - ctx: Any, figure_type: str, text_format: str, data_model_labels: DisplayLabelType = "class_label", + ctx: Any, + figure_type: str, + text_format: str, + data_model_labels: DisplayLabelType = "class_label", ) -> None: """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path @@ -117,11 +122,15 @@ def get_tangled_tree_text( "-dml", default="class_label", type=click.Choice(["display_label", "class_label"], case_sensitive=True), - help=query_dict(viz_commands, ("visualization", "tangled_tree", "data_model_labels")), + help=query_dict( + viz_commands, ("visualization", "tangled_tree", "data_model_labels") + ), ) @click.pass_obj def get_tangled_tree_component_layers( - ctx: Any, figure_type: str, data_model_labels: DisplayLabelType = "class_label", + ctx: Any, + figure_type: str, + data_model_labels: DisplayLabelType = "class_label", ) -> None: """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path From 6e6a2d13601a61f5c8037fd20ce237776490dad5 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 8 Feb 2024 13:21:37 -0800 Subject: [PATCH 169/199] update to add data_model_labels option for attributes --- schematic/visualization/commands.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 8abdba00f..64cc8d717 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -55,14 +55,28 @@ def viz(ctx: Any, config: str) -> None: # use as `schematic model ...` "attributes", ) @click_log.simple_verbosity_option(logger) +@click.option( + "--data_model_labels", + "-dml", + default="class_label", + type=click.Choice(["display_label", "class_label"], case_sensitive=True), + help=query_dict( + viz_commands, ("visualization", "tangled_tree", "data_model_labels") + ), +) @click.pass_obj -def get_attributes(ctx: Any) -> None: +def get_attributes( + ctx: Any, + data_model_labels: DisplayLabelType, +) -> None: """Gets attributes""" # Get JSONLD file path path_to_jsonld = CONFIG.model_location log_value_from_config("jsonld", path_to_jsonld) # Run attributes explorer - AttributesExplorer(path_to_jsonld).parse_attributes(save_file=True) + AttributesExplorer(path_to_jsonld, data_model_labels).parse_attributes( + save_file=True + ) return @@ -94,7 +108,7 @@ def get_tangled_tree_text( ctx: Any, figure_type: str, text_format: str, - data_model_labels: DisplayLabelType = "class_label", + data_model_labels: DisplayLabelType, ) -> None: """Get text to be placed on the tangled tree visualization.""" # Get JSONLD file path @@ -130,7 +144,7 @@ def get_tangled_tree_text( def get_tangled_tree_component_layers( ctx: Any, figure_type: str, - data_model_labels: DisplayLabelType = "class_label", + data_model_labels: DisplayLabelType, ) -> None: """Get the components that belong in each layer of the tangled tree visualization.""" # Get JSONLD file path From 3563cefd7b72862e69c9670b9105a11b9d6671d8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 8 Feb 2024 13:29:40 -0800 Subject: [PATCH 170/199] make some parameters as required since the are required, and not passing would result in ambigusous error --- schematic/visualization/commands.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 64cc8d717..39bedc1c8 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -85,12 +85,14 @@ def get_attributes( @click.option( "-ft", "--figure_type", + required=True, type=click.Choice(["component", "dependency"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) @click.option( "-tf", "--text_format", + required=True, type=click.Choice(["plain", "highlighted"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "text_format")), ) @@ -128,6 +130,7 @@ def get_tangled_tree_text( @click.option( "-ft", "--figure_type", + required=True, type=click.Choice(["component", "dependency"], case_sensitive=False), help=query_dict(viz_commands, ("visualization", "tangled_tree", "figure_type")), ) From 159c5d220376adecb6a11308c7fcce7f60bdbd1e Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 9 Feb 2024 08:41:58 -0800 Subject: [PATCH 171/199] made jinja and uswgi extras --- poetry.lock | 2 +- pyproject.toml | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index a7d28169d..3b111e3cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4746,4 +4746,4 @@ api = ["Flask", "Flask-Cors", "Jinja2", "connexion", "uWSGI"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "f81e639645b02362da8f6f51e1a1c359c5129751f1b44e49486951d8146f2cb3" +content-hash = "c1357a7b00dd799ba38081b73d9ac3a81275d90c75cc82b703a7697c5c4736b0" diff --git a/pyproject.toml b/pyproject.toml index a79da0bf9..c429da1f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,6 @@ great-expectations = "^0.15.0" sphinx-click = "^4.0.0" MarkupSafe = "2.1.0" itsdangerous = "^2.0.0" -Jinja2 = ">2.11.3" openpyxl = "^3.0.9" "backports.zoneinfo" = {markers = "python_version < \"3.9\"", version = "^0.2.1"} pdoc = "^12.2.0" @@ -74,9 +73,11 @@ dataclasses-json = "^0.6.1" connexion = {extras = ["swagger-ui"], version = "^2.8.0", optional = true} Flask = {version = "^2.0.0", optional = true} Flask-Cors = {version = "^3.0.10", optional = true} +uWSGI = {version = "^2.0.21", optional = true} +Jinja2 = {version = ">2.11.3", optional = true} [tool.poetry.extras] -api = ["connexion", "Flask", "Flask-Cors"] +api = ["connexion", "Flask", "Flask-Cors", "uWSGI", "Jinja2"] [tool.poetry.group.dev.dependencies] @@ -95,7 +96,7 @@ pytest-xdist = "^3.5.0" optional = true [tool.poetry.group.aws.dependencies] -uWSGI = "^2.0.21" + [tool.black] line-length = 88 From 6b223b9309ca773e409f9dae5068f65d0fe8f581 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 9 Feb 2024 08:42:10 -0800 Subject: [PATCH 172/199] updated readmes --- README.md | 6 ++++++ schematic_api/api/README.md | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7d3890de8..fb8d54ac6 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,12 @@ poetry install ``` This command will install the dependencies based on what we specify in poetry.lock. If this step is taking a long time, try to go back to step 2 and check your version of poetry. Alternatively, you could also try deleting the lock file and regenerate it by doing `poetry install` (Please note this method should be used as a last resort because this would force other developers to change their development environment) +If you want to install the API you will need to install those dependencies as well: + +``` +poetry install --all-extras +``` + 5. Fill in credential files: *Note*: If you won't interact with Synapse, please ignore this section. diff --git a/schematic_api/api/README.md b/schematic_api/api/README.md index 8e8f27fa5..0abddc730 100644 --- a/schematic_api/api/README.md +++ b/schematic_api/api/README.md @@ -22,7 +22,7 @@ docker compose up --build --remove-orphans ### install uWSGI Install uWSGI by doing: ``` -poetry install --with aws +poetry install --all-extras ``` Note: this approach only works for unix OSs users or windows user with WSL From 2b6063077207db89b1c71d9207cba4c6ce315464 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 9 Feb 2024 08:49:12 -0800 Subject: [PATCH 173/199] separated uwsgi into its own group --- README.md | 8 +++++++- poetry.lock | 5 +++-- pyproject.toml | 3 ++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index fb8d54ac6..dd7263940 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,13 @@ This command will install the dependencies based on what we specify in poetry.lo If you want to install the API you will need to install those dependencies as well: ``` -poetry install --all-extras +poetry install --extras "api" +``` + +If you want to install the uwsgi: + +``` +poetry install --extras "api" ``` 5. Fill in credential files: diff --git a/poetry.lock b/poetry.lock index 8743f01f1..5673df9a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4766,9 +4766,10 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] -api = ["Flask", "Flask-Cors", "Jinja2", "connexion", "uWSGI"] +api = ["Flask", "Flask-Cors", "Jinja2", "connexion"] +aws = ["uWSGI"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.11" -content-hash = "122de090099558f6bac7d0f7167e3fd3cfbc42e33adcd03c0630027236561954" +content-hash = "fd6dbe19c700a80fedc739825f3c26d63476360d1a5f605f8abc2726ce9c3157" diff --git a/pyproject.toml b/pyproject.toml index 792d9f757..90aa08de7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,8 @@ uWSGI = {version = "^2.0.21", optional = true} Jinja2 = {version = ">2.11.3", optional = true} [tool.poetry.extras] -api = ["connexion", "Flask", "Flask-Cors", "uWSGI", "Jinja2"] +api = ["connexion", "Flask", "Flask-Cors", "Jinja2"] +aws = ["uWSGI"] [tool.poetry.group.dev.dependencies] From 7b57a04a1557e3b234a177e828e01ce17b5386f1 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 9 Feb 2024 09:30:45 -0800 Subject: [PATCH 174/199] add dataframe check --- schematic/utils/df_utils.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 7c578f046..222b75713 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -32,6 +32,9 @@ def load_df( data_model (bool, optional): bool, indicates if importing a data model **load_args(dict): dict of key value pairs to be passed to the pd.read_csv function + Raises: + ValueError: When pd.read_csv on the file path doesn't return as dataframe + Returns: pd.DataFrame: a processed dataframe for manifests or unprocessed df for data models and where indicated @@ -43,7 +46,13 @@ def load_df( org_df = pd.read_csv( # type: ignore file_path, keep_default_na=True, encoding="utf8", **load_args ) - assert isinstance(org_df, pd.DataFrame) + if not isinstance(org_df, pd.DataFrame): + raise ValueError( + ( + "Pandas did not return a dataframe. " + "Pandas will return a TextFileReader if chunksize parameter is used." + ) + ) # only trim if not data model csv if not data_model: From c4665df3da6e13ef8508b15057807706023ebc67 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Fri, 9 Feb 2024 14:21:22 -0800 Subject: [PATCH 175/199] add component based rule feature --- schematic/manifest/generator.py | 10 +++- schematic/models/GE_Helpers.py | 25 +++++++- schematic/models/validate_attribute.py | 4 -- schematic/models/validate_manifest.py | 27 ++++++++- schematic/utils/schema_utils.py | 76 ++++++++++++++++++++++++- schematic/utils/validate_rules_utils.py | 2 +- 6 files changed, 129 insertions(+), 15 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 090024f04..c793f9968 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -10,7 +10,7 @@ from pathlib import Path import pygsheets as ps from tempfile import NamedTemporaryFile -from typing import Dict, List, Optional, Tuple, Union, BinaryIO, Literal +from typing import Any, Dict, List, Optional, Tuple, Union, BinaryIO, Literal from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer from schematic.schemas.data_model_parser import DataModelParser @@ -21,6 +21,7 @@ build_service_account_creds, ) from schematic.utils.df_utils import update_df, load_df +from schematic.utils.schema_utils import extract_component_validation_rules from schematic.utils.validate_utils import rule_in_rule_list # TODO: This module should only be aware of the store interface @@ -730,7 +731,7 @@ def _request_regex_vr(self, gs_formula, i: int, text_color={"red": 1}): def _request_regex_match_vr_formatting( self, - validation_rules: List[str], + validation_rules: Any, i: int, spreadsheet_id: str, requests_body: dict, @@ -750,7 +751,7 @@ def _request_regex_match_vr_formatting( - Upon correct format entry, text will turn black. - If incorrect format is entered a validation error will pop up. Input: - validation_rules: List[str], defines the validation rules + validation_rules: Any(List[str], Dict), defines the validation rules applied to a particular column. i: int, defines current column. requests_body: dict, containing all the update requests to add to the gs @@ -1136,6 +1137,9 @@ def _create_requests_body( validation_rules = self.dmge.get_node_validation_rules( node_display_name=req ) + if type(validation_rules)==dict: + validation_rules = extract_component_validation_rules( + validation_rules=validation_rules, manifest_component=self.root) # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index d1fcfb577..229765ddd 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -32,6 +32,8 @@ from schematic.models.validate_attribute import GenerateError from schematic.schemas.data_model_graph import DataModelGraphExplorer +from schematic.utils.schema_utils import extract_component_validation_rules + from schematic.utils.validate_utils import ( rule_in_rule_list, np_array_to_str_list, @@ -164,16 +166,35 @@ def build_expectation_suite( # remove trailing/leading whitespaces from manifest self.manifest.map(lambda x: x.strip() if isinstance(x, str) else x) + validation_rules = self.dmge.get_node_validation_rules( node_display_name=col ) + # Check if the validation rule applies to this manifest + if validation_rules and type(validation_rules)==dict: + validation_rules=extract_component_validation_rules(manifest_component = self.manifest['Component'][0], validation_rules=validation_rules) + + ''' + if type(validation_rules)==dict: + manifest_component = self.manifest['Component'][0] + manifest_component_rule = validation_rules.get(manifest_component) + if manifest_component_rule and type(manifest_component_rule)==str: + validation_rules=[manifest_component_rule] + elif manifest_component_rule: + validation_rules=manifest_component_rule + else: + validation_rules=[] + ''' + # check if attribute has any rules associated with it if validation_rules: # iterate through all validation rules for an attribute for rule in validation_rules: - base_rule = rule.split(" ")[0] - + try: + base_rule = rule.split(" ")[0] + except: + breakpoint() # check if rule has an implemented expectation if rule_in_rule_list(rule, self.unimplemented_expectations): continue diff --git a/schematic/models/validate_attribute.py b/schematic/models/validate_attribute.py index fdba69454..693309118 100644 --- a/schematic/models/validate_attribute.py +++ b/schematic/models/validate_attribute.py @@ -254,10 +254,6 @@ def generate_type_error( node_display_name=attribute_name ) - # TODO: Can remove when handling updated so split within graph - if validation_rules and "::" in validation_rules[0]: - validation_rules = validation_rules[0].split("::") - # If IsNA rule is being used to allow `Not Applicable` entries, do not log a message if error_val.lower() == "not applicable" and rule_in_rule_list( "IsNA", validation_rules diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 5ed1afa96..00e4fc244 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -25,6 +25,7 @@ from schematic.models.GE_Helpers import GreatExpectationsHelpers from schematic.utils.validate_rules_utils import validation_rule_info from schematic.utils.validate_utils import rule_in_rule_list +from schematic.utils.schema_utils import extract_component_validation_rules logger = logging.getLogger(__name__) @@ -183,9 +184,31 @@ def validate_manifest_rules( manifest.map(lambda x: x.strip() if isinstance(x, str) else x) validation_rules = dmge.get_node_validation_rules(node_display_name=col) + # Parse the validation rules + if validation_rules and type(validation_rules)==dict: + validation_rules=extract_component_validation_rules(manifest_component = manifest['Component'][0], validation_rules=validation_rules) + ''' + if type(validation_rules)==dict: + manifest_component = manifest['Component'][0] + manifest_component_rule = validation_rules.get(manifest_component) + all_component_rules = validation_rules.get('all_other_components') + + if manifest_component_rule: + if type(manifest_component_rule)==str: + validation_rules=[manifest_component_rule] + else: + validation_rules=manifest_component_rule + elif all_component_rules: + if type(all_component_rules) == str: + validation_rules=[manifest_component_rule] + else: + validation_rules=manifest_component_rule + else: + validation_rules=[] + ''' # TODO: Can remove when handling updated so split within graph - if validation_rules and "::" in validation_rules[0]: - validation_rules = validation_rules[0].split("::") + #if validation_rules and "::" in validation_rules[0]: + # validation_rules = validation_rules[0].split("::") # Check that attribute rules conform to limits: # no more than two rules for an attribute. diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 2e43f8f3e..c975706d3 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -1,8 +1,9 @@ import inflection import json import networkx as nx +import re import string -from typing import List, Dict +from typing import Any, List, Dict def attr_dict_template(key_name: str) -> Dict[str, dict[str, dict]]: @@ -122,9 +123,78 @@ def parse_validation_rules(validation_rules: List[str]) -> List[str]: validation_rules, list: list containing a string validation rule Returns: validation_rules, list: if submitted List + Raises: + ValueError if Rule is not formatted properly """ - if validation_rules and "::" in validation_rules[0]: - validation_rules = validation_rules[0].split("::") + delimiters = { + "component_name_delimiter": "#", + "component_rules_delimiter": "^^", + "rule_delimiter": "::", + } + + + validation_rule_string=validation_rules[0] + + component_names = [] + validation_rules = [] + # Separate out component rules + + if '^^' in validation_rule_string: + component_rules = validation_rule_string.split('^^') + # extract component name + for component_rule in component_rules: + component_rule = component_rule.strip() + if component_rule: + if "#" != component_rule[0]: + component_names.append('all_other_components') + #raise ValueError(f'The provided component rule {component_rule} is not structured properly in the data model, ' + # f'should have hashtag prior to the Component name, please refer to documentation for proper structure') + else: + component_names.append(component_rule.split(" ")[0].replace('#','')) + try: + assert component_names[-1] != ' ' + except: + ValueError(f'There was an error capturing at least one of the component name in the following rule: {component_rule}, please ensure there is not extra whitespace or non-allowed characters.') + component_rule = component_rule.replace(component_rule.split(" ")[0], '') + component_rule = component_rule.strip() + # parse rules + if "::" in component_rule: + validation_rules.append(component_rule.split("::")) + else: + validation_rules.append(component_rule) + + try: + assert len(component_names) == len(validation_rules) + except: + ValueError(f'The number of components names and validation rules does not match for validation rule: {validation_rule_string}.') + + validation_rules_dict = dict(map(lambda i,j : (i,j) , component_names, validation_rules)) + + return validation_rules_dict + + else: + if '#' == validation_rule_string[0]: + ValueError(f"The provided validation rule {validation_rule_string}, looks to be formatted as a component based rule, but is missing the necessary formatting, " + f"please refer to the SchemaHub documentation for more details.") + if "::" in validation_rule_string: + validation_rules = validation_rule_string.split("::") + return validation_rules + +def extract_component_validation_rules(manifest_component:str, validation_rules:dict) -> list: + manifest_component_rule = validation_rules.get(manifest_component) + all_component_rules = validation_rules.get('all_other_components') + if manifest_component_rule: + if type(manifest_component_rule)==str: + validation_rules=[manifest_component_rule] + else: + validation_rules=manifest_component_rule + elif all_component_rules: + if type(all_component_rules) == str: + validation_rules=[all_component_rules] + else: + validation_rules=all_component_rules + else: + validation_rules=[] return validation_rules diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index f1588ed2e..5ac1e4c6f 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -2,7 +2,7 @@ from jsonschema import ValidationError import logging import pandas as pd -from typing import Any, Dict, Optional, Text, List +from typing import Any, Dict, Optional, Text, List, Tuple logger = logging.getLogger(__name__) From b834032d4496e1af8a040e6b37dd3a0bb144edf0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Fri, 9 Feb 2024 14:36:15 -0800 Subject: [PATCH 176/199] run black --- schematic/manifest/generator.py | 5 ++- schematic/models/GE_Helpers.py | 11 +++-- schematic/models/validate_manifest.py | 13 +++--- schematic/utils/schema_utils.py | 64 ++++++++++++++++----------- 4 files changed, 57 insertions(+), 36 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index c793f9968..fe49a23e1 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1137,9 +1137,10 @@ def _create_requests_body( validation_rules = self.dmge.get_node_validation_rules( node_display_name=req ) - if type(validation_rules)==dict: + if type(validation_rules) == dict: validation_rules = extract_component_validation_rules( - validation_rules=validation_rules, manifest_component=self.root) + validation_rules=validation_rules, manifest_component=self.root + ) # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index 229765ddd..c69be1776 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -172,10 +172,13 @@ def build_expectation_suite( ) # Check if the validation rule applies to this manifest - if validation_rules and type(validation_rules)==dict: - validation_rules=extract_component_validation_rules(manifest_component = self.manifest['Component'][0], validation_rules=validation_rules) + if validation_rules and type(validation_rules) == dict: + validation_rules = extract_component_validation_rules( + manifest_component=self.manifest["Component"][0], + validation_rules=validation_rules, + ) - ''' + """ if type(validation_rules)==dict: manifest_component = self.manifest['Component'][0] manifest_component_rule = validation_rules.get(manifest_component) @@ -185,7 +188,7 @@ def build_expectation_suite( validation_rules=manifest_component_rule else: validation_rules=[] - ''' + """ # check if attribute has any rules associated with it if validation_rules: diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index 00e4fc244..caf782a15 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -185,9 +185,12 @@ def validate_manifest_rules( validation_rules = dmge.get_node_validation_rules(node_display_name=col) # Parse the validation rules - if validation_rules and type(validation_rules)==dict: - validation_rules=extract_component_validation_rules(manifest_component = manifest['Component'][0], validation_rules=validation_rules) - ''' + if validation_rules and type(validation_rules) == dict: + validation_rules = extract_component_validation_rules( + manifest_component=manifest["Component"][0], + validation_rules=validation_rules, + ) + """ if type(validation_rules)==dict: manifest_component = manifest['Component'][0] manifest_component_rule = validation_rules.get(manifest_component) @@ -205,9 +208,9 @@ def validate_manifest_rules( validation_rules=manifest_component_rule else: validation_rules=[] - ''' + """ # TODO: Can remove when handling updated so split within graph - #if validation_rules and "::" in validation_rules[0]: + # if validation_rules and "::" in validation_rules[0]: # validation_rules = validation_rules[0].split("::") # Check that attribute rules conform to limits: diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index c975706d3..6b99b1959 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -131,31 +131,36 @@ def parse_validation_rules(validation_rules: List[str]) -> List[str]: "component_rules_delimiter": "^^", "rule_delimiter": "::", } - - validation_rule_string=validation_rules[0] + validation_rule_string = validation_rules[0] component_names = [] validation_rules = [] # Separate out component rules - - if '^^' in validation_rule_string: - component_rules = validation_rule_string.split('^^') + + if "^^" in validation_rule_string: + component_rules = validation_rule_string.split("^^") # extract component name for component_rule in component_rules: component_rule = component_rule.strip() if component_rule: if "#" != component_rule[0]: - component_names.append('all_other_components') - #raise ValueError(f'The provided component rule {component_rule} is not structured properly in the data model, ' + component_names.append("all_other_components") + # raise ValueError(f'The provided component rule {component_rule} is not structured properly in the data model, ' # f'should have hashtag prior to the Component name, please refer to documentation for proper structure') else: - component_names.append(component_rule.split(" ")[0].replace('#','')) + component_names.append( + component_rule.split(" ")[0].replace("#", "") + ) try: - assert component_names[-1] != ' ' + assert component_names[-1] != " " except: - ValueError(f'There was an error capturing at least one of the component name in the following rule: {component_rule}, please ensure there is not extra whitespace or non-allowed characters.') - component_rule = component_rule.replace(component_rule.split(" ")[0], '') + ValueError( + f"There was an error capturing at least one of the component name in the following rule: {component_rule}, please ensure there is not extra whitespace or non-allowed characters." + ) + component_rule = component_rule.replace( + component_rule.split(" ")[0], "" + ) component_rule = component_rule.strip() # parse rules if "::" in component_rule: @@ -166,35 +171,44 @@ def parse_validation_rules(validation_rules: List[str]) -> List[str]: try: assert len(component_names) == len(validation_rules) except: - ValueError(f'The number of components names and validation rules does not match for validation rule: {validation_rule_string}.') - - validation_rules_dict = dict(map(lambda i,j : (i,j) , component_names, validation_rules)) + ValueError( + f"The number of components names and validation rules does not match for validation rule: {validation_rule_string}." + ) + + validation_rules_dict = dict( + map(lambda i, j: (i, j), component_names, validation_rules) + ) return validation_rules_dict else: - if '#' == validation_rule_string[0]: - ValueError(f"The provided validation rule {validation_rule_string}, looks to be formatted as a component based rule, but is missing the necessary formatting, " - f"please refer to the SchemaHub documentation for more details.") + if "#" == validation_rule_string[0]: + ValueError( + f"The provided validation rule {validation_rule_string}, looks to be formatted as a component based rule, but is missing the necessary formatting, " + f"please refer to the SchemaHub documentation for more details." + ) if "::" in validation_rule_string: validation_rules = validation_rule_string.split("::") return validation_rules -def extract_component_validation_rules(manifest_component:str, validation_rules:dict) -> list: + +def extract_component_validation_rules( + manifest_component: str, validation_rules: dict +) -> list: manifest_component_rule = validation_rules.get(manifest_component) - all_component_rules = validation_rules.get('all_other_components') + all_component_rules = validation_rules.get("all_other_components") if manifest_component_rule: - if type(manifest_component_rule)==str: - validation_rules=[manifest_component_rule] + if type(manifest_component_rule) == str: + validation_rules = [manifest_component_rule] else: - validation_rules=manifest_component_rule + validation_rules = manifest_component_rule elif all_component_rules: if type(all_component_rules) == str: - validation_rules=[all_component_rules] + validation_rules = [all_component_rules] else: - validation_rules=all_component_rules + validation_rules = all_component_rules else: - validation_rules=[] + validation_rules = [] return validation_rules From 298688057cee177c3c821189ab5e4af7289ad648 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Fri, 9 Feb 2024 15:25:01 -0800 Subject: [PATCH 177/199] clean up and refactor some functions --- schematic/manifest/generator.py | 2 +- schematic/models/GE_Helpers.py | 31 ++---- schematic/models/validate_manifest.py | 24 +--- schematic/utils/schema_utils.py | 155 +++++++++++++++----------- 4 files changed, 102 insertions(+), 110 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index fe49a23e1..1daba5397 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1137,7 +1137,7 @@ def _create_requests_body( validation_rules = self.dmge.get_node_validation_rules( node_display_name=req ) - if type(validation_rules) == dict: + if isistance(validation_rules, dict): validation_rules = extract_component_validation_rules( validation_rules=validation_rules, manifest_component=self.root ) diff --git a/schematic/models/GE_Helpers.py b/schematic/models/GE_Helpers.py index c69be1776..5b356999e 100644 --- a/schematic/models/GE_Helpers.py +++ b/schematic/models/GE_Helpers.py @@ -171,33 +171,18 @@ def build_expectation_suite( node_display_name=col ) - # Check if the validation rule applies to this manifest - if validation_rules and type(validation_rules) == dict: - validation_rules = extract_component_validation_rules( - manifest_component=self.manifest["Component"][0], - validation_rules=validation_rules, - ) - - """ - if type(validation_rules)==dict: - manifest_component = self.manifest['Component'][0] - manifest_component_rule = validation_rules.get(manifest_component) - if manifest_component_rule and type(manifest_component_rule)==str: - validation_rules=[manifest_component_rule] - elif manifest_component_rule: - validation_rules=manifest_component_rule - else: - validation_rules=[] - """ - # check if attribute has any rules associated with it if validation_rules: + # Check if the validation rule applies to this manifest + if isinstance(validation_rules, dict): + validation_rules = extract_component_validation_rules( + manifest_component=self.manifest["Component"][0], + validation_rules=validation_rules, + ) # iterate through all validation rules for an attribute for rule in validation_rules: - try: - base_rule = rule.split(" ")[0] - except: - breakpoint() + base_rule = rule.split(" ")[0] + # check if rule has an implemented expectation if rule_in_rule_list(rule, self.unimplemented_expectations): continue diff --git a/schematic/models/validate_manifest.py b/schematic/models/validate_manifest.py index caf782a15..f078bd30e 100644 --- a/schematic/models/validate_manifest.py +++ b/schematic/models/validate_manifest.py @@ -185,33 +185,11 @@ def validate_manifest_rules( validation_rules = dmge.get_node_validation_rules(node_display_name=col) # Parse the validation rules - if validation_rules and type(validation_rules) == dict: + if validation_rules and isinstance(validation_rules, dict): validation_rules = extract_component_validation_rules( manifest_component=manifest["Component"][0], validation_rules=validation_rules, ) - """ - if type(validation_rules)==dict: - manifest_component = manifest['Component'][0] - manifest_component_rule = validation_rules.get(manifest_component) - all_component_rules = validation_rules.get('all_other_components') - - if manifest_component_rule: - if type(manifest_component_rule)==str: - validation_rules=[manifest_component_rule] - else: - validation_rules=manifest_component_rule - elif all_component_rules: - if type(all_component_rules) == str: - validation_rules=[manifest_component_rule] - else: - validation_rules=manifest_component_rule - else: - validation_rules=[] - """ - # TODO: Can remove when handling updated so split within graph - # if validation_rules and "::" in validation_rules[0]: - # validation_rules = validation_rules[0].split("::") # Check that attribute rules conform to limits: # no more than two rules for an attribute. diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 6b99b1959..806498631 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -5,6 +5,12 @@ import string from typing import Any, List, Dict +DELIMITERS = { + "component_name_delimiter": "#", + "component_rules_delimiter": "^^", + "rule_delimiter": "::", +} + def attr_dict_template(key_name: str) -> Dict[str, dict[str, dict]]: return {key_name: {"Relationships": {}}} @@ -117,79 +123,102 @@ def convert_bool_to_str(provided_bool: bool) -> str: return str(provided_bool) -def parse_validation_rules(validation_rules: List[str]) -> List[str]: +def parse_component_validation_rules(validation_rule_string: str): + component_names = [] + validation_rules = [] + + component_rules = validation_rule_string.split( + DELIMITERS["component_rules_delimiter"] + ) + # extract component name + for component_rule in component_rules: + component_rule = component_rule.strip() + if component_rule: + if DELIMITERS["component_name_delimiter"] != component_rule[0]: + component_names.append("all_other_components") + else: + component_names.append( + component_rule.split(" ")[0].replace( + DELIMITERS["component_name_delimiter"], "" + ) + ) + try: + assert component_names[-1] != " " + except: + ValueError( + f"There was an error capturing at least one of the component name in the following rule: {component_rule}, " + f"please ensure there is not extra whitespace or non-allowed characters." + ) + component_rule = component_rule.replace( + component_rule.split(" ")[0], "" + ) + component_rule = component_rule.strip() + # parse rules + if DELIMITERS["rule_delimiter"] in component_rule: + validation_rules.append( + component_rule.split(DELIMITERS["rule_delimiter"]) + ) + else: + validation_rules.append(component_rule) + + try: + assert len(component_names) == len(validation_rules) + except: + raise ValueError( + f"The number of components names and validation rules does not match for validation rule: {validation_rule_string}." + ) + + validation_rules_dict = dict( + map(lambda i, j: (i, j), component_names, validation_rules) + ) + return validation_rules_dict + + +def parse_single_set_validation_rules(validation_rule_string: str) -> list: + # Try to catch an improperly formatted rule + if DELIMITERS["component_name_delimiter"] == validation_rule_string[0]: + raise ValueError( + f"The provided validation rule {validation_rule_string}, looks to be formatted as a component " + f"based rule, but is missing the necessary formatting, " + f"please refer to the SchemaHub documentation for more details." + ) + + # Parse rules that are set across *all* components/manifests + if DELIMITERS["rule_delimiter"] in validation_rule_string: + return validation_rule_string.split(DELIMITERS["rule_delimiter"]) + + +def parse_validation_rules(validation_rules: Any) -> Any: """Split multiple validation rules based on :: delimiter Args: - validation_rules, list: list containing a string validation rule + validation_rules, Any[List[str], Dict]: List or Dictionary of validation rules, + if list, contains a string validation rule; if dictionary, key is the component the + rule (value) is applied to Returns: validation_rules, list: if submitted List Raises: ValueError if Rule is not formatted properly """ - delimiters = { - "component_name_delimiter": "#", - "component_rules_delimiter": "^^", - "rule_delimiter": "::", - } - validation_rule_string = validation_rules[0] - - component_names = [] - validation_rules = [] - # Separate out component rules - - if "^^" in validation_rule_string: - component_rules = validation_rule_string.split("^^") - # extract component name - for component_rule in component_rules: - component_rule = component_rule.strip() - if component_rule: - if "#" != component_rule[0]: - component_names.append("all_other_components") - # raise ValueError(f'The provided component rule {component_rule} is not structured properly in the data model, ' - # f'should have hashtag prior to the Component name, please refer to documentation for proper structure') - else: - component_names.append( - component_rule.split(" ")[0].replace("#", "") - ) - try: - assert component_names[-1] != " " - except: - ValueError( - f"There was an error capturing at least one of the component name in the following rule: {component_rule}, please ensure there is not extra whitespace or non-allowed characters." - ) - component_rule = component_rule.replace( - component_rule.split(" ")[0], "" - ) - component_rule = component_rule.strip() - # parse rules - if "::" in component_rule: - validation_rules.append(component_rule.split("::")) - else: - validation_rules.append(component_rule) - - try: - assert len(component_names) == len(validation_rules) - except: - ValueError( - f"The number of components names and validation rules does not match for validation rule: {validation_rule_string}." + if isinstance(validation_rules, dict): + # Rules pulled in as a dict can be used directly + return validation_rules + elif isinstance(validation_rules, list): + validation_rule_string = validation_rules[0] + # Parse rules set for a subset of components/manifests + if DELIMITERS["component_rules_delimiter"] in validation_rule_string: + return parse_component_validation_rules( + validation_rule_string=validation_rule_string ) - - validation_rules_dict = dict( - map(lambda i, j: (i, j), component_names, validation_rules) - ) - - return validation_rules_dict - - else: - if "#" == validation_rule_string[0]: - ValueError( - f"The provided validation rule {validation_rule_string}, looks to be formatted as a component based rule, but is missing the necessary formatting, " - f"please refer to the SchemaHub documentation for more details." + # Parse rules that are set across *all* components/manifests + else: + return parse_single_set_validation_rules( + validation_rule_string=validation_rule_string ) - if "::" in validation_rule_string: - validation_rules = validation_rule_string.split("::") - return validation_rules + else: + raise ValueError( + f"The validation rule provided: {str(validation_rules)} is not submitted in an accepted type (list, dictionary) please check your JSONLD." + ) def extract_component_validation_rules( From 659e4a4cd12e97b04bcb1ac84bb3bff39332732b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Fri, 9 Feb 2024 15:53:56 -0800 Subject: [PATCH 178/199] continue cleanup, and refactor functions to break down more --- schematic/manifest/generator.py | 4 +- schematic/utils/schema_utils.py | 77 ++++++++++++++++++++------------- 2 files changed, 49 insertions(+), 32 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 1daba5397..0e4b7db20 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -731,7 +731,7 @@ def _request_regex_vr(self, gs_formula, i: int, text_color={"red": 1}): def _request_regex_match_vr_formatting( self, - validation_rules: Any, + validation_rules: List[str], i: int, spreadsheet_id: str, requests_body: dict, @@ -1137,7 +1137,7 @@ def _create_requests_body( validation_rules = self.dmge.get_node_validation_rules( node_display_name=req ) - if isistance(validation_rules, dict): + if isinstance(validation_rules, dict): validation_rules = extract_component_validation_rules( validation_rules=validation_rules, manifest_component=self.root ) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 806498631..f6316ae29 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -3,7 +3,7 @@ import networkx as nx import re import string -from typing import Any, List, Dict +from typing import List, Dict, Tuple, Union DELIMITERS = { "component_name_delimiter": "#", @@ -123,44 +123,61 @@ def convert_bool_to_str(provided_bool: bool) -> str: return str(provided_bool) -def parse_component_validation_rules(validation_rule_string: str): +def get_component_rules(component_rule: str, validation_rules: list) -> list: + # Separate multiple rules (defined by addition of the rule delimiter) + if DELIMITERS["rule_delimiter"] in component_rule: + validation_rules.append(component_rule.split(DELIMITERS["rule_delimiter"])) + # Get single rule + else: + validation_rules.append(component_rule) + return validation_rules + + +def get_component_name(component_names: list, component_rule: str) -> Tuple[list, str]: + # If a component name is not attached to the rule, have it apply to all other components + if DELIMITERS["component_name_delimiter"] != component_rule[0]: + component_names.append("all_other_components") + # Get the component name if available + else: + component_names.append( + component_rule.split(" ")[0].replace( + DELIMITERS["component_name_delimiter"], "" + ) + ) + try: + assert component_names[-1] != " " + except: + ValueError( + f"There was an error capturing at least one of the component name in the following rule: {component_rule}, " + f"please ensure there is not extra whitespace or non-allowed characters." + ) + component_rule = component_rule.replace(component_rule.split(" ")[0], "") + component_rule = component_rule.strip() + return component_names, component_rule + + +def parse_component_validation_rules(validation_rule_string: str) -> Dict: component_names = [] validation_rules = [] component_rules = validation_rule_string.split( DELIMITERS["component_rules_delimiter"] ) - # extract component name + # Extract component rules, per component for component_rule in component_rules: component_rule = component_rule.strip() if component_rule: - if DELIMITERS["component_name_delimiter"] != component_rule[0]: - component_names.append("all_other_components") - else: - component_names.append( - component_rule.split(" ")[0].replace( - DELIMITERS["component_name_delimiter"], "" - ) - ) - try: - assert component_names[-1] != " " - except: - ValueError( - f"There was an error capturing at least one of the component name in the following rule: {component_rule}, " - f"please ensure there is not extra whitespace or non-allowed characters." - ) - component_rule = component_rule.replace( - component_rule.split(" ")[0], "" - ) - component_rule = component_rule.strip() - # parse rules - if DELIMITERS["rule_delimiter"] in component_rule: - validation_rules.append( - component_rule.split(DELIMITERS["rule_delimiter"]) - ) - else: - validation_rules.append(component_rule) + # Get component name attached to rule + component_names, component_rule = get_component_name( + component_names=component_names, component_rule=component_rule + ) + + # Get rules + validation_rules = get_component_rules( + component_rule=component_rule, validation_rules=validation_rules + ) + # Ensure we collected the component names and validation rules like expected try: assert len(component_names) == len(validation_rules) except: @@ -188,7 +205,7 @@ def parse_single_set_validation_rules(validation_rule_string: str) -> list: return validation_rule_string.split(DELIMITERS["rule_delimiter"]) -def parse_validation_rules(validation_rules: Any) -> Any: +def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, dict]: """Split multiple validation rules based on :: delimiter Args: validation_rules, Any[List[str], Dict]: List or Dictionary of validation rules, From 4ac1f98b9abe799d8363721d9bcbf3abd2d9884a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:23:00 -0800 Subject: [PATCH 179/199] add new ManifestValidation Test --- tests/test_validation.py | 37 ++++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/tests/test_validation.py b/tests/test_validation.py index df1211cfb..f704117ba 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -382,7 +382,42 @@ def test_in_house_validation(self,helpers,dmge,metadataModel): attribute_name='Check Match Exactly values', invalid_entry = ['71738', '98085', '210065'], dmge = dmge, - )[1] in warnings + )[1] in warnings + + @pytest.mark.parametrize("manifest_path", + ["mock_manifests/example.biospecimen_component_rule.manifest.csv", + "mock_manifests/example.patient_component_rule.manifest.csv"], + ids=["biospecimen_manifest", "patient_manifest"]) + def test_component_validations(self, helpers, manifest_path): + full_manifest_path = helpers.get_data_path(manifest_path) + manifest = helpers.get_data_frame(full_manifest_path) + + root_node = manifest['Component'][0] + + dmge = helpers.get_data_model_graph_explorer(path="example_new_vrs.model.csv") + + data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example_new_vrs.model.csv'), graph=dmge.graph) + + json_schema = data_model_js.get_json_validation_schema(source_node=root_node, schema_name=root_node + "_validation") + + validateManifest = ValidateManifest( + errors = [], + manifest = manifest, + manifestPath = full_manifest_path, + dmge = dmge, + jsonSchema = json_schema + ) + + _, vmr_errors, vmr_warnings = validateManifest.validate_manifest_rules( + manifest=manifest, dmge=dmge, restrict_rules=False, project_scope=None, + ) + + if root_node == 'Biospecimen': + assert vmr_errors and vmr_errors[0][0] == ['2', '3'] and vmr_errors[0][-1] == ['123'] + assert vmr_warnings == [] + elif root_node == 'Patient': + assert vmr_errors == [] + assert vmr_warnings and vmr_warnings[0][0] == ['2', '3'] and vmr_warnings[0][-1] == ['123'] @pytest.mark.rule_combos(reason = 'This introduces a great number of tests covering every possible rule combination that are only necessary on occasion.') From 0f652aa5834c6c6d8b2d3f4665371a342ac149ae Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:23:43 -0800 Subject: [PATCH 180/199] add new tests to test_utils and run black formatter --- tests/test_utils.py | 615 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 490 insertions(+), 125 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 8fb738d1d..71e9f5550 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -18,7 +18,12 @@ from schematic.schemas.data_model_parser import DataModelParser from schematic.schemas.data_model_graph import DataModelGraph, DataModelGraphExplorer -from schematic.schemas.data_model_jsonld import DataModelJsonLD, BaseTemplate, PropertyTemplate, ClassTemplate +from schematic.schemas.data_model_jsonld import ( + DataModelJsonLD, + BaseTemplate, + PropertyTemplate, + ClassTemplate, +) from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.schemas.data_model_relationships import DataModelRelationships @@ -29,18 +34,30 @@ MissingConfigAndArgumentValueError, ) from schematic import LOADER -from schematic.exceptions import (MissingConfigAndArgumentValueError, - MissingConfigValueError) +from schematic.exceptions import ( + MissingConfigAndArgumentValueError, + MissingConfigValueError, +) -from schematic.utils import (cli_utils, df_utils, general, io_utils, - validate_utils) -from schematic.utils.general import (calculate_datetime, - check_synapse_cache_size, - clear_synapse_cache, entity_type_mapping) -from schematic.utils.schema_utils import (export_schema, - get_property_label_from_display_name, - get_class_label_from_display_name, - strip_context) +from schematic.utils import cli_utils, df_utils, general, io_utils, validate_utils +from schematic.utils.general import ( + calculate_datetime, + check_synapse_cache_size, + clear_synapse_cache, + entity_type_mapping, +) +from schematic.utils.schema_utils import ( + export_schema, + get_property_label_from_display_name, + get_class_label_from_display_name, + strip_context, + get_individual_rules, + get_component_name_rules, + parse_component_validation_rules, + parse_single_set_validation_rules, + parse_validation_rules, + extract_component_validation_rules, +) logging.basicConfig(level=logging.DEBUG) @@ -48,6 +65,83 @@ IN_GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") +MULTI_RULE_DICT = { + "multi_rule": { + "starting_rule": "unique::list::num", + "parsed_rule": [["unique", "list", "num"]], + }, + "double_rule": { + "starting_rule": "unique::list", + "parsed_rule": [["unique", "list"]], + }, + "single_rule": {"starting_rule": "unique", "parsed_rule": ["unique"]}, +} + +TEST_VALIDATION_RULES = { + "multi_component_rule": { + "validation_rules": [ + "#Patient int^^#Biospecimen unique error^^#BulkRNA-seqAssay int" + ], + "parsed_rules": { + "Patient": "int", + "Biospecimen": "unique error", + "BulkRNA-seqAssay": "int", + }, + "extracted_rules": { + "Patient": ["int"], + "Biospecimen": ["unique error"], + "BulkRNA-seqAssay": ["int"], + }, + }, + "double_component_rule": { + "validation_rules": ["#Patient int^^#Biospecimen unique error"], + "parsed_rules": {"Patient": "int", "Biospecimen": "unique error"}, + "extracted_rules": {"Patient": ["int"], "Biospecimen": ["unique error"]}, + }, + "single_component_rule_1": { + "validation_rules": ["#Patient int^^"], + "parsed_rules": {"Patient": "int"}, + "extracted_rules": {"Patient": ["int"]}, + }, + "single_component_rule_2": { + "validation_rules": ["^^#Patient int"], + "parsed_rules": {"Patient": "int"}, + "extracted_rules": {"Patient": ["int"]}, + }, + "single_component_exclusion": { + "validation_rules": ["int::inRange 100 900^^#Patient"], + "parsed_rules": { + "all_other_components": ["int", "inRange 100 900"], + "Patient": "", + }, + "extracted_rules": { + "all_other_components": ["int", "inRange 100 900"], + "Patient": [], + }, + }, + "dictionary_rule": { + "validation_rules": {"BiospecimenManifest": "unique error", "Patient": "int"}, + "parsed_rules": {"BiospecimenManifest": "unique error", "Patient": "int"}, + "extracted_rules": { + "BiospecimenManifest": ["unique error"], + "Patient": ["int"], + }, + }, + "str_rule": { + "validation_rules": "#Patient int^^#Biospecimen unique error", + "parsed_rules": "raises_exception", + }, + "simple_rule": { + "validation_rules": ["int"], + "parsed_rules": ["int"], + }, + "double_rule": { + "validation_rules": ["list::regex match \(\d{3}\) \d{3}-\d{4}"], + "parsed_rules": ["list", "regex match \(\d{3}\) \d{3}-\d{4}"], + }, +} + + class TestGeneral: def test_clear_synapse_cache(self, tmp_path): # define location of mock synapse cache @@ -55,14 +149,18 @@ def test_clear_synapse_cache(self, tmp_path): mock_synapse_cache_dir.mkdir() mock_sub_folder = mock_synapse_cache_dir / "123" mock_sub_folder.mkdir() - mock_table_query_folder = mock_sub_folder/ "456" + mock_table_query_folder = mock_sub_folder / "456" mock_table_query_folder.mkdir() # create mock table query csv and a mock cache map - mock_synapse_table_query_csv = mock_table_query_folder/ "mock_synapse_table_query.csv" + mock_synapse_table_query_csv = ( + mock_table_query_folder / "mock_synapse_table_query.csv" + ) mock_synapse_table_query_csv.write_text("mock table query content") - mock_cache_map = mock_table_query_folder/ ".cacheMap" - mock_cache_map.write_text(f"{mock_synapse_table_query_csv}: '2022-06-13T19:24:27.000Z'") + mock_cache_map = mock_table_query_folder / ".cacheMap" + mock_cache_map.write_text( + f"{mock_synapse_table_query_csv}: '2022-06-13T19:24:27.000Z'" + ) assert os.path.exists(mock_synapse_table_query_csv) @@ -76,31 +174,43 @@ def test_clear_synapse_cache(self, tmp_path): # make sure that cache files are now gone assert os.path.exists(mock_synapse_table_query_csv) == False assert os.path.exists(mock_cache_map) == False - + def test_calculate_datetime_before_minutes(self): - input_date = datetime.strptime("07/20/23 17:36:34", '%m/%d/%y %H:%M:%S') - minutes_before = calculate_datetime(input_date=input_date, minutes=10, before_or_after="before") - expected_result_date_before = datetime.strptime("07/20/23 17:26:34", '%m/%d/%y %H:%M:%S') + input_date = datetime.strptime("07/20/23 17:36:34", "%m/%d/%y %H:%M:%S") + minutes_before = calculate_datetime( + input_date=input_date, minutes=10, before_or_after="before" + ) + expected_result_date_before = datetime.strptime( + "07/20/23 17:26:34", "%m/%d/%y %H:%M:%S" + ) assert minutes_before == expected_result_date_before def test_calculate_datetime_after_minutes(self): - input_date = datetime.strptime("07/20/23 17:36:34", '%m/%d/%y %H:%M:%S') - minutes_after = calculate_datetime(input_date=input_date, minutes=10, before_or_after="after") - expected_result_date_after = datetime.strptime("07/20/23 17:46:34", '%m/%d/%y %H:%M:%S') + input_date = datetime.strptime("07/20/23 17:36:34", "%m/%d/%y %H:%M:%S") + minutes_after = calculate_datetime( + input_date=input_date, minutes=10, before_or_after="after" + ) + expected_result_date_after = datetime.strptime( + "07/20/23 17:46:34", "%m/%d/%y %H:%M:%S" + ) assert minutes_after == expected_result_date_after def test_calculate_datetime_raise_error(self): with pytest.raises(ValueError): - input_date = datetime.strptime("07/20/23 17:36:34", '%m/%d/%y %H:%M:%S') - minutes = calculate_datetime(input_date=input_date, minutes=10, before_or_after="error") - + input_date = datetime.strptime("07/20/23 17:36:34", "%m/%d/%y %H:%M:%S") + minutes = calculate_datetime( + input_date=input_date, minutes=10, before_or_after="error" + ) + # this test might fail for windows machine @pytest.mark.not_windows - def test_check_synapse_cache_size(self,tmp_path): + def test_check_synapse_cache_size(self, tmp_path): mock_synapse_cache_dir = tmp_path / ".synapseCache" mock_synapse_cache_dir.mkdir() - mock_synapse_table_query_csv = mock_synapse_cache_dir/ "mock_synapse_table_query.csv" + mock_synapse_table_query_csv = ( + mock_synapse_cache_dir / "mock_synapse_table_query.csv" + ) mock_synapse_table_query_csv.write_text("example file for calculating cache") file_size = check_synapse_cache_size(mock_synapse_cache_dir) @@ -112,7 +222,6 @@ def test_check_synapse_cache_size(self,tmp_path): assert file_size == 4000 def test_find_duplicates(self): - mock_list = ["foo", "bar", "foo"] mock_dups = {"foo"} @@ -120,7 +229,6 @@ def test_find_duplicates(self): assert test_dups == mock_dups def test_dict2list_with_dict(self): - mock_dict = {"foo": "bar"} mock_list = [{"foo": "bar"}] @@ -128,14 +236,22 @@ def test_dict2list_with_dict(self): assert test_list == mock_list def test_dict2list_with_list(self): - # mock_dict = {'foo': 'bar'} mock_list = [{"foo": "bar"}] test_list = general.dict2list(mock_list) assert test_list == mock_list - @pytest.mark.parametrize("entity_id,expected_type", [("syn27600053","folder"), ("syn29862078", "file"), ("syn23643253", "asset view"), ("syn30988314", "folder"), ("syn51182432", "org.sagebionetworks.repo.model.table.TableEntity")]) + @pytest.mark.parametrize( + "entity_id,expected_type", + [ + ("syn27600053", "folder"), + ("syn29862078", "file"), + ("syn23643253", "asset view"), + ("syn30988314", "folder"), + ("syn51182432", "org.sagebionetworks.repo.model.table.TableEntity"), + ], + ) def test_entity_type_mapping(self, synapse_store, entity_id, expected_type): syn = synapse_store.syn @@ -157,7 +273,6 @@ def test_download_manifest_to_temp_folder(self): class TestCliUtils: def test_query_dict(self): - mock_dict = {"k1": {"k2": {"k3": "foobar"}}} mock_keys_valid = ["k1", "k2", "k3"] mock_keys_invalid = ["k1", "k2", "k4"] @@ -192,7 +307,6 @@ def close(self): class TestIOUtils: def test_json_load(self, tmpdir): - json_file = tmpdir.join("example.json") json_file.write_text(json.dumps([{"k1": "v1"}, {"k2": "v2"}]), encoding="utf-8") @@ -204,7 +318,6 @@ def test_json_load(self, tmpdir): assert local_result == expected def test_json_load_online(self, mocker): - mock_urlopen = mocker.patch( "urllib.request.urlopen", return_value=FakeResponse( @@ -218,7 +331,6 @@ def test_json_load_online(self, mocker): assert mock_urlopen.call_count == 1 def test_export_json(self, tmpdir): - json_str = json.dumps([{"k1": "v1"}, {"k2": "v2"}]) export_file = tmpdir.join("export_json_expected.json") @@ -230,7 +342,6 @@ def test_export_json(self, tmpdir): assert expected == json_str def test_load_default(self): - biothings_schema = io_utils.load_default() expected_ctx_keys = ["bts", "rdf", "rdfs", "schema", "xsd"] @@ -242,10 +353,36 @@ def test_load_default(self): assert expected_no_of_keys == actual_no_of_keys def test_load_schema_org(self): - schema_org_schema = io_utils.load_schemaorg() - expected_ctx_keys = ['brick', 'csvw', 'dc', 'dcam', 'dcat', 'dcmitype', 'dcterms', 'doap', 'foaf', 'odrl', 'org', 'owl', 'prof', 'prov', 'qb', 'rdf', 'rdfs', 'schema', 'sh', 'skos', 'sosa', 'ssn', 'time', 'vann', 'void', 'xsd'] + expected_ctx_keys = [ + "brick", + "csvw", + "dc", + "dcam", + "dcat", + "dcmitype", + "dcterms", + "doap", + "foaf", + "odrl", + "org", + "owl", + "prof", + "prov", + "qb", + "rdf", + "rdfs", + "schema", + "sh", + "skos", + "sosa", + "ssn", + "time", + "vann", + "void", + "xsd", + ] actual_ctx_keys = list(schema_org_schema["@context"].keys()) assert expected_ctx_keys == actual_ctx_keys @@ -255,13 +392,19 @@ def test_load_schema_org(self): class TestDfUtils: - @pytest.mark.parametrize("preserve_raw_input", [True, False], ids=["Do not infer datatypes", "Infer datatypes"]) + @pytest.mark.parametrize( + "preserve_raw_input", + [True, False], + ids=["Do not infer datatypes", "Infer datatypes"], + ) def test_load_df(self, helpers, preserve_raw_input): test_col = "Check NA" file_path = helpers.get_data_path("mock_manifests", "Invalid_Test_Manifest.csv") unprocessed_df = pd.read_csv(file_path, encoding="utf8") - df = df_utils.load_df(file_path, preserve_raw_input=preserve_raw_input, data_model=False) + df = df_utils.load_df( + file_path, preserve_raw_input=preserve_raw_input, data_model=False + ) assert df["Component"].dtype == "object" @@ -281,7 +424,6 @@ def test_load_df(self, helpers, preserve_raw_input): assert isinstance(df[test_col].iloc[2], str) def test_update_df_col_present(self, helpers): - synapse_manifest = helpers.get_data_frame( "mock_manifests", "synapse_manifest.csv" ) @@ -293,7 +435,6 @@ def test_update_df_col_present(self, helpers): assert_frame_equal(col_pres_res, synapse_manifest) def test_update_df_col_absent(self, helpers): - synapse_manifest = helpers.get_data_frame( "mock_manifests", "synapse_manifest.csv" ) @@ -304,7 +445,6 @@ def test_update_df_col_absent(self, helpers): df_utils.update_df(local_manifest, synapse_manifest, "Col_Not_In_Dfs") def test_trim_commas_df(self, helpers): - local_manifest = helpers.get_data_frame("mock_manifests", "local_manifest.csv") nan_row = pd.DataFrame( @@ -349,104 +489,330 @@ def test_update_dataframe(self): def test_populate_column(self): input_df = pd.DataFrame( - { - "column1": ["col1Val","col1Val"], - "column2": [None, None] - } + {"column1": ["col1Val", "col1Val"], "column2": [None, None]} ) - output_df = df_utils.populate_df_col_with_another_col(input_df,'column1','column2') - assert (output_df["column2"].values == ["col1Val","col1Val"]).all() + output_df = df_utils.populate_df_col_with_another_col( + input_df, "column1", "column2" + ) + assert (output_df["column2"].values == ["col1Val", "col1Val"]).all() + class TestSchemaUtils: def test_get_property_label_from_display_name(self, helpers): - # tests where strict_camel_case is the same - assert(get_property_label_from_display_name("howToAcquire") == "howToAcquire") - assert(get_property_label_from_display_name("howToAcquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("how_to_acquire") == "howToAcquire") - assert(get_property_label_from_display_name("how_to_acquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("howtoAcquire") == "howtoAcquire") - assert(get_property_label_from_display_name("howtoAcquire", strict_camel_case = True) == "howtoAcquire") - assert(get_property_label_from_display_name("How To Acquire") == "howToAcquire") - assert(get_property_label_from_display_name("How To Acquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("Model Of Manifestation") == "modelOfManifestation") - assert(get_property_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(get_property_label_from_display_name("ModelOfManifestation") == "modelOfManifestation") - assert(get_property_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "modelOfManifestation") - assert(get_property_label_from_display_name("model Of Manifestation") == "modelOfManifestation") - assert(get_property_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "modelOfManifestation") + assert get_property_label_from_display_name("howToAcquire") == "howToAcquire" + assert ( + get_property_label_from_display_name("howToAcquire", strict_camel_case=True) + == "howToAcquire" + ) + assert get_property_label_from_display_name("how_to_acquire") == "howToAcquire" + assert ( + get_property_label_from_display_name( + "how_to_acquire", strict_camel_case=True + ) + == "howToAcquire" + ) + assert get_property_label_from_display_name("howtoAcquire") == "howtoAcquire" + assert ( + get_property_label_from_display_name("howtoAcquire", strict_camel_case=True) + == "howtoAcquire" + ) + assert get_property_label_from_display_name("How To Acquire") == "howToAcquire" + assert ( + get_property_label_from_display_name( + "How To Acquire", strict_camel_case=True + ) + == "howToAcquire" + ) + assert ( + get_property_label_from_display_name("Model Of Manifestation") + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name( + "Model Of Manifestation", strict_camel_case=True + ) + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name("ModelOfManifestation") + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name( + "ModelOfManifestation", strict_camel_case=True + ) + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name("model Of Manifestation") + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name( + "model Of Manifestation", strict_camel_case=True + ) + == "modelOfManifestation" + ) # tests where strict_camel_case changes the result - assert(get_property_label_from_display_name("how to Acquire") == "howtoAcquire") - assert(get_property_label_from_display_name("how to Acquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("How to Acquire") == "howtoAcquire") - assert(get_property_label_from_display_name("How to Acquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("how to acquire") == "howtoacquire") - assert(get_property_label_from_display_name("how to acquire", strict_camel_case = True) == "howToAcquire") - assert(get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") - assert(get_property_label_from_display_name("model of manifestation") == "modelofmanifestation") - assert(get_property_label_from_display_name("model of manifestation", strict_camel_case = True) == "modelOfManifestation") + assert get_property_label_from_display_name("how to Acquire") == "howtoAcquire" + assert ( + get_property_label_from_display_name( + "how to Acquire", strict_camel_case=True + ) + == "howToAcquire" + ) + assert get_property_label_from_display_name("How to Acquire") == "howtoAcquire" + assert ( + get_property_label_from_display_name( + "How to Acquire", strict_camel_case=True + ) + == "howToAcquire" + ) + assert get_property_label_from_display_name("how to acquire") == "howtoacquire" + assert ( + get_property_label_from_display_name( + "how to acquire", strict_camel_case=True + ) + == "howToAcquire" + ) + assert ( + get_property_label_from_display_name("model of manifestation") + == "modelofmanifestation" + ) + assert ( + get_property_label_from_display_name( + "model of manifestation", strict_camel_case=True + ) + == "modelOfManifestation" + ) + assert ( + get_property_label_from_display_name("model of manifestation") + == "modelofmanifestation" + ) + assert ( + get_property_label_from_display_name( + "model of manifestation", strict_camel_case=True + ) + == "modelOfManifestation" + ) def test_get_class_label_from_display_name(self, helpers): - # tests where strict_camel_case is the same - assert(get_class_label_from_display_name("howToAcquire") == "HowToAcquire") - assert(get_class_label_from_display_name("howToAcquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("how_to_acquire") == "HowToAcquire") - assert(get_class_label_from_display_name("how_to_acquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("howtoAcquire") == "HowtoAcquire") - assert(get_class_label_from_display_name("howtoAcquire", strict_camel_case = True) == "HowtoAcquire") - assert(get_class_label_from_display_name("How To Acquire") == "HowToAcquire") - assert(get_class_label_from_display_name("How To Acquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("Model Of Manifestation") == "ModelOfManifestation") - assert(get_class_label_from_display_name("Model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(get_class_label_from_display_name("ModelOfManifestation") == "ModelOfManifestation") - assert(get_class_label_from_display_name("ModelOfManifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(get_class_label_from_display_name("model Of Manifestation") == "ModelOfManifestation") - assert(get_class_label_from_display_name("model Of Manifestation", strict_camel_case = True) == "ModelOfManifestation") + assert get_class_label_from_display_name("howToAcquire") == "HowToAcquire" + assert ( + get_class_label_from_display_name("howToAcquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert get_class_label_from_display_name("how_to_acquire") == "HowToAcquire" + assert ( + get_class_label_from_display_name("how_to_acquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert get_class_label_from_display_name("howtoAcquire") == "HowtoAcquire" + assert ( + get_class_label_from_display_name("howtoAcquire", strict_camel_case=True) + == "HowtoAcquire" + ) + assert get_class_label_from_display_name("How To Acquire") == "HowToAcquire" + assert ( + get_class_label_from_display_name("How To Acquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert ( + get_class_label_from_display_name("Model Of Manifestation") + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name( + "Model Of Manifestation", strict_camel_case=True + ) + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name("ModelOfManifestation") + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name( + "ModelOfManifestation", strict_camel_case=True + ) + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name("model Of Manifestation") + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name( + "model Of Manifestation", strict_camel_case=True + ) + == "ModelOfManifestation" + ) # tests where strict_camel_case changes the result - assert(get_class_label_from_display_name("how to Acquire") == "HowtoAcquire") - assert(get_class_label_from_display_name("how to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("How to Acquire") == "HowtoAcquire") - assert(get_class_label_from_display_name("How to Acquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("how to acquire") == "Howtoacquire") - assert(get_class_label_from_display_name("how to acquire", strict_camel_case = True) == "HowToAcquire") - assert(get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") - assert(get_class_label_from_display_name("model of manifestation") == "Modelofmanifestation") - assert(get_class_label_from_display_name("model of manifestation", strict_camel_case = True) == "ModelOfManifestation") - - @pytest.mark.parametrize("context_value", ['@id', 'sms:required'], ids=['remove_at', 'remove_sms']) + assert get_class_label_from_display_name("how to Acquire") == "HowtoAcquire" + assert ( + get_class_label_from_display_name("how to Acquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert get_class_label_from_display_name("How to Acquire") == "HowtoAcquire" + assert ( + get_class_label_from_display_name("How to Acquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert get_class_label_from_display_name("how to acquire") == "Howtoacquire" + assert ( + get_class_label_from_display_name("how to acquire", strict_camel_case=True) + == "HowToAcquire" + ) + assert ( + get_class_label_from_display_name("model of manifestation") + == "Modelofmanifestation" + ) + assert ( + get_class_label_from_display_name( + "model of manifestation", strict_camel_case=True + ) + == "ModelOfManifestation" + ) + assert ( + get_class_label_from_display_name("model of manifestation") + == "Modelofmanifestation" + ) + assert ( + get_class_label_from_display_name( + "model of manifestation", strict_camel_case=True + ) + == "ModelOfManifestation" + ) + + @pytest.mark.parametrize( + "context_value", ["@id", "sms:required"], ids=["remove_at", "remove_sms"] + ) def test_strip_context(self, helpers, context_value): stripped_contex = strip_context(context_value=context_value) - if '@id' == context_value: - assert stripped_contex == ('', 'id') - elif 'sms:required' == context_value: - assert stripped_contex == ('sms', 'required') + if "@id" == context_value: + assert stripped_contex == ("", "id") + elif "sms:required" == context_value: + assert stripped_contex == ("sms", "required") + + @pytest.mark.parametrize( + "test_multi_rule", + list(MULTI_RULE_DICT.keys()), + ids=list(MULTI_RULE_DICT.keys()), + ) + def test_get_individual_rules(self, test_multi_rule): + validation_rules = [] + test_rule = MULTI_RULE_DICT[test_multi_rule]["starting_rule"] + expected_rule = MULTI_RULE_DICT[test_multi_rule]["parsed_rule"] + parsed_rule = get_individual_rules( + rule=test_rule, + validation_rules=validation_rules, + ) + assert expected_rule == parsed_rule + + @pytest.mark.parametrize( + "test_individual_component_rule", + [ + ["#Patient int", [["Patient"], "int"]], + ["int", [["all_other_components"], "int"]], + ], + ids=["Patient_component", "no_component"], + ) + def test_get_component_name_rules(self, test_individual_component_rule): + component_names = [] + + component, parsed_rule = get_component_name_rules( + component_names=[], component_rule=test_individual_component_rule[0] + ) + expected_rule = test_individual_component_rule[1][1] + expected_component = test_individual_component_rule[1][0] + + assert expected_rule == parsed_rule + assert expected_component == component + + @pytest.mark.parametrize( + "test_individual_rule_set", + [ + ["#Patient int::inRange 100 900", []], + ["int::inRange 100 900", ["int", "inRange 100 900"]], + ["int", ["int"]], + ], + ids=["improper_format", "double_rule", "single_rule"], + ) + def test_parse_single_set_validation_rules(self, test_individual_rule_set): + validation_rule_string = test_individual_rule_set[0] + try: + parsed_rule = parse_single_set_validation_rules( + validation_rule_string=validation_rule_string + ) + expected_rule = test_individual_rule_set[1] + assert parsed_rule == expected_rule + except: + assert validation_rule_string == "#Patient int::inRange 100 900" + + @pytest.mark.parametrize( + "test_rule_name", + list(TEST_VALIDATION_RULES.keys()), + ids=list(TEST_VALIDATION_RULES.keys()), + ) + def test_parse_validation_rules(self, test_rule_name): + """ + The test dictionary tests the following: + A dictionary rule is simply returned. + A string rule, raises an exception. + A single rule, a double rule, component rules, with a single component in either orientation, + double rules, multiple rules, creating a rule for all components except one. + """ + validation_rules = TEST_VALIDATION_RULES[test_rule_name]["validation_rules"] + expected_parsed_rules = TEST_VALIDATION_RULES[test_rule_name]["parsed_rules"] + + try: + parsed_validation_rules = parse_validation_rules( + validation_rules=validation_rules + ) + assert expected_parsed_rules == parsed_validation_rules + except: + assert test_rule_name == "str_rule" + + @pytest.mark.parametrize( + "test_rule_name", + list(TEST_VALIDATION_RULES.keys()), + ids=list(TEST_VALIDATION_RULES.keys()), + ) + def test_extract_component_validation_rules(self, test_rule_name): + """ + Test that a component validation rule dictionary is parsed properly + """ + attribute_rules_set = TEST_VALIDATION_RULES[test_rule_name]["parsed_rules"] + if isinstance(attribute_rules_set, dict): + for component in attribute_rules_set.keys(): + extracted_rules = extract_component_validation_rules( + component, attribute_rules_set + ) + assert isinstance(extracted_rules, list) + assert ( + extracted_rules + == TEST_VALIDATION_RULES[test_rule_name]["extracted_rules"][ + component + ] + ) + class TestValidateUtils: def test_validate_schema(self, helpers): - ''' - Previously did: - se_obj = helpers.get_schema_explorer("example.model.jsonld") - actual = validate_utils.validate_schema(se_obj.schema) - - schema is defined as: self.schema = load_json(schema) - - TODO: Validate this is doing what its supposed to. - ''' + """ """ # Get data model path data_model_path = helpers.get_data_path("example.model.jsonld") schema = io_utils.load_json(data_model_path) - #need to pass the jsonschema + # need to pass the jsonschema actual = validate_utils.validate_schema(schema) assert actual is None - def test_validate_class_schema(self, helpers): """ Get a class template, fill it out with mock data, and validate against a JSON Schema @@ -460,12 +826,11 @@ def test_validate_class_schema(self, helpers): mock_class["@type"] = "rdfs:Class" mock_class["@rdfs:comment"] = "This is a mock class" mock_class["@rdfs:label"] = "MockClass" - mock_class["rdfs:subClassOf"].append({"@id":"bts:Patient"}) + mock_class["rdfs:subClassOf"].append({"@id": "bts:Patient"}) error = validate_utils.validate_class_schema(mock_class) assert error is None - def test_validate_property_schema(self, helpers): """ @@ -479,13 +844,13 @@ def test_validate_property_schema(self, helpers): mock_class["@id"] = "bts:MockProperty" mock_class["@type"] = "rdf:Property" mock_class["@rdfs:comment"] = "This is a mock Patient class" - mock_class["@rdfs:label"] = "MockProperty" - mock_class["schema:domainIncludes"].append({"@id":"bts:Patient"}) + mock_class["@rdfs:label"] = "MockProperty" + mock_class["schema:domainIncludes"].append({"@id": "bts:Patient"}) error = validate_utils.validate_property_schema(mock_class) assert error is None - + class TestCsvUtils: def test_csv_to_schemaorg(self, helpers, tmp_path): @@ -498,9 +863,9 @@ def test_csv_to_schemaorg(self, helpers, tmp_path): csv_path = helpers.get_data_path("example.model.csv") # Instantiate DataModelParser - data_model_parser = DataModelParser(path_to_data_model = csv_path) - - #Parse Model + data_model_parser = DataModelParser(path_to_data_model=csv_path) + + # Parse Model parsed_data_model = data_model_parser.parse_model() # Instantiate DataModelGraph From ac94b2d095e72d5a497609588dc785a5a224fdfd Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:24:43 -0800 Subject: [PATCH 181/199] update test_run_rel_functions to work better with new rule handling --- tests/test_schemas.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index cccdb0208..cf4fcd952 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -741,17 +741,15 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo assert len(vrs) == len(parsed_vrs) if DATA_MODEL_DICT[data_model] == "CSV": - assert vrs != parsed_vrs + for ind, rule in enumerate(vrs): + if "::" in rule[0]: + assert parsed_vrs[ind] == rule[0].split("::") + else: + assert parsed_vrs[ind] == rule elif DATA_MODEL_DICT[data_model] == "JSONLD": # JSONLDs already contain parsed validaiton rules so the raw vrs will match the parsed_vrs assert vrs == parsed_vrs - # For all validation rules where there are multiple rules, make sure they have been split as expected. - for i, pvr in enumerate(parsed_vrs): - delim_count = vrs[i][0].count("::") - if delim_count: - assert len(pvr) == delim_count + 1 - elif rel_func == get_label_from_display_name: # For a limited set check label is returned as expected. for entry_type, expected_value in TEST_DN_DICT[test_dn].items(): From aab76826b067c49a5e3b7dd6be90fcb386ad0a4a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:25:20 -0800 Subject: [PATCH 182/199] update syntax, clean and except errors --- schematic/utils/schema_utils.py | 70 ++++++++++++++++++++------------- 1 file changed, 43 insertions(+), 27 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index f6316ae29..686075eea 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -123,17 +123,17 @@ def convert_bool_to_str(provided_bool: bool) -> str: return str(provided_bool) -def get_component_rules(component_rule: str, validation_rules: list) -> list: +def get_individual_rules(rule: str, validation_rules: list[str]) -> list: # Separate multiple rules (defined by addition of the rule delimiter) - if DELIMITERS["rule_delimiter"] in component_rule: - validation_rules.append(component_rule.split(DELIMITERS["rule_delimiter"])) + if DELIMITERS["rule_delimiter"] in rule: + validation_rules.append(parse_single_set_validation_rules(rule)) # Get single rule else: - validation_rules.append(component_rule) + validation_rules.append(rule) return validation_rules -def get_component_name(component_names: list, component_rule: str) -> Tuple[list, str]: +def get_component_name_rules(component_names: list, component_rule: str) -> Tuple[list, str]: # If a component name is not attached to the rule, have it apply to all other components if DELIMITERS["component_name_delimiter"] != component_rule[0]: component_names.append("all_other_components") @@ -146,9 +146,10 @@ def get_component_name(component_names: list, component_rule: str) -> Tuple[list ) try: assert component_names[-1] != " " - except: - ValueError( - f"There was an error capturing at least one of the component name in the following rule: {component_rule}, " + except ValueError: + print ( + f"There was an error capturing at least one of the component name " + f"in the following rule: {component_rule}, " f"please ensure there is not extra whitespace or non-allowed characters." ) component_rule = component_rule.replace(component_rule.split(" ")[0], "") @@ -168,21 +169,22 @@ def parse_component_validation_rules(validation_rule_string: str) -> Dict: component_rule = component_rule.strip() if component_rule: # Get component name attached to rule - component_names, component_rule = get_component_name( + component_names, component_rule = get_component_name_rules( component_names=component_names, component_rule=component_rule ) # Get rules - validation_rules = get_component_rules( - component_rule=component_rule, validation_rules=validation_rules + validation_rules = get_individual_rules( + rule=component_rule, validation_rules=validation_rules ) # Ensure we collected the component names and validation rules like expected try: assert len(component_names) == len(validation_rules) - except: - raise ValueError( - f"The number of components names and validation rules does not match for validation rule: {validation_rule_string}." + except ValueError: + print ( + f"The number of components names and validation rules does not match " + f"for validation rule: {validation_rule_string}." ) validation_rules_dict = dict( @@ -201,8 +203,7 @@ def parse_single_set_validation_rules(validation_rule_string: str) -> list: ) # Parse rules that are set across *all* components/manifests - if DELIMITERS["rule_delimiter"] in validation_rule_string: - return validation_rule_string.split(DELIMITERS["rule_delimiter"]) + return validation_rule_string.split(DELIMITERS["rule_delimiter"]) def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, dict]: @@ -221,16 +222,18 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d # Rules pulled in as a dict can be used directly return validation_rules elif isinstance(validation_rules, list): - validation_rule_string = validation_rules[0] + # If rules are already parsed from the JSONLD + if len(validation_rules) > 1 and isinstance(validation_rules[-1], str): + return validation_rules # Parse rules set for a subset of components/manifests - if DELIMITERS["component_rules_delimiter"] in validation_rule_string: + elif DELIMITERS["component_rules_delimiter"] in validation_rules[0]: return parse_component_validation_rules( - validation_rule_string=validation_rule_string + validation_rule_string= validation_rules[0] ) # Parse rules that are set across *all* components/manifests else: return parse_single_set_validation_rules( - validation_rule_string=validation_rule_string + validation_rule_string= validation_rules[0] ) else: raise ValueError( @@ -239,19 +242,32 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d def extract_component_validation_rules( - manifest_component: str, validation_rules: dict + manifest_component: str, validation_rules: dict[str, list] ) -> list: + """Parse a component validation rule dictionary to pull out the rule (if any) for a given manifest + Args: + manifest_component, str: Component label, pulled from the manifest directly + validation_rules, dict[str, list[Union[list,str]]: Validation rules dictionary, where keys are the manifest component label, + and the value is a parsed set of validation rules. + Returns: + validation_rules, list[str]: + """ manifest_component_rule = validation_rules.get(manifest_component) all_component_rules = validation_rules.get("all_other_components") - if manifest_component_rule: - if type(manifest_component_rule) == str: - validation_rules = [manifest_component_rule] - else: + + # Capture situation where manifest_component rule is an empty string + if manifest_component_rule is not None: + if isinstance(manifest_component_rule, str): + if manifest_component_rule == '': + validation_rules = [] + else: + validation_rules = [manifest_component_rule] + elif isinstance(manifest_component_rule, list): validation_rules = manifest_component_rule elif all_component_rules: - if type(all_component_rules) == str: + if isinstance(all_component_rules, str): validation_rules = [all_component_rules] - else: + elif isinstance(all_component_rules, list): validation_rules = all_component_rules else: validation_rules = [] From ef22e8e768538252eb8576461fc47a1a8249f76d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 11:48:44 -0800 Subject: [PATCH 183/199] run black on test_validation --- tests/test_validation.py | 834 ++++++++++++++++++++++----------------- 1 file changed, 473 insertions(+), 361 deletions(-) diff --git a/tests/test_validation.py b/tests/test_validation.py index f704117ba..5c2f31a95 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -17,472 +17,584 @@ from schematic.schemas.data_model_json_schema import DataModelJSONSchema from schematic.utils.validate_rules_utils import validation_rule_info + logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) + @pytest.fixture(name="dmge") def DMGE(helpers): dmge = helpers.get_data_model_graph_explorer(path="example.model.jsonld") yield dmge + @pytest.fixture def metadataModel(helpers): metadataModel = MetadataModel( - inputMModelLocation = helpers.get_data_path("example.model.jsonld"), - inputMModelLocationType = "local" - ) + inputMModelLocation=helpers.get_data_path("example.model.jsonld"), + inputMModelLocationType="local", + ) yield metadataModel + def get_rule_combinations(): rule_info = validation_rule_info() for base_rule, indiv_info in rule_info.items(): - complementary_rules = indiv_info['complementary_rules'] + complementary_rules = indiv_info["complementary_rules"] if complementary_rules: for second_rule in complementary_rules: yield base_rule, second_rule else: continue - + + class TestManifestValidation: # check if suite has been created. If so, delete it if os.path.exists("great_expectations/expectations/Manifest_test_suite.json"): os.remove("great_expectations/expectations/Manifest_test_suite.json") - def test_valid_manifest(self,helpers,metadataModel): + def test_valid_manifest(self, helpers, metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Valid_Test_Manifest.csv") - rootNode = 'MockComponent' + rootNode = "MockComponent" errors, warnings = metadataModel.validateModelManifest( manifestPath=manifestPath, rootNode=rootNode, - project_scope = ["syn23643250"], - ) - - assert errors == [] - assert warnings == [] + project_scope=["syn23643250"], + ) + assert errors == [] + assert warnings == [] - def test_invalid_manifest(self,helpers, dmge,metadataModel): + def test_invalid_manifest(self, helpers, dmge, metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") - rootNode = 'MockComponent' + rootNode = "MockComponent" errors, warnings = metadataModel.validateModelManifest( manifestPath=manifestPath, rootNode=rootNode, - project_scope = ["syn23643250"], - ) + project_scope=["syn23643250"], + ) - #Check errors - assert GenerateError.generate_type_error( - val_rule = 'num', - row_num = '3', - attribute_name = 'Check Num', - invalid_entry = 'c', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_type_error( - val_rule = 'int', - row_num = '3', - attribute_name = 'Check Int', - invalid_entry = '5.63', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_type_error( - val_rule = 'str', - row_num = '3', - attribute_name = 'Check String', - invalid_entry = '94', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_list_error( - val_rule = 'list strict', - list_string = 'invalid list values', - row_num = '3', - attribute_name = 'Check List', - list_error = "not_comma_delimited", - invalid_entry = 'invalid list values', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_list_error( - val_rule = 'list strict', - list_string = 'ab cd ef', - row_num = '3', - attribute_name = 'Check Regex List', - list_error = "not_comma_delimited", - invalid_entry = 'ab cd ef', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_regex_error( - val_rule = 'regex', - reg_expression = '[a-f]', - row_num = '3', - attribute_name = 'Check Regex Format', - module_to_call = 'match', - invalid_entry = 'm', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_regex_error( - val_rule = 'regex', - reg_expression = '[a-f]', - row_num = '3', - attribute_name = 'Check Regex Single', - module_to_call = 'search', - invalid_entry = 'q', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_regex_error( - val_rule = 'regex', - reg_expression = '^\d+$', - row_num = '2', - attribute_name = 'Check Regex Integer', - module_to_call = 'search', - invalid_entry = '5.4', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_url_error( - val_rule = 'url', - url = 'http://googlef.com/', - url_error = 'invalid_url', - row_num = '3', - attribute_name = 'Check URL', - argument = None, - invalid_entry = 'http://googlef.com/', - dmge = dmge, - )[0] in errors + # Check errors + assert ( + GenerateError.generate_type_error( + val_rule="num", + row_num="3", + attribute_name="Check Num", + invalid_entry="c", + dmge=dmge, + )[0] + in errors + ) + assert ( + GenerateError.generate_type_error( + val_rule="int", + row_num="3", + attribute_name="Check Int", + invalid_entry="5.63", + dmge=dmge, + )[0] + in errors + ) - date_err = GenerateError.generate_content_error( - val_rule = 'date', - attribute_name = 'Check Date', - dmge = dmge, - row_num = ['2','3','4'], - error_val = ['84-43-094', '32-984', 'notADate'], + assert ( + GenerateError.generate_type_error( + val_rule="str", + row_num="3", + attribute_name="Check String", + invalid_entry="94", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_list_error( + val_rule="list strict", + list_string="invalid list values", + row_num="3", + attribute_name="Check List", + list_error="not_comma_delimited", + invalid_entry="invalid list values", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_list_error( + val_rule="list strict", + list_string="ab cd ef", + row_num="3", + attribute_name="Check Regex List", + list_error="not_comma_delimited", + invalid_entry="ab cd ef", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_regex_error( + val_rule="regex", + reg_expression="[a-f]", + row_num="3", + attribute_name="Check Regex Format", + module_to_call="match", + invalid_entry="m", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_regex_error( + val_rule="regex", + reg_expression="[a-f]", + row_num="3", + attribute_name="Check Regex Single", + module_to_call="search", + invalid_entry="q", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_regex_error( + val_rule="regex", + reg_expression="^\d+$", + row_num="2", + attribute_name="Check Regex Integer", + module_to_call="search", + invalid_entry="5.4", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_url_error( + val_rule="url", + url="http://googlef.com/", + url_error="invalid_url", + row_num="3", + attribute_name="Check URL", + argument=None, + invalid_entry="http://googlef.com/", + dmge=dmge, )[0] + in errors + ) + + date_err = GenerateError.generate_content_error( + val_rule="date", + attribute_name="Check Date", + dmge=dmge, + row_num=["2", "3", "4"], + error_val=["84-43-094", "32-984", "notADate"], + )[0] error_in_list = [date_err[2] in error for error in errors] assert any(error_in_list) - assert GenerateError.generate_content_error( - val_rule = 'unique error', - attribute_name = 'Check Unique', - dmge = dmge, - row_num = ['2','3','4'], - error_val = ['str1'], - )[0] in errors - - assert GenerateError.generate_content_error( - val_rule = 'inRange 50 100 error', - attribute_name = 'Check Range', - dmge = dmge, - row_num = ['3'], - error_val = ['30'], - )[0] in errors - - #check warnings - assert GenerateError.generate_content_error( - val_rule = 'recommended', - attribute_name = 'Check Recommended', - dmge = dmge, - )[1] in warnings - - assert GenerateError.generate_content_error( - val_rule = 'protectAges', - attribute_name = 'Check Ages', - dmge = dmge, - row_num = ['2','3'], - error_val = ['6549','32851'], - )[1] in warnings - - assert GenerateError.generate_cross_warning( - val_rule = 'matchAtLeastOne', - row_num = ['3'], - attribute_name='Check Match at Least', - invalid_entry = ['7163'], - missing_manifest_ID = ['syn27600110', 'syn29381803'], - dmge = dmge, - )[1] in warnings - - assert GenerateError.generate_cross_warning( - val_rule = 'matchAtLeastOne MockComponent.checkMatchatLeastvalues value', - row_num = ['3'], - attribute_name = 'Check Match at Least values', - invalid_entry = ['51100'], - dmge = dmge, - )[1] in warnings - - assert \ + assert ( + GenerateError.generate_content_error( + val_rule="unique error", + attribute_name="Check Unique", + dmge=dmge, + row_num=["2", "3", "4"], + error_val=["str1"], + )[0] + in errors + ) + + assert ( + GenerateError.generate_content_error( + val_rule="inRange 50 100 error", + attribute_name="Check Range", + dmge=dmge, + row_num=["3"], + error_val=["30"], + )[0] + in errors + ) + + # check warnings + assert ( + GenerateError.generate_content_error( + val_rule="recommended", + attribute_name="Check Recommended", + dmge=dmge, + )[1] + in warnings + ) + + assert ( + GenerateError.generate_content_error( + val_rule="protectAges", + attribute_name="Check Ages", + dmge=dmge, + row_num=["2", "3"], + error_val=["6549", "32851"], + )[1] + in warnings + ) + + assert ( GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne', - attribute_name='Check Match Exactly', - matching_manifests = ['syn29862078', 'syn27648165'], - dmge = dmge, - )[1] in warnings \ - or \ + val_rule="matchAtLeastOne", + row_num=["3"], + attribute_name="Check Match at Least", + invalid_entry=["7163"], + missing_manifest_ID=["syn27600110", "syn29381803"], + dmge=dmge, + )[1] + in warnings + ) + + assert ( GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne', - attribute_name='Check Match Exactly', - matching_manifests = ['syn29862066', 'syn27648165'], - dmge = dmge, - )[1] in warnings + val_rule="matchAtLeastOne MockComponent.checkMatchatLeastvalues value", + row_num=["3"], + attribute_name="Check Match at Least values", + invalid_entry=["51100"], + dmge=dmge, + )[1] + in warnings + ) + assert ( + GenerateError.generate_cross_warning( + val_rule="matchExactlyOne", + attribute_name="Check Match Exactly", + matching_manifests=["syn29862078", "syn27648165"], + dmge=dmge, + )[1] + in warnings + or GenerateError.generate_cross_warning( + val_rule="matchExactlyOne", + attribute_name="Check Match Exactly", + matching_manifests=["syn29862066", "syn27648165"], + dmge=dmge, + )[1] + in warnings + ) cross_warning = GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne MockComponent.checkMatchExactlyvalues MockComponent.checkMatchExactlyvalues value', - row_num = ['2', '3', '4'], - attribute_name='Check Match Exactly values', - invalid_entry = ['71738', '98085', '210065'], - dmge = dmge, - )[1] + val_rule="matchExactlyOne MockComponent.checkMatchExactlyvalues MockComponent.checkMatchExactlyvalues value", + row_num=["2", "3", "4"], + attribute_name="Check Match Exactly values", + invalid_entry=["71738", "98085", "210065"], + dmge=dmge, + )[1] warning_in_list = [cross_warning[1] in warning for warning in warnings] assert any(warning_in_list) - - - - def test_in_house_validation(self,helpers,dmge,metadataModel): + def test_in_house_validation(self, helpers, dmge, metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") - rootNode = 'MockComponent' + rootNode = "MockComponent" errors, warnings = metadataModel.validateModelManifest( manifestPath=manifestPath, rootNode=rootNode, restrict_rules=True, - project_scope = ["syn23643250"], - ) - - #Check errors - assert GenerateError.generate_type_error( - val_rule = 'num', - row_num = '3', - attribute_name = 'Check Num', - invalid_entry = 'c', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_type_error( - val_rule = 'int', - row_num = '3', - attribute_name = 'Check Int', - invalid_entry = '5.63', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_type_error( - val_rule = 'str', - row_num = '3', - attribute_name = 'Check String', - invalid_entry = '94', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_type_error( - val_rule = 'int', - row_num = '3', - attribute_name = 'Check NA', - invalid_entry = '9.5', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_list_error( - val_rule = 'list strict', - list_string = 'invalid list values', - row_num = '3', - attribute_name = 'Check List', - list_error = "not_comma_delimited", - invalid_entry = 'invalid list values', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_list_error( - val_rule = 'list strict', - list_string = 'ab cd ef', - row_num = '3', - attribute_name = 'Check Regex List', - list_error = "not_comma_delimited", - invalid_entry = 'ab cd ef', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_regex_error( - val_rule = 'regex', - reg_expression = '[a-f]', - row_num = '3', - attribute_name = 'Check Regex Single', - module_to_call = 'search', - invalid_entry = 'q', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_regex_error( - val_rule = 'regex', - reg_expression = '[a-f]', - row_num = '3', - attribute_name = 'Check Regex Format', - module_to_call = 'match', - invalid_entry = 'm', - dmge = dmge, - )[0] in errors - - assert GenerateError.generate_url_error( - val_rule = 'url', - url = 'http://googlef.com/', - url_error = 'invalid_url', - row_num = '3', - attribute_name = 'Check URL', - argument = None, - invalid_entry = 'http://googlef.com/', - dmge = dmge, - )[0] in errors - - - #Check Warnings - assert GenerateError.generate_cross_warning( - val_rule = 'matchAtLeastOne', - row_num = ['3'], - attribute_name='Check Match at Least', - invalid_entry = ['7163'], - missing_manifest_ID = ['syn27600110', 'syn29381803'], - dmge = dmge, - )[1] in warnings - - assert GenerateError.generate_cross_warning( - val_rule = 'matchAtLeastOne MockComponent.checkMatchatLeastvalues value', - row_num = ['3'], - attribute_name = 'Check Match at Least values', - invalid_entry = ['51100'], - dmge = dmge, - )[1] in warnings - - assert \ + project_scope=["syn23643250"], + ) + + # Check errors + assert ( + GenerateError.generate_type_error( + val_rule="num", + row_num="3", + attribute_name="Check Num", + invalid_entry="c", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_type_error( + val_rule="int", + row_num="3", + attribute_name="Check Int", + invalid_entry="5.63", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_type_error( + val_rule="str", + row_num="3", + attribute_name="Check String", + invalid_entry="94", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_type_error( + val_rule="int", + row_num="3", + attribute_name="Check NA", + invalid_entry="9.5", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_list_error( + val_rule="list strict", + list_string="invalid list values", + row_num="3", + attribute_name="Check List", + list_error="not_comma_delimited", + invalid_entry="invalid list values", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_list_error( + val_rule="list strict", + list_string="ab cd ef", + row_num="3", + attribute_name="Check Regex List", + list_error="not_comma_delimited", + invalid_entry="ab cd ef", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_regex_error( + val_rule="regex", + reg_expression="[a-f]", + row_num="3", + attribute_name="Check Regex Single", + module_to_call="search", + invalid_entry="q", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_regex_error( + val_rule="regex", + reg_expression="[a-f]", + row_num="3", + attribute_name="Check Regex Format", + module_to_call="match", + invalid_entry="m", + dmge=dmge, + )[0] + in errors + ) + + assert ( + GenerateError.generate_url_error( + val_rule="url", + url="http://googlef.com/", + url_error="invalid_url", + row_num="3", + attribute_name="Check URL", + argument=None, + invalid_entry="http://googlef.com/", + dmge=dmge, + )[0] + in errors + ) + + # Check Warnings + assert ( GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne', - attribute_name='Check Match Exactly', - matching_manifests = ['syn29862078', 'syn27648165'], - dmge = dmge, - )[1] in warnings \ - or \ + val_rule="matchAtLeastOne", + row_num=["3"], + attribute_name="Check Match at Least", + invalid_entry=["7163"], + missing_manifest_ID=["syn27600110", "syn29381803"], + dmge=dmge, + )[1] + in warnings + ) + + assert ( + GenerateError.generate_cross_warning( + val_rule="matchAtLeastOne MockComponent.checkMatchatLeastvalues value", + row_num=["3"], + attribute_name="Check Match at Least values", + invalid_entry=["51100"], + dmge=dmge, + )[1] + in warnings + ) + + assert ( GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne', - attribute_name='Check Match Exactly', - matching_manifests = ['syn29862066', 'syn27648165'], - dmge = dmge, - )[1] in warnings - - assert GenerateError.generate_cross_warning( - val_rule = 'matchExactlyOne MockComponent.checkMatchExactlyvalues MockComponent.checkMatchExactlyvalues value', - row_num = ['2', '3', '4'], - attribute_name='Check Match Exactly values', - invalid_entry = ['71738', '98085', '210065'], - dmge = dmge, - )[1] in warnings - - @pytest.mark.parametrize("manifest_path", - ["mock_manifests/example.biospecimen_component_rule.manifest.csv", - "mock_manifests/example.patient_component_rule.manifest.csv"], - ids=["biospecimen_manifest", "patient_manifest"]) + val_rule="matchExactlyOne", + attribute_name="Check Match Exactly", + matching_manifests=["syn29862078", "syn27648165"], + dmge=dmge, + )[1] + in warnings + or GenerateError.generate_cross_warning( + val_rule="matchExactlyOne", + attribute_name="Check Match Exactly", + matching_manifests=["syn29862066", "syn27648165"], + dmge=dmge, + )[1] + in warnings + ) + + assert ( + GenerateError.generate_cross_warning( + val_rule="matchExactlyOne MockComponent.checkMatchExactlyvalues MockComponent.checkMatchExactlyvalues value", + row_num=["2", "3", "4"], + attribute_name="Check Match Exactly values", + invalid_entry=["71738", "98085", "210065"], + dmge=dmge, + )[1] + in warnings + ) + + @pytest.mark.parametrize( + "manifest_path", + [ + "mock_manifests/example.biospecimen_component_rule.manifest.csv", + "mock_manifests/example.patient_component_rule.manifest.csv", + ], + ids=["biospecimen_manifest", "patient_manifest"], + ) def test_component_validations(self, helpers, manifest_path): full_manifest_path = helpers.get_data_path(manifest_path) manifest = helpers.get_data_frame(full_manifest_path) - root_node = manifest['Component'][0] + root_node = manifest["Component"][0] dmge = helpers.get_data_model_graph_explorer(path="example_new_vrs.model.csv") - data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example_new_vrs.model.csv'), graph=dmge.graph) + data_model_js = DataModelJSONSchema( + jsonld_path=helpers.get_data_path("example_new_vrs.model.csv"), + graph=dmge.graph, + ) - json_schema = data_model_js.get_json_validation_schema(source_node=root_node, schema_name=root_node + "_validation") + json_schema = data_model_js.get_json_validation_schema( + source_node=root_node, schema_name=root_node + "_validation" + ) validateManifest = ValidateManifest( - errors = [], - manifest = manifest, - manifestPath = full_manifest_path, - dmge = dmge, - jsonSchema = json_schema + errors=[], + manifest=manifest, + manifestPath=full_manifest_path, + dmge=dmge, + jsonSchema=json_schema, ) _, vmr_errors, vmr_warnings = validateManifest.validate_manifest_rules( - manifest=manifest, dmge=dmge, restrict_rules=False, project_scope=None, - ) + manifest=manifest, + dmge=dmge, + restrict_rules=False, + project_scope=None, + ) - if root_node == 'Biospecimen': - assert vmr_errors and vmr_errors[0][0] == ['2', '3'] and vmr_errors[0][-1] == ['123'] + if root_node == "Biospecimen": + assert ( + vmr_errors + and vmr_errors[0][0] == ["2", "3"] + and vmr_errors[0][-1] == ["123"] + ) assert vmr_warnings == [] - elif root_node == 'Patient': + elif root_node == "Patient": assert vmr_errors == [] - assert vmr_warnings and vmr_warnings[0][0] == ['2', '3'] and vmr_warnings[0][-1] == ['123'] - + assert ( + vmr_warnings + and vmr_warnings[0][0] == ["2", "3"] + and vmr_warnings[0][-1] == ["123"] + ) - @pytest.mark.rule_combos(reason = 'This introduces a great number of tests covering every possible rule combination that are only necessary on occasion.') + @pytest.mark.rule_combos( + reason="This introduces a great number of tests covering every possible rule combination that are only necessary on occasion." + ) @pytest.mark.parametrize("base_rule, second_rule", get_rule_combinations()) - def test_rule_combinations(self, helpers, dmge, base_rule, second_rule, metadataModel): + def test_rule_combinations( + self, helpers, dmge, base_rule, second_rule, metadataModel + ): """ TODO: Describe what this test is doing. Updating the data model graph to allow testing of allowable rule combinations. Works one rule combo at a time using (get_rule_combinations.) """ - rule_regex = re.compile(base_rule+'.*') - rootNode = 'MockComponent' + rule_regex = re.compile(base_rule + ".*") + rootNode = "MockComponent" manifestPath = helpers.get_data_path("mock_manifests/Rule_Combo_Manifest.csv") manifest = helpers.get_data_frame(manifestPath) - + # Get a view of the node data all_node_data = dmge.graph.nodes.data() - + # Update select validation rules in the data model graph for columns in the manifest for attribute in manifest.columns: # Get the node label node_label = dmge.get_node_label(attribute) - + # Get a view of the recorded info for current node node_info = all_node_data[node_label] - if node_info['validationRules']: - - if node_info['displayName'] == 'Check NA': + if node_info["validationRules"]: + if node_info["displayName"] == "Check NA": # Edit the node info -in place- - node_info['validationRules'].remove('int') + node_info["validationRules"].remove("int") break - - if base_rule in node_info['validationRules'] or re.match(rule_regex, node_info['validationRules'][0]): - if second_rule.startswith('matchAtLeastOne') or second_rule.startswith('matchExactlyOne'): + + if base_rule in node_info["validationRules"] or re.match( + rule_regex, node_info["validationRules"][0] + ): + if second_rule.startswith( + "matchAtLeastOne" + ) or second_rule.startswith("matchExactlyOne"): rule_args = f" MockComponent.{node_label} Patient.PatientID" - elif second_rule.startswith('inRange'): - rule_args = ' 1 1000 warning' - elif second_rule.startswith('regex'): - rule_args = ' search [a-f]' + elif second_rule.startswith("inRange"): + rule_args = " 1 1000 warning" + elif second_rule.startswith("regex"): + rule_args = " search [a-f]" else: - rule_args = '' + rule_args = "" # Edit the node info -in place- - node_info['validationRules'].append(second_rule + rule_args) + node_info["validationRules"].append(second_rule + rule_args) break - # Update the manifest to only contain the Component and attribute column where the rule was changed. - manifest = manifest[['Component', attribute]] - - data_model_js = DataModelJSONSchema(jsonld_path=helpers.get_data_path('example.model.jsonld'), graph=dmge.graph) - json_schema = data_model_js.get_json_validation_schema(source_node=rootNode, schema_name=rootNode + "_validation") + # Update the manifest to only contain the Component and attribute column where the rule was changed. + manifest = manifest[["Component", attribute]] - validateManifest = ValidateManifest( - errors = [], - manifest = manifest, - manifestPath = manifestPath, - dmge = dmge, - jsonSchema = json_schema + data_model_js = DataModelJSONSchema( + jsonld_path=helpers.get_data_path("example.model.jsonld"), graph=dmge.graph + ) + json_schema = data_model_js.get_json_validation_schema( + source_node=rootNode, schema_name=rootNode + "_validation" ) + validateManifest = ValidateManifest( + errors=[], + manifest=manifest, + manifestPath=manifestPath, + dmge=dmge, + jsonSchema=json_schema, + ) - #perform validation with no exceptions raised + # perform validation with no exceptions raised _, errors, warnings = validateManifest.validate_manifest_rules( - manifest = manifest, - dmge = dmge, - restrict_rules = False, - project_scope = None, - ) + manifest=manifest, + dmge=dmge, + restrict_rules=False, + project_scope=None, + ) From a9c42bdf8a1cc086bc019ac6cf424583a1fc6fc4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 11:49:54 -0800 Subject: [PATCH 184/199] run black on schema_utils --- schematic/utils/schema_utils.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 686075eea..7d19cbeb0 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -133,7 +133,9 @@ def get_individual_rules(rule: str, validation_rules: list[str]) -> list: return validation_rules -def get_component_name_rules(component_names: list, component_rule: str) -> Tuple[list, str]: +def get_component_name_rules( + component_names: list, component_rule: str +) -> Tuple[list, str]: # If a component name is not attached to the rule, have it apply to all other components if DELIMITERS["component_name_delimiter"] != component_rule[0]: component_names.append("all_other_components") @@ -147,7 +149,7 @@ def get_component_name_rules(component_names: list, component_rule: str) -> Tupl try: assert component_names[-1] != " " except ValueError: - print ( + print( f"There was an error capturing at least one of the component name " f"in the following rule: {component_rule}, " f"please ensure there is not extra whitespace or non-allowed characters." @@ -182,7 +184,7 @@ def parse_component_validation_rules(validation_rule_string: str) -> Dict: try: assert len(component_names) == len(validation_rules) except ValueError: - print ( + print( f"The number of components names and validation rules does not match " f"for validation rule: {validation_rule_string}." ) @@ -224,16 +226,16 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d elif isinstance(validation_rules, list): # If rules are already parsed from the JSONLD if len(validation_rules) > 1 and isinstance(validation_rules[-1], str): - return validation_rules + return validation_rules # Parse rules set for a subset of components/manifests elif DELIMITERS["component_rules_delimiter"] in validation_rules[0]: return parse_component_validation_rules( - validation_rule_string= validation_rules[0] + validation_rule_string=validation_rules[0] ) # Parse rules that are set across *all* components/manifests else: return parse_single_set_validation_rules( - validation_rule_string= validation_rules[0] + validation_rule_string=validation_rules[0] ) else: raise ValueError( @@ -245,12 +247,12 @@ def extract_component_validation_rules( manifest_component: str, validation_rules: dict[str, list] ) -> list: """Parse a component validation rule dictionary to pull out the rule (if any) for a given manifest - Args: - manifest_component, str: Component label, pulled from the manifest directly - validation_rules, dict[str, list[Union[list,str]]: Validation rules dictionary, where keys are the manifest component label, - and the value is a parsed set of validation rules. - Returns: - validation_rules, list[str]: + Args: + manifest_component, str: Component label, pulled from the manifest directly + validation_rules, dict[str, list[Union[list,str]]: Validation rules dictionary, where keys are the manifest component label, + and the value is a parsed set of validation rules. + Returns: + validation_rules, list[str]: """ manifest_component_rule = validation_rules.get(manifest_component) all_component_rules = validation_rules.get("all_other_components") @@ -258,7 +260,7 @@ def extract_component_validation_rules( # Capture situation where manifest_component rule is an empty string if manifest_component_rule is not None: if isinstance(manifest_component_rule, str): - if manifest_component_rule == '': + if manifest_component_rule == "": validation_rules = [] else: validation_rules = [manifest_component_rule] From 170a975280c305e8b296644317526915e1c5dd0b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 11:50:41 -0800 Subject: [PATCH 185/199] run black on test_schemas --- tests/test_schemas.py | 40 ++++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index cf4fcd952..852d1b708 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -544,14 +544,14 @@ def test_gather_nodes(self, helpers, data_model): # Make sure the nodes returned conform to expectations (values and order) ## The parsing records display names for relationships for CSV and labels for JSONLD, so the expectations are different between the two. expected_nodes = [ - "Patient", - "Patient ID", - "Sex", - "Year of Birth", - "Diagnosis", - "Component", - "DataType", - ] + "Patient", + "Patient ID", + "Sex", + "Year of Birth", + "Diagnosis", + "Component", + "DataType", + ] assert nodes == expected_nodes @@ -892,11 +892,15 @@ def test_skip_edge(self, helpers, DMR, data_model_edges): # We're attempting to add an edge for a node that is the only one in the graph, # so `generate_edge` should skip adding edges and return the same graph edge_list_2 = data_model_edges.generate_edge( - node, node_dict, {node: parsed_data_model[node]}, edge_relationships, edge_list, + node, + node_dict, + {node: parsed_data_model[node]}, + edge_relationships, + edge_list, ) for node_1, node_2, edge_dict in edge_list_2: - G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + G.add_edge(node_1, node_2, key=edge_dict["key"], weight=edge_dict["weight"]) # Assert that no edges were added and that the current graph edges are the same as before the call to `generate_edge` assert before_edges == G.edges @@ -948,11 +952,15 @@ def test_generate_edge( # Generate edges for whichever node we are testing edge_list_2 = data_model_edges.generate_edge( - node_to_add, all_node_dict, parsed_data_model, edge_relationships, edge_list, + node_to_add, + all_node_dict, + parsed_data_model, + edge_relationships, + edge_list, ) for node_1, node_2, edge_dict in edge_list_2: - G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + G.add_edge(node_1, node_2, key=edge_dict["key"], weight=edge_dict["weight"]) # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges @@ -1014,11 +1022,15 @@ def test_generate_weights( # Generate edges for whichever node we are testing edge_list_2 = data_model_edges.generate_edge( - node_to_add, all_node_dict, parsed_data_model, edge_relationships, edge_list, + node_to_add, + all_node_dict, + parsed_data_model, + edge_relationships, + edge_list, ) for node_1, node_2, edge_dict in edge_list_2: - G.add_edge(node_1, node_2, key=edge_dict['key'], weight=edge_dict['weight']) + G.add_edge(node_1, node_2, key=edge_dict["key"], weight=edge_dict["weight"]) # Assert that the current edges are different from the edges of the graph before assert G.edges > before_edges From 2cb4475ba80741e730268275aa55c279c1d3987e Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 12:12:20 -0800 Subject: [PATCH 186/199] add updated data model with PatientID validation rule --- tests/data/example.model.csv | 6 +++--- tests/data/example.model.jsonld | 10 ++++++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index 6858e509c..7cb3120ce 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -1,7 +1,7 @@ Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules Component,,,,,TRUE,,,, Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, -Patient ID,,,,,TRUE,DataProperty,,, +Patient ID,,,,,TRUE,DataProperty,,,#Patient unique warning^^#Biospecimen unique error Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, Year of Birth,,,,,FALSE,DataProperty,,, Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, @@ -11,7 +11,7 @@ Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,, Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, Sample ID,,,,,TRUE,DataProperty,,, Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, -Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, Patient ID, File Format, Component",,FALSE,DataType,Patient,, Filename,,,,,TRUE,DataProperty,,, File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, BAM,,,Genome Build,,FALSE,ValidValue,,, @@ -42,4 +42,4 @@ Check Date,,,,,TRUE,DataProperty,,,date Check NA,,,,,TRUE,DataProperty,,,int::IsNA MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, MockRDB_id,,,,,TRUE,DataProperty,,,int -SourceManifest,,,,,TRUE,DataProperty,,, +SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index a58d36323..05da1a607 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -73,7 +73,10 @@ }, "sms:displayName": "Patient ID", "sms:required": "sms:true", - "sms:validationRules": [] + "sms:validationRules": { + "Biospecimen": "unique error", + "Patient": "unique warning" + } }, { "@id": "bts:Sex", @@ -577,7 +580,7 @@ "sms:required": "sms:false", "sms:requiresComponent": [ { - "@id": "bts:Biospecimen" + "@id": "bts:Patient" } ], "sms:requiresDependency": [ @@ -587,6 +590,9 @@ { "@id": "bts:SampleID" }, + { + "@id": "bts:PatientID" + }, { "@id": "bts:FileFormat" }, From 8c65b57e89b18c9d78112affcb3bb1401901d0e2 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 12:45:04 -0800 Subject: [PATCH 187/199] update path to new model and dmge --- tests/test_validation.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_validation.py b/tests/test_validation.py index 5c2f31a95..952b38fae 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -475,16 +475,14 @@ def test_in_house_validation(self, helpers, dmge, metadataModel): ], ids=["biospecimen_manifest", "patient_manifest"], ) - def test_component_validations(self, helpers, manifest_path): + def test_component_validations(self, helpers, manifest_path, dmge): full_manifest_path = helpers.get_data_path(manifest_path) manifest = helpers.get_data_frame(full_manifest_path) root_node = manifest["Component"][0] - dmge = helpers.get_data_model_graph_explorer(path="example_new_vrs.model.csv") - data_model_js = DataModelJSONSchema( - jsonld_path=helpers.get_data_path("example_new_vrs.model.csv"), + jsonld_path=helpers.get_data_path("example.model.csv"), graph=dmge.graph, ) From c91f5a19070d89d13762eb4971c6d7cef59b8fed Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 12:46:33 -0800 Subject: [PATCH 188/199] add additional mock manifests for component rule testing --- .../example.biospecimen_component_rule.manifest.csv | 4 ++++ .../example.patient_component_rule.manifest.csv | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 tests/data/mock_manifests/example.biospecimen_component_rule.manifest.csv create mode 100644 tests/data/mock_manifests/example.patient_component_rule.manifest.csv diff --git a/tests/data/mock_manifests/example.biospecimen_component_rule.manifest.csv b/tests/data/mock_manifests/example.biospecimen_component_rule.manifest.csv new file mode 100644 index 000000000..8ac07cd1d --- /dev/null +++ b/tests/data/mock_manifests/example.biospecimen_component_rule.manifest.csv @@ -0,0 +1,4 @@ +Sample ID,Patient ID,Tissue Status,Component +123,123,Healthy,Biospecimen +456,123,Healthy,Biospecimen +789,syn465,Healthy,Biospecimen \ No newline at end of file diff --git a/tests/data/mock_manifests/example.patient_component_rule.manifest.csv b/tests/data/mock_manifests/example.patient_component_rule.manifest.csv new file mode 100644 index 000000000..d0d26035b --- /dev/null +++ b/tests/data/mock_manifests/example.patient_component_rule.manifest.csv @@ -0,0 +1,4 @@ +Patient ID,Sex,Year of Birth,Diagnosis,Component,Cancer Type,Family History +123,Female,,Cancer,Patient,Breast,"Breast, Colorectal" +123,Male,,Healthy,Patient,,"Breast, Colorectal" +789,Other,,Healthy,Patient,,"Breast, Colorectal" \ No newline at end of file From 8a2809b17171d2da6f4dc525d85c8794f4e2967d Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:09:11 -0800 Subject: [PATCH 189/199] run black on schema_utils --- schematic/utils/schema_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 84cb2e9dc..9fa4b2314 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -7,7 +7,6 @@ from typing import List, Literal, Dict, Tuple, Union - logger = logging.getLogger(__name__) DisplayLabelType = Literal["class_label", "display_label"] From 1ea120eff3fbea7e82b0c78bef55b8084d62672b Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:52:57 -0800 Subject: [PATCH 190/199] clean up validate_rules_utils imports --- schematic/utils/validate_rules_utils.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/schematic/utils/validate_rules_utils.py b/schematic/utils/validate_rules_utils.py index ec9fa546b..3ee3399e6 100644 --- a/schematic/utils/validate_rules_utils.py +++ b/schematic/utils/validate_rules_utils.py @@ -1,9 +1,7 @@ """validate rules utils""" -from typing import Union import logging -import pandas as pd -from typing import Any, Dict, Optional, Text, List, Tuple +from typing import Union from jsonschema import ValidationError From 9c25d9cf4a318467356bfc34850a9c57ffc433f4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 14:12:52 -0800 Subject: [PATCH 191/199] tests/test_schemas.py --- tests/test_schemas.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_schemas.py b/tests/test_schemas.py index fda88f102..54b93fd84 100644 --- a/tests/test_schemas.py +++ b/tests/test_schemas.py @@ -786,6 +786,10 @@ def test_run_rel_functions(self, helpers, data_model, rel_func, test_dn, test_bo for ind, rule in enumerate(vrs): if "::" in rule[0]: assert parsed_vrs[ind] == rule[0].split("::") + elif '^^' in rule[0]: + component_rule_sets = rule[0].split("^^") + components = [cr.split(' ')[0].replace('#', '') for cr in component_rule_sets] + assert components == [k for k in parsed_vrs[0].keys()] else: assert parsed_vrs[ind] == rule elif DATA_MODEL_DICT[data_model] == "JSONLD": From b2e436cf35fc32a7e3d1b9bdb00158a3020e78f3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 14:46:07 -0800 Subject: [PATCH 192/199] revert some test relationships --- tests/data/example.model.csv | 2 +- tests/data/example.model.jsonld | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index 7cb3120ce..f49aa8d3a 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -11,7 +11,7 @@ Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,, Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, Sample ID,,,,,TRUE,DataProperty,,, Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, -Bulk RNA-seq Assay,,,"Filename, Sample ID, Patient ID, File Format, Component",,FALSE,DataType,Patient,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, Filename,,,,,TRUE,DataProperty,,, File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, BAM,,,Genome Build,,FALSE,ValidValue,,, diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 05da1a607..9724e341d 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -580,7 +580,7 @@ "sms:required": "sms:false", "sms:requiresComponent": [ { - "@id": "bts:Patient" + "@id": "bts:Biospecimen" } ], "sms:requiresDependency": [ @@ -590,9 +590,6 @@ { "@id": "bts:SampleID" }, - { - "@id": "bts:PatientID" - }, { "@id": "bts:FileFormat" }, From ffc9140f429565beb4522c985bde3c99a5282743 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:32:44 -0800 Subject: [PATCH 193/199] fix indent issue in test_validation --- tests/test_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_validation.py b/tests/test_validation.py index 4b021a448..8e028b6fe 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -295,7 +295,7 @@ def test_invalid_manifest(self, helpers, dmge, metadataModel): assert any(warning_in_list) - def test_in_house_validation(self, helpers, dmge, metadataModel): + def test_in_house_validation(self, helpers, dmge, metadataModel): manifestPath = helpers.get_data_path("mock_manifests/Invalid_Test_Manifest.csv") rootNode = "MockComponent" From 41640e512f8fbf260403bec66f4bff6408fe7843 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:22:32 -0800 Subject: [PATCH 194/199] update typing in docstring --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index db02a8b12..dea6a44d6 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -752,7 +752,7 @@ def _request_regex_match_vr_formatting( - Upon correct format entry, text will turn black. - If incorrect format is entered a validation error will pop up. Input: - validation_rules: Any(List[str], Dict), defines the validation rules + validation_rules: List[str], defines the validation rules applied to a particular column. i: int, defines current column. requests_body: dict, containing all the update requests to add to the gs From c7580c21ce20f82c0055c728d80ecf69c57d524f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 15 Feb 2024 10:24:20 -0800 Subject: [PATCH 195/199] add additional function to check for duplicate components and additional testing --- schematic/utils/schema_utils.py | 17 ++++++++++++++++- tests/test_utils.py | 26 +++++++++++++++++++++++++- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 9fa4b2314..bb93c6645 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -241,7 +241,7 @@ def get_component_name_rules( assert component_names[-1] != " " except ValueError: print( - f"There was an error capturing at least one of the component name " + f"There was an error capturing at least one of the component names " f"in the following rule: {component_rule}, " f"please ensure there is not extra whitespace or non-allowed characters." ) @@ -249,6 +249,18 @@ def get_component_name_rules( component_rule = component_rule.strip() return component_names, component_rule +def check_for_duplicate_components(component_names:list[str], validation_rule_string:str)->None: + """ + + """ + duplicated_entries = [cn for cn in component_names if component_names.count(cn) > 1] + if duplicated_entries: + raise ValueError( + f"Oops, it looks like the following rule {validation_rule_string}, contains the same component " + f"name more than once. An attribute can only have a single rule applied per manifest/component." + ) + return + def parse_component_validation_rules(validation_rule_string: str) -> Dict: component_names = [] @@ -280,6 +292,9 @@ def parse_component_validation_rules(validation_rule_string: str) -> Dict: f"for validation rule: {validation_rule_string}." ) + # If a component name is repeated throw an error. + check_for_duplicate_components(component_names, validation_rule_string) + validation_rules_dict = dict( map(lambda i, j: (i, j), component_names, validation_rules) ) diff --git a/tests/test_utils.py b/tests/test_utils.py index ee1b2afa8..3b99377f2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -61,6 +61,7 @@ parse_single_set_validation_rules, parse_validation_rules, extract_component_validation_rules, + check_for_duplicate_components, ) @@ -143,6 +144,11 @@ "validation_rules": ["list::regex match \(\d{3}\) \d{3}-\d{4}"], "parsed_rules": ["list", "regex match \(\d{3}\) \d{3}-\d{4}"], }, + "duplicated_component": { + "validation_rules": ["#Patient unique^^#Patient int"], + "parsed_rules": "raises_exception", + } + } TEST_DN_DICT = { @@ -766,6 +772,24 @@ def test_parse_single_set_validation_rules(self, test_individual_rule_set): except: assert validation_rule_string == "#Patient int::inRange 100 900" + @pytest.mark.parametrize( + "component_names", + [ + ["duplicated_component", ['Patient', 'Biospecimen', 'Patient']], + ["individual_component", ['Patient', 'Biospecimen']], + ["no_component", []] + ], + ids=["duplicated_component", "individual_component", "no_component"], + ) + def test_check_for_duplicate_components(self, component_names): + """Test that we are properly identifying duplicates in a list. + Exception should only be triggered when the duplicate component list is passed. + """ + try: + check_for_duplicate_components(component_names=component_names[1], validation_rule_string='dummy_str') + except: + assert component_names[0] == "duplicated_component" + @pytest.mark.parametrize( "test_rule_name", list(TEST_VALIDATION_RULES.keys()), @@ -788,7 +812,7 @@ def test_parse_validation_rules(self, test_rule_name): ) assert expected_parsed_rules == parsed_validation_rules except: - assert test_rule_name == "str_rule" + assert test_rule_name in ["str_rule", "duplicated_component"] @pytest.mark.parametrize( "test_rule_name", From 47028fac04f2ed67bcc18dd35d2d1602f76d2fd3 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 15 Feb 2024 10:30:08 -0800 Subject: [PATCH 196/199] run black --- schematic/utils/schema_utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index bb93c6645..97f147346 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -249,10 +249,11 @@ def get_component_name_rules( component_rule = component_rule.strip() return component_names, component_rule -def check_for_duplicate_components(component_names:list[str], validation_rule_string:str)->None: - """ - """ +def check_for_duplicate_components( + component_names: list[str], validation_rule_string: str +) -> None: + """ """ duplicated_entries = [cn for cn in component_names if component_names.count(cn) > 1] if duplicated_entries: raise ValueError( From a0b3b73364c0958cd26a363d1ac16ba4d5444d2f Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:27:20 -0800 Subject: [PATCH 197/199] add docstrings and clean up, address andrews comments --- schematic/utils/schema_utils.py | 94 +++++++++++++++++++++++---------- 1 file changed, 65 insertions(+), 29 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index 97f147346..a1a129cd1 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -11,11 +11,9 @@ DisplayLabelType = Literal["class_label", "display_label"] BLACKLISTED_CHARS = ["(", ")", ".", " ", "-"] -DELIMITERS = { - "component_name_delimiter": "#", - "component_rules_delimiter": "^^", - "rule_delimiter": "::", -} +COMPONENT_NAME_DELIMITER = "#" +COMPONENT_RULES_DELIMITER = "^^" +RULE_DELIMITER = "::" def attr_dict_template(key_name: str) -> Dict[str, dict[str, dict]]: @@ -214,9 +212,19 @@ def convert_bool_to_str(provided_bool: bool) -> str: return str(provided_bool) -def get_individual_rules(rule: str, validation_rules: list[str]) -> list: +def get_individual_rules( + rule: str, validation_rules: list[Union[str, None]] +) -> Union[str, list]: + """Extract individual rules from a string and add to a list of rules + Args: + rule, str: valdation rule that has been parsed from a component rule. + validaiton_rules, list: list of rules being collected, + if this is the first time the list is being added to, it will be empty + Returns: + validaiton_rules, list: list of rules being collected. + """ # Separate multiple rules (defined by addition of the rule delimiter) - if DELIMITERS["rule_delimiter"] in rule: + if RULE_DELIMITER in rule: validation_rules.append(parse_single_set_validation_rules(rule)) # Get single rule else: @@ -225,17 +233,25 @@ def get_individual_rules(rule: str, validation_rules: list[str]) -> list: def get_component_name_rules( - component_names: list, component_rule: str + component_names: list[Union[str, None]], component_rule: str ) -> Tuple[list, str]: + """Get component name and rule from an string that was initilly split by the COMPONENT_RULES_DELIMITER + Args: + component_names, list[Union[str,None]]: list of components, will be empty if being added to for the first time. + component_rule, str: component rule string that has only been split by the COMPONENT_RULES_DELIMETER + Returns: + Tuple[list,str]: list with the a new component name or 'all_other_components' appended, + rule with the component name stripped off. + Raises: + ValueError if it looks like a component name should have been added to the list, but wass not. + """ # If a component name is not attached to the rule, have it apply to all other components - if DELIMITERS["component_name_delimiter"] != component_rule[0]: + if COMPONENT_NAME_DELIMITER != component_rule[0]: component_names.append("all_other_components") # Get the component name if available else: component_names.append( - component_rule.split(" ")[0].replace( - DELIMITERS["component_name_delimiter"], "" - ) + component_rule.split(" ")[0].replace(COMPONENT_NAME_DELIMITER, "") ) try: assert component_names[-1] != " " @@ -253,7 +269,14 @@ def get_component_name_rules( def check_for_duplicate_components( component_names: list[str], validation_rule_string: str ) -> None: - """ """ + """Check if component names are repeated in a validation rule + Args: + component_names, list[str]: list of components identified in the validation rule + validation_rule_str, str: validation rule, used if error needs to be raised. + Returns: + None + Raises: ValueError if a component name is duplicated. + """ duplicated_entries = [cn for cn in component_names if component_names.count(cn) > 1] if duplicated_entries: raise ValueError( @@ -264,12 +287,18 @@ def check_for_duplicate_components( def parse_component_validation_rules(validation_rule_string: str) -> Dict: + """If a validation rule is identified to be fomatted as a component validation rule, parse to a dictionary of components:rules + Args: + validation_rule_string, str: validation rule provided by user. + Returns: + validation_rules_dict, dict: validation rules parsed to a dictionary where + the key is the component name (or 'all_other_components') and the value is the parsed validaiton rule for + the given component. + """ component_names = [] validation_rules = [] - component_rules = validation_rule_string.split( - DELIMITERS["component_rules_delimiter"] - ) + component_rules = validation_rule_string.split(COMPONENT_RULES_DELIMITER) # Extract component rules, per component for component_rule in component_rules: component_rule = component_rule.strip() @@ -296,23 +325,30 @@ def parse_component_validation_rules(validation_rule_string: str) -> Dict: # If a component name is repeated throw an error. check_for_duplicate_components(component_names, validation_rule_string) - validation_rules_dict = dict( - map(lambda i, j: (i, j), component_names, validation_rules) - ) + validation_rules_dict = dict(zip(component_names, validation_rules)) + return validation_rules_dict def parse_single_set_validation_rules(validation_rule_string: str) -> list: + """Parse a single set of validation rules. + Args: + validation_rule_string, str: validation rule provided by user. + Returns: + list, the valiation rule string split by the rule delimiter + Raise: + ValueEror if the string contains a component name delimter in the beginning. + This would indicate that a user was trying to set component rules, but did so improperly. + """ # Try to catch an improperly formatted rule - if DELIMITERS["component_name_delimiter"] == validation_rule_string[0]: + if COMPONENT_NAME_DELIMITER == validation_rule_string[0]: raise ValueError( f"The provided validation rule {validation_rule_string}, looks to be formatted as a component " f"based rule, but is missing the necessary formatting, " f"please refer to the SchemaHub documentation for more details." ) - # Parse rules that are set across *all* components/manifests - return validation_rule_string.split(DELIMITERS["rule_delimiter"]) + return validation_rule_string.split(RULE_DELIMITER) def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, dict]: @@ -322,7 +358,8 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d if list, contains a string validation rule; if dictionary, key is the component the rule (value) is applied to Returns: - validation_rules, list: if submitted List + validation_rules, Union[list,dict]: Parsed validation rules, component rules are output as a dictionary, + single sets are a list. Raises: ValueError if Rule is not formatted properly """ @@ -335,7 +372,7 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d if len(validation_rules) > 1 and isinstance(validation_rules[-1], str): return validation_rules # Parse rules set for a subset of components/manifests - elif DELIMITERS["component_rules_delimiter"] in validation_rules[0]: + elif COMPONENT_RULES_DELIMITER in validation_rules[0]: return parse_component_validation_rules( validation_rule_string=validation_rules[0] ) @@ -344,10 +381,7 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d return parse_single_set_validation_rules( validation_rule_string=validation_rules[0] ) - else: - raise ValueError( - f"The validation rule provided: {str(validation_rules)} is not submitted in an accepted type (list, dictionary) please check your JSONLD." - ) + return def extract_component_validation_rules( @@ -359,7 +393,9 @@ def extract_component_validation_rules( validation_rules, dict[str, list[Union[list,str]]: Validation rules dictionary, where keys are the manifest component label, and the value is a parsed set of validation rules. Returns: - validation_rules, list[str]: + validation_rules, list[str]: rule for the provided manifest component if one is available, + if a validation rule is not specified for a given component but "all_other_components" is specified (as a key), then pull that one, + otherwise return an empty list. """ manifest_component_rule = validation_rules.get(manifest_component) all_component_rules = validation_rules.get("all_other_components") From ed31d914bcd63529b89506da26464c8837b1fff8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:53:11 -0800 Subject: [PATCH 198/199] switch from try:except to logger --- schematic/utils/schema_utils.py | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/schematic/utils/schema_utils.py b/schematic/utils/schema_utils.py index a1a129cd1..6caba81c6 100644 --- a/schematic/utils/schema_utils.py +++ b/schematic/utils/schema_utils.py @@ -148,7 +148,7 @@ def get_schema_label( Returns: label, str: class label of display name Raises: - ValueError if entry_type.lower(), is not either 'class' or 'property' + Error Logged if entry_type.lower(), is not either 'class' or 'property' """ if entry_type.lower() == "class": label = get_class_label_from_display_name( @@ -160,7 +160,7 @@ def get_schema_label( display_name=display_name, strict_camel_case=strict_camel_case ) else: - raise ValueError( + logger.error( f"The entry type submitted: {entry_type}, is not one of the permitted types: 'class' or 'property'" ) return label @@ -243,7 +243,7 @@ def get_component_name_rules( Tuple[list,str]: list with the a new component name or 'all_other_components' appended, rule with the component name stripped off. Raises: - ValueError if it looks like a component name should have been added to the list, but wass not. + Error Logged if it looks like a component name should have been added to the list, but wass not. """ # If a component name is not attached to the rule, have it apply to all other components if COMPONENT_NAME_DELIMITER != component_rule[0]: @@ -253,14 +253,13 @@ def get_component_name_rules( component_names.append( component_rule.split(" ")[0].replace(COMPONENT_NAME_DELIMITER, "") ) - try: - assert component_names[-1] != " " - except ValueError: - print( + if component_names[-1] == " ": + logger.error( f"There was an error capturing at least one of the component names " f"in the following rule: {component_rule}, " f"please ensure there is not extra whitespace or non-allowed characters." ) + component_rule = component_rule.replace(component_rule.split(" ")[0], "") component_rule = component_rule.strip() return component_names, component_rule @@ -275,11 +274,11 @@ def check_for_duplicate_components( validation_rule_str, str: validation rule, used if error needs to be raised. Returns: None - Raises: ValueError if a component name is duplicated. + Raises: Error Logged if a component name is duplicated. """ duplicated_entries = [cn for cn in component_names if component_names.count(cn) > 1] if duplicated_entries: - raise ValueError( + logger.error( f"Oops, it looks like the following rule {validation_rule_string}, contains the same component " f"name more than once. An attribute can only have a single rule applied per manifest/component." ) @@ -314,10 +313,8 @@ def parse_component_validation_rules(validation_rule_string: str) -> Dict: ) # Ensure we collected the component names and validation rules like expected - try: - assert len(component_names) == len(validation_rules) - except ValueError: - print( + if len(component_names) != len(validation_rules): + logger.error( f"The number of components names and validation rules does not match " f"for validation rule: {validation_rule_string}." ) @@ -342,7 +339,7 @@ def parse_single_set_validation_rules(validation_rule_string: str) -> list: """ # Try to catch an improperly formatted rule if COMPONENT_NAME_DELIMITER == validation_rule_string[0]: - raise ValueError( + logger.error( f"The provided validation rule {validation_rule_string}, looks to be formatted as a component " f"based rule, but is missing the necessary formatting, " f"please refer to the SchemaHub documentation for more details." @@ -361,7 +358,7 @@ def parse_validation_rules(validation_rules: Union[list, dict]) -> Union[list, d validation_rules, Union[list,dict]: Parsed validation rules, component rules are output as a dictionary, single sets are a list. Raises: - ValueError if Rule is not formatted properly + Error Logged if Rule is not formatted properly """ if isinstance(validation_rules, dict): From 649b28f2a955375d30d296efb5620836a9d76d73 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice <85905780+mialy-defelice@users.noreply.github.com> Date: Fri, 16 Feb 2024 10:48:57 -0800 Subject: [PATCH 199/199] update parallel_map to parallel_applymap --- schematic/utils/df_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 222b75713..1bba83c7a 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -98,7 +98,7 @@ def find_and_convert_ints(dataframe: pd.DataFrame) -> tuple[pd.DataFrame, pd.Dat else: # parallelize iterations for large manifests pandarallel.initialize(verbose=1) - ints = dataframe.parallel_map( + ints = dataframe.parallel_applymap( lambda cell: convert_ints(cell), na_action="ignore" ).fillna(False)