From 266afcb178933af398758100acc6ed09b7dfd235 Mon Sep 17 00:00:00 2001 From: James Stevenson Date: Fri, 3 Jan 2025 19:18:42 -0500 Subject: [PATCH] style: update ruff --- .pre-commit-config.yaml | 2 +- pyproject.toml | 38 ++++++++++++++--- src/metakb/cli.py | 8 ++-- src/metakb/database.py | 6 +-- src/metakb/harvesters/base.py | 4 +- src/metakb/load_data.py | 2 +- src/metakb/normalizers.py | 80 ++++++++++++++++++++--------------- src/metakb/query.py | 11 ++--- 8 files changed, 92 insertions(+), 59 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0ea25589..9d58b507 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: - id: detect-aws-credentials args: [ --allow-missing-credentials ] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.0 # ruff version + rev: v0.8.4 # ruff version hooks: - id: ruff-format - id: ruff diff --git a/pyproject.toml b/pyproject.toml index 982b0abc..b243dccf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dynamic = ["version"] [project.optional-dependencies] tests = ["pytest", "pytest-cov", "mock", "pytest-asyncio", "deepdiff"] -dev = ["pre-commit>=3.7.1", "ruff==0.5.0"] +dev = ["pre-commit>=3.7.1", "ruff==0.8.4"] notebooks = ["ipykernel", "jupyterlab"] docs = [ "sphinx==6.1.3", @@ -116,10 +116,14 @@ select = [ "RSE", # https://docs.astral.sh/ruff/rules/#flake8-raise-rse "RET", # https://docs.astral.sh/ruff/rules/#flake8-return-ret "SLF", # https://docs.astral.sh/ruff/rules/#flake8-self-slf + "SLOT", # https://docs.astral.sh/ruff/rules/#flake8-slots-slot "SIM", # https://docs.astral.sh/ruff/rules/#flake8-simplify-sim "ARG", # https://docs.astral.sh/ruff/rules/#flake8-unused-arguments-arg "PTH", # https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth "PGH", # https://docs.astral.sh/ruff/rules/#pygrep-hooks-pgh + "PLC", # https://docs.astral.sh/ruff/rules/#convention-c + "PLE", # https://docs.astral.sh/ruff/rules/#error-e_1 + "TRY", # https://docs.astral.sh/ruff/rules/#tryceratops-try "PERF", # https://docs.astral.sh/ruff/rules/#perflint-perf "FURB", # https://docs.astral.sh/ruff/rules/#refurb-furb "RUF", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf @@ -138,13 +142,14 @@ fixable = [ "PT", "RSE", "SIM", + "PLC", + "PLE", + "TRY", "PERF", "FURB", "RUF" ] # ANN003 - missing-type-kwargs -# ANN101 - missing-type-self -# ANN102 - missing-type-cls # D203 - one-blank-line-before-class # D205 - blank-line-after-summary # D206 - indent-with-spaces* @@ -158,19 +163,22 @@ fixable = [ # E501 - line-too-long* # W191 - tab-indentation* # S321 - suspicious-ftp-lib-usage +# PLC0206 - dict-index-missing-items # *ignored for compatibility with formatter ignore = [ - "ANN003", "ANN101", "ANN102", + "ANN003", "D203", "D205", "D206", "D213", "D300", "D400", "D415", "E111", "E114", "E117", "E501", "W191", "S321", + "PLC0206", ] [tool.ruff.lint.per-file-ignores] # ANN001 - missing-type-function-argument # ANN2 - missing-return-type -# ANN102 - missing-type-cls +# D100 - undocumented-public-module +# D102 - undocumented-public-class # S101 - assert # B011 - assert-false # D104 - undocumented-public-package @@ -179,5 +187,23 @@ ignore = [ # ARG001 - unused-function-argument # SLF001 - private-member-acces # N815 - mixed-case-variable-in-class-scope -"tests/*" = ["ANN001", "ANN2", "ANN102", "S101", "B011", "D100", "D104", "INP001", "SLF001", "ARG001"] +"tests/*" = [ + "ANN001", + "ANN2", + "D100", + "D102", + "S101", + "B011", + "D100", + "D104", + "INP001", + "SLF001", + "ARG001" +] "src/metakb/schemas/*" = ["ANN102", "N815"] + +[tool.ruff.lint.flake8-annotations] +mypy-init-return = true + +[tool.ruff.format] +docstring-code-format = true diff --git a/src/metakb/cli.py b/src/metakb/cli.py index da12eb3a..a8c1c3f2 100644 --- a/src/metakb/cli.py +++ b/src/metakb/cli.py @@ -171,13 +171,13 @@ def update_normalizers( f"prohibited. Unset the environment variable " f"{NORMALIZER_AWS_ENV_VARS[name]} to proceed." ) - _logger.error(msg) + _logger.exception(msg) click.echo(msg) success = False continue - except (Exception, SystemExit) as e: - _logger.error( - "Encountered error while updating %s database: %s", name.value, e + except (Exception, SystemExit): + _logger.exception( + "Encountered error while updating %s database", name.value ) click.echo(f"Failed to update {name.value} normalizer.") success = False diff --git a/src/metakb/database.py b/src/metakb/database.py index bbbe4aee..75b1998c 100644 --- a/src/metakb/database.py +++ b/src/metakb/database.py @@ -25,11 +25,11 @@ def _get_secret() -> str: try: get_secret_value_response = client.get_secret_value(SecretId=secret_name) - except ClientError as e: + except ClientError: # For a list of exceptions thrown, see # https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html - logger.error(e) - raise e + logger.exception("Boto client error while acquiring secrets") + raise else: return get_secret_value_response["SecretString"] diff --git a/src/metakb/harvesters/base.py b/src/metakb/harvesters/base.py index 36f9a337..9c71007c 100644 --- a/src/metakb/harvesters/base.py +++ b/src/metakb/harvesters/base.py @@ -75,7 +75,7 @@ def save_harvested_data_to_file( try: with (harvested_filepath).open("w+") as f: json.dump(harvested_data.model_dump(), f, indent=2) - except Exception as e: - logger.error("Error creating %s harvester JSON: %s", src_name, e) + except Exception: + logger.exception("Error creating %s harvester JSON", src_name) return False return True diff --git a/src/metakb/load_data.py b/src/metakb/load_data.py index baf5b203..fa53dbf8 100644 --- a/src/metakb/load_data.py +++ b/src/metakb/load_data.py @@ -489,7 +489,7 @@ def add_transformed_data(driver: Driver, data: dict) -> None: for method in data.get("methods", []): session.execute_write(_add_method, method, ids_in_stmts) - for obj_type in {"genes", "conditions"}: + for obj_type in ("genes", "conditions"): for obj in data.get(obj_type, []): session.execute_write(_add_gene_or_disease, obj, ids_in_stmts) diff --git a/src/metakb/normalizers.py b/src/metakb/normalizers.py index 572c7568..3a822c20 100644 --- a/src/metakb/normalizers.py +++ b/src/metakb/normalizers.py @@ -33,12 +33,12 @@ from variation.query import QueryHandler as VariationQueryHandler __all__ = [ - "ViccNormalizers", + "NORMALIZER_AWS_ENV_VARS", + "IllegalUpdateError", "NormalizerName", + "ViccNormalizers", "check_normalizers", - "IllegalUpdateError", "update_normalizer", - "NORMALIZER_AWS_ENV_VARS", ] _logger = logging.getLogger(__name__) @@ -90,7 +90,9 @@ def __init__(self, db_url: str | None = None) -> None: Note that gene concept lookups within the Variation Normalizer are resolved using the Gene Normalizer instance, rather than creating a second sub-instance. - >>> id(norm.gene_query_handler) == id(norm.variation_normalizer.gnomad_vcf_to_protein_handler.gene_normalizer) + >>> id(norm.gene_query_handler) == id( + ... norm.variation_normalizer.gnomad_vcf_to_protein_handler.gene_normalizer + ... ) True :param db_url: optional definition of shared normalizer database. Because the @@ -127,14 +129,16 @@ async def normalize_variation( ) if variation_norm_resp and variation_norm_resp.variation: return variation_norm_resp.variation - except TokenRetrievalError as e: - _logger.error(e) - raise e - except Exception as e: - _logger.error( - "Variation Normalizer raised an exception using query %s: %s", + except TokenRetrievalError: + _logger.exception( + "Variation Normalizer encountered boto token retrieval error for query %s", + query, + ) + raise + except Exception: + _logger.exception( + "Variation Normalizer raised an exception using query %s", query, - e, ) return None @@ -173,14 +177,16 @@ def normalize_gene( try: gene_norm_resp = self.gene_query_handler.normalize(query_str) - except TokenRetrievalError as e: - _logger.error(e) - raise e - except Exception as e: - _logger.error( - "Gene Normalizer raised an exception using query %s: %s", + except TokenRetrievalError: + _logger.exception( + "Gene Normalizer encountered boto token retrieval error fetching query %s", + query_str, + ) + raise + except Exception: + _logger.exception( + "Gene Normalizer raised an exception fetching query %s", query_str, - e, ) else: if gene_norm_resp.match_type > highest_match: @@ -223,14 +229,16 @@ def normalize_disease( try: disease_norm_resp = self.disease_query_handler.normalize(query) - except TokenRetrievalError as e: - _logger.error(e) - raise e - except Exception as e: - _logger.error( - "Disease Normalizer raised an exception using query %s: %s", + except TokenRetrievalError: + _logger.exception( + "Disease Normalizer encountered boto retrieval error while fetching term %s", + query, + ) + raise + except Exception: + _logger.exception( + "Disease Normalizer raised an exception using query %s", query, - e, ) else: if disease_norm_resp.match_type > highest_match: @@ -273,14 +281,16 @@ def normalize_therapy( try: therapy_norm_resp = self.therapy_query_handler.normalize(query) - except TokenRetrievalError as e: - _logger.error(e) - raise e - except Exception as e: - _logger.error( - "Therapy Normalizer raised an exception using query %s: %s", + except TokenRetrievalError: + _logger.exception( + "Failed to retrieve from boto while fetching therapy query %s", + query, + ) + raise + except Exception: + _logger.exception( + "Therapy Normalizer raised an exception using query %s", query, - e, ) else: if therapy_norm_resp.match_type > highest_match: @@ -424,9 +434,9 @@ def check_normalizers( "Tables for %s normalizer appear to be unpopulated.", name.value ) success = False - except Exception as e: - _logger.error( - "Encountered exception while checking %s normalizer: %s", name.value, e + except Exception: + _logger.exception( + "Encountered exception while checking %s normalizer", name.value ) success = False return success diff --git a/src/metakb/query.py b/src/metakb/query.py index a676744d..f7845186 100644 --- a/src/metakb/query.py +++ b/src/metakb/query.py @@ -116,7 +116,7 @@ def __init__( >>> from metakb.normalizers import ViccNormalizers >>> qh = QueryHandler( ... get_driver("bolt://localhost:7687", ("neo4j", "password")), - ... ViccNormalizers("http://localhost:8000") + ... ViccNormalizers("http://localhost:8000"), ... ) ``default_page_limit`` sets the default max number of statements to include in @@ -129,10 +129,7 @@ def __init__( This value is overruled by an explicit ``limit`` parameter: - >>> response = await limited_qh.batch_search_statements( - ... ["BRAF V600E"], - ... limit=2 - ... ) + >>> response = await limited_qh.batch_search_statements(["BRAF V600E"], limit=2) >>> print(len(response.statement_ids)) 2 @@ -491,8 +488,8 @@ def _get_nested_stmts(self, statement_nodes: list[Node]) -> list[dict]: if s_id not in added_stmts: try: nested_stmt = self._get_nested_stmt(s) - except ValidationError as e: - logger.error("%s: %s", s_id, e) + except ValidationError: + logger.exception("Validation error while constructing %s", s_id) else: if nested_stmt: nested_stmts.append(nested_stmt)