Skip to content

Commit

Permalink
General backend refresh (#474)
Browse files Browse the repository at this point in the history
* Update pre-commit and pyproject.toml
* Compile requirements files on commit
* Remove CI checks done by pre-commit
* More migration off python 3.8
* Rename references master -> main
* Run general pyupgrade fixes
  • Loading branch information
ndevenish authored Nov 5, 2024
1 parent c99a613 commit f5ca233
Show file tree
Hide file tree
Showing 14 changed files with 150 additions and 192 deletions.
49 changes: 11 additions & 38 deletions .azure-pipelines/azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,36 +7,6 @@ stages:
pool:
vmImage: ubuntu-20.04
steps:
# Run syntax validation using oldest and latest Python
- task: UsePythonVersion@0
displayName: Set up python
inputs:
versionSpec: 3.8

- bash: python .azure-pipelines/syntax-validation.py
displayName: Syntax validation (3.7)

- task: UsePythonVersion@0
displayName: Set up python
inputs:
versionSpec: 3.11

- bash: python .azure-pipelines/syntax-validation.py
displayName: Syntax validation (3.10)

- bash: |
set -eux
pip install --disable-pip-version-check ruff
python .azure-pipelines/ruff-validation.py
displayName: Ruff validation
- bash: |
set -eux
# install versions matching the ones in the corresponding pre-commit hook
pip install --disable-pip-version-check mypy==1.9.0 types-PyYAML==6.0.12 types-requests==2.31.0
mypy --no-strict-optional dials_data/
displayName: Type checking
# Set up constants for further build steps
- bash: |
echo "##vso[task.setvariable variable=BUILD_REPOSITORY_NAME;isOutput=true]${BUILD_REPOSITORY_NAME}"
Expand Down Expand Up @@ -157,44 +127,47 @@ stages:
vmImage: ubuntu-20.04
strategy:
matrix:
python38:
PYTHON_VERSION: 3.8
python39:
PYTHON_VERSION: 3.9
python310:
PYTHON_VERSION: 3.10
python311:
PYTHON_VERSION: 3.11
python312:
PYTHON_VERSION: 3.12

steps:
- template: ci.yml
- job: macOS
pool:
vmImage: macOS-latest
strategy:
matrix:
python38:
PYTHON_VERSION: 3.8
python39:
PYTHON_VERSION: 3.9
python310:
PYTHON_VERSION: 3.10
python311:
PYTHON_VERSION: 3.11
python312:
PYTHON_VERSION: 3.12

steps:
- template: ci.yml
- job: windows
pool:
vmImage: windows-latest
strategy:
matrix:
python38:
PYTHON_VERSION: 3.8
python39:
PYTHON_VERSION: 3.9
python310:
PYTHON_VERSION: 3.10
python311:
PYTHON_VERSION: 3.11
python312:
PYTHON_VERSION: 3.12

steps:
- template: ci-windows.yml

Expand All @@ -205,7 +178,7 @@ stages:
- tests
condition: and(succeeded(),
eq(dependencies.static.outputs['checks.constants.BUILD_REPOSITORY_NAME'], 'dials/data'),
eq(dependencies.static.outputs['checks.constants.BUILD_SOURCEBRANCH'], 'refs/heads/master'))
eq(dependencies.static.outputs['checks.constants.BUILD_SOURCEBRANCH'], 'refs/heads/main'))
jobs:
- job: pypi
displayName: Publish pypi release
Expand Down Expand Up @@ -247,7 +220,7 @@ stages:
- build
- tests
condition: and(succeeded(),
eq(dependencies.static.outputs['checks.constants.BUILD_SOURCEBRANCH'], 'refs/heads/master'),
eq(dependencies.static.outputs['checks.constants.BUILD_SOURCEBRANCH'], 'refs/heads/main'),
eq(dependencies.static.outputs['checks.constants.BUILD_REPOSITORY_NAME'], 'dials/data'))
# only run this job in the main branch of the main repository
jobs:
Expand Down
2 changes: 1 addition & 1 deletion .azure-pipelines/create-hashinfo-pull-requests
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ echo

echo "##[section]Open pull requests:"
declare -A UPDATEBRANCHES
for BRANCH in $(hub pr list -b master -f '%H%n'); do
for BRANCH in $(hub pr list -b main -f '%H%n'); do
echo ${BRANCH}
UPDATEBRANCHES[${BRANCH}]=1
done
Expand Down
39 changes: 0 additions & 39 deletions .azure-pipelines/ruff-validation.py

This file was deleted.

32 changes: 0 additions & 32 deletions .azure-pipelines/syntax-validation.py

This file was deleted.

2 changes: 1 addition & 1 deletion .codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ coverage:
comment:
layout: "diff, flags"
branches:
- master
- main
after_n_builds: 3
6 changes: 3 additions & 3 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: "CodeQL"

on:
push:
branches: [ "master" ]
branches: ["main"]
pull_request:
branches: [ "master" ]
branches: ["main"]
schedule:
- cron: "22 2 * * 3"

Expand All @@ -20,7 +20,7 @@ jobs:
strategy:
fail-fast: false
matrix:
language: [ javascript, python ]
language: [javascript, python]

steps:
- name: Checkout
Expand Down
25 changes: 21 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
ci:
autoupdate_schedule: quarterly
skip: [pip-compile]

repos:
# Syntax validation and some basic sanity checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: check-ast
Expand All @@ -12,8 +16,8 @@ repos:
- id: check-yaml

# Linting, sorting and formatting
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.3.3
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.2
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand All @@ -23,8 +27,21 @@ repos:
# Remember to change versions in .azure-pipelines/azure-pipelines.yml to match
# the versions here.
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.9.0
rev: v1.13.0
hooks:
- id: mypy
files: 'dials_data/.*\.py$'
additional_dependencies: ['types-PyYAML==6.0.12', 'types-requests==2.31.0']

- repo: https://github.com/astral-sh/uv-pre-commit
rev: 0.4.30
hooks:
- id: pip-compile
args: [pyproject.toml, -q, -o, requirements.txt]
files: ^(pyproject.toml|requirements(_dev|_doc)?.txt)$
- id: pip-compile
args: [pyproject.toml, -q, --extra, test, -o, requirements_dev.txt]
files: ^(pyproject.toml|requirements(_dev|_doc)?.txt)$
- id: pip-compile
args: [pyproject.toml, -q, --extra, doc, -o, requirements_doc.txt]
files: ^(pyproject.toml|requirements(_dev|_doc)?.txt)$
4 changes: 2 additions & 2 deletions dials_data/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def main():
version = dials_data.__version__ + "-dev"
parser = argparse.ArgumentParser(
usage="dials.data <command> [<args>]",
description="""DIALS regression data manager v{version}
description=f"""DIALS regression data manager v{version}
The most commonly used commands are:
list List available datasets
Expand All @@ -128,7 +128,7 @@ def main():
Each command has its own set of parameters, and you can get more information
by running dials.data <command> --help
""".format(version=version),
""",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument("subcommand", help=argparse.SUPPRESS)
Expand Down
8 changes: 2 additions & 6 deletions dials_data/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,7 @@ def list_known_definitions(ds_list, quiet=False) -> None:
size_information = "unverified dataset"
else:
size_information = _human_readable(get_resident_size(shortname))
print(
"{shortname}: {dataset[name]} ({size_information})".format(
shortname=shortname, dataset=dataset, size_information=size_information
)
)
print(f"{shortname}: {dataset['name']} ({size_information})")
print(
"{indent}{author} ({license})".format(
author=dataset.get("author", "unknown author"),
Expand All @@ -147,7 +143,7 @@ def list_known_definitions(ds_list, quiet=False) -> None:
)
)
if dataset.get("url"):
print("{indent}{dataset[url]}".format(indent=indent, dataset=dataset))
print(f"{indent}{dataset['url']}")
print(
"\n{}\n".format(
textwrap.fill(
Expand Down
8 changes: 4 additions & 4 deletions dials_data/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import warnings
import zipfile
from pathlib import Path
from typing import Any, Optional, Union
from typing import Any
from urllib.parse import urlparse

import py.path
Expand Down Expand Up @@ -104,7 +104,7 @@ def fetch_dataset(
read_only: bool = False,
verbose: bool = False,
pre_scan: bool = True,
) -> Union[bool, dict[str, Any]]:
) -> bool | dict[str, Any]:
"""Check for the presence or integrity of the local copy of the specified
test dataset. If the dataset is not available or out of date then attempt
to download/update it transparently.
Expand Down Expand Up @@ -280,7 +280,7 @@ class DataFetcher:
"""

def __init__(self, read_only: bool = False, verify: bool = True):
self._cache: dict[str, Optional[Path]] = {}
self._cache: dict[str, Path | None] = {}
self._target_dir: Path = dials_data.datasets.repository_location()
self._read_only: bool = read_only and os.access(self._target_dir, os.W_OK)
self._verify: bool = verify
Expand Down Expand Up @@ -332,7 +332,7 @@ def __call__(self, test_data: str, pathlib=None, **kwargs):
return self.result_filter(result=py.path.local(self._cache[test_data]))
return self.result_filter(result=self._cache[test_data])

def _attempt_fetch(self, test_data: str) -> Optional[Path]:
def _attempt_fetch(self, test_data: str) -> Path | None:
if self._read_only:
hashinfo = fetch_dataset(test_data, pre_scan=True, read_only=True)
else:
Expand Down
Loading

0 comments on commit f5ca233

Please sign in to comment.