diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index 9ad6deb6..f5213cfe 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -3,15 +3,15 @@ on: [push] jobs: mirror: runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# with: -# fetch-depth: 0 -# - uses: yesolutions/mirror-action@master -# with: -# REMOTE: 'https://bitbucket.csiro.au/scm/spice/spiceracs.git' -# GIT_USERNAME: tho822@csiro.au -# GIT_PASSWORD: ${{ secrets.GIT_PASSWORD }} + # steps: + # - uses: actions/checkout@v3 + # with: + # fetch-depth: 0 + # - uses: yesolutions/mirror-action@master + # with: + # REMOTE: 'https://bitbucket.csiro.au/scm/spice/spiceracs.git' + # GIT_USERNAME: tho822@csiro.au + # GIT_PASSWORD: ${{ secrets.GIT_PASSWORD }} steps: - uses: actions/checkout@v3 with: diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 841cfe8b..a945e332 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -5,36 +5,35 @@ name: Python application on: push: - branches: [ "master" ] + branches: ["master"] pull_request: - branches: [ "master" ] + branches: ["master"] permissions: contents: read jobs: build: - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.8 - uses: actions/setup-python@v3 - with: - python-version: "3.8" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install "flake8>=5" - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 arrakis --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 arrakis --count --exit-zero --max-complexity=10 --max-line-length=127 --statistic - - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + - uses: actions/checkout@v3 + - name: Set up Python 3.8 + uses: actions/setup-python@v3 + with: + python-version: "3.8" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install "flake8>=5" + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 arrakis --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 arrakis --count --exit-zero --max-complexity=10 --max-line-length=127 --statistic + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v3 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 054bf427..703697ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,29 +1,78 @@ +ci: + autoupdate_commit_msg: "chore: update pre-commit hooks" + autofix_commit_msg: "style: pre-commit fixes" + repos: -- repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.9.3 - hooks: - # Run the linter. - - id: ruff - args: [ --fix ] - # Run the formatter. - - id: ruff-format -# - repo: https://github.com/pre-commit/pre-commit-hooks -# rev: v4.6.0 -# hooks: -# - id: trailing-whitespace -# - id: end-of-file-fixer -# - id: check-yaml -# - id: check-added-large-files + - repo: https://github.com/adamchainz/blacken-docs + rev: "1.18.0" + hooks: + - id: blacken-docs + additional_dependencies: [black==24.*] -ci: - autofix_commit_msg: | - [pre-commit.ci] auto fixes from pre-commit.com hooks - - for more information, see https://pre-commit.ci - autofix_prs: true - autoupdate_branch: '' - autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' - autoupdate_schedule: weekly - skip: [] - submodules: false + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "v4.6.0" + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: mixed-line-ending + - id: name-tests-test + args: ["--pytest-test-first"] + - id: requirements-txt-fixer + - id: trailing-whitespace + + - repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" + hooks: + # - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v3.1.0" + hooks: + - id: prettier + types_or: [yaml, markdown, html, css, scss, javascript, json] + args: [--prose-wrap=always] + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.9.2" + hooks: + - id: ruff + args: ["--fix", "--show-fixes"] + - id: ruff-format + + - repo: https://github.com/codespell-project/codespell + rev: "v2.3.0" + hooks: + - id: codespell + + - repo: local + hooks: + - id: disallow-caps + name: Disallow improper capitalization + language: pygrep + entry: PyBind|Numpy|Cmake|CCache|Github|PyTest + exclude: .pre-commit-config.yaml + + - repo: https://github.com/abravalheri/validate-pyproject + rev: "v0.18" + hooks: + - id: validate-pyproject + additional_dependencies: ["validate-pyproject-schema-store[all]"] + + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: "0.29.0" + hooks: + - id: check-dependabot + - id: check-github-workflows + - id: check-readthedocs + + - repo: https://github.com/kynan/nbstripout + rev: 0.8.1 + hooks: + - id: nbstripout diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 2eb38da7..bc26adb3 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -25,7 +25,7 @@ build: # Build documentation in the "docs/" directory with Sphinx sphinx: - configuration: docs/source/conf.py + configuration: docs/source/conf.py # Optionally build your docs in additional formats such as PDF and ePub # formats: @@ -36,5 +36,5 @@ sphinx: # to build your documentation # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: - install: - - requirements: docs/requirements.txt + install: + - requirements: docs/requirements.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 55d1049d..427cb159 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,273 +3,373 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +and this project adheres to +[Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] ## [2.4.1] - 2024-09-18 + ##$ What's Changed -* Divergence by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/79 -* Update some imager plots -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.4.0...v2.4.1 +- Divergence by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/79 +- Update some imager plots + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.4.0...v2.4.1 ## [2.4.0] - 2024-09-03 + ### What's Changed + Mostly bits to help out a filesystem being sad -* Threadlock cutout writes to prevent dumb errors on Lustre (and maybe elsewhere?) -* Copy holography file to a $MEMDIR if it exists -* Allow validation plots to fail gracefully -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.11...v2.4.0 +- Threadlock cutout writes to prevent dumb errors on Lustre (and maybe + elsewhere?) +- Copy holography file to a $MEMDIR if it exists +- Allow validation plots to fail gracefully + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.11...v2.4.0 ## [2.3.11] - 2024-07-15 + ### What's Changed -* Fix l,m calculation for tile offsets (including rotation) -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.10...v2.3.11 +- Fix l,m calculation for tile offsets (including rotation) + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.10...v2.3.11 ## [2.3.10] - 2024-07-13 + ### What's Changed -* Improve stability with `persist_result=True` and `dirs_exist_ok=True` -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.9...v2.3.10 +- Improve stability with `persist_result=True` and `dirs_exist_ok=True` + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.9...v2.3.10 ## [2.3.9] - 2024-07-11 + ### What's Changed -* Switch to dynamic versioning -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.8...v2.3.9 +- Switch to dynamic versioning + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.8...v2.3.9 ## [2.3.8] - 2024-07-10 + ### What's Changed -* Add flow diagram by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/75 -* Split Stokes I and QU imaging tasks by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/76 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.7...v2.3.8 +- Add flow diagram by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/75 +- Split Stokes I and QU imaging tasks by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/76 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.7...v2.3.8 ## [2.3.7] - 2024-07-09 + ### What's Changed -* Check ionex by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/74 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.6...v2.3.7 +- Check ionex by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/74 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.6...v2.3.7 ## [2.3.6] - 2024-06-25 ### What's Changed -* Add field offsets by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/72 -* Added field/tile seperation calculation and columns -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/65 -* Added an auto-purge to temp-image files in imager -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.5...v2.3.6 +- Add field offsets by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/72 +- Added field/tile separation calculation and columns +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/65 +- Added an auto-purge to temp-image files in imager + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.5...v2.3.6 ## [2.3.5] - 2024-06-21 ### What's Changed -* Added tarball verification -* Tweaks to petrichor config -* Retry cutout failues -* Reduce validation plot resolution -* Fail if commonbeam is NaN (i.e. whole beam will be blanked) -* Use threadpool for beamcon -* Catch the failure of Voronoi binning -* Fixes to database queries and related structuring -* Catch failure of a validation plot - -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.4...v2.3.5 + +- Added tarball verification +- Tweaks to petrichor config +- Retry cutout failures +- Reduce validation plot resolution +- Fail if commonbeam is NaN (i.e. whole beam will be blanked) +- Use threadpool for beamcon +- Catch the failure of Voronoi binning +- Fixes to database queries and related structuring +- Catch failure of a validation plot + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.4...v2.3.5 ## [2.3.4] - 2024-05-13 + ### What's Changed -* Polar by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/71 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.3...v2.3.4 +- Polar by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/71 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.3...v2.3.4 ## [2.3.3] - 2024-05-10 + ### What's Changed -* Reduce number of tasks by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/70 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.2...v2.3.3 +- Reduce number of tasks by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/70 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.2...v2.3.3 ## [2.3.2] - 2024-05-09 + ### What's Changed -* Queries by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/69 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.1...v2.3.2 +- Queries by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/69 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.1...v2.3.2 ## [2.3.1] - 2024-05-08 + ## What's Changed -* Validate image by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/68 -* Adds and improves validation plots -* Fixes LINMOS rotation parameters +- Validate image by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/68 +- Adds and improves validation plots +- Fixes LINMOS rotation parameters -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.3.0...v2.3.1 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.3.0...v2.3.1 ## [2.3.0] - 2024-05-07 + ### What's Changed -* Add validation stage to pipeline, including plot artifacts -* Add LINMOS rotation for rotated fields -* Fixes WSClean argument handling and clean thresholds -* Submit by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/66 -* Validate by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/67 +- Add validation stage to pipeline, including plot artifacts +- Add LINMOS rotation for rotated fields +- Fixes WSClean argument handling and clean thresholds +- Submit by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/66 +- Validate by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/67 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.2.2...v2.3.0 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.2.2...v2.3.0 ## [2.2.2] - 2024-04-18 + ### What's Changed -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/61 -* Image fewer than 36 beams by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/64 +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/61 +- Image fewer than 36 beams by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/64 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.2.1...v2.2.2 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.2.1...v2.2.2 ## [2.2.1] - 2024-04-17 + ### What's Changed -* Use sql by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/63 -* Allow PostgreSQL RACS database to be ingested in `spice_init` -* Add helper scipt `create_mongodb.py` -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.8...v2.2.1 +- Use sql by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/63 +- Allow PostgreSQL RACS database to be ingested in `spice_init` +- Add helper script `create_mongodb.py` + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.8...v2.2.1 ## [2.2.0] - 2024-04-11 + ### What's Changed -* Allow SBID to passed as an argument - * This will enable a 'single field mode' - * Database queries / updates changes to support this -* Unified ArgParse mode - * Much easier argument parsing - * Now reused amongst modules -* Fixes to typing - * Much better use of `pathlib.Path` and `pandas` -* SBID by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/62 +- Allow SBID to passed as an argument + - This will enable a 'single field mode' + - Database queries / updates changes to support this +- Unified ArgParse mode + - Much easier argument parsing + - Now reused amongst modules +- Fixes to typing + - Much better use of `pathlib.Path` and `pandas` +- SBID by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/62 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.7...v2.1.8 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.7...v2.1.8 ## [2.1.7] - 2024-04-03 + ### What's Changed Updated documentation. -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.6...v2.1.7 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.6...v2.1.7 ## [2.1.6] - 2024-04-01 + ### What's Changed -* Image docs by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/60 +- Image docs by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/60 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.5...v2.1.6 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.5...v2.1.6 ## [2.1.5] - 2024-03-27 + ### What's Changed -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/56 -* Tempdir by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/58 -* Clean by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/59 +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/56 +- Tempdir by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/58 +- Clean by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/59 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.4...v2.1.5 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.4...v2.1.5 ## [2.1.4] - 2024-03-20 + ### What's Changed -* DR2 preparation improvements by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/57 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.3...v2.1.4 +- DR2 preparation improvements by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/57 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.3...v2.1.4 ## [2.1.3] - 2024-02-05 ### What's Changed -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/54 -* Noise fix by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/55 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.2...v2.1.3 +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/54 +- Noise fix by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/55 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.2...v2.1.3 ## [2.1.2] - 2024-01-17 Hotfix updates. ## Fixed + - Base Python version bumped to 3.10 for docs - Enable `-no-mf-weighting` in WSClean - Ensure FixMS skipped is used in pipeline - Pass data in correct order to RM-Tools -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.1...v2.1.2 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.1...v2.1.2 ## [2.1.1] - 2024-01-16 ### What's Changed -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/48 -* Create dependabot.yml by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/50 -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/51 -* [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in https://github.com/AlecThomson/arrakis/pull/52 -* In-memory cutouts by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/53 + +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/48 +- Create dependabot.yml by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/50 +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/51 +- [pre-commit.ci] pre-commit autoupdate by @pre-commit-ci in + https://github.com/AlecThomson/arrakis/pull/52 +- In-memory cutouts by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/53 ### New Contributors -* @pre-commit-ci made their first contribution in https://github.com/AlecThomson/arrakis/pull/48 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.1.0...v2.1.1 +- @pre-commit-ci made their first contribution in + https://github.com/AlecThomson/arrakis/pull/48 + +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.1.0...v2.1.1 ## [2.1.0] - 2023-12-14 ### What's Changed -* Fix weights by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/46 -* described the scalability of prefect/postgres by @tjgalvin in https://github.com/AlecThomson/arrakis/pull/44 -* Migrate the whole shebang to Prefect by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/49 +- Fix weights by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/46 +- described the scalability of prefect/postgres by @tjgalvin in + https://github.com/AlecThomson/arrakis/pull/44 +- Migrate the whole shebang to Prefect by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/49 -**Full Changelog**: https://github.com/AlecThomson/arrakis/compare/v2.0.0...v2.1.0 +**Full Changelog**: +https://github.com/AlecThomson/arrakis/compare/v2.0.0...v2.1.0 ### Added + - Full `prefect` flows and tasks - More `prefect` documentation ### Fixed + - Incorrect image weighting for LINMOS ### Removed -- `dask` delayed etc. +- `dask` delayed etc. ## [2.0.0] - 2023-10-16 ### Added + - `pre-commit` hooks for autoformatting checks - `imager` script for sweet imaging - `prefect` backbone ## [1.0.0] - 2023-06-15 + Corresponds with DR1 paper. ### What's Changed -* Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/1 -* Simplify readme by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/2 -* Don't need these by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/3 -* Clean up tests by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/4 -* Fix scripts and RM-synth by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/5 -* Merge dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/7 -* Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/9 -* Remove RACS db by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/14 -* Arrakis by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/16 -* Fix RACS db pathing by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/15 -* Docs update by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/18 -* Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/23 -* Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/24 -* Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/25 + +- Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/1 +- Simplify readme by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/2 +- Don't need these by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/3 +- Clean up tests by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/4 +- Fix scripts and RM-synth by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/5 +- Merge dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/7 +- Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/9 +- Remove RACS db by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/14 +- Arrakis by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/16 +- Fix RACS db pathing by @AlecThomson in + https://github.com/AlecThomson/arrakis/pull/15 +- Docs update by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/18 +- Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/23 +- Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/24 +- Dev by @AlecThomson in https://github.com/AlecThomson/arrakis/pull/25 ### Added - This changelog! - `scripts/tar_cubelets.py` and CLI hook -- `makecat.py`: Added `flag_blended_components` to identify and flag blended components. Adds `is_blended_flag`, `N_blended`, `blend_ratio` to the catalogue. +- `makecat.py`: Added `flag_blended_components` to identify and flag blended + components. Adds `is_blended_flag`, `N_blended`, `blend_ratio` to the + catalogue. - Proper logging module ### Fixed -- `columns_possum.py`: Add new Stokes I fit flags and UCDs (plus others) and descriptions -- `scripts/casda_prepare.py`: Refactor to make considated products and make CASDA happy -- `scripts/fix_dr1_cat.py`: Added extra columns that needed to be fixed in DR1 e.g. sbid, start_time +- `columns_possum.py`: Add new Stokes I fit flags and UCDs (plus others) and + descriptions +- `scripts/casda_prepare.py`: Refactor to make considated products and make + CASDA happy +- `scripts/fix_dr1_cat.py`: Added extra columns that needed to be fixed in DR1 + e.g. sbid, start_time - Typing in various places ### Changed @@ -284,8 +384,8 @@ Corresponds with DR1 paper. - `submit/casda_pre_prep.sh` - `submit/casda_prep_test.sh` -- ASKAP RACS database as a submodule (changes how `init_databse.py` ingests data) - +- ASKAP RACS database as a submodule (changes how `init_databse.py` ingests + data) ## [0.2.0] - 2019-12-01 diff --git a/README.md b/README.md index 53a38d44..c97a4bb9 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,26 @@ [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/AlecThomson/arrakis/master.svg)](https://results.pre-commit.ci/latest/github/AlecThomson/arrakis/master) + # _Arrakis_ -Pipeline for extracting the _SPICE_: **S**pectra and **P**olarization **I**n **C**utouts of **E**xtragalactic sources from **R**ACS +Pipeline for extracting the _SPICE_: **S**pectra and **P**olarization **I**n +**C**utouts of **E**xtragalactic sources from **R**ACS ## Documentation -The documentation is available at [arrakis.readthedocs.io](https://arrakis.readthedocs.io). +The documentation is available at +[arrakis.readthedocs.io](https://arrakis.readthedocs.io). ## Acknowledging -If you use Arrakis in your research, please cite [Thomson et al. (2023)](https://arxiv.org/abs/2307.07207). +If you use Arrakis in your research, please cite +[Thomson et al. (2023)](https://arxiv.org/abs/2307.07207). ### 3rd party software -Please also consider acknowledging the following software packages outlines in [docs](https://arrakis.readthedocs.io/en/latest/acknowledge.html). +Please also consider acknowledging the following software packages outlines in +[docs](https://arrakis.readthedocs.io/en/latest/acknowledge.html). -## Contibuting +## Contributing -Contributions are welcome! Please open an issue or pull request on [GitHub](https://github.com/AlecThomson/arrakis). +Contributions are welcome! Please open an issue or pull request on +[GitHub](https://github.com/AlecThomson/arrakis). diff --git a/arrakis/.default_config.yaml b/arrakis/.default_config.yaml index 5722b5ea..4126b1c4 100644 --- a/arrakis/.default_config.yaml +++ b/arrakis/.default_config.yaml @@ -17,7 +17,7 @@ skip_cleanup: false #Skip cleanup stage [False]. (default: False) # # generic null arguments: # sbid: null #SBID of observation. (default: None) -stokes: [I,Q,U] # List of Stokes parameters to image (default: ['I', 'Q', 'U']) +stokes: [I, Q, U] # List of Stokes parameters to image (default: ['I', 'Q', 'U']) epoch: 0 # Epoch of observation. (default: 0) # host: null # Host of mongodb (probably $hostname -i). (default: None) # username: null # Username of mongodb. (default: None) diff --git a/arrakis/configs/default.yaml b/arrakis/configs/default.yaml index 7e064aee..5406e42d 100644 --- a/arrakis/configs/default.yaml +++ b/arrakis/configs/default.yaml @@ -1,13 +1,13 @@ # Set up for local mahine cluster_class: "distributed.LocalCluster" cluster_kwargs: - cores: 1 - processes: 1 - name: 'spice-worker' - memory: "8GB" + cores: 1 + processes: 1 + name: "spice-worker" + memory: "8GB" adapt_kwargs: - minimum: 1 - maximum: 8 - wait_count: 20 - target_duration: "300s" - interval: "30s" + minimum: 1 + maximum: 8 + wait_count: 20 + target_duration: "300s" + interval: "30s" diff --git a/arrakis/configs/petrichor.yaml b/arrakis/configs/petrichor.yaml index e531c9e8..bf79bb57 100644 --- a/arrakis/configs/petrichor.yaml +++ b/arrakis/configs/petrichor.yaml @@ -1,26 +1,23 @@ # Set up for Petrichor cluster_class: "dask_jobqueue.SLURMCluster" cluster_kwargs: - cores: 16 - processes: 1 - name: 'spice-worker' - memory: "248GiB" - account: 'OD-217087' - walltime: '0-4:00:00' - job_extra_directives: ['--qos express', '--gres=memdir:135'] - # interface for the workers - interface: "ib0" - log_directory: 'spice_logs' - job_script_prologue: [ - 'module load singularity', - 'unset SINGULARITY_BINDPATH' - ] - local_directory: $LOCALDIR - silence_logs: 'info' - worker_extra_args: ["--lifetime", "11.5h", "--lifetime-stagger", "2m"] + cores: 16 + processes: 1 + name: "spice-worker" + memory: "248GiB" + account: "OD-217087" + walltime: "0-4:00:00" + job_extra_directives: ["--qos express", "--gres=memdir:135"] + # interface for the workers + interface: "ib0" + log_directory: "spice_logs" + job_script_prologue: ["module load singularity", "unset SINGULARITY_BINDPATH"] + local_directory: $LOCALDIR + silence_logs: "info" + worker_extra_args: ["--lifetime", "11.5h", "--lifetime-stagger", "2m"] adapt_kwargs: - minimum_jobs: 1 - maximum_jobs: 18 - wait_count: 20 - target_duration: "300s" - interval: "30s" + minimum_jobs: 1 + maximum_jobs: 18 + wait_count: 20 + target_duration: "300s" + interval: "30s" diff --git a/arrakis/configs/rm_petrichor.yaml b/arrakis/configs/rm_petrichor.yaml index 3350cedc..5dc91d74 100644 --- a/arrakis/configs/rm_petrichor.yaml +++ b/arrakis/configs/rm_petrichor.yaml @@ -1,27 +1,28 @@ # Set up for Petrichor cluster_class: "dask_jobqueue.SLURMCluster" cluster_kwargs: - cores: 4 - processes: 4 - name: 'spice-worker' - memory: "144GiB" - account: 'OD-217087' - walltime: '0-00:45:00' - job_extra_directives: ['--qos express'] - # interface for the workers - interface: "ib0" - log_directory: 'spice_logs' - job_script_prologue: [ - 'module load singularity', - 'unset SINGULARITY_BINDPATH', - 'export OMP_NUM_THREADS=1' + cores: 4 + processes: 4 + name: "spice-worker" + memory: "144GiB" + account: "OD-217087" + walltime: "0-00:45:00" + job_extra_directives: ["--qos express"] + # interface for the workers + interface: "ib0" + log_directory: "spice_logs" + job_script_prologue: + [ + "module load singularity", + "unset SINGULARITY_BINDPATH", + "export OMP_NUM_THREADS=1", ] - local_directory: $LOCALDIR - silence_logs: 'info' - worker_extra_args: ["--memory-limit", "128GiB"] + local_directory: $LOCALDIR + silence_logs: "info" + worker_extra_args: ["--memory-limit", "128GiB"] adapt_kwargs: - minimum: 1 - maximum: 256 - wait_count: 20 - target_duration: "5s" - interval: "10s" + minimum: 1 + maximum: 256 + wait_count: 20 + target_duration: "5s" + interval: "10s" diff --git a/arrakis/frion.py b/arrakis/frion.py index 0edfebac..fee8777a 100644 --- a/arrakis/frion.py +++ b/arrakis/frion.py @@ -276,7 +276,7 @@ def main( epoch (int): Epoch of observation sbid (int, optional): SBID of observation. Defaults to None. username (str, optional): Mongo username. Defaults to None. - password (str, optional): Mongo passwrod. Defaults to None. + password (str, optional): Mongo password. Defaults to None. database (bool, optional): Update database. Defaults to False. ionex_server (str, optional): IONEX server. Defaults to "ftp://ftp.aiub.unibe.ch/CODE/". ionex_proxy_server (str, optional): Proxy server. Defaults to None. diff --git a/arrakis/imager.py b/arrakis/imager.py index 3af23481..d659991e 100644 --- a/arrakis/imager.py +++ b/arrakis/imager.py @@ -68,7 +68,7 @@ class ImageSet(Struct): image_lists: dict[str, list[str]] """Dictionary of lists of images. The keys are the polarisations and the values are the list of images for that polarisation.""" aux_lists: dict[tuple[str, str], list[str]] | None = None - """Dictionary of lists of auxillary images. The keys are a tuple of the polarisation and the image type, and the values are the list of images for that polarisation and image type.""" + """Dictionary of lists of auxiliary images. The keys are a tuple of the polarisation and the image type, and the values are the list of images for that polarisation and image type.""" class MFSImage(Struct): @@ -242,7 +242,7 @@ def cleanup_imageset(purge: bool, image_set: ImageSet) -> None: # The aux images are the same between the native images and the smoothed images, # they were just copied across directly without modification if image_set.aux_lists: - logger.critical("Removing auxillary images. ") + logger.critical("Removing auxiliary images. ") for (pol, aux), aux_list in image_set.aux_lists.items(): for aux_image in aux_list: try: @@ -686,19 +686,19 @@ def get_beam(image_set: ImageSet, cutoff: float | None) -> Path: import sys import traceback - tbe = traceback.TracebackException.from_exception(e) - logger.error(f"Local {''.join(tbe.format())}") + the = traceback.TracebackException.from_exception(e) + logger.error(f"Local {''.join(the.format())}") f = sys.exc_info()[2].tb_frame f = f.f_back while f is not None: - tbe.stack.append( + the.stack.append( traceback.FrameSummary( f.f_code.co_filename, f.f_lineno, f.f_code.co_name ) ) f = f.f_back - logger.error(f"Full {''.join(tbe.format())}") + logger.error(f"Full {''.join(the.format())}") raise e logger.info(f"The common beam is: {common_beam=}") @@ -744,7 +744,7 @@ def smooth_imageset( logger.info(f"{common_beam=}") - logger.info(f"Smooting {image_set.ms} images") + logger.info(f"Smoothing {image_set.ms} images") images_to_smooth: dict[str, list[str]] if aux_mode is None: @@ -833,7 +833,7 @@ def fix_ms(ms: Path) -> Path: @task(name="Fix MeasurementSet Correlations") def fix_ms_askap_corrs(ms: Path, *args, **kwargs) -> Path: """Applies a correction to raw telescope polarisation products to rotate them - to the wsclean espected form. This is essentially related to the third-axis of + to the wsclean expected form. This is essentially related to the third-axis of ASKAP and reorientating its 'X' and 'Y's. Args: @@ -910,7 +910,7 @@ def main( force_mask_rounds (Union[int, None], optional): WSClean force mask rounds (requires modified WSClean). Defaults to None. auto_threshold (float, optional): WSClean auto threshold (in SNR). Defaults to 1. taper (Union[float, None], optional): WSClean taper (in arcsec). Defaults to None. - purge (bool, optional): Purge auxillary files after imaging. Defaults to False. + purge (bool, optional): Purge auxiliary files after imaging. Defaults to False. minuv (float, optional): WSClean minuv-l. Defaults to 0.0. parallel_deconvolution (Optional[int], optional): WSClean parallel deconvolution. Defaults to None. gridder (Optional[str], optional): WSClean gridder. Defaults to None. @@ -963,7 +963,7 @@ def main( cube_aux_modes = (None, "residual") if make_residual_cubes else (None,) - # Image_sets will be a containter that represents the output wsclean image products + # Image_sets will be a container that represents the output wsclean image products # produced for each beam. A single ImageSet is a container for a single beam. ms_list_fixed = [] pol_angles = [] diff --git a/arrakis/init_database.py b/arrakis/init_database.py index f54942ca..190c83ce 100644 --- a/arrakis/init_database.py +++ b/arrakis/init_database.py @@ -34,10 +34,10 @@ def source2beams(ra: float, dec: float, database: Table, max_sep: float = 1) -> ra (float): RA of source in degrees. dec (float): DEC of source in degrees. database (dict): RACS database table. - max_sep (float, optional): Maximum seperation of source to beam centre in degrees. Defaults to 1. + max_sep (float, optional): Maximum separation of source to beam centre in degrees. Defaults to 1. Returns: - Table: Subset of RACS databsae table containing beams that contain the source. + Table: Subset of RACS database table containing beams that contain the source. """ c1 = SkyCoord(database["RA_DEG"] * u.deg, database["DEC_DEG"] * u.deg, frame="icrs") c2 = SkyCoord(ra * u.deg, dec * u.deg, frame="icrs") @@ -74,7 +74,7 @@ def cat2beams( Args: mastercat (Table): Master catalogue table. database (Table): RACS database table. - max_sep (float, optional): Maxium source separation in degrees. Defaults to 1. + max_sep (float, optional): Maximum source separation in degrees. Defaults to 1. Returns: Tuple[np.ndarray, np.ndarray, Angle]: Output of astropy.coordinates.search_around_sky @@ -206,7 +206,7 @@ def beam_database( beams_col, island_col, comp_col = get_db( host=host, epoch=epoch, username=username, password=password ) - delete_res = beams_col.delete_many({}) # Delete previous databas + delete_res = beams_col.delete_many({}) # Delete previous database logger.warning(f"Deleted {delete_res.deleted_count} documents from beam collection") insert_res = beams_col.insert_many(json_data) count = beams_col.count_documents({}) @@ -274,7 +274,7 @@ def get_beams(mastercat: Table, database: Table, epoch: int = 0) -> list[dict]: List[Dict]: List of beam dictionaries. """ - # Get seperations on sky + # Get separations on sky seps = cat2beams(mastercat, database, max_sep=1) vals, ixs = ndix_unique(seps[1]) diff --git a/arrakis/linmos.py b/arrakis/linmos.py index af077b06..7ce7a5df 100644 --- a/arrakis/linmos.py +++ b/arrakis/linmos.py @@ -193,7 +193,7 @@ def genparset( linmos.weights = {weight_string} linmos.imagetype = fits linmos.outname = {linmos_image_str} -linmos.outweight = {linmos_weight_str} +linmos.outweigh = {linmos_weight_str} # For ASKAPsoft>1.3.0 linmos.useweightslog = true linmos.weighttype = Combined diff --git a/arrakis/makecat.py b/arrakis/makecat.py index f17c86ff..3d6597a2 100644 --- a/arrakis/makecat.py +++ b/arrakis/makecat.py @@ -58,7 +58,9 @@ class SpectralIndices(NamedTuple): betas_err: np.ndarray -def combinate(data: ArrayLike) -> tuple[ArrayLike, ArrayLike]: +def combinate( # codespell:ignore[combinate] + data: ArrayLike, +) -> tuple[ArrayLike, ArrayLike]: """Return all combinations of data with itself Args: @@ -228,7 +230,7 @@ def is_blended_component(sub_df: pd.DataFrame) -> pd.DataFrame: def lognorm_from_percentiles(x1, p1, x2, p2): - """Return a log-normal distribuion X parametrized by: + """Return a log-normal distribution X parametrized by: P(X < p1) = x1 P(X < p2) = x2 @@ -325,7 +327,7 @@ def get_fit_func( Args: tab (TableLike): Catalogue to fit - nbins (int, optional): Number of bins along seperation axis. Defaults to 21. + nbins (int, optional): Number of bins along separation axis. Defaults to 21. Returns: Callable: 3rd order polynomial fit. @@ -611,7 +613,7 @@ def cuts_and_flags( m2_flag = cat["rm_width"] > cat["rmsf_fwhm"] cat.add_column(Column(data=m2_flag, name="complex_M2_CC_flag")) - # Flag RMs which are very diffent from RMs nearby + # Flag RMs which are very different from RMs nearby # Set up voronoi bins, trying to obtain 50 sources per bin goodI = ~cat["stokesI_fit_flag"] & ~cat["channel_flag"] goodL = goodI & ~cat["leakage_flag"] & (cat["snr_polint"] > 5) @@ -676,7 +678,7 @@ def get_integration_time(cat: RMTable, field_col: Collection, sbid: int | None = if sbid is not None: query["$and"].append({"SBID": sbid}) query["$and"].remove({"FIELD_NAME": {"$in": unique_field_names}}) - # Get the singlular field name + # Get the singular field name field_names = [ field_col.find_one({"SBID": sbid}, {"FIELD_NAME": 1})["FIELD_NAME"] ] * len(field_names) @@ -1139,7 +1141,7 @@ def main( # Add epoch rmtab.add_column(Column(data=rmtab["start_time"] + (tints / 2), name="epoch")) - # Get Galatic coords + # Get Galactic coords glon, glat = RMTable.calculate_missing_coordinates_column( rmtab["ra"].to(u.deg), rmtab["dec"].to(u.deg), to_galactic=True ) @@ -1195,7 +1197,7 @@ def main( rmtab.add_missing_columns() rmtab.verify_standard_strings() rmtab.verify_limits() - # Readd complex test + # Re-add complex test rmtab["complex_test"] = "sigma_add OR Second moment" # Add main ID rmtab["cat_id"].meta["ucd"] = "meta.id;meta.main" diff --git a/arrakis/merge_fields.py b/arrakis/merge_fields.py index 9859708d..0c43ad64 100644 --- a/arrakis/merge_fields.py +++ b/arrakis/merge_fields.py @@ -163,7 +163,7 @@ def genparset( linmos.weights = {weightlist} linmos.imagetype = fits linmos.outname = {im_outname} -linmos.outweight = {wt_outname} +linmos.outweigh = {wt_outname} # For ASKAPsoft>1.3.0 linmos.weighttype = FromWeightImages linmos.weightstate = Corrected diff --git a/arrakis/process_spice.py b/arrakis/process_spice.py index 62904d5c..b2b86f08 100644 --- a/arrakis/process_spice.py +++ b/arrakis/process_spice.py @@ -225,7 +225,7 @@ def create_dask_runner( """Create a DaskTaskRunner Args: - dask_config (str): Configuraiton file for the DaskTaskRunner + dask_config (str): Configuration file for the DaskTaskRunner overload (bool, optional): Overload the options for threadded work. Defaults to False. Returns: diff --git a/arrakis/rmclean_oncuts.py b/arrakis/rmclean_oncuts.py index 7dba2245..754e85a8 100644 --- a/arrakis/rmclean_oncuts.py +++ b/arrakis/rmclean_oncuts.py @@ -55,7 +55,7 @@ def rmclean1d( comp (dict): Mongo entry for component. outdir (str): Output directory. cutoff (float, optional): CLEAN cutouff (in sigma). Defaults to -3. - maxIter (int, optional): Maximum CLEAN interation. Defaults to 10000. + maxIter (int, optional): Maximum CLEAN iteration. Defaults to 10000. gain (float, optional): CLEAN gain. Defaults to 0.1. savePlots (bool, optional): Save CLEAN plots. Defaults to False. rm_verbose (bool, optional): Verbose RM-CLEAN. Defaults to True. diff --git a/arrakis/rmsynth_oncuts.py b/arrakis/rmsynth_oncuts.py index cd1de962..cc2d25ff 100644 --- a/arrakis/rmsynth_oncuts.py +++ b/arrakis/rmsynth_oncuts.py @@ -119,7 +119,7 @@ def rmsynthoncut3d( database (bool, optional): Update MongoDB. Defaults to False. phiMax_radm2 (float, optional): Max Faraday depth. Defaults to None. dPhi_radm2 (float, optional): Faraday dpeth channel width. Defaults to None. - nSamples (int, optional): Samples acorss RMSF. Defaults to 5. + nSamples (int, optional): Samples across RMSF. Defaults to 5. weightType (str, optional): Weighting type. Defaults to 'variance'. fitRMSF (bool, optional): Fit RMSF. Defaults to False. not_RMSF (bool, optional): Skip calculation of RMSF. Defaults to False. diff --git a/arrakis/utils/database.py b/arrakis/utils/database.py index e8a558c1..35743cd9 100644 --- a/arrakis/utils/database.py +++ b/arrakis/utils/database.py @@ -48,7 +48,7 @@ def test_db( Returns: - bool: True if connection succesful + bool: True if connection successful Raises: Exception: If connection fails. @@ -67,7 +67,7 @@ def test_db( except pymongo.errors.ServerSelectionTimeoutError: raise Exception("Please ensure 'mongod' is running") - logger.info("MongoDB connection succesful!") + logger.info("MongoDB connection successful!") return True diff --git a/arrakis/utils/exceptions.py b/arrakis/utils/exceptions.py index a98f3f8e..f4c4c32d 100644 --- a/arrakis/utils/exceptions.py +++ b/arrakis/utils/exceptions.py @@ -35,7 +35,7 @@ class ReadError(OSError): class RegistryError(Exception): """Raised when a registry operation with the archiving - and unpacking registeries fails""" + and unpacking registries fails""" class DivergenceError(Exception): diff --git a/arrakis/utils/fitsutils.py b/arrakis/utils/fitsutils.py index a15366b4..4f38dce2 100644 --- a/arrakis/utils/fitsutils.py +++ b/arrakis/utils/fitsutils.py @@ -101,7 +101,7 @@ def getfreq( # Two problems. The default 'UTC' stored in 'TIMESYS' is # incompatible with the TIME_SCALE checks in astropy. - # Deleting or coverting to lower case fixes it. Second + # Deleting or converting to lower case fixes it. Second # problem, the OBSGEO keywords prompts astropy to apply # a velocity correction, but no SPECSYS has been defined. for k in ["TIMESYS", "OBSGEO-X", "OBSGEO-Y", "OBSGEO-Z"]: diff --git a/arrakis/utils/json.py b/arrakis/utils/json.py index c8ca006c..d55b003e 100644 --- a/arrakis/utils/json.py +++ b/arrakis/utils/json.py @@ -13,7 +13,7 @@ class MyEncoder(json.JSONEncoder): - """Cutom JSON encorder. + """Custom JSON encoder. Parses the data stored in source_dict to JSON without errors. diff --git a/arrakis/utils/msutils.py b/arrakis/utils/msutils.py index 167e78f8..988dd3d2 100644 --- a/arrakis/utils/msutils.py +++ b/arrakis/utils/msutils.py @@ -27,7 +27,7 @@ def get_pol_axis( Args: ms (Path): The path to the measurement set that will be inspected - feed_idx (Optional[int], optional): Specify the entery in the FEED + feed_idx (Optional[int], optional): Specify the entry in the FEED table of `ms` to return. This might be required when a subset of a measurement set has been extracted from an observation with a varying orientation. @@ -404,7 +404,7 @@ def wsclean( padding (float, optional): Pad images by the given factor during inversion to avoid aliasing. Default: 1.2 (=20%). Defaults to None. scale (str, optional): Scale of a pixel. Default unit is degrees, but - can be specificied, e.g. -scale 20asec. Default: 0.01deg. + can be specified, e.g. -scale 20asec. Default: 0.01deg. Defaults to None. predict (bool, optional): Only perform a single prediction for an existing image. Doesn't do any imaging or cleaning. The input @@ -455,7 +455,7 @@ def wsclean( <1%. Default: on. Defaults to False. grid_mode (str, optional): Kernel and mode used for gridding: kb = Kaiser-Bessel (default with 7 pixels), nn = nearest neighbour - (no kernel), more options: rect, kb-no-sinc, gaus, bn. Default: kb. + (no kernel), more options: rect, kb-no-sinc, gauss, bn. Default: kb. Defaults to None. kernel_size (int, optional): Gridding antialiasing kernel size. Default: 7. Defaults to None. @@ -560,7 +560,7 @@ def wsclean( Defaults to None. maxw (float, optional): Do not grid visibilities with a w-value higher than the given percentage of the max w, to save speed. - Default: grid everythin. Defaults to None. + Default: grid everything. Defaults to None. niter (int, optional): Maximum number of clean iterations to perform. Default: 0 (=no cleaning). Defaults to None. nmiter (int, optional): Maximum number of major clean @@ -650,11 +650,11 @@ def wsclean( iuwt (bool, optional): Use the IUWT deconvolution algorithm. Defaults to False. iuwt_snr_test (bool, optional): Stop IUWT when the SNR decreases. - This might help limitting divergence, but can occasionally also + This might help limiting divergence, but can occasionally also stop the algorithm too early. Default: no SNR test. Defaults to False. no_iuwt_snr_test (bool, optional): Do not stop IUWT when the SNR - decreases. This might help limitting divergence, but can + decreases. This might help limiting divergence, but can occasionally also stop the algorithm too early. Default: no SNR test. Defaults to False. moresane_ext (str, optional): Use the MoreSane deconvolution algorithm, @@ -724,7 +724,7 @@ def wsclean( -beam-shape 0. Defaults to None. beam_shape (str, optional): Set the FWHM beam shape for restoring the clean components. Defaults units for maj and min are arcsec, and - degrees for PA. Can be overriden, + degrees for PA. Can be overridden, e.g. '-beam-shape 1amin 1amin 3deg'. Default: shape of PSF. Defaults to None. fit_beam (bool, optional): Determine beam shape by fitting the PSF diff --git a/docs/requirements.txt b/docs/requirements.txt index 124ac9c0..76c9a833 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ -sphinx_rtd_theme -sphinx-book-theme -sphinx-autoapi -numpydoc +-e ./ myst_parser +numpydoc +sphinx-autoapi +sphinx-book-theme +sphinx_rtd_theme sphinxcontrib-mermaid --e ./ diff --git a/docs/source/acknowledge.rst b/docs/source/acknowledge.rst index 0651aaf6..f7d50d83 100644 --- a/docs/source/acknowledge.rst +++ b/docs/source/acknowledge.rst @@ -9,7 +9,7 @@ This package utilises a number of third-party libraries. Please acknowledge thes List of third party libraries: -* `Numpy `_ +* `numpy `_ * `SciPy `_ * `Matplotlib `_ * `Astropy `_ diff --git a/docs/source/imaging.rst b/docs/source/imaging.rst index 28bb005e..d5dd9030 100644 --- a/docs/source/imaging.rst +++ b/docs/source/imaging.rst @@ -5,7 +5,7 @@ Imaging MeasurementSets produced by the ASKAPsoft pipeline need modification before using tools like WSClean. This can be done using `FixMS `_, which is called internally by *Arrakis*. -*Arrakis* provides an interface to the `WSClean `_ imaging software, with convencince functions for imaging mutliple ASKAP beams simultaneously. There are two main interaces for running the imaging pipeline: +*Arrakis* provides an interface to the `WSClean `_ imaging software, with convencince functions for imaging multiple ASKAP beams simultaneously. There are two main interfaces for running the imaging pipeline: The `spice_image` CLI and API =================================== @@ -26,8 +26,8 @@ This can be run using: [--multiscale] [--multiscale_scale_bias MULTISCALE_SCALE_BIAS] [--multiscale_scales MULTISCALE_SCALES] [--absmem ABSMEM] [--make_residual_cubes] [--ms_glob_pattern MS_GLOB_PATTERN] [--data_column DATA_COLUMN] [--no_mf_weighting] [--skip_fix_ms] [--num_beams NUM_BEAMS] [--disable_pol_local_rms] [--disable_pol_force_mask_rounds] [--hosted-wsclean HOSTED_WSCLEAN | --local_wsclean LOCAL_WSCLEAN] msdir datadir - - + + mmm mmm mmm mmm mmm )-( )-( )-( )-( )-( ( S ) ( P ) ( I ) ( C ) ( E ) @@ -38,17 +38,17 @@ This can be run using: ( R ) ( A ) ( C ) ( S ) | | | | | | | | |___| |___| |___| |___| - + Arrkis imager - - + + options: -h, --help show this help message and exit --hosted-wsclean HOSTED_WSCLEAN Docker or Singularity image for wsclean (default: docker://alecthomson/wsclean:latest) --local_wsclean LOCAL_WSCLEAN Path to local wsclean Singularity image (default: None) - + imaging arguments: msdir Directory containing MS files --temp_dir_wsclean TEMP_DIR_WSCLEAN @@ -96,7 +96,7 @@ This can be run using: Disable local RMS for polarisation images (default: False) --disable_pol_force_mask_rounds Disable force mask rounds for polarisation images (default: False) - + workdir arguments: datadir Directory to create/find full-size images and 'cutout' directory @@ -108,14 +108,10 @@ You may instead prefer to use the Python API, which is more flexible and allows from prefect.task_runners import SequentialTaskRunner from arrakis.imager import main as imager_flow - def main(...): + def main(): task_runner = SequentialTaskRunner() - imager_flow.with_options( - task_runner=task_runner - )( - ... # Add your arguments here - ) + imager_flow.with_options(task_runner=task_runner)(...) # Add your arguments here You can find the full list of arguments in the API docs here: :py:mod:`arrakis.imager.main`. @@ -124,7 +120,7 @@ You can find the full list of arguments in the API docs here: :py:mod:`arrakis.i The `spice_process` CLI ===================================== -It is also possible to run just the imaging part of the pipeline using a the `spice_process` command line tool, as described in :ref:`Running the pipeline`. You will need to envoke the argument `--imager_only`, along with the other imaging arguments. This will run the imaging pipeline in parallel, using the Dask task runner defined in your config file of choice. Here is an example pipeline config for only imaging: +It is also possible to run just the imaging part of the pipeline using a the `spice_process` command line tool, as described in :ref:`Running the pipeline`. You will need to invoke the argument `--imager_only`, along with the other imaging arguments. This will run the imaging pipeline in parallel, using the Dask task runner defined in your config file of choice. Here is an example pipeline config for only imaging: .. code-block:: yaml diff --git a/docs/source/install.rst b/docs/source/install.rst index 158057f5..d5d3d56f 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -18,7 +18,7 @@ After cloning this repo, please run: :: # or - if you have mamba: mamba env create -This will install the python dependencies and the command-line scrips into a conda environment called `arrakis310`, which can be activated by: :: +This will install the python dependencies and the command-line scripts into a conda environment called `arrakis310`, which can be activated by: :: conda activate arrakis310 diff --git a/docs/source/pipeline.rst b/docs/source/pipeline.rst index 3a1002ea..e6ca50e2 100644 --- a/docs/source/pipeline.rst +++ b/docs/source/pipeline.rst @@ -2,9 +2,9 @@ Running the pipeline -------------------- So you're ready to run the pipeline? Make sure you've completed the :ref:`installation` and :ref:`Getting started` steps first. -The Arrakis pipeline requires 36 calibrated MeasurementSets, one per ASKAP beam. You can obtain these from the Observatory (via `CASDA `_) or produce them yourself with a pipline like `Flint `_. You'll need to have the visibilities stored in a single 'working' directory. +The Arrakis pipeline requires 36 calibrated MeasurementSets, one per ASKAP beam. You can obtain these from the Observatory (via `CASDA `_) or produce them yourself with a pipeline like `Flint `_. You'll need to have the visibilities stored in a single 'working' directory. -:code:`spice_process` and :code:`spice_field` orchestrate the pipeline flow using `Prefect `_ and `Dask `_. These script calls the other :code:`arrakis` modules to do the work. You can control which modules run in the configuration of :code:`spice_process` or :code:`spice_field`. :code:`spice_process` operates on the level of a single RACS fields, whereas :code:`spice_field` merges multiple fields togther. You will need to run :code:`spice_process` on at least two fields before calling :code:`spice_field`. After running :code:`spice_process` or :code:`spice_field` you can run :code:`spice_cat` to produce a just a catalogue from the database values. +:code:`spice_process` and :code:`spice_field` orchestrate the pipeline flow using `Prefect `_ and `Dask `_. These script calls the other :code:`arrakis` modules to do the work. You can control which modules run in the configuration of :code:`spice_process` or :code:`spice_field`. :code:`spice_process` operates on the level of a single RACS fields, whereas :code:`spice_field` merges multiple fields together. You will need to run :code:`spice_process` on at least two fields before calling :code:`spice_field`. After running :code:`spice_process` or :code:`spice_field` you can run :code:`spice_cat` to produce a just a catalogue from the database values. Details of each module can be found in the API documentation. But broadly the stages are: * Imaging - Create image cubes from visibilities using `WSClean `_. This will also convolve the cubes to a common spatial resolution. @@ -33,7 +33,7 @@ Details of each module can be found in the API documentation. But broadly the st ---- -With an initalised database you can call the pipeline on a single field: :: +With an initialised database you can call the pipeline on a single field: :: (arrakis310) $ spice_process -h usage: spice_process [-h] [--dask_config DASK_CONFIG] [--imager_dask_config IMAGER_DASK_CONFIG] [--imager_only] [--skip_imager] [--skip_cutout] [--skip_linmos] [--skip_frion] [--skip_rmsynth] [--skip_rmclean] [--skip_cat] [--skip_validate] [--skip_cleanup] [--sbid SBID] @@ -46,8 +46,8 @@ With an initalised database you can call the pipeline on a single field: :: [--phi_max PHI_MAX] [--dphi DPHI] [--n_samples N_SAMPLES] [--poly_ord POLY_ORD] [--no_stokes_i] [--show_plots] [--not_rmsf] [--debug] [--cutoff CUTOFF] [--max_iter MAX_ITER] [--gain GAIN] [--window WINDOW] [--leakage_degree LEAKAGE_DEGREE] [--leakage_bins LEAKAGE_BINS] [--leakage_snr LEAKAGE_SNR] [--catfile OUTFILE] [--npix NPIX] [--map_size MAP_SIZE] [--overwrite] [--config CONFIG] datadir field msdir - - + + mmm mmm mmm mmm mmm )-( )-( )-( )-( )-( ( S ) ( P ) ( I ) ( C ) ( E ) @@ -58,14 +58,14 @@ With an initalised database you can call the pipeline on a single field: :: ( R ) ( A ) ( C ) ( S ) | | | | | | | | |___| |___| |___| |___| - + Arrakis pipeline. - + Before running make sure to start a session of mongodb e.g. $ mongod --dbpath=/path/to/database --bind_ip $(hostname -i) - - - + + + options: -h, --help show this help message and exit --hosted-wsclean HOSTED_WSCLEAN @@ -73,7 +73,7 @@ With an initalised database you can call the pipeline on a single field: :: --local_wsclean LOCAL_WSCLEAN Path to local wsclean Singularity image (default: None) --config CONFIG Config file path (default: None) - + pipeline arguments: --dask_config DASK_CONFIG Config file for Dask SlurmCLUSTER. (default: None) @@ -89,10 +89,10 @@ With an initalised database you can call the pipeline on a single field: :: --skip_cat Skip catalogue stage. (default: False) --skip_validate Skip validation stage. (default: False) --skip_cleanup Skip cleanup stage. (default: False) - + workdir arguments: datadir Directory to create/find full-size images and 'cutout' directory - + generic arguments: field Name of field (e.g. RACS_2132-50). --sbid SBID SBID of observation. (default: None) @@ -106,7 +106,7 @@ With an initalised database you can call the pipeline on a single field: :: --password PASSWORD Password of mongodb. (default: None) --limit LIMIT Limit the number of islands to process. (default: None) --database Add data to MongoDB. (default: False) - + imaging arguments: msdir Directory containing MS files --temp_dir_wsclean TEMP_DIR_WSCLEAN @@ -154,17 +154,17 @@ With an initalised database you can call the pipeline on a single field: :: Disable local RMS for polarisation images (default: False) --disable_pol_force_mask_rounds Disable force mask rounds for polarisation images (default: False) - + cutout arguments: -p PAD, --pad PAD Number of beamwidths to pad around source [3]. (default: 3) -d, --dryrun Do a dry-run [False]. (default: False) - + linmos arguments: --holofile HOLOFILE Path to holography image (default: None) --yanda YANDA Yandasoft version to pull from DockerHub [1.3.0]. (default: 1.3.0) --yanda_image YANDA_IMAGE Path to an existing yandasoft singularity container image. (default: None) - + frion arguments: --ionex_server IONEX_SERVER IONEX server (default: ftp://ftp.aiub.unibe.ch/CODE/) @@ -174,13 +174,13 @@ With an initalised database you can call the pipeline on a single field: :: --ionex_proxy_server IONEX_PROXY_SERVER Proxy server. (default: None) --ionex_predownload Pre-download IONEX files. (default: False) - + common rm arguments: --dimension DIMENSION How many dimensions for RMsynth '1d' or '3d'. (default: 1d) --save_plots save the plots. (default: False) --rm_verbose Verbose RMsynth/RMClean. (default: False) - + rm-synth arguments: --ion Use ionospheric-corrected data. (default: False) --tt0 TT0 TT0 MFS image -- will be used for model of Stokes I -- also needs --tt1. (default: None) @@ -201,13 +201,13 @@ With an initalised database you can call the pipeline on a single field: :: --show_plots show the plots. (default: False) --not_rmsf Skip calculation of RMSF? (default: False) --debug turn on debugging messages & plots. (default: False) - + rm-clean arguments: --cutoff CUTOFF CLEAN cutoff (+ve = absolute, -ve = sigma). (default: -3) --max_iter MAX_ITER maximum number of CLEAN iterations. (default: 10000) --gain GAIN CLEAN loop gain. (default: 0.1) --window WINDOW Further CLEAN in mask to this threshold. (default: None) - + catalogue arguments: --leakage_degree LEAKAGE_DEGREE Degree of leakage polynomial fit. (default: 4) @@ -216,14 +216,14 @@ With an initalised database you can call the pipeline on a single field: :: --leakage_snr LEAKAGE_SNR SNR cut for leakage fit. (default: 30.0) --catfile OUTFILE File to save table to. (default: None) - + validation options: --npix NPIX Number of pixels in the gridded maps (default: 512) --map_size MAP_SIZE Size of the maps in degrees (default: 8) - + cleanup arguments: --overwrite Overwrite existing tarball (default: False) - + Args that start with '--' can also be set in a config file (specified via --config). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at https://goo.gl/R74nmi). In general, command-line values override config file values which override defaults. .. code-block:: yaml @@ -351,15 +351,15 @@ For extra information you can refer to the API: * :py:mod:`arrakis.process_spice` -Similarly, you can merge multiple fields togther using: :: +Similarly, you can merge multiple fields together using: :: (arrakis310) $ spice_region -h usage: spice_region [-h] [--dask_config DASK_CONFIG] [--skip_frion] [--skip_rmsynth] [--skip_rmclean] [--skip_cat] [--skip_validate] [--skip_cleanup] [--merge_name MERGE_NAME] [--fields FIELDS [FIELDS ...]] [--datadirs DATADIRS [DATADIRS ...]] [--output_dir OUTPUT_DIR] [-e EPOCH] [--host host] [--username USERNAME] [--password PASSWORD] [--holofile HOLOFILE] [--yanda YANDA] [--yanda_image YANDA_IMAGE] [--dimension DIMENSION] [--save_plots] [--rm_verbose] [--ion] [--tt0 TT0] [--tt1 TT1] [--validate] [--own_fit] [--weight_type WEIGHT_TYPE] [--fit_function FIT_FUNCTION] [--fit_rmsf] [--phi_max PHI_MAX] [--dphi DPHI] [--n_samples N_SAMPLES] [--poly_ord POLY_ORD] [--no_stokes_i] [--show_plots] [--not_rmsf] [--debug] [--cutoff CUTOFF] [--max_iter MAX_ITER] [--gain GAIN] [--window WINDOW] [--leakage_degree LEAKAGE_DEGREE] [--leakage_bins LEAKAGE_BINS] [--leakage_snr LEAKAGE_SNR] [--catfile OUTFILE] [--npix NPIX] [--map_size MAP_SIZE] [--overwrite] [--config CONFIG] - - + + mmm mmm mmm mmm mmm )-( )-( )-( )-( )-( ( S ) ( P ) ( I ) ( C ) ( E ) @@ -370,18 +370,18 @@ Similarly, you can merge multiple fields togther using: :: ( R ) ( A ) ( C ) ( S ) | | | | | | | | |___| |___| |___| |___| - + Arrakis regional pipeline. - + Before running make sure to start a session of mongodb e.g. $ mongod --dbpath=/path/to/database --bind_ip $(hostname -i) - - - + + + options: -h, --help show this help message and exit --config CONFIG Config file path (default: None) - + pipeline arguments: --dask_config DASK_CONFIG Config file for Dask SlurmCLUSTER. (default: None) @@ -391,7 +391,7 @@ Similarly, you can merge multiple fields togther using: :: --skip_cat Skip catalogue stage [False]. (default: False) --skip_validate Skip validation stage. (default: False) --skip_cleanup Skip cleanup stage [False]. (default: False) - + merge arguments: --merge_name MERGE_NAME Name of the merged region (default: None) @@ -406,19 +406,19 @@ Similarly, you can merge multiple fields togther using: :: --host host Host of mongodb (probably $hostname -i). (default: None) --username USERNAME Username of mongodb. (default: None) --password PASSWORD Password of mongodb. (default: None) - + linmos arguments: --holofile HOLOFILE Path to holography image (default: None) --yanda YANDA Yandasoft version to pull from DockerHub [1.3.0]. (default: 1.3.0) --yanda_image YANDA_IMAGE Path to an existing yandasoft singularity container image. (default: None) - + common rm arguments: --dimension DIMENSION How many dimensions for RMsynth '1d' or '3d'. (default: 1d) --save_plots save the plots. (default: False) --rm_verbose Verbose RMsynth/RMClean. (default: False) - + rm-synth arguments: --ion Use ionospheric-corrected data. (default: False) --tt0 TT0 TT0 MFS image -- will be used for model of Stokes I -- also needs --tt1. (default: None) @@ -439,13 +439,13 @@ Similarly, you can merge multiple fields togther using: :: --show_plots show the plots. (default: False) --not_rmsf Skip calculation of RMSF? (default: False) --debug turn on debugging messages & plots. (default: False) - + rm-clean arguments: --cutoff CUTOFF CLEAN cutoff (+ve = absolute, -ve = sigma). (default: -3) --max_iter MAX_ITER maximum number of CLEAN iterations. (default: 10000) --gain GAIN CLEAN loop gain. (default: 0.1) --window WINDOW Further CLEAN in mask to this threshold. (default: None) - + catalogue arguments: --leakage_degree LEAKAGE_DEGREE Degree of leakage polynomial fit. (default: 4) @@ -454,14 +454,14 @@ Similarly, you can merge multiple fields togther using: :: --leakage_snr LEAKAGE_SNR SNR cut for leakage fit. (default: 30.0) --catfile OUTFILE File to save table to. (default: None) - + validation options: --npix NPIX Number of pixels in the gridded maps (default: 512) --map_size MAP_SIZE Size of the maps in degrees (default: 8) - + cleanup arguments: --overwrite Overwrite existing tarball (default: False) - + Args that start with '--' can also be set in a config file (specified via --config). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at https://goo.gl/R74nmi). In general, command-line values override config file values which override defaults. diff --git a/docs/source/start.rst b/docs/source/start.rst index 8aa4f1ab..19f74b7b 100644 --- a/docs/source/start.rst +++ b/docs/source/start.rst @@ -21,7 +21,7 @@ For example, you can start mongo using (for NUMA systems like Pawsey): :: .. tip:: It can be very convenient to run this database on a VM service like Pawsey's Nimbus cloud. You can then access the database from anywhere, and you don't need to worry about the database being deleted when you log out. This will require some network setup, such as opening the port for MongoDB (27017) on the VM. Get in touch with your local helpdesk if you need help with this. -For conveniance, we have provided a helper script to setup a MongoDB with both an admin and read-only user in the `scripts` directory. You can run this script with the following command: :: +For convenience, we have provided a helper script to setup a MongoDB with both an admin and read-only user in the `scripts` directory. You can run this script with the following command: :: create_mongodb.py @@ -184,7 +184,7 @@ To set up a Prefect Server, fist install Prefect with `pip`. You will also need prefect server necessary. Options: - -s - will atempt to start an prefect server + -s - will attempt to start an prefect server -h - will print this help page " @@ -259,10 +259,4 @@ Additionally, these should be provided to the `.adapt` call that would automatic .. code-block:: python - { - minimum: 2, - maximum: 36, - wait_count: 20, - target_interval: "300s", - interval: "30s" - } + {minimum: 2, maximum: 36, wait_count: 20, target_interval: "300s", interval: "30s"} diff --git a/environment.yml b/environment.yml index e30ca408..3d379aa9 100644 --- a/environment.yml +++ b/environment.yml @@ -1,36 +1,36 @@ name: arrakis310 channels: -- astropy -- conda-forge -- defaults -- pkgw-forge + - astropy + - conda-forge + - defaults + - pkgw-forge dependencies: -- python=3.10 -- pip -- future -- numpy -- scipy -- pandas -- matplotlib>=3.8 -- setuptools -- ipython -- astropy>=4.3 -- dask -- dask-jobqueue -- distributed -- prefect -- python-graphviz -- bokeh -- pymongo -- mongodb -- fitsio -- casacore -- casa-data -- sphinx -- numpydoc -- sphinx-autoapi -- m2r2 -- gfortran -- numba -- pip: - - .[RMextract] + - python=3.10 + - pip + - future + - numpy + - scipy + - pandas + - matplotlib>=3.8 + - setuptools + - ipython + - astropy>=4.3 + - dask + - dask-jobqueue + - distributed + - prefect + - python-graphviz + - bokeh + - pymongo + - mongodb + - fitsio + - casacore + - casa-data + - sphinx + - numpydoc + - sphinx-autoapi + - m2r2 + - gfortran + - numba + - pip: + - .[RMextract] diff --git a/licenses/flint.txt b/licenses/flint.txt index fd0a43b2..cefac032 100644 --- a/licenses/flint.txt +++ b/licenses/flint.txt @@ -25,4 +25,4 @@ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pyproject.toml b/pyproject.toml index 0fe3519b..b2bf4daf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "hatchling.build" [project] name = "arrakis" authors = [ - {name="Alec Thomson"}, + {name="Alec Thomson"}, {name="Tim Galvin"}, ] description = "Processing the SPICE." @@ -156,4 +156,4 @@ ignore = [ "PLR2004", # Magic value used in comparison "ISC001", # Conflicts with formatter ] -isort.required-imports = ["from __future__ import annotations"] \ No newline at end of file +isort.required-imports = ["from __future__ import annotations"] diff --git a/scripts/casda_prepare.py b/scripts/casda_prepare.py index 42a66bf4..6d6061ac 100755 --- a/scripts/casda_prepare.py +++ b/scripts/casda_prepare.py @@ -339,7 +339,7 @@ def find_cubes(data_dir: str = ".") -> list: """Find Stokes I image cubelets in a directory Args: - data_dir (str, optional): Data containg cutouts directory. Defaults to ".". + data_dir (str, optional): Data containing cutouts directory. Defaults to ".". Returns: list: List of cubelets @@ -461,7 +461,7 @@ def find_plots(data_dir: str = ".") -> list: """Find plots in a directory Args: - data_dir (str, optional): Data containg cutouts directory. Defaults to ".". + data_dir (str, optional): Data containing cutouts directory. Defaults to ".". Returns: list: List of plots diff --git a/scripts/compare_leakage.py b/scripts/compare_leakage.py index 07adeec8..4713c3ec 100644 --- a/scripts/compare_leakage.py +++ b/scripts/compare_leakage.py @@ -240,7 +240,7 @@ def cli(): "datadir", metavar="datadir", type=str, - help="Directory containing data cutout direcory (in datadir/cutouts).", + help="Directory containing data cutout directory (in datadir/cutouts).", ) parser.add_argument("--holofile", type=str, help="Path to holography image") diff --git a/scripts/compute_leakage.py b/scripts/compute_leakage.py index d40e62cc..64d4dd20 100644 --- a/scripts/compute_leakage.py +++ b/scripts/compute_leakage.py @@ -129,7 +129,7 @@ def trim_mean(x): # if row_idx%100==0: # logger.info('Processing row %d of %d'%(row_idx,len(pair_dist))) - # idxs of poitns within d degs + # idxs of points within d degs idxs_of_points_in_aperture = np.argwhere(row < d) # collect data points for sources in aperture q_of_points_in_aperture = q[idxs_of_points_in_aperture] diff --git a/submit/cat_all_spica.sh b/submit/cat_all_spica.sh index 07154229..bf7079a9 100644 --- a/submit/cat_all_spica.sh +++ b/submit/cat_all_spica.sh @@ -86,7 +86,7 @@ for field in ${SPICA[*]} cal_sbid=`find_sbid.py $field --cal` data_dir=/group/ja3/athomson/spica - # Image dirctory + # Image directory cd $data_dir srun -n 3 --export=ALL processSPICE $field $data_dir/$cal_sbid/RACS_test4_1.05_$field --config $config --savePlots --tt0 $tt0_dir/RACS_test4_1.05_$field.fits --tt1 $tt1_dir/RACS_test4_1.05_$field.fits --use_mpi --skip_cutout --skip_linmos --skip_frion --skip_rmsynth --skip_rmclean diff --git a/submit/processSPICE.sbatch b/submit/processSPICE.sbatch index ed058066..e4242c5d 100644 --- a/submit/processSPICE.sbatch +++ b/submit/processSPICE.sbatch @@ -38,7 +38,7 @@ data_dir=/scratch/ja3/athomson/spica # data_dir=/group/ja3/athomson/spica config=/group/askap/athomson/projects/arrakis/spica/spica_config.txt -# Image dirctory +# Image directory cd $data_dir # Make a copy of this sbatch file for posterity diff --git a/submit/process_reSPICE.sh b/submit/process_reSPICE.sh index 133ab508..7302c4fa 100644 --- a/submit/process_reSPICE.sh +++ b/submit/process_reSPICE.sh @@ -32,7 +32,7 @@ tt1_dir=/group/askap/athomson/projects/RACS/CI1_mosaic_1.0 data_dir=/askapbuffer/processing/len067/arrakis config=/group/askap/athomson/projects/arrakis/spica/spica_config.txt -# Image dirctory +# Image directory cd $data_dir # Make a copy of this sbatch file for posterity diff --git a/submit/process_single.sh b/submit/process_single.sh index 01e7190b..d969c976 100644 --- a/submit/process_single.sh +++ b/submit/process_single.sh @@ -27,7 +27,7 @@ zernike=/group/askap/athomson/projects/arrakis/leakages/${weight_pad}_zernike_ho data_dir=/group/ja3/athomson/spica config=/group/askap/athomson/projects/arrakis/spica/spica_config.txt -# Image dirctory +# Image directory cd $data_dir # Make a copy of this sbatch file for posterity diff --git a/submit/run_all_spica.sh b/submit/run_all_spica.sh index 87b2e914..a6756d89 100644 --- a/submit/run_all_spica.sh +++ b/submit/run_all_spica.sh @@ -98,7 +98,7 @@ for field in ${SPICA[*]} data_dir=/group/ja3/athomson/spica config=/group/askap/athomson/projects/arrakis/spica/spica_config.txt - # Image dirctory + # Image directory cd $data_dir # Make a copy of this sbatch file for posterity diff --git a/submit/run_all_spica_array.sh b/submit/run_all_spica_array.sh index fd6971db..8e79d66e 100644 --- a/submit/run_all_spica_array.sh +++ b/submit/run_all_spica_array.sh @@ -68,7 +68,7 @@ zernike=/group/askap/athomson/projects/arrakis/leakages_bak/${weight_pad}_zernik data_dir=/group/ja3/athomson/spica config=/group/askap/athomson/projects/arrakis/spica/spica_config.txt -# Image dirctory +# Image directory cd $data_dir # Make a copy of this sbatch file for posterity diff --git a/tests/cli_test.py b/tests/test_cli.py similarity index 100% rename from tests/cli_test.py rename to tests/test_cli.py diff --git a/tests/init_test.py b/tests/test_init.py similarity index 100% rename from tests/init_test.py rename to tests/test_init.py diff --git a/tests/unit_test.py b/tests/test_unit.py similarity index 100% rename from tests/unit_test.py rename to tests/test_unit.py