From 1c6e60f503f2e3d2bcaada02c8e3c2f4b678bd9e Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Fri, 10 Oct 2025 14:20:44 +0000 Subject: [PATCH 1/7] Adopt python-copier-template 1.0.0 --- .copier-answers.yml | 15 +++ .github/CONTRIBUTING.rst | 2 +- .../actions/install_requirements/action.yml | 12 +- .github/dependabot.yml | 4 + .github/pages/index.html | 2 +- .github/pages/make_switcher.py | 4 +- .github/workflows/code.yml | 119 ++++++++++++------ .github/workflows/docs.yml | 3 +- .github/workflows/docs_clean.yml | 2 +- .github/workflows/linkcheck.yml | 5 +- .gitignore | 4 +- .pre-commit-config.yaml | 6 +- .vscode/extensions.json | 5 +- .vscode/launch.json | 2 +- .vscode/settings.json | 17 ++- .vscode/tasks.json | 2 +- LICENSE | 2 +- README.rst | 13 +- catalog-info.yaml | 16 +-- docs/conf.py | 78 ++++++------ .../0002-switched-to-pip-skeleton.rst | 12 +- docs/developer/how-to/build-docs.rst | 2 +- docs/developer/how-to/lint.rst | 14 +-- docs/developer/how-to/make-release.rst | 2 +- docs/developer/reference/standards.rst | 5 +- docs/developer/tutorials/dev-install.rst | 12 +- docs/index.rst | 6 +- docs/user/explanations/docs-structure.rst | 2 +- docs/user/index.rst | 2 - docs/user/reference/api.rst | 76 +---------- docs/user/tutorials/installation.rst | 2 +- pyproject.toml | 56 ++++----- 32 files changed, 252 insertions(+), 252 deletions(-) create mode 100644 .copier-answers.yml diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 00000000..317eda86 --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,15 @@ +# Changes here will be overwritten by Copier +_commit: 1.0.0 +_src_path: https://github.com/DiamondLightSource/python-copier-template.git +author_email: philip.smith@diamond.ac.uk +author_name: Phil Smith +component_owner: group:default/high-level-apps +description: Python Toolkit for Accelerator Controls (Pytac) is a Python library for + working with elements of particle accelerators, developed at Diamond Light Source. +distribution_name: pytac +docker: false +docs_type: sphinx +git_platform: github.com +github_org: DiamondLightSource +package_name: pytac +repo_name: pytac diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst index ebc41624..a1c03f6e 100644 --- a/.github/CONTRIBUTING.rst +++ b/.github/CONTRIBUTING.rst @@ -32,4 +32,4 @@ The `Developer Guide`_ contains information on setting up a development environment, running the tests and what standards the code and documentation should follow. -.. _Developer Guide: https://diamondlightsource.github.io/pytac/main/developer/how-to/contribute.html +.. _Developer Guide: https://DiamondLightSource.github.io/pytac/main/developer/how-to/contribute.html diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index c553af22..79d1a71e 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -7,6 +7,9 @@ inputs: install_options: description: Parameters to pass to pip install required: true + artifact_name: + description: A user friendly name to give the produced artifacts + required: true python_version: description: Python version to install default: "3.x" @@ -16,7 +19,7 @@ runs: steps: - name: Setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python_version }} @@ -32,13 +35,13 @@ runs: mkdir -p lockfiles pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} # delete the self referencing line and make sure it isn't blank - sed -i '/file:/d' lockfiles/${{ inputs.requirements_file }} + sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} shell: bash - name: Upload lockfiles - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v4.0.0 with: - name: lockfiles_${{ github.job }}_${{ matrix.python }} + name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} path: lockfiles # This eliminates the class of problems where the requirements being given no @@ -55,4 +58,3 @@ runs: fi fi shell: bash - diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fb7c6ee6..2d1af873 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,6 +9,10 @@ updates: directory: "/" schedule: interval: "weekly" + groups: + github-artifacts: + patterns: + - actions/*-artifact - package-ecosystem: "pip" directory: "/" diff --git a/.github/pages/index.html b/.github/pages/index.html index 80f0a009..c495f39f 100644 --- a/.github/pages/index.html +++ b/.github/pages/index.html @@ -8,4 +8,4 @@ - \ No newline at end of file + diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index 39c12772..ae227ab7 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -59,12 +59,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st def write_json(path: Path, repository: str, versions: str): org, repo_name = repository.split("/") struct = [ - dict(version=version, url=f"https://{org}.github.io/{repo_name}/{version}/") + {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} for version in versions ] text = json.dumps(struct, indent=2) print(f"JSON switcher:\n{text}") - path.write_text(text) + path.write_text(text, encoding="utf-8") def main(args=None): diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml index 868066c7..364ff43d 100644 --- a/.github/workflows/code.yml +++ b/.github/workflows/code.yml @@ -5,7 +5,8 @@ on: pull_request: env: # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.10" + CONTAINER_PYTHON: "3.11" + DIST_WHEEL_PATH: dist-${{ github.sha }} jobs: lint: @@ -15,13 +16,14 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install python packages uses: ./.github/actions/install_requirements with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: lint - name: Lint run: tox -e pre-commit,mypy @@ -32,12 +34,12 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.9", "3.10"] + python: ["3.8", "3.9", "3.10", "3.11"] install: ["-e .[dev]"] # Make one version be non-editable to test both paths of version code include: - os: "ubuntu-latest" - python: "3.8" + python: "3.7" install: ".[dev]" runs-on: ${{ matrix.os }} @@ -47,7 +49,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: # Need this to get version number from last tag fetch-depth: 0 @@ -58,12 +60,13 @@ jobs: python_version: ${{ matrix.python }} requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt install_options: ${{ matrix.install }} + artifact_name: tests - name: List dependency tree run: pipdeptree - name: Run tests - run: pytest + run: tox -e pytest - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 @@ -77,7 +80,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: # Need this to get version number from last tag fetch-depth: 0 @@ -88,9 +91,9 @@ jobs: pipx run build - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v4.0.0 with: - name: dist_${{ github.job }}_${{ matrix.python }} + name: ${{ env.DIST_WHEEL_PATH }} path: dist - name: Check for packaging errors @@ -102,10 +105,11 @@ jobs: python_version: ${{env.CONTAINER_PYTHON}} requirements_file: requirements.txt install_options: dist/*.whl + artifact_name: dist - name: Test module --version works using the installed wheel # If more than one module in src/ replace with module name to test - run: python -m $(ls src | head -1) --version + run: python -m $(ls --hide='*.egg-info' src | head -1) --version container: needs: [lint, dist, test] @@ -115,73 +119,109 @@ jobs: contents: read packages: write + env: + TEST_TAG: "testing" + steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 # image names must be all lower case - name: Generate image repo name run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV + - name: Set lockfile location in environment + run: | + echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV + - name: Download wheel and lockfiles - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v4.1.0 with: path: artifacts/ + pattern: "*dist*" - name: Log in to GitHub Docker Registry if: github.event_name != 'pull_request' - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Docker meta - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.IMAGE_REPOSITORY }} - tags: | - type=ref,event=tag - type=raw,value=latest - - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - - name: Build runtime image - uses: docker/build-push-action@v3 + - name: Build and export to Docker local cache + uses: docker/build-push-action@v5 with: + # Note build-args, context, file, and target must all match between this + # step and the later build-push-action, otherwise the second build-push-action + # will attempt to build the image again build-args: | - PIP_OPTIONS=-r lockfiles_dist_/requirements.txt dist_dist_/*.whl - push: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} - load: ${{ ! (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) }} - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl context: artifacts/ file: ./Dockerfile - # If you have a long docker build, uncomment the following to turn on caching - # For short build times this makes it a little slower + target: runtime + load: true + tags: ${{ env.TEST_TAG }} + # If you have a long docker build (2+ minutes), uncomment the + # following to turn on caching. For short build times this + # makes it a little slower #cache-from: type=gha #cache-to: type=gha,mode=max - - name: Test cli works in runtime image - run: docker run ${{ env.IMAGE_REPOSITORY }} --version + - name: Test cli works in cached runtime image + run: docker run docker.io/library/${{ env.TEST_TAG }} --version + + - name: Create tags for publishing image + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE_REPOSITORY }} + tags: | + type=ref,event=tag + type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} + # type=edge,branch=main + # Add line above to generate image for every commit to given branch, + # and uncomment the end of if clause in next step + + - name: Push cached image to container registry + if: github.ref_type == 'tag' # || github.ref_name == 'main' + uses: docker/build-push-action@v5 + # This does not build the image again, it will find the image in the + # Docker cache and publish it + with: + # Note build-args, context, file, and target must all match between this + # step and the previous build-push-action, otherwise this step will + # attempt to build the image again + build-args: | + PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl + context: artifacts/ + file: ./Dockerfile + target: runtime + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} release: # upload to PyPI and make a release on every tag needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags') }} + if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} runs-on: ubuntu-latest env: HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} steps: - - uses: actions/download-artifact@v4 + - name: Download wheel and lockfiles + uses: actions/download-artifact@v4.1.0 + with: + path: artifacts/ + pattern: "*dist*" - name: Fixup blank lockfiles # Github release artifacts can't be blank - run: for f in lockfiles_dist_/*; do [ -s $f ] || echo '# No requirements' >> $f; done + run: for f in ${{ env.DIST_LOCKFILE_PATH }}/*; do [ -s $f ] || echo '# No requirements' >> $f; done - name: Github Release # We pin to the SHA, not the tag, for security reasons. @@ -190,8 +230,8 @@ jobs: with: prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} files: | - dist_dist_/* - lockfiles_dist_/* + ${{ env.DIST_WHEEL_PATH }}/* + ${{ env.DIST_LOCKFILE_PATH }}/* generate_release_notes: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -200,5 +240,4 @@ jobs: if: ${{ env.HAS_PYPI_TOKEN }} uses: pypa/gh-action-pypi-publish@release/v1 with: - packages-dir: dist_dist_/ password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c510d577..3c29ff94 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -15,7 +15,7 @@ jobs: run: sleep 60 - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: # Need this to get version number from last tag fetch-depth: 0 @@ -29,6 +29,7 @@ jobs: with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: docs - name: Build docs run: tox -e docs diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml index a67e1881..e324640e 100644 --- a/.github/workflows/docs_clean.yml +++ b/.github/workflows/docs_clean.yml @@ -18,7 +18,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: gh-pages diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml index 6b64fdea..7f651a27 100644 --- a/.github/workflows/linkcheck.yml +++ b/.github/workflows/linkcheck.yml @@ -12,16 +12,17 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install python packages uses: ./.github/actions/install_requirements with: requirements_file: requirements-dev-3.x.txt install_options: -e .[dev] + artifact_name: link_check - name: Check links run: tox -e docs build -- -b linkcheck - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 \ No newline at end of file + uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.gitignore b/.gitignore index ceb1c53e..a37be99b 100644 --- a/.gitignore +++ b/.gitignore @@ -67,5 +67,5 @@ venv* # further build artifacts lockfiles/ -# matlab autosave -*.asv \ No newline at end of file +# ruff cache +.ruff_cache/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa2a4cb2..5bc9f001 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,9 +15,9 @@ repos: entry: black --check --diff types: [python] - - id: flake8 - name: Run flake8 + - id: ruff + name: Run ruff stages: [commit] language: system - entry: flake8 + entry: ruff types: [python] diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 81922991..a1227b34 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -4,6 +4,7 @@ "ms-python.python", "tamasfe.even-better-toml", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "charliermarsh.Ruff" ] -} \ No newline at end of file +} diff --git a/.vscode/launch.json b/.vscode/launch.json index f65cb376..3cda7432 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -22,4 +22,4 @@ }, } ] -} \ No newline at end of file +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 2472acfd..e2909249 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,15 +1,22 @@ { "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": true, + "python.linting.flake8Enabled": false, "python.linting.mypyEnabled": true, "python.linting.enabled": true, - "python.testing.pytestArgs": [], + "python.testing.pytestArgs": [ + "--cov=pytac", + "--cov-report", + "xml:cov.xml" + ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "python.formatting.provider": "black", "python.languageServer": "Pylance", "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.organizeImports": true + "[python]": { + "editor.codeActionsOnSave": { + "source.fixAll.ruff": false, + "source.organizeImports.ruff": true + } } -} \ No newline at end of file +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 946e69d4..c999e864 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -13,4 +13,4 @@ "problemMatcher": [], } ] -} \ No newline at end of file +} diff --git a/LICENSE b/LICENSE index 43abe76a..8dada3ed 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 Diamond Light Source Ltd. + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.rst b/README.rst index 427de093..2849b905 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ pytac -=========================== +============================================================================= |code_ci| |docs_ci| |coverage| |pypi_version| |license| @@ -13,11 +13,19 @@ Documentation https://DiamondLightSource.github.io/pytac Releases https://github.com/DiamondLightSource/pytac/releases ============== ============================================================== +This is where you should put some images or code snippets that illustrate +some relevant examples. If it is a library then you might put some +introductory code here: + .. code-block:: python from pytac import __version__ - print(f"Hello pytac {__version__}.") + print(f"Hello pytac {__version__}") + +Or if it is a commandline tool then you might put some example commands here:: + + $ python -m pytac --version .. |code_ci| image:: https://github.com/DiamondLightSource/pytac/actions/workflows/code.yml/badge.svg?branch=main :target: https://github.com/DiamondLightSource/pytac/actions/workflows/code.yml @@ -39,6 +47,7 @@ Releases https://github.com/DiamondLightSource/pytac/releases :target: https://opensource.org/licenses/Apache-2.0 :alt: Apache License + .. Anything below this line is used when viewing README.rst and will be replaced when included in index.rst diff --git a/catalog-info.yaml b/catalog-info.yaml index a6361025..02941ee4 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -2,15 +2,9 @@ apiVersion: backstage.io/v1alpha1 kind: Component metadata: name: pytac - title: Pytac - description: Python Toolkit for Accelerator Controls - annotations: - backstage.io/techdocs-ref: dir:. - github.com/project-slug: DiamondLightSource/pytac - tags: - - python - - controls + title: pytac + description: Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source. spec: - type: library - lifecycle: production - owner: group:accelerator-controls \ No newline at end of file + type: documentation + lifecycle: experimental + owner: group:default/high-level-apps \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index e764ee86..0d40ab7f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -8,7 +8,7 @@ from pathlib import Path from subprocess import check_output -import requests # type: ignore +import requests import pytac @@ -30,10 +30,10 @@ version = release extensions = [ - # This can parse google style docstrings - "sphinx.ext.napoleon", # Use this for generating API docs "sphinx.ext.autodoc", + # This can parse google style docstrings + "sphinx.ext.napoleon", # For linking to external sphinx documentation "sphinx.ext.intersphinx", # Add links to source code in API docs @@ -48,7 +48,7 @@ # If true, Sphinx will warn about all references where the target cannot # be found. -nitpicky = False +nitpicky = True # A list of (type, target) tuples (by default empty) that should be ignored when # generating warnings in "nitpicky mode". Note that type should include the @@ -98,17 +98,16 @@ # This means you can link things like `str` and `asyncio` to the relevant # docs in the python documentation. -intersphinx_mapping = dict(python=("https://docs.python.org/3/", None)) +intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} # A dictionary of graphviz graph attributes for inheritance diagrams. -inheritance_graph_attrs = dict(rankdir="TB") +inheritance_graph_attrs = {"rankdir": "TB"} # Common links that should be available on every page rst_epilog = """ .. _Diamond Light Source: http://www.diamond.ac.uk .. _black: https://github.com/psf/black -.. _flake8: https://flake8.pycqa.org/en/latest/ -.. _isort: https://github.com/PyCQA/isort +.. _ruff: https://beta.ruff.rs/docs/ .. _mypy: http://mypy-lang.org/ .. _pre-commit: https://pre-commit.com/ """ @@ -127,7 +126,7 @@ # a list of builtin themes. # html_theme = "pydata_sphinx_theme" -github_repo = project +github_repo = "pytac" github_user = "DiamondLightSource" switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json" switcher_exists = requests.get(switcher_json).ok @@ -149,40 +148,41 @@ # Point 3 makes checking switcher difficult, because the updated skeleton # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. -html_theme_options = dict( - logo=dict( - text=project, - ), - use_edit_page_button=True, - github_url=f"https://github.com/{github_user}/{github_repo}", - icon_links=[ - dict( - name="PyPI", - url=f"https://pypi.org/project/{project}", - icon="fas fa-cube", - ) +html_theme_options = { + "logo": { + "text": project, + }, + "use_edit_page_button": True, + "github_url": f"https://github.com/{github_user}/{github_repo}", + "icon_links": [ + { + "name": "PyPI", + "url": f"https://pypi.org/project/{project}", + "icon": "fas fa-cube", + } ], - switcher=dict( - json_url=switcher_json, - version_match=version, - ), - check_switcher=False, - navbar_end=["theme-switcher", "icon-links", "version-switcher"], - external_links=[ - dict( - name="Release Notes", - url=f"https://github.com/{github_user}/{github_repo}/releases", - ) + "switcher": { + "json_url": switcher_json, + "version_match": version, + }, + "check_switcher": False, + "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], + "external_links": [ + { + "name": "Release Notes", + "url": f"https://github.com/{github_user}/{github_repo}/releases", + } ], -) + "navigation_with_keys": False, +} # A dictionary of values to pass into the template engine’s context for all pages -html_context = dict( - github_user=github_user, - github_repo=project, - github_version=version, - doc_path="docs", -) +html_context = { + "github_user": github_user, + "github_repo": project, + "github_version": version, + "doc_path": "docs", +} # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = False diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst index 4dcfdb2c..33d56981 100644 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst @@ -1,5 +1,5 @@ -2. Adopt pytac for project structure -=================================================== +2. Adopt python_copier_template for project structure +===================================================== Date: 2022-02-18 @@ -11,8 +11,8 @@ Accepted Context ------- -We should use the following `pip-skeleton `_. -The skeleton will ensure consistency in developer +We should use the following `python_copier_template `_. +The template will ensure consistency in developer environments and package management. Decision @@ -23,8 +23,8 @@ We have switched to using the skeleton. Consequences ------------ -This module will use a fixed set of tools as developed in pytac -and can pull from this skeleton to update the packaging to the latest techniques. +This module will use a fixed set of tools as developed in python_copier_template +and can pull from this template to update the packaging to the latest techniques. As such, the developer environment may have changed, the following could be different: diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst index 0174fc82..11a5e638 100644 --- a/docs/developer/how-to/build-docs.rst +++ b/docs/developer/how-to/build-docs.rst @@ -35,4 +35,4 @@ changes in this directory too:: $ tox -e docs autobuild -- --watch src -.. _sphinx: https://www.sphinx-doc.org/ \ No newline at end of file +.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst index 8f4e92db..2df258d8 100644 --- a/docs/developer/how-to/lint.rst +++ b/docs/developer/how-to/lint.rst @@ -1,7 +1,7 @@ Run linting using pre-commit ============================ -Code linting is handled by black_, flake8_ and isort_ run under pre-commit_. +Code linting is handled by black_ and ruff_ run under pre-commit_. Running pre-commit ------------------ @@ -26,16 +26,14 @@ repository:: $ black . -Likewise with isort:: +Likewise with ruff:: - $ isort . + $ ruff --fix . -If you get any flake8 issues you will have to fix those manually. +Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. VSCode support -------------- -The ``.vscode/settings.json`` will run black and isort formatters as well as -flake8 checking on save. Issues will be highlighted in the editor window. - - +The ``.vscode/settings.json`` will run black formatting as well as +ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst index 4a349884..9491ec55 100644 --- a/docs/developer/how-to/make-release.rst +++ b/docs/developer/how-to/make-release.rst @@ -13,4 +13,4 @@ To make a new release, please follow this checklist: Note that tagging and pushing to the main branch has the same effect except that you will not get the option to edit the release notes. -.. _release: https://github.com/DiamondLightSource/pytac/releases \ No newline at end of file +.. _release: https://github.com/DiamondLightSource/pytac/releases diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst index b78a719e..5a1fd478 100644 --- a/docs/developer/reference/standards.rst +++ b/docs/developer/reference/standards.rst @@ -10,8 +10,7 @@ Code Standards The code in this repository conforms to standards set by the following tools: - black_ for code formatting -- flake8_ for style checks -- isort_ for import ordering +- ruff_ for style checks - mypy_ for static type checking .. seealso:: @@ -61,4 +60,4 @@ Docs follow the underlining convention:: .. seealso:: - How-to guide `../how-to/build-docs` \ No newline at end of file + How-to guide `../how-to/build-docs` diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst index 3cac6e6d..6e049c3e 100644 --- a/docs/developer/tutorials/dev-install.rst +++ b/docs/developer/tutorials/dev-install.rst @@ -28,16 +28,21 @@ requires python 3.8 or later) or to run in a container under `VSCode $ cd pytac $ python3 -m venv venv $ source venv/bin/activate - $ pip install -e .[dev] + $ pip install -e '.[dev]' .. tab-item:: VSCode devcontainer .. code:: - $ vscode pytac + $ code pytac # Click on 'Reopen in Container' when prompted # Open a new terminal + .. note:: + + See the epics-containers_ documentation for more complex + use cases, such as integration with podman. + See what was installed ---------------------- @@ -58,3 +63,6 @@ This will run in parallel the following checks: - `../how-to/run-tests` - `../how-to/static-analysis` - `../how-to/lint` + + +.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/index.rst b/docs/index.rst index 2c84d160..580feb34 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,16 +14,16 @@ The documentation is split into 2 sections: :link: user/index :link-type: doc - The User Guide contains documentation on how to install and use python3-pip-skeleton. + The User Guide contains documentation on how to install and use pytac. .. grid-item-card:: :material-regular:`code;4em` :link: developer/index :link-type: doc - The Developer Guide contains documentation on how to develop and contribute changes back to python3-pip-skeleton. + The Developer Guide contains documentation on how to develop and contribute changes back to pytac. .. toctree:: :hidden: user/index - developer/index \ No newline at end of file + developer/index diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst index 4d0c6472..f25a09ba 100644 --- a/docs/user/explanations/docs-structure.rst +++ b/docs/user/explanations/docs-structure.rst @@ -15,4 +15,4 @@ They represent four different purposes or functions, and require four different approaches to their creation. Understanding the implications of this will help improve most documentation - often immensely. -`More information on this topic. `_ \ No newline at end of file +`More information on this topic. `_ diff --git a/docs/user/index.rst b/docs/user/index.rst index 77bcdf35..2c94a0c0 100644 --- a/docs/user/index.rst +++ b/docs/user/index.rst @@ -14,7 +14,6 @@ side-bar. :maxdepth: 1 tutorials/installation - tutorials/basic-tutorial +++ @@ -38,7 +37,6 @@ side-bar. :caption: Explanations :maxdepth: 1 - explanations/what-is-pytac explanations/docs-structure +++ diff --git a/docs/user/reference/api.rst b/docs/user/reference/api.rst index 8a47323a..d81cc42b 100644 --- a/docs/user/reference/api.rst +++ b/docs/user/reference/api.rst @@ -2,82 +2,8 @@ API === .. automodule:: pytac - :members: - :undoc-members: - :show-inheritance: - -pytac.cs module ---------------- - -.. automodule:: pytac.cs - :members: - :undoc-members: - :show-inheritance: - -pytac.data_source module ------------------------- - -.. automodule:: pytac.data_source - :members: - :undoc-members: - :show-inheritance: - -pytac.device module -------------------- - -.. automodule:: pytac.device - :members: - :undoc-members: - :show-inheritance: - -pytac.element module --------------------- - -.. automodule:: pytac.element - :members: - :undoc-members: - :show-inheritance: - -pytac.exceptions module ------------------------ - -.. automodule:: pytac.exceptions - :members: - :undoc-members: - :show-inheritance: - -pytac.lattice module --------------------- - -.. automodule:: pytac.lattice - :members: - :undoc-members: - :show-inheritance: - -pytac.load_csv module ---------------------- - -.. automodule:: pytac.load_csv - :members: - :undoc-members: - :show-inheritance: - -pytac.units module ------------------- - -.. automodule:: pytac.units - :members: - :undoc-members: - :show-inheritance: - -pytac.utils module ------------------- - -.. automodule:: pytac.utils - :members: - :undoc-members: - :show-inheritance: + ``pytac`` ----------------------------------- This is the internal API reference for pytac diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst index 24347108..26f94e64 100644 --- a/docs/user/tutorials/installation.rst +++ b/docs/user/tutorials/installation.rst @@ -4,7 +4,7 @@ Installation Check your version of python ---------------------------- -You will need python 3.7 or later. You can check your version of python by +You will need python 3.8 or later. You can check your version of python by typing into a terminal:: $ python3 --version diff --git a/pyproject.toml b/pyproject.toml index 9ce04cf4..f13d33d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,15 +5,20 @@ build-backend = "setuptools.build_meta" [project] name = "pytac" classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 3 - Alpha", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ] -description = "Python Toolkit for Accelerator Controls" -dependencies = ["numpy", "scipy", "setuptools>=60.0.0"] +description = "Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source." +dependencies = [ + "typing-extensions;python_version<'3.8'", + "numpy", + "scipy", +] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" readme = "README.rst" @@ -23,17 +28,15 @@ requires-python = ">=3.7" dev = [ "black", "mypy", - "flake8-isort", - "Flake8-pyproject", "pipdeptree", "pre-commit", "pydata-sphinx-theme>=0.12", "pytest", "pytest-cov", + "ruff", "sphinx-autobuild", "sphinx-copybutton", "sphinx-design", - "testfixtures", "tox-direct", "types-mock", ] @@ -45,8 +48,8 @@ pytac = "pytac.__main__:main" GitHub = "https://github.com/DiamondLightSource/pytac" [[project.authors]] # Further authors may be added by duplicating this section -email = "diamhighlvlapps@diamond.ac.uk" -name = "Will Rogers" +email = "philip.smith@diamond.ac.uk" +name = "Phil Smith" [tool.setuptools_scm] @@ -55,32 +58,15 @@ write_to = "src/pytac/_version.py" [tool.mypy] ignore_missing_imports = true # Ignore missing stubs in imported modules -[tool.isort] -float_to_top = true -profile = "black" - -[tool.flake8] -extend-ignore = [ - "E203", # See https://github.com/PyCQA/pycodestyle/issues/373 - "E402", # allow isort:skip - "F811", # support typing.overload decorator - "F722", # allow Annotated[typ, some_func("some string")] -] -# per-file-ignore = ["src/pytac/__init__.py:E402"] -max-line-length = 88 # Respect black's line length (default 88), -exclude = [".tox", "venv"] - - [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error addopts = """ --tb=native -vv --doctest-modules --doctest-glob="*.rst" - --cov=pytac --cov-report term --cov-report xml:cov.xml """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings filterwarnings = "error" # Doctest python code in docs, python code in src docstrings, test functions in tests -testpaths = "src tests" +testpaths = "docs src tests" [tool.coverage.run] data_file = "/tmp/pytac.coverage" @@ -100,15 +86,27 @@ skipsdist=True # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * -allowlist_externals = - pytest +allowlist_externals = + pytest pre-commit mypy sphinx-build sphinx-autobuild commands = - pytest: pytest {posargs} + pytest: pytest --cov=pytac --cov-report term --cov-report xml:cov.xml {posargs} mypy: mypy src tests {posargs} pre-commit: pre-commit run --all-files {posargs} docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ + + +[tool.ruff] +src = ["src", "tests"] +line-length = 88 +select = [ + "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e + "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f + "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w + "I001", # isort +] From c3e4036a812949f3a33fd614ef1a5e3768cea853 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Mon, 13 Oct 2025 11:58:55 +0000 Subject: [PATCH 2/7] Update to python-copier-template 4.3.0 --- .copier-answers.yml | 6 +- .devcontainer/devcontainer.json | 52 ++-- .github/CONTRIBUTING.md | 27 ++ .github/CONTRIBUTING.rst | 35 --- .github/ISSUE_TEMPLATE/bug_report.md | 21 ++ .github/ISSUE_TEMPLATE/issue.md | 13 + .../pull_request_template.md | 8 + .../actions/install_requirements/action.yml | 67 ++--- .github/dependabot.yml | 12 +- .github/pages/make_switcher.py | 29 +-- .github/workflows/_dist.yml | 36 +++ .github/workflows/{docs.yml => _docs.yml} | 29 ++- .github/workflows/_pypi.yml | 19 ++ .github/workflows/_release.yml | 32 +++ .github/workflows/_test.yml | 62 +++++ .github/workflows/_tox.yml | 22 ++ .github/workflows/ci.yml | 54 ++++ .github/workflows/code.yml | 243 ------------------ .github/workflows/docs_clean.yml | 43 ---- .github/workflows/linkcheck.yml | 28 -- .github/workflows/periodic.yml | 13 + .gitignore | 2 +- .pre-commit-config.yaml | 19 +- .vscode/extensions.json | 5 - .vscode/launch.json | 8 +- .vscode/settings.json | 22 +- Dockerfile | 40 +-- README.md | 27 ++ README.rst | 55 ---- catalog-info.yaml | 6 +- docs/_api.rst | 16 ++ docs/_templates/custom-module-template.rst | 37 +++ docs/conf.py | 52 ++-- docs/developer/explanations/decisions.rst | 17 -- .../0001-record-architecture-decisions.rst | 26 -- .../0002-switched-to-pip-skeleton.rst | 35 --- docs/developer/how-to/build-docs.rst | 38 --- docs/developer/how-to/contribute.rst | 1 - docs/developer/how-to/lint.rst | 39 --- docs/developer/how-to/make-release.rst | 16 -- docs/developer/how-to/pin-requirements.rst | 74 ------ docs/developer/how-to/run-tests.rst | 12 - docs/developer/how-to/static-analysis.rst | 8 - docs/developer/how-to/test-container.rst | 25 -- docs/developer/how-to/update-tools.rst | 16 -- docs/developer/index.rst | 64 ----- docs/developer/reference/standards.rst | 63 ----- docs/developer/tutorials/dev-install.rst | 68 ----- docs/explanations.md | 10 + docs/explanations/decisions.md | 12 + .../0001-record-architecture-decisions.md | 18 ++ ...0002-switched-to-python-copier-template.md | 28 ++ docs/explanations/decisions/COPYME | 19 ++ .../what-is-pytac.md} | 0 docs/genindex.md | 3 + docs/genindex.rst | 5 - docs/how-to.md | 10 + docs/how-to/contribute.md | 2 + docs/images/dls-favicon.ico | Bin 99678 -> 0 bytes docs/images/dls-logo.svg | 20 +- docs/index.md | 56 ++++ docs/index.rst | 29 --- docs/reference.md | 12 + docs/tutorials.md | 10 + docs/{user => }/tutorials/basic-tutorial.rst | 0 docs/tutorials/installation.md | 42 +++ docs/user/explanations/docs-structure.rst | 18 -- docs/user/how-to/run-container.rst | 15 -- docs/user/index.rst | 57 ---- docs/user/reference/api.rst | 14 - docs/user/tutorials/installation.rst | 38 --- pyproject.toml | 50 ++-- src/pytac/__init__.py | 19 +- src/pytac/__main__.py | 14 +- tests/conftest.py | 4 +- tests/test_cothread_cs.py | 2 +- tests/test_data_source.py | 2 +- tests/test_device.py | 2 +- tests/test_element.py | 2 +- tests/test_epics.py | 2 +- tests/test_lattice.py | 2 +- tests/test_load.py | 2 +- tests/test_machine.py | 2 +- tests/test_units.py | 2 +- 84 files changed, 815 insertions(+), 1350 deletions(-) create mode 100644 .github/CONTRIBUTING.md delete mode 100644 .github/CONTRIBUTING.rst create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/issue.md create mode 100644 .github/PULL_REQUEST_TEMPLATE/pull_request_template.md create mode 100644 .github/workflows/_dist.yml rename .github/workflows/{docs.yml => _docs.yml} (69%) create mode 100644 .github/workflows/_pypi.yml create mode 100644 .github/workflows/_release.yml create mode 100644 .github/workflows/_test.yml create mode 100644 .github/workflows/_tox.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/code.yml delete mode 100644 .github/workflows/docs_clean.yml delete mode 100644 .github/workflows/linkcheck.yml create mode 100644 .github/workflows/periodic.yml create mode 100644 README.md delete mode 100644 README.rst create mode 100644 docs/_api.rst create mode 100644 docs/_templates/custom-module-template.rst delete mode 100644 docs/developer/explanations/decisions.rst delete mode 100644 docs/developer/explanations/decisions/0001-record-architecture-decisions.rst delete mode 100644 docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst delete mode 100644 docs/developer/how-to/build-docs.rst delete mode 100644 docs/developer/how-to/contribute.rst delete mode 100644 docs/developer/how-to/lint.rst delete mode 100644 docs/developer/how-to/make-release.rst delete mode 100644 docs/developer/how-to/pin-requirements.rst delete mode 100644 docs/developer/how-to/run-tests.rst delete mode 100644 docs/developer/how-to/static-analysis.rst delete mode 100644 docs/developer/how-to/test-container.rst delete mode 100644 docs/developer/how-to/update-tools.rst delete mode 100644 docs/developer/index.rst delete mode 100644 docs/developer/reference/standards.rst delete mode 100644 docs/developer/tutorials/dev-install.rst create mode 100644 docs/explanations.md create mode 100644 docs/explanations/decisions.md create mode 100644 docs/explanations/decisions/0001-record-architecture-decisions.md create mode 100644 docs/explanations/decisions/0002-switched-to-python-copier-template.md create mode 100644 docs/explanations/decisions/COPYME rename docs/{user/explanations/what-is-pytac.rst => explanations/what-is-pytac.md} (100%) create mode 100644 docs/genindex.md delete mode 100644 docs/genindex.rst create mode 100644 docs/how-to.md create mode 100644 docs/how-to/contribute.md delete mode 100644 docs/images/dls-favicon.ico create mode 100644 docs/index.md delete mode 100644 docs/index.rst create mode 100644 docs/reference.md create mode 100644 docs/tutorials.md rename docs/{user => }/tutorials/basic-tutorial.rst (100%) create mode 100644 docs/tutorials/installation.md delete mode 100644 docs/user/explanations/docs-structure.rst delete mode 100644 docs/user/how-to/run-container.rst delete mode 100644 docs/user/index.rst delete mode 100644 docs/user/reference/api.rst delete mode 100644 docs/user/tutorials/installation.rst diff --git a/.copier-answers.yml b/.copier-answers.yml index 317eda86..92ccde52 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,9 +1,11 @@ # Changes here will be overwritten by Copier -_commit: 1.0.0 +_commit: 4.3.0 _src_path: https://github.com/DiamondLightSource/python-copier-template.git author_email: philip.smith@diamond.ac.uk author_name: Phil Smith +component_lifecycle: production component_owner: group:default/high-level-apps +component_type: library description: Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source. distribution_name: pytac @@ -12,4 +14,6 @@ docs_type: sphinx git_platform: github.com github_org: DiamondLightSource package_name: pytac +pypi: true repo_name: pytac +type_checker: mypy diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 44de8d36..fce9dd59 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,52 +3,44 @@ "name": "Python 3 Developer Container", "build": { "dockerfile": "../Dockerfile", - "target": "build", - // Only upgrade pip, we will install the project below - "args": { - "PIP_OPTIONS": "--upgrade pip" - } + "target": "developer" }, "remoteEnv": { + // Allow X11 apps to run inside the container "DISPLAY": "${localEnv:DISPLAY}" }, - // Add the URLs of features you want added when the container is built. - "features": { - "ghcr.io/devcontainers/features/common-utils:1": { - "username": "none", - "upgradePackages": false - } - }, - // Set *default* container specific settings.json values on container create. - "settings": { - "python.defaultInterpreterPath": "/venv/bin/python" - }, "customizations": { "vscode": { + // Set *default* container specific settings.json values on container create. + "settings": { + "python.defaultInterpreterPath": "/venv/bin/python" + }, // Add the IDs of extensions you want installed when the container is created. "extensions": [ "ms-python.python", + "github.vscode-github-actions", "tamasfe.even-better-toml", "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters" + "ryanluker.vscode-coverage-gutters", + "charliermarsh.ruff", + "ms-azuretools.vscode-docker" ] } }, - // Make sure the files we are mapping into the container exist on the host - "initializeCommand": "bash -c 'for i in $HOME/.inputrc; do [ -f $i ] || touch $i; done'", + "features": { + // add in eternal history and other bash features + "ghcr.io/diamondlightsource/devcontainer-features/bash-config:1": {} + }, + // Create the config folder for the bash-config feature + "initializeCommand": "mkdir -p ${localEnv:HOME}/.config/bash-config", "runArgs": [ + // Allow the container to access the host X11 display and EPICS CA "--net=host", - "--security-opt=label=type:container_runtime_t" - ], - "mounts": [ - "source=${localEnv:HOME}/.ssh,target=/root/.ssh,type=bind", - "source=${localEnv:HOME}/.inputrc,target=/root/.inputrc,type=bind", - // map in home directory - not strictly necessary but useful - "source=${localEnv:HOME},target=${localEnv:HOME},type=bind,consistency=cached" + // Make sure SELinux does not disable with access to host filesystems like tmp + "--security-opt=label=disable" ], - // make the workspace folder the same inside and outside of the container - "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "workspaceFolder": "${localWorkspaceFolder}", + // Mount the parent as /workspaces so we can pip install peers as editable + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind", // After the container is created, install the python project in editable form - "postCreateCommand": "pip install -e '.[dev]'" + "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install" } diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 00000000..e0addf5d --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,27 @@ +# Contribute to the project + +Contributions and issues are most welcome! All issues and pull requests are +handled through [GitHub](https://github.com/DiamondLightSource/pytac/issues). Also, please check for any existing issues before +filing a new one. If you have a great idea but it involves big changes, please +file a ticket before making a pull request! We want to make sure you don't spend +your time coding something that might not fit the scope of the project. + +## Issue or Discussion? + +Github also offers [discussions](https://github.com/DiamondLightSource/pytac/discussions) as a place to ask questions and share ideas. If +your issue is open ended and it is not obvious when it can be "closed", please +raise it as a discussion instead. + +## Code Coverage + +While 100% code coverage does not make a library bug-free, it significantly +reduces the number of easily caught bugs! Please make sure coverage remains the +same or is improved by a pull request! + +## Developer Information + +It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs. + +This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects. + +For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/4.3.0/how-to.html). diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst deleted file mode 100644 index a1c03f6e..00000000 --- a/.github/CONTRIBUTING.rst +++ /dev/null @@ -1,35 +0,0 @@ -Contributing to the project -=========================== - -Contributions and issues are most welcome! All issues and pull requests are -handled through GitHub_. Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. - -.. _GitHub: https://github.com/DiamondLightSource/pytac/issues - -Issue or Discussion? --------------------- - -Github also offers discussions_ as a place to ask questions and share ideas. If -your issue is open ended and it is not obvious when it can be "closed", please -raise it as a discussion instead. - -.. _discussions: https://github.com/DiamondLightSource/pytac/discussions - -Code coverage -------------- - -While 100% code coverage does not make a library bug-free, it significantly -reduces the number of easily caught bugs! Please make sure coverage remains the -same or is improved by a pull request! - -Developer guide ---------------- - -The `Developer Guide`_ contains information on setting up a development -environment, running the tests and what standards the code and documentation -should follow. - -.. _Developer Guide: https://DiamondLightSource.github.io/pytac/main/developer/how-to/contribute.html diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..92087f5b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,21 @@ +--- +name: Bug Report +about: The template to use for reporting bugs and usability issues +title: " " +labels: 'bug' +assignees: '' + +--- + +Describe the bug, including a clear and concise description of the expected behaviour, the actual behavior and the context in which you encountered it (ideally include details of your environment). + +## Steps To Reproduce +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md new file mode 100644 index 00000000..52c84dd8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -0,0 +1,13 @@ +--- +name: Issue +about: The standard template to use for feature requests, design discussions and tasks +title: " " +labels: '' +assignees: '' + +--- + +A brief description of the issue, including specific stakeholders and the business case where appropriate + +## Acceptance Criteria +- Specific criteria that will be used to judge if the issue is fixed diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 00000000..8200afe5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,8 @@ +Fixes #ISSUE + +### Instructions to reviewer on how to test: +1. Do thing x +2. Confirm thing y happens + +### Checks for reviewer +- [ ] Would the PR title make sense to a user on a set of release notes diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml index 79d1a71e..7a7bda0d 100644 --- a/.github/actions/install_requirements/action.yml +++ b/.github/actions/install_requirements/action.yml @@ -1,60 +1,35 @@ name: Install requirements -description: Run pip install with requirements and upload resulting requirements +description: Install a version of python then call pip install and report what was installed inputs: - requirements_file: - description: Name of requirements file to use and upload - required: true - install_options: + python-version: + description: Python version to install, default is from Dockerfile + default: "dev" + pip-install: description: Parameters to pass to pip install - required: true - artifact_name: - description: A user friendly name to give the produced artifacts - required: true - python_version: - description: Python version to install - default: "3.x" + default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]" runs: using: composite - steps: + - name: Get version of python + run: | + PYTHON_VERSION="${{ inputs.python-version }}" + if [ $PYTHON_VERSION == "dev" ]; then + # python version from Dockerfile, removing potential pinned sha + PYTHON_VERSION=$(sed -Ene "s/ARG PYTHON_VERSION=([0-9\.]+).*/\1/p" Dockerfile) + fi + echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV" + shell: bash + - name: Setup python uses: actions/setup-python@v5 with: - python-version: ${{ inputs.python_version }} + python-version: ${{ env.PYTHON_VERSION }} - - name: Pip install - run: | - touch ${{ inputs.requirements_file }} - # -c uses requirements.txt as constraints, see 'Validate requirements file' - pip install -c ${{ inputs.requirements_file }} ${{ inputs.install_options }} + - name: Install packages + run: pip install ${{ inputs.pip-install }} shell: bash - - name: Create lockfile - run: | - mkdir -p lockfiles - pip freeze --exclude-editable > lockfiles/${{ inputs.requirements_file }} - # delete the self referencing line and make sure it isn't blank - sed -i'' -e '/file:/d' lockfiles/${{ inputs.requirements_file }} - shell: bash - - - name: Upload lockfiles - uses: actions/upload-artifact@v4.0.0 - with: - name: lockfiles-${{ inputs.python_version }}-${{ inputs.artifact_name }}-${{ github.sha }} - path: lockfiles - - # This eliminates the class of problems where the requirements being given no - # longer match what the packages themselves dictate. E.g. In the rare instance - # where I install some-package which used to depend on vulnerable-dependency - # but now uses good-dependency (despite being nominally the same version) - # pip will install both if given a requirements file with -r - - name: If requirements file exists, check it matches pip installed packages - run: | - if [ -s ${{ inputs.requirements_file }} ]; then - if ! diff -u ${{ inputs.requirements_file }} lockfiles/${{ inputs.requirements_file }}; then - echo "Error: ${{ inputs.requirements_file }} need the above changes to be exhaustive" - exit 1 - fi - fi + - name: Report what was installed + run: pip freeze shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2d1af873..d2c2a0d6 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,11 +10,19 @@ updates: schedule: interval: "weekly" groups: - github-artifacts: + actions: patterns: - - actions/*-artifact + - "*" + commit-message: + prefix: "chore" - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" + groups: + dev-dependencies: + patterns: + - "*" + commit-message: + prefix: "chore" diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py index ae227ab7..c06813af 100755 --- a/.github/pages/make_switcher.py +++ b/.github/pages/make_switcher.py @@ -1,30 +1,32 @@ +"""Make switcher.json to allow docs to switch between different versions.""" + import json import logging from argparse import ArgumentParser from pathlib import Path from subprocess import CalledProcessError, check_output -from typing import List, Optional -def report_output(stdout: bytes, label: str) -> List[str]: +def report_output(stdout: bytes, label: str) -> list[str]: + """Print and return something received frm stdout.""" ret = stdout.decode().strip().split("\n") print(f"{label}: {ret}") return ret -def get_branch_contents(ref: str) -> List[str]: +def get_branch_contents(ref: str) -> list[str]: """Get the list of directories in a branch.""" stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref]) return report_output(stdout, "Branch contents") -def get_sorted_tags_list() -> List[str]: +def get_sorted_tags_list() -> list[str]: """Get a list of sorted tags in descending order from the repository.""" stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"]) return report_output(stdout, "Tags list") -def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[str]: +def get_versions(ref: str, add: str | None) -> list[str]: """Generate the file containing the list of all GitHub Pages builds.""" # Get the directories (i.e. builds) from the GitHub Pages branch try: @@ -36,15 +38,12 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st # Add and remove from the list of builds if add: builds.add(add) - if remove: - assert remove in builds, f"Build '{remove}' not in {sorted(builds)}" - builds.remove(remove) # Get a sorted list of tags tags = get_sorted_tags_list() # Make the sorted versions list from main branches and tags - versions: List[str] = [] + versions: list[str] = [] for version in ["master", "main"] + tags: if version in builds: versions.append(version) @@ -56,7 +55,8 @@ def get_versions(ref: str, add: Optional[str], remove: Optional[str]) -> List[st return versions -def write_json(path: Path, repository: str, versions: str): +def write_json(path: Path, repository: str, versions: list[str]): + """Write the JSON switcher to path.""" org, repo_name = repository.split("/") struct = [ {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"} @@ -68,17 +68,14 @@ def write_json(path: Path, repository: str, versions: str): def main(args=None): + """Parse args and write switcher.""" parser = ArgumentParser( - description="Make a versions.txt file from gh-pages directories" + description="Make a versions.json file from gh-pages directories" ) parser.add_argument( "--add", help="Add this directory to the list of existing directories", ) - parser.add_argument( - "--remove", - help="Remove this directory from the list of existing directories", - ) parser.add_argument( "repository", help="The GitHub org and repository name: ORG/REPO", @@ -91,7 +88,7 @@ def main(args=None): args = parser.parse_args(args) # Write the versions file - versions = get_versions("origin/gh-pages", args.add, args.remove) + versions = get_versions("origin/gh-pages", args.add) write_json(args.output, args.repository, versions) diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml new file mode 100644 index 00000000..b1c4c93c --- /dev/null +++ b/.github/workflows/_dist.yml @@ -0,0 +1,36 @@ +on: + workflow_call: + +jobs: + build: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - name: Build sdist and wheel + run: > + export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && + pipx run build + + - name: Upload sdist and wheel as artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check for packaging errors + run: pipx run twine check --strict dist/* + + - name: Install produced wheel + uses: ./.github/actions/install_requirements + with: + pip-install: dist/*.whl + + - name: Test module --version works using the installed wheel + # If more than one module in src/ replace with module name to test + run: python -m $(ls --hide='*.egg-info' src | head -1) --version diff --git a/.github/workflows/docs.yml b/.github/workflows/_docs.yml similarity index 69% rename from .github/workflows/docs.yml rename to .github/workflows/_docs.yml index 3c29ff94..1f5491ba 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/_docs.yml @@ -1,17 +1,14 @@ -name: Docs CI - on: - push: - pull_request: + workflow_call: + jobs: - docs: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + build: runs-on: ubuntu-latest steps: - name: Avoid git conflicts when tag and branch pushed at same time - if: startsWith(github.ref, 'refs/tags') + if: github.ref_type == 'tag' run: sleep 60 - name: Checkout @@ -21,19 +18,23 @@ jobs: fetch-depth: 0 - name: Install system packages - # Can delete this if you don't use graphviz in your docs run: sudo apt-get install graphviz - name: Install python packages uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: docs - name: Build docs run: tox -e docs + - name: Remove environment.pickle + run: rm build/html/.doctrees/environment.pickle + + - name: Upload built docs artifact + uses: actions/upload-artifact@v4 + with: + name: docs + path: build + - name: Sanitize ref name for docs version run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV @@ -44,10 +45,10 @@ jobs: run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json - name: Publish Docs to gh-pages - if: github.event_name == 'push' && github.actor != 'dependabot[bot]' + if: github.ref_type == 'tag' || github.ref_name == 'main' # We pin to the SHA, not the tag, for security reasons. # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: peaceiris/actions-gh-pages@64b46b4226a4a12da2239ba3ea5aa73e3163c75b # v3.9.1 + uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .github/pages diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml new file mode 100644 index 00000000..8032bbaa --- /dev/null +++ b/.github/workflows/_pypi.yml @@ -0,0 +1,19 @@ +on: + workflow_call: + +jobs: + upload: + runs-on: ubuntu-latest + environment: release + + steps: + - name: Download dist artifact + uses: actions/download-artifact@v4 + with: + name: dist + path: dist + + - name: Publish to PyPI using trusted publishing + uses: pypa/gh-action-pypi-publish@release/v1 + with: + attestations: false diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml new file mode 100644 index 00000000..3f949fcc --- /dev/null +++ b/.github/workflows/_release.yml @@ -0,0 +1,32 @@ +on: + workflow_call: + +jobs: + artifacts: + runs-on: ubuntu-latest + + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + merge-multiple: true + + - name: Zip up docs + run: | + set -vxeuo pipefail + if [ -d html ]; then + mv html $GITHUB_REF_NAME + zip -r docs.zip $GITHUB_REF_NAME + rm -rf $GITHUB_REF_NAME + fi + + - name: Create GitHub Release + # We pin to the SHA, not the tag, for security reasons. + # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions + uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2 + with: + prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} + files: "*" + generate_release_notes: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml new file mode 100644 index 00000000..552b29d0 --- /dev/null +++ b/.github/workflows/_test.yml @@ -0,0 +1,62 @@ +on: + workflow_call: + inputs: + python-version: + type: string + description: The version of python to install + required: true + runs-on: + type: string + description: The runner to run this job on + required: true + secrets: + CODECOV_TOKEN: + required: true + +env: + # https://github.com/pytest-dev/pytest/issues/2042 + PY_IGNORE_IMPORTMISMATCH: "1" + +jobs: + run: + runs-on: ${{ inputs.runs-on }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + # Need this to get version number from last tag + fetch-depth: 0 + + - if: inputs.python-version == 'dev' + name: Install dev versions of python packages + uses: ./.github/actions/install_requirements + + - if: inputs.python-version == 'dev' + name: Write the requirements as an artifact + run: pip freeze --exclude-editable > /tmp/dev-requirements.txt + + - if: inputs.python-version == 'dev' + name: Upload dev-requirements.txt + uses: actions/upload-artifact@v4 + with: + name: dev-requirements + path: /tmp/dev-requirements.txt + + - if: inputs.python-version != 'dev' + name: Install latest versions of python packages + uses: ./.github/actions/install_requirements + with: + python-version: ${{ inputs.python-version }} + pip-install: ".[dev]" + + - name: Run tests + run: tox -e tests + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + name: ${{ inputs.python-version }}/${{ inputs.runs-on }} + files: cov.xml + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml new file mode 100644 index 00000000..a13536d3 --- /dev/null +++ b/.github/workflows/_tox.yml @@ -0,0 +1,22 @@ +on: + workflow_call: + inputs: + tox: + type: string + description: What to run under tox + required: true + + +jobs: + run: + runs-on: "ubuntu-latest" + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install python packages + uses: ./.github/actions/install_requirements + + - name: Run tox + run: tox -e ${{ inputs.tox }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..571730fb --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,54 @@ +name: CI + +on: + push: + branches: + - main + tags: + - '*' + pull_request: + +jobs: + + lint: + uses: ./.github/workflows/_tox.yml + with: + tox: pre-commit,type-checking + + test: + strategy: + matrix: + runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest + python-version: ["3.11", "3.12", "3.13"] + include: + # Include one that runs in the dev environment + - runs-on: "ubuntu-latest" + python-version: "dev" + fail-fast: false + uses: ./.github/workflows/_test.yml + with: + runs-on: ${{ matrix.runs-on }} + python-version: ${{ matrix.python-version }} + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + uses: ./.github/workflows/_docs.yml + + + dist: + uses: ./.github/workflows/_dist.yml + + pypi: + needs: [dist, test] + if: github.ref_type == 'tag' + uses: ./.github/workflows/_pypi.yml + permissions: + id-token: write + + release: + needs: [dist, test, docs] + if: github.ref_type == 'tag' + uses: ./.github/workflows/_release.yml + permissions: + contents: write diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml deleted file mode 100644 index 364ff43d..00000000 --- a/.github/workflows/code.yml +++ /dev/null @@ -1,243 +0,0 @@ -name: Code CI - -on: - push: - pull_request: -env: - # The target python version, which must match the Dockerfile version - CONTAINER_PYTHON: "3.11" - DIST_WHEEL_PATH: dist-${{ github.sha }} - -jobs: - lint: - # pull requests are a duplicate of a branch push if within the same repo. - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: lint - - - name: Lint - run: tox -e pre-commit,mypy - - test: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest"] # can add windows-latest, macos-latest - python: ["3.8", "3.9", "3.10", "3.11"] - install: ["-e .[dev]"] - # Make one version be non-editable to test both paths of version code - include: - - os: "ubuntu-latest" - python: "3.7" - install: ".[dev]" - - runs-on: ${{ matrix.os }} - env: - # https://github.com/pytest-dev/pytest/issues/2042 - PY_IGNORE_IMPORTMISMATCH: "1" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{ matrix.python }} - requirements_file: requirements-test-${{ matrix.os }}-${{ matrix.python }}.txt - install_options: ${{ matrix.install }} - artifact_name: tests - - - name: List dependency tree - run: pipdeptree - - - name: Run tests - run: tox -e pytest - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 - with: - name: ${{ matrix.python }}/${{ matrix.os }} - files: cov.xml - - dist: - if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository - runs-on: "ubuntu-latest" - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # Need this to get version number from last tag - fetch-depth: 0 - - - name: Build sdist and wheel - run: | - export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) && \ - pipx run build - - - name: Upload sdist and wheel as artifacts - uses: actions/upload-artifact@v4.0.0 - with: - name: ${{ env.DIST_WHEEL_PATH }} - path: dist - - - name: Check for packaging errors - run: pipx run twine check --strict dist/* - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - python_version: ${{env.CONTAINER_PYTHON}} - requirements_file: requirements.txt - install_options: dist/*.whl - artifact_name: dist - - - name: Test module --version works using the installed wheel - # If more than one module in src/ replace with module name to test - run: python -m $(ls --hide='*.egg-info' src | head -1) --version - - container: - needs: [lint, dist, test] - runs-on: ubuntu-latest - - permissions: - contents: read - packages: write - - env: - TEST_TAG: "testing" - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # image names must be all lower case - - name: Generate image repo name - run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV - - - name: Set lockfile location in environment - run: | - echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV - - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Log in to GitHub Docker Registry - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - - name: Build and export to Docker local cache - uses: docker/build-push-action@v5 - with: - # Note build-args, context, file, and target must all match between this - # step and the later build-push-action, otherwise the second build-push-action - # will attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - load: true - tags: ${{ env.TEST_TAG }} - # If you have a long docker build (2+ minutes), uncomment the - # following to turn on caching. For short build times this - # makes it a little slower - #cache-from: type=gha - #cache-to: type=gha,mode=max - - - name: Test cli works in cached runtime image - run: docker run docker.io/library/${{ env.TEST_TAG }} --version - - - name: Create tags for publishing image - id: meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGE_REPOSITORY }} - tags: | - type=ref,event=tag - type=raw,value=latest, enable=${{ github.ref_type == 'tag' }} - # type=edge,branch=main - # Add line above to generate image for every commit to given branch, - # and uncomment the end of if clause in next step - - - name: Push cached image to container registry - if: github.ref_type == 'tag' # || github.ref_name == 'main' - uses: docker/build-push-action@v5 - # This does not build the image again, it will find the image in the - # Docker cache and publish it - with: - # Note build-args, context, file, and target must all match between this - # step and the previous build-push-action, otherwise this step will - # attempt to build the image again - build-args: | - PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl - context: artifacts/ - file: ./Dockerfile - target: runtime - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - release: - # upload to PyPI and make a release on every tag - needs: [lint, dist, test] - if: ${{ github.event_name == 'push' && github.ref_type == 'tag' }} - runs-on: ubuntu-latest - env: - HAS_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN != '' }} - - steps: - - name: Download wheel and lockfiles - uses: actions/download-artifact@v4.1.0 - with: - path: artifacts/ - pattern: "*dist*" - - - name: Fixup blank lockfiles - # Github release artifacts can't be blank - run: for f in ${{ env.DIST_LOCKFILE_PATH }}/*; do [ -s $f ] || echo '# No requirements' >> $f; done - - - name: Github Release - # We pin to the SHA, not the tag, for security reasons. - # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 - with: - prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }} - files: | - ${{ env.DIST_WHEEL_PATH }}/* - ${{ env.DIST_LOCKFILE_PATH }}/* - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Publish to PyPI - if: ${{ env.HAS_PYPI_TOKEN }} - uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/docs_clean.yml b/.github/workflows/docs_clean.yml deleted file mode 100644 index e324640e..00000000 --- a/.github/workflows/docs_clean.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Docs Cleanup CI - -# delete branch documentation when a branch is deleted -# also allow manually deleting a documentation version -on: - delete: - workflow_dispatch: - inputs: - version: - description: "documentation version to DELETE" - required: true - type: string - -jobs: - remove: - if: github.event.ref_type == 'branch' || github.event_name == 'workflow_dispatch' - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - - name: removing documentation for branch ${{ github.event.ref }} - if: ${{ github.event_name != 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.ref }}" >> $GITHUB_ENV - - - name: manually removing documentation version ${{ github.event.inputs.version }} - if: ${{ github.event_name == 'workflow_dispatch' }} - run: echo "REF_NAME=${{ github.event.inputs.version }}" >> $GITHUB_ENV - - - name: Sanitize ref name for docs version - run: echo "DOCS_VERSION=${REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV - - - name: update index and push changes - run: | - rm -r $DOCS_VERSION - python make_switcher.py --remove $DOCS_VERSION ${{ github.repository }} switcher.json - git config --global user.name 'GitHub Actions Docs Cleanup CI' - git config --global user.email 'GithubActionsCleanup@noreply.github.com' - git commit -am "Removing redundant docs version $DOCS_VERSION" - git push diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml deleted file mode 100644 index 7f651a27..00000000 --- a/.github/workflows/linkcheck.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Link Check - -on: - workflow_dispatch: - schedule: - # Run weekly to check URL links still resolve - - cron: "0 8 * * WED" - -jobs: - docs: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install python packages - uses: ./.github/actions/install_requirements - with: - requirements_file: requirements-dev-3.x.txt - install_options: -e .[dev] - artifact_name: link_check - - - name: Check links - run: tox -e docs build -- -b linkcheck - - - name: Keepalive Workflow - uses: gautamkrishnar/keepalive-workflow@v1 diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml new file mode 100644 index 00000000..e2a0fd1b --- /dev/null +++ b/.github/workflows/periodic.yml @@ -0,0 +1,13 @@ +name: Periodic + +on: + workflow_dispatch: + schedule: + # Run weekly to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + linkcheck: + uses: ./.github/workflows/_tox.yml + with: + tox: docs build -- -b linkcheck diff --git a/.gitignore b/.gitignore index a37be99b..0f33bf29 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python env/ -.venv build/ develop-eggs/ dist/ @@ -56,6 +55,7 @@ cov.xml # Sphinx documentation docs/_build/ +docs/_api # PyBuilder target/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5bc9f001..ef68f8ad 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,24 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-added-large-files - id: check-yaml - id: check-merge-conflict + - id: end-of-file-fixer - repo: local hooks: - - id: black - name: Run black - stages: [commit] + - id: ruff + name: lint with ruff language: system - entry: black --check --diff + entry: ruff check --force-exclude --fix types: [python] + require_serial: true - - id: ruff - name: Run ruff - stages: [commit] + - id: ruff-format + name: format with ruff language: system - entry: ruff + entry: ruff format --force-exclude types: [python] + require_serial: true diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a1227b34..933c580c 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,10 +1,5 @@ { "recommendations": [ "ms-vscode-remote.remote-containers", - "ms-python.python", - "tamasfe.even-better-toml", - "redhat.vscode-yaml", - "ryanluker.vscode-coverage-gutters", - "charliermarsh.Ruff" ] } diff --git a/.vscode/launch.json b/.vscode/launch.json index 3cda7432..7d563e00 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,7 @@ "configurations": [ { "name": "Debug Unit Test", - "type": "python", + "type": "debugpy", "request": "launch", "justMyCode": false, "program": "${file}", @@ -14,12 +14,6 @@ "debug-test" ], "console": "integratedTerminal", - "env": { - // The default config in pyproject.toml's "[tool.pytest.ini_options]" adds coverage. - // Cannot have coverage and debugging at the same time. - // https://github.com/microsoft/vscode-python/issues/693 - "PYTEST_ADDOPTS": "--no-cov" - }, } ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index e2909249..101c75fa 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,22 +1,12 @@ { - "python.linting.pylintEnabled": false, - "python.linting.flake8Enabled": false, - "python.linting.mypyEnabled": true, - "python.linting.enabled": true, - "python.testing.pytestArgs": [ - "--cov=pytac", - "--cov-report", - "xml:cov.xml" - ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, - "python.formatting.provider": "black", - "python.languageServer": "Pylance", "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + }, + "files.insertFinalNewline": true, "[python]": { - "editor.codeActionsOnSave": { - "source.fixAll.ruff": false, - "source.organizeImports.ruff": true - } - } + "editor.defaultFormatter": "charliermarsh.ruff", + }, } diff --git a/Dockerfile b/Dockerfile index 8de9b2a0..35d2abf7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,37 +1,13 @@ -# This file is for use as a devcontainer and a runtime container -# -# The devcontainer should use the build target and run as root with podman +# The devcontainer should use the developer target and run as root with podman # or docker with user namespaces. -# -FROM python:3.10 as build +ARG PYTHON_VERSION=3.11 +FROM python:${PYTHON_VERSION} AS developer -ARG PIP_OPTIONS=. +# Add any system dependencies for the developer/build environment here +RUN apt-get update && apt-get install -y --no-install-recommends \ + graphviz \ + && rm -rf /var/lib/apt/lists/* -# Add any system dependencies for the developer/build environment here e.g. -# RUN apt-get update && apt-get upgrade -y && \ -# apt-get install -y --no-install-recommends \ -# desired-packages \ -# && rm -rf /var/lib/apt/lists/* - -# set up a virtual environment and put it in PATH +# Set up a virtual environment and put it in PATH RUN python -m venv /venv ENV PATH=/venv/bin:$PATH - -# Copy any required context for the pip install over -COPY . /context -WORKDIR /context - -# install python package into /venv -RUN pip install ${PIP_OPTIONS} - -FROM python:3.10-slim as runtime - -# Add apt-get system dependecies for runtime here if needed - -# copy the virtual environment from the build stage and put it in PATH -COPY --from=build /venv/ /venv/ -ENV PATH=/venv/bin:$PATH - -# change this entrypoint if it is not the same as the repo -ENTRYPOINT ["pytac"] -CMD ["--version"] diff --git a/README.md b/README.md new file mode 100644 index 00000000..f0d84b62 --- /dev/null +++ b/README.md @@ -0,0 +1,27 @@ +[![CI](https://github.com/DiamondLightSource/pytac/actions/workflows/ci.yml/badge.svg)](https://github.com/DiamondLightSource/pytac/actions/workflows/ci.yml) +[![Coverage](https://codecov.io/gh/DiamondLightSource/pytac/branch/main/graph/badge.svg)](https://codecov.io/gh/DiamondLightSource/pytac) +[![PyPI](https://img.shields.io/pypi/v/pytac.svg)](https://pypi.org/project/pytac) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://www.apache.org/licenses/LICENSE-2.0) + +# pytac + +Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source. + + +Source | +:---: | :---: +PyPI | `pip install pytac` +Documentation | +Releases | + + +```python +from pytac import __version__ + +print(f"Hello pytac {__version__}") +``` + + + + +See https://diamondlightsource.github.io/pytac for more detailed documentation. diff --git a/README.rst b/README.rst deleted file mode 100644 index 2849b905..00000000 --- a/README.rst +++ /dev/null @@ -1,55 +0,0 @@ -pytac -============================================================================= - -|code_ci| |docs_ci| |coverage| |pypi_version| |license| - -Python Toolkit for Accelerator Controls (Pytac) is a Python library for working -with elements of particle accelerators, developed at Diamond Light Source. - -============== ============================================================== -PyPI ``pip install pytac`` -Source code https://github.com/DiamondLightSource/pytac -Documentation https://DiamondLightSource.github.io/pytac -Releases https://github.com/DiamondLightSource/pytac/releases -============== ============================================================== - -This is where you should put some images or code snippets that illustrate -some relevant examples. If it is a library then you might put some -introductory code here: - -.. code-block:: python - - from pytac import __version__ - - print(f"Hello pytac {__version__}") - -Or if it is a commandline tool then you might put some example commands here:: - - $ python -m pytac --version - -.. |code_ci| image:: https://github.com/DiamondLightSource/pytac/actions/workflows/code.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/pytac/actions/workflows/code.yml - :alt: Code CI - -.. |docs_ci| image:: https://github.com/DiamondLightSource/pytac/actions/workflows/docs.yml/badge.svg?branch=main - :target: https://github.com/DiamondLightSource/pytac/actions/workflows/docs.yml - :alt: Docs CI - -.. |coverage| image:: https://codecov.io/gh/DiamondLightSource/pytac/branch/main/graph/badge.svg - :target: https://codecov.io/gh/DiamondLightSource/pytac - :alt: Test Coverage - -.. |pypi_version| image:: https://img.shields.io/pypi/v/pytac.svg - :target: https://pypi.org/project/pytac - :alt: Latest PyPI version - -.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg - :target: https://opensource.org/licenses/Apache-2.0 - :alt: Apache License - - -.. - Anything below this line is used when viewing README.rst and will be replaced - when included in index.rst - -See https://DiamondLightSource.github.io/pytac for more detailed documentation. diff --git a/catalog-info.yaml b/catalog-info.yaml index 02941ee4..db1f2510 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -5,6 +5,6 @@ metadata: title: pytac description: Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source. spec: - type: documentation - lifecycle: experimental - owner: group:default/high-level-apps \ No newline at end of file + type: library + lifecycle: production + owner: group:default/high-level-apps diff --git a/docs/_api.rst b/docs/_api.rst new file mode 100644 index 00000000..b7e544d4 --- /dev/null +++ b/docs/_api.rst @@ -0,0 +1,16 @@ +:orphan: + +.. + This page is not included in the TOC tree, but must exist so that the + autosummary pages are generated for pytac and all its + subpackages + +API +=== + +.. autosummary:: + :toctree: _api + :template: custom-module-template.rst + :recursive: + + pytac diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst new file mode 100644 index 00000000..9aeca540 --- /dev/null +++ b/docs/_templates/custom-module-template.rst @@ -0,0 +1,37 @@ +{{ ('``' + fullname + '``') | underline }} + +{%- set filtered_members = [] %} +{%- for item in members %} + {%- if item in functions + classes + exceptions + attributes %} + {% set _ = filtered_members.append(item) %} + {%- endif %} +{%- endfor %} + +.. automodule:: {{ fullname }} + :members: + + {% block modules %} + {% if modules %} + .. rubric:: Submodules + + .. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: + {% for item in modules %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block members %} + {% if filtered_members %} + .. rubric:: Members + + .. autosummary:: + :nosignatures: + {% for item in filtered_members %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index 0d40ab7f..8084c23f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,8 +1,9 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html +"""Configuration file for the Sphinx documentation builder. + +This file only contains a selection of the most common options. For a full +list see the documentation: +https://www.sphinx-doc.org/en/master/usage/configuration.html +""" import sys from pathlib import Path @@ -32,6 +33,8 @@ extensions = [ # Use this for generating API docs "sphinx.ext.autodoc", + # and making summary tables at the top of API docs + "sphinx.ext.autosummary", # This can parse google style docstrings "sphinx.ext.napoleon", # For linking to external sphinx documentation @@ -44,8 +47,13 @@ "sphinx_copybutton", # For the card element "sphinx_design", + # So we can write markdown files + "myst_parser", ] +# So we can use the ::: syntax +myst_enable_extensions = ["colon_fence"] + # If true, Sphinx will warn about all references where the target cannot # be found. nitpicky = True @@ -75,6 +83,12 @@ # Don't inherit docstrings from baseclasses autodoc_inherit_docstrings = False +# Document only what is in __all__ +autosummary_ignore_module_all = False + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + # Output graphviz directive produced images in a scalable format graphviz_output_format = "svg" @@ -82,9 +96,6 @@ # role, that is, for text marked up `like this` default_role = "any" -# The suffix of source filenames. -source_suffix = ".rst" - # The master toctree document. master_doc = "index" @@ -103,15 +114,6 @@ # A dictionary of graphviz graph attributes for inheritance diagrams. inheritance_graph_attrs = {"rankdir": "TB"} -# Common links that should be available on every page -rst_epilog = """ -.. _Diamond Light Source: http://www.diamond.ac.uk -.. _black: https://github.com/psf/black -.. _ruff: https://beta.ruff.rs/docs/ -.. _mypy: http://mypy-lang.org/ -.. _pre-commit: https://pre-commit.com/ -""" - # Ignore localhost links for periodic check that links in docs are valid linkcheck_ignore = [r"http://localhost:\d+/"] @@ -142,10 +144,10 @@ # Theme options for pydata_sphinx_theme # We don't check switcher because there are 3 possible states for a repo: # 1. New project, docs are not published so there is no switcher -# 2. Existing project with latest skeleton, switcher exists and works -# 3. Existing project with old skeleton that makes broken switcher, +# 2. Existing project with latest copier template, switcher exists and works +# 3. Existing project with old copier template that makes broken switcher, # switcher exists but is broken -# Point 3 makes checking switcher difficult, because the updated skeleton +# Point 3 makes checking switcher difficult, because the updated copier template # will fix the switcher at the end of the docs workflow, but never gets a chance # to complete as the docs build warns and fails. html_theme_options = { @@ -167,19 +169,13 @@ }, "check_switcher": False, "navbar_end": ["theme-switcher", "icon-links", "version-switcher"], - "external_links": [ - { - "name": "Release Notes", - "url": f"https://github.com/{github_user}/{github_repo}/releases", - } - ], "navigation_with_keys": False, } # A dictionary of values to pass into the template engine’s context for all pages html_context = { "github_user": github_user, - "github_repo": project, + "github_repo": github_repo, "github_version": version, "doc_path": "docs", } @@ -192,4 +188,4 @@ # Logo html_logo = "images/dls-logo.svg" -html_favicon = "images/dls-favicon.ico" +html_favicon = html_logo diff --git a/docs/developer/explanations/decisions.rst b/docs/developer/explanations/decisions.rst deleted file mode 100644 index 5841e6ea..00000000 --- a/docs/developer/explanations/decisions.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. This Source Code Form is subject to the terms of the Mozilla Public -.. License, v. 2.0. If a copy of the MPL was not distributed with this -.. file, You can obtain one at http://mozilla.org/MPL/2.0/. - -Architectural Decision Records -============================== - -We record major architectural decisions in Architecture Decision Records (ADRs), -as `described by Michael Nygard -`_. -Below is the list of our current ADRs. - -.. toctree:: - :maxdepth: 1 - :glob: - - decisions/* \ No newline at end of file diff --git a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst b/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst deleted file mode 100644 index b2d3d0fe..00000000 --- a/docs/developer/explanations/decisions/0001-record-architecture-decisions.rst +++ /dev/null @@ -1,26 +0,0 @@ -1. Record architecture decisions -================================ - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We need to record the architectural decisions made on this project. - -Decision --------- - -We will use Architecture Decision Records, as `described by Michael Nygard -`_. - -Consequences ------------- - -See Michael Nygard's article, linked above. To create new ADRs we will copy and -paste from existing ones. diff --git a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst b/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst deleted file mode 100644 index 33d56981..00000000 --- a/docs/developer/explanations/decisions/0002-switched-to-pip-skeleton.rst +++ /dev/null @@ -1,35 +0,0 @@ -2. Adopt python_copier_template for project structure -===================================================== - -Date: 2022-02-18 - -Status ------- - -Accepted - -Context -------- - -We should use the following `python_copier_template `_. -The template will ensure consistency in developer -environments and package management. - -Decision --------- - -We have switched to using the skeleton. - -Consequences ------------- - -This module will use a fixed set of tools as developed in python_copier_template -and can pull from this template to update the packaging to the latest techniques. - -As such, the developer environment may have changed, the following could be -different: - -- linting -- formatting -- pip venv setup -- CI/CD diff --git a/docs/developer/how-to/build-docs.rst b/docs/developer/how-to/build-docs.rst deleted file mode 100644 index 11a5e638..00000000 --- a/docs/developer/how-to/build-docs.rst +++ /dev/null @@ -1,38 +0,0 @@ -Build the docs using sphinx -=========================== - -You can build the `sphinx`_ based docs from the project directory by running:: - - $ tox -e docs - -This will build the static docs on the ``docs`` directory, which includes API -docs that pull in docstrings from the code. - -.. seealso:: - - `documentation_standards` - -The docs will be built into the ``build/html`` directory, and can be opened -locally with a web browser:: - - $ firefox build/html/index.html - -Autobuild ---------- - -You can also run an autobuild process, which will watch your ``docs`` -directory for changes and rebuild whenever it sees changes, reloading any -browsers watching the pages:: - - $ tox -e docs autobuild - -You can view the pages at localhost:: - - $ firefox http://localhost:8000 - -If you are making changes to source code too, you can tell it to watch -changes in this directory too:: - - $ tox -e docs autobuild -- --watch src - -.. _sphinx: https://www.sphinx-doc.org/ diff --git a/docs/developer/how-to/contribute.rst b/docs/developer/how-to/contribute.rst deleted file mode 100644 index 65b992f0..00000000 --- a/docs/developer/how-to/contribute.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../../.github/CONTRIBUTING.rst diff --git a/docs/developer/how-to/lint.rst b/docs/developer/how-to/lint.rst deleted file mode 100644 index 2df258d8..00000000 --- a/docs/developer/how-to/lint.rst +++ /dev/null @@ -1,39 +0,0 @@ -Run linting using pre-commit -============================ - -Code linting is handled by black_ and ruff_ run under pre-commit_. - -Running pre-commit ------------------- - -You can run the above checks on all files with this command:: - - $ tox -e pre-commit - -Or you can install a pre-commit hook that will run each time you do a ``git -commit`` on just the files that have changed:: - - $ pre-commit install - -It is also possible to `automatically enable pre-commit on cloned repositories `_. -This will result in pre-commits being enabled on every repo your user clones from now on. - -Fixing issues -------------- - -If black reports an issue you can tell it to reformat all the files in the -repository:: - - $ black . - -Likewise with ruff:: - - $ ruff --fix . - -Ruff may not be able to automatically fix all issues; in this case, you will have to fix those manually. - -VSCode support --------------- - -The ``.vscode/settings.json`` will run black formatting as well as -ruff checking on save. Issues will be highlighted in the editor window. diff --git a/docs/developer/how-to/make-release.rst b/docs/developer/how-to/make-release.rst deleted file mode 100644 index 9491ec55..00000000 --- a/docs/developer/how-to/make-release.rst +++ /dev/null @@ -1,16 +0,0 @@ -Make a release -============== - -To make a new release, please follow this checklist: - -- Choose a new PEP440 compliant release number (see https://peps.python.org/pep-0440/) -- Go to the GitHub release_ page -- Choose ``Draft New Release`` -- Click ``Choose Tag`` and supply the new tag you chose (click create new tag) -- Click ``Generate release notes``, review and edit these notes -- Choose a title and click ``Publish Release`` - -Note that tagging and pushing to the main branch has the same effect except that -you will not get the option to edit the release notes. - -.. _release: https://github.com/DiamondLightSource/pytac/releases diff --git a/docs/developer/how-to/pin-requirements.rst b/docs/developer/how-to/pin-requirements.rst deleted file mode 100644 index 89639623..00000000 --- a/docs/developer/how-to/pin-requirements.rst +++ /dev/null @@ -1,74 +0,0 @@ -Pinning Requirements -==================== - -Introduction ------------- - -By design this project only defines dependencies in one place, i.e. in -the ``requires`` table in ``pyproject.toml``. - -In the ``requires`` table it is possible to pin versions of some dependencies -as needed. For library projects it is best to leave pinning to a minimum so -that your library can be used by the widest range of applications. - -When CI builds the project it will use the latest compatible set of -dependencies available (after applying your pins and any dependencies' pins). - -This approach means that there is a possibility that a future build may -break because an updated release of a dependency has made a breaking change. - -The correct way to fix such an issue is to work out the minimum pinning in -``requires`` that will resolve the problem. However this can be quite hard to -do and may be time consuming when simply trying to release a minor update. - -For this reason we provide a mechanism for locking all dependencies to -the same version as a previous successful release. This is a quick fix that -should guarantee a successful CI build. - -Finding the lock files ----------------------- - -Every release of the project will have a set of requirements files published -as release assets. - -For example take a look at the release page for python3-pip-skeleton-cli here: -https://github.com/DiamondLightSource/python3-pip-skeleton-cli/releases/tag/3.3.0 - -There is a list of requirements*.txt files showing as assets on the release. - -There is one file for each time the CI installed the project into a virtual -environment. There are multiple of these as the CI creates a number of -different environments. - -The files are created using ``pip freeze`` and will contain a full list -of the dependencies and sub-dependencies with pinned versions. - -You can download any of these files by clicking on them. It is best to use -the one that ran with the lowest Python version as this is more likely to -be compatible with all the versions of Python in the test matrix. -i.e. ``requirements-test-ubuntu-latest-3.8.txt`` in this example. - -Applying the lock file ----------------------- - -To apply a lockfile: - -- copy the requirements file you have downloaded to the root of your - repository -- rename it to requirements.txt -- commit it into the repo -- push the changes - -The CI looks for a requirements.txt in the root and will pass it to pip -when installing each of the test environments. pip will then install exactly -the same set of packages as the previous release. - -Removing dependency locking from CI ------------------------------------ - -Once the reasons for locking the build have been resolved it is a good idea -to go back to an unlocked build. This is because you get an early indication -of any incoming problems. - -To restore unlocked builds in CI simply remove requirements.txt from the root -of the repo and push. diff --git a/docs/developer/how-to/run-tests.rst b/docs/developer/how-to/run-tests.rst deleted file mode 100644 index d2e03644..00000000 --- a/docs/developer/how-to/run-tests.rst +++ /dev/null @@ -1,12 +0,0 @@ -Run the tests using pytest -========================== - -Testing is done with pytest_. It will find functions in the project that `look -like tests`_, and run them to check for errors. You can run it with:: - - $ tox -e pytest - -It will also report coverage to the commandline and to ``cov.xml``. - -.. _pytest: https://pytest.org/ -.. _look like tests: https://docs.pytest.org/explanation/goodpractices.html#test-discovery diff --git a/docs/developer/how-to/static-analysis.rst b/docs/developer/how-to/static-analysis.rst deleted file mode 100644 index 065920e1..00000000 --- a/docs/developer/how-to/static-analysis.rst +++ /dev/null @@ -1,8 +0,0 @@ -Run static analysis using mypy -============================== - -Static type analysis is done with mypy_. It checks type definition in source -files without running them, and highlights potential issues where types do not -match. You can run it with:: - - $ tox -e mypy diff --git a/docs/developer/how-to/test-container.rst b/docs/developer/how-to/test-container.rst deleted file mode 100644 index a4a43a6f..00000000 --- a/docs/developer/how-to/test-container.rst +++ /dev/null @@ -1,25 +0,0 @@ -Container Local Build and Test -============================== - -CI builds a runtime container for the project. The local tests -checks available via ``tox -p`` do not verify this because not -all developers will have docker installed locally. - -If CI is failing to build the container, then it is best to fix and -test the problem locally. This would require that you have docker -or podman installed on your local workstation. - -In the following examples the command ``docker`` is interchangeable with -``podman`` depending on which container cli you have installed. - -To build the container and call it ``test``:: - - cd - docker build -t test . - -To verify that the container runs:: - - docker run -it test --help - -You can pass any other command line parameters to your application -instead of --help. diff --git a/docs/developer/how-to/update-tools.rst b/docs/developer/how-to/update-tools.rst deleted file mode 100644 index c1075ee8..00000000 --- a/docs/developer/how-to/update-tools.rst +++ /dev/null @@ -1,16 +0,0 @@ -Update the tools -================ - -This module is merged with the python3-pip-skeleton_. This is a generic -Python project structure which provides a means to keep tools and -techniques in sync between multiple Python projects. To update to the -latest version of the skeleton, run:: - - $ git pull --rebase=false https://github.com/DiamondLightSource/python3-pip-skeleton - -Any merge conflicts will indicate an area where something has changed that -conflicts with the setup of the current module. Check the `closed pull requests -`_ -of the skeleton module for more details. - -.. _python3-pip-skeleton: https://DiamondLightSource.github.io/python3-pip-skeleton diff --git a/docs/developer/index.rst b/docs/developer/index.rst deleted file mode 100644 index 8a6369b9..00000000 --- a/docs/developer/index.rst +++ /dev/null @@ -1,64 +0,0 @@ -Developer Guide -=============== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_run;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/dev-install - - +++ - - Tutorials for getting up and running as a developer. - - .. grid-item-card:: :material-regular:`task;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/contribute - how-to/build-docs - how-to/run-tests - how-to/static-analysis - how-to/lint - how-to/update-tools - how-to/make-release - how-to/pin-requirements - how-to/test-container - - +++ - - Practical step-by-step guides for day-to-day dev tasks. - - .. grid-item-card:: :material-regular:`apartment;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/decisions - - +++ - - Explanations of how and why the architecture is why it is. - - .. grid-item-card:: :material-regular:`description;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/standards - - +++ - - Technical reference material on standards in use. diff --git a/docs/developer/reference/standards.rst b/docs/developer/reference/standards.rst deleted file mode 100644 index 5a1fd478..00000000 --- a/docs/developer/reference/standards.rst +++ /dev/null @@ -1,63 +0,0 @@ -Standards -========= - -This document defines the code and documentation standards used in this -repository. - -Code Standards --------------- - -The code in this repository conforms to standards set by the following tools: - -- black_ for code formatting -- ruff_ for style checks -- mypy_ for static type checking - -.. seealso:: - - How-to guides `../how-to/lint` and `../how-to/static-analysis` - -.. _documentation_standards: - -Documentation Standards ------------------------ - -Docstrings are pre-processed using the Sphinx Napoleon extension. As such, -google-style_ is considered as standard for this repository. Please use type -hints in the function signature for types. For example: - -.. code:: python - - def func(arg1: str, arg2: int) -> bool: - """Summary line. - - Extended description of function. - - Args: - arg1: Description of arg1 - arg2: Description of arg2 - - Returns: - Description of return value - """ - return True - -.. _google-style: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/index.html#google-vs-numpy - -Documentation is contained in the ``docs`` directory and extracted from -docstrings of the API. - -Docs follow the underlining convention:: - - Headling 1 (page title) - ======================= - - Heading 2 - --------- - - Heading 3 - ~~~~~~~~~ - -.. seealso:: - - How-to guide `../how-to/build-docs` diff --git a/docs/developer/tutorials/dev-install.rst b/docs/developer/tutorials/dev-install.rst deleted file mode 100644 index 6e049c3e..00000000 --- a/docs/developer/tutorials/dev-install.rst +++ /dev/null @@ -1,68 +0,0 @@ -Developer install -================= - -These instructions will take you through the minimal steps required to get a dev -environment setup, so you can run the tests locally. - -Clone the repository --------------------- - -First clone the repository locally using `Git -`_:: - - $ git clone git://github.com/DiamondLightSource/pytac.git - -Install dependencies --------------------- - -You can choose to either develop on the host machine using a `venv` (which -requires python 3.8 or later) or to run in a container under `VSCode -`_ - -.. tab-set:: - - .. tab-item:: Local virtualenv - - .. code:: - - $ cd pytac - $ python3 -m venv venv - $ source venv/bin/activate - $ pip install -e '.[dev]' - - .. tab-item:: VSCode devcontainer - - .. code:: - - $ code pytac - # Click on 'Reopen in Container' when prompted - # Open a new terminal - - .. note:: - - See the epics-containers_ documentation for more complex - use cases, such as integration with podman. - -See what was installed ----------------------- - -To see a graph of the python package dependency tree type:: - - $ pipdeptree - -Build and test --------------- - -Now you have a development environment you can run the tests in a terminal:: - - $ tox -p - -This will run in parallel the following checks: - -- `../how-to/build-docs` -- `../how-to/run-tests` -- `../how-to/static-analysis` -- `../how-to/lint` - - -.. _epics-containers: https://epics-containers.github.io/main/user/tutorials/devcontainer.html diff --git a/docs/explanations.md b/docs/explanations.md new file mode 100644 index 00000000..73ab289b --- /dev/null +++ b/docs/explanations.md @@ -0,0 +1,10 @@ +# Explanations + +Explanations of how it works and why it works that way. + +```{toctree} +:maxdepth: 1 +:glob: + +explanations/* +``` diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md new file mode 100644 index 00000000..0533b98d --- /dev/null +++ b/docs/explanations/decisions.md @@ -0,0 +1,12 @@ +# Architectural Decision Records + +Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below. + +```{toctree} +:glob: true +:maxdepth: 1 + +decisions/* +``` + +For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md new file mode 100644 index 00000000..44d234ef --- /dev/null +++ b/docs/explanations/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,18 @@ +# 1. Record architecture decisions + +## Status + +Accepted + +## Context + +We need to record the architectural decisions made on this project. + +## Decision + +We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions). + +## Consequences + +See Michael Nygard's article, linked above. To create new ADRs we will copy and +paste from existing ones. diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md new file mode 100644 index 00000000..66fe5d8b --- /dev/null +++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md @@ -0,0 +1,28 @@ +# 2. Adopt python-copier-template for project structure + +## Status + +Accepted + +## Context + +We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template). +The template will ensure consistency in developer +environments and package management. + +## Decision + +We have switched to using the template. + +## Consequences + +This module will use a fixed set of tools as developed in `python-copier-template` +and can pull from this template to update the packaging to the latest techniques. + +As such, the developer environment may have changed, the following could be +different: + +- linting +- formatting +- pip venv setup +- CI/CD diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME new file mode 100644 index 00000000..b466c792 --- /dev/null +++ b/docs/explanations/decisions/COPYME @@ -0,0 +1,19 @@ +# 3. Short descriptive title + +Date: Today's date + +## Status + +Accepted + +## Context + +Background to allow us to make the decision, to show how we arrived at our conclusions. + +## Decision + +What decision we made. + +## Consequences + +What we will do as a result of this decision. diff --git a/docs/user/explanations/what-is-pytac.rst b/docs/explanations/what-is-pytac.md similarity index 100% rename from docs/user/explanations/what-is-pytac.rst rename to docs/explanations/what-is-pytac.md diff --git a/docs/genindex.md b/docs/genindex.md new file mode 100644 index 00000000..73f1191b --- /dev/null +++ b/docs/genindex.md @@ -0,0 +1,3 @@ +# Index + + diff --git a/docs/genindex.rst b/docs/genindex.rst deleted file mode 100644 index 93eb8b29..00000000 --- a/docs/genindex.rst +++ /dev/null @@ -1,5 +0,0 @@ -API Index -========= - -.. - https://stackoverflow.com/a/42310803 diff --git a/docs/how-to.md b/docs/how-to.md new file mode 100644 index 00000000..6b161417 --- /dev/null +++ b/docs/how-to.md @@ -0,0 +1,10 @@ +# How-to Guides + +Practical step-by-step guides for the more experienced user. + +```{toctree} +:maxdepth: 1 +:glob: + +how-to/* +``` diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md new file mode 100644 index 00000000..6e419797 --- /dev/null +++ b/docs/how-to/contribute.md @@ -0,0 +1,2 @@ +```{include} ../../.github/CONTRIBUTING.md +``` diff --git a/docs/images/dls-favicon.ico b/docs/images/dls-favicon.ico deleted file mode 100644 index 9a11f508ef8aed28f14c5ce0d8408e1ec8b614a1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 99678 zcmeI537lO;m4{y-5^&fxAd7U62#m-odom^>E-11%hzcU;%qW8>qNAV!X%rBR1O<_G zlo>@u83#o{W)z#S%OZm8BQq&!G^+HP(&n3&@W+)-9$hLOTPl^tjT;RAocFi!Zm+$n;Ww8`pBh^#O`bd$ z-t~}DY10X%Qg3fHyy2+Q{%4m;y8?rxKpcFJm&pY|u-6wCRW5(cjPg@`tAm&CdMS9B z-%o#TQRNE0?HvbX^O@z1HkeVq^0H->N|}gPE~^Af__3Vl@}-qP@2*;2sSxMtEoPQq zYs1-$wB*q@dPX_;yP4(Sk(Y_=xW{?7G2ax2xYNn62IKezl`B5Buo8T2aYcCq3)VS_ z2|mxetNC`;i~d2h<| z1L0&p|I2sR_3;k8>*A623f?_wr#*T>B~WUWL3O6z&+%LSv3#@RlJ;qyHRj!$W|xB( zN%WHym4NyQ9$Hfg9(}nIY|8IzDf?2s?L21)2hy%J={F+IpH>IKr=B0mmvt^~WxsY|c^bETWshNJpW zo$@@vv!?nyiT?vrUORpeluB!QN~QiWrBdJegHP`$_({ZLzALWMD6RO+IG)Ko;$Mxr zZTricy>@2#IB>ms%#88_@SR08{a5sSWpQPZ-fcLue2wC4*IyQkE5reRJkK>V)&{E% z92jcH7t#KVy8@nOXuCIU{mHcfy&?D^&(3*~*uKBK5q)ne?R>4thi)5uo^}hZ1Mv;x z{>%rxJDI*_y$&v2R#^*-Y1_{p;)z-Cfk*5Fyhl_f>NJ@C(okN?Q~cX?FFL&S{xv}W zEy8*M*5Bamnd$?A*(yZ;*}=7!GXGstcPv-!+svtxk;n?+nIj;uKAVVKj4>H-SrGs?lGN^-$l0Z(cPHo;nGh{BdY^4mkch_3#He)3d}>zw>nrufYt`-Uf^x z0&5B|PXf01zW6tJ{!nG#y1%>$ZElsJPn55|eJW#CR`+Fi1pKhZlcHdf=jyHClkkUQ zqrSWEz7GCb-8AGnH+@u?ypIFV$T8NAe+YH9E_?Q&d~`VN--Z$Oo4l`~ZtsoyX5P_P zf_YX)5G(v8{mX6>bd}&2yt8G*7f2(%W#B~l|GM@^IHb8--!6QO3C11uTy*|QW9Sjp7Rc)X`oQHj?0=(Pqw3p^ zqu;wTwitIH@~r#a4T~OU)1K`2+ihDPm^AQF*-*m)ZOP**fh8%qAo4#;w8A1NQUC9Xpx)qI~4V-LvBGFZ5~6 zN8Eg(!oXaJejuDzN9Ak3Q$0{mskHb2d@pVuZsVXjPb;^bzkY8;d#JX_*nY9s+)ALi zyq%ZxdoBI!+wiIlUHDnU>YL&Z)ZZ{3#k){OaPrh#XC-N_BJKFB`J}}g3!fCP2JYq5 z=e;}&c-B-O{nooHh;uA)H%WtMzK1-#e@qbcjtVNJ(v)?j(xf$|QqR&-X|sM8#lYW9pmxw^n**Nr$3;l zcor0v@`QQ}{AF*QQ=Y-MKN9Cs;-1hmyS)8uDOB3zz-dcl%G0)-Rlc8gRntMK%}F2P zy7xM=meNp;2k%`Ie1W*HYgIAGYa5>L@vP)Q=NT{`t{k5!LhU6{s`YXJ3w<5~0 z`Kz;>I6s;&zf&peU<4Z8;5#mNRE)L1bNr^ ziwi#~Ou7djVE({*;?^1;lH$gF(|UQMPP*hc_$luzto?4!`1j$Ic#-h;g*Quw+^F*z z!(2SU{RHN87rF1#!WvVggD%R6w@A00maqFA+%Kga{oZ|_7QP-H5#@e|F!5E|gXS}? z({hLO#P<4z9p_fk!UMg^fX%>djLD%rN*d1QdsLej5BjV%Kb&gW02myvw&q_aF~5}T z<~rZL0PZt*78%^q{HQknEbVAN%YH#HPLAl;XFB~9S*vbMNoDcv3*f$j=cP2f^*yT1 zt1TcC4x_o&JzS?cck@B64}Qd$Xgi<20Pba;)h^tqu-)cOdlCPSikn4$VyAQ4Q`Wvv z#Xq(E*lk|zMRLELzxx~AlwGCa?>%WRZah2ewx=w80sNQqB=%ps&BwJD8xQ@4uMM+t zU_Cw&f2FhAQYAAGP@? z{t_48e*af%y;}0B{VmIH^razx(mGIF*`f0#jKOv5j0U#WI@Mn6`J4Hc#aF(@-N)Q1 zOBy$hX;0E~xZe~;2K^W^&{k3M+hLBrH7b45JKL`4*VIE&+_Y~oxKz-ih4V1l(OqdU ze7|e`%Q)K(&lgTyd~m+s$p6emPKk?`_q}uo#`Q+nG3147(t-2o27lR5(uV4EoF-mg zU;1d{Bv0gp6O|5JSJ8Ir)(q&&v3w{BM%uec=%tL4{wOWJ&v(hqrtXc8zFPfwnGc+# zxLVUN?tql>Ith;Z4IEdZBiz>DZTqyTFS_ybhS8yhHtZ^cw9MSBOgT-tse-T`YW31rt*8EKy_tFp4YY`A z>N%|V!Tn^D0ny9TY&$Koh;?t7Q{En35Jwcz#9P3rKS;a_0`QfIIX9I*C#A`-3U#GtG{o?b2|G@o|K(!L|MYJQI^=fDLW+S619$izU~?F_!3WB`KnEW zYPr9TFT2E=(>@gR2QEDW>EGg<_Ha1#5A|#jYdgz;aRE=;>VdWM_0R!+8vB6fz5=FGTAv(v!xyZ!W1U0*6zNTUdefw$8COJRUxEhoRLC=mF!L_F<_% zFusO;LUt^1PJ2{MJlW+)KON^3cT9EujI41ldsZ{eAsekF`0_`Q7wTj@tu-alNoCNU z#w=^IGoiPhB&WHz%PZhF(!ZS4X!+vObDqF@*osXxGwBhP#GD{TPZzTVC!>bKf#vz=^sqw==jf$NRz``a*2S=}@T&#P=eU;m8_ zKkhfOe~`or8m$|{AL8=2--Gk-_Z?`g4zPz_kGlN14L9w#c!AcXigt@5`g|HL)WMDK zou9uiAcuZCEsv=0K6`(&)>GcK8p?2q+orRG8CO3NRkpNulKW)uS+tAVkC}#x`A%6* z%2H+%hdJ<@C`Y0`Mtz-l?CBWXQ*{RVB-!BG>$64If%MMWJIwhV!Ewk@+DnBj5-V$) z@@s4a*G%%kM;DgYL!P)@bd>yhP_=xrbK$I>KzpYjW1oH#NSwR6w4}@#BH`Y~tH4pV zQpcm?n+WdMfQI$MJnCNdzop8F$QGYWJHIG5qHRj3`cd2AETmIR8;|lqZxfycZ9=mZ z+3F0O*f|r`bWSUfXlEXj@q#GY?>xJ_DSYz9x3W)N`=Dgo^lfYfbT-Pp2(~&$i?ki@ zgrjVH?gwYdV&7q{MY;p&%lBmeDe}p3)(W?D>wt0cG{Z0BeAY~Y-Kd}UF{jnpTRKxq zYf-8noh`;+)1Bmh&3|-;k@N!Er=vZ1+S|JaxFP?i%Ey%B47>cC%}|0rJ|0)@tnaDA zaBgCs|4~$hXs?BI2Re@}D?V}Ym}AfQ#KIw68a8bE#>LI^Ui1B;-DR}nJh;TAc|H0> z(*}@}FN}+q=Y2EeKkf05%#{b9s5F%MyQciKhex8~wwNO!R-+s}Ys%E!H_$ZrZ3bUIjyIW{+BS?{>OIcmd^K&69YRU{o3OF0RkJ z?cGh!94l5naL?PY(+D|d@<;V~o!=PM-t97&-%)$K)RM5Rifl6`oqY8N zSW2DCD;KEzzU@D%&x^muwRanL^E+y-OmdU?p5{mOhdjK1@~i#NNXyUu?)Le#_HL&& zzjd~$>!hDD-?R8p{Xw{7No(Rz_Ib#`nfF-OeLjxA8`w#H#Cm>kJ4(8wG;!awC&?Zk ze0Tw6e~2;gx;WVOd%Ms3ws#wjeqYL5)^*aOxbd=v?f&4y3nc#_|DBbVkKO0qfB*ZKFZbNA6^ffE(S^as(&mA%~f z)Y&oP=ak11FV@ae@_3Rw#=)e_Ppa&4@PPB<;x*&F)(u@J-E=eZii1g+rwx{# zvm5)%`^3d-#(QM0VYV{h(9-hLrVlpd=Y9(Hfx>ivS?WxCh>eptr1jP;>57O$S)XP- z1Z(Ia#~AmSB4B5Qq4gQ#^6X>Inom?b%KC3ZB_I67-iVE%LGHP5R6a@XZektXIISNg z#Vzt1Wn9X>!Oh+BD~vplDhm~bi`MCl)A?CN!A*lh8PAIAAWjQ+N@kwQN zzp>BygQPEHUCiKN`#RUuxc#XM`&-e!ndcnmmM=?~aq=5Q<6__;e+H%oTw87vrwAW@ zKkV%KEM-@mchXo;oPoKYjzkzIhKCVvC*N+Cy4N>?v`c8N1 z$*!nTI8o`r`8Vu6E9AUpY<{#yxA1nLJwgxXyAL9<&M5oOlg?9(qjl2zcgzcI!Nm^> z^e)Raav;X`}MU^iLoFkDF8COrF-gD0vbpDg>Me?P!iBH}Ok!k<=o%6~~ zYwu}wfgH23=8fRuJ$KgrHOT>{JXwA6dXYSP(O+(whF`0`b63*F(xEg{j|A+;$m2Bb zSm>B?yY-8WONskT_J*$KgYUyhy7dh7uBbkNbs;eKMMvyr*YRQ6#aOMeP=>SMnb%RC zJK90HRoXfo*vvo(EUDrOpWtX74 zL$W$?3V2NJ{B({V_ruHw%!NEV6ETOheH!Rh0DJV)@fO|R!kmZnFiF4W&A^4!joSb=;GoowoT z#sl5WuWEl^9=6RL754Yv%vpH5k+$jmtdla}jKK{#gXUcHqTyXgI`<~8(|Evoa3ZaAwvDe# zvt88vI4S%-G0UG-_eG#5UW?uERL(lwxRYqqEL^Z*pTL~C?hYgMqdYV+6`V94Xk5-g z{t$HB-me_|-k=)#(l6+)R-3=T$7Zs&|1J*CZC2H{748YoSH{rJFJvwjsjrdkyzU{* z>(s-qVa?s%ldL&>Bj@(%-dq=+?k38~@57?0Epmoo9qmm!kUoj_`hE7M{9Rj#RdD9q z<^E>$ruUn2#`(IV+nGCgHwWEKtb1+0k8GfR)~J&`zxGLK?%Cf!`!smyo~^j@oA>B7 zALUN^-3ul|%fZcnmtlW@G;5!kZB84J1xy`xs;@Dhb%a#mLx8JF)ASYjZ#-jXZowmHwlOeVu8?h#m zdakftR{OVPHr=m1Qk>Qk;>LWt+;P8IQ}`WcKXE_TmxS{dE%QTypTg{E2Y@EP;n^1ET?h)=^u z#&sIqh0nx+^0wetH?Mc`_YG^^t`WUJRvI-cp5`Aq7s$8VN%65k>ECy5rKL8}Y3+@( z^xptph0@;CfnUv>IYft0q()z*1Xoc z3zh`yp&R{KBl!EI)qMyur051$^qDtF^@L90X7*dP)GqlM^m^zerX=CjjBh$0z0;l6 zA#^uIGs+(a6B+5^sY_d@CqyrKgs)yN4)?6@wLbKUDz^)q?2WRPtB80SYp{Vop%tS5 z>k>Pmn|`qfytBg4I^HkPop+1Vx*_{VTG|G%rC7=O@gB`=14lhq*#JG%Jz43NH=gJ% z9;$VA?x74Gi#a2>liR~Q^w+(de}jEf0KW{FA2+={FeiBQu;t65+DUlirKOL9fG2znk3P-_6XAGmLI5c~&r3g^-`bYA8=xf_-> z(p>uu>^e2SS$FyVpH~(y3t$g_Ao{phOg>3Iyh!6wFp2)Feeh>PU)g4ezRy74h+~31 zYI0;omED8v3%El&(D@lUN9>pc>Vl;DBisk(tE!lGTz%s_o|pT0$K?B6^=;i>+ZK}usKHGew+5dYkr}5 z#(B&)evDi>9r;r85bc3@wQ+Pt~a8i zMjybMLZaQa^qJC2NIxMxh4dBDTink4RCb_2`_}cCqrMI zQJ}q#oLyR*`x_mN>!YuEkK51V!mKGbysj@Dp!AxYwH)d>rSFv9vkx7D^q{Zm0EcL< z{wsTT-G%&9=&Q3+b$0xFsXNwm0_odadisX3)A@ZIz3unhy}2!V-nG8)ed24qQf1P* zh}KF!L0Pp{axLxSPqYu+%OoAsNO985h`x71-|L|71y%aWPDJ;wp<8Xby^z-HS-aiE zrghYB)(_6|p=Gn;YJ5@q&^=w!J9eAXy{7*{yAJtt3+S7L4&04&Q54P1JJxE}qb)w0 z1y(ELXiwM=SRd>br*)84toQoT0G_+x6ARDrjqJN_W!pI@=8oi6 z)m2hHth;}}^mo^%jxS3}+wO1AcZvO9Gw(fVlm^Iw*SU08`1pmD(eQ_XM&UOrz2*|# zG6G;H)v&zYta`+DZ|RZP?YnJ2_8ra2vk17d59$`DdAjzl6;bYHz~DT@!(93!8#e7u zs7A}6{?u)YP_k)jwA{@~kACM;m;T88_cbfOM&N1=xTp)peU~>$|JiCg@T~RB+~ld7 ztaHn`XJ!ld)yrAaw<@0OfW=F@)#>b@?OMDSBnxe1BY5zhW_m7xTV$kC*_Cz za-ehEMvCi1SpXT}Zqc7QF7Z3}U3W=z%=1lSzSglvnv*QRp4pD!1Jv`1ud;6#`;M`! z$CdNYsu^jfjes#fuI+Y`ETA>meK?mBUOS-~bj$;@h%sOLPh|h1b0oEwrcw6@>v)>W z>n{60%c8nL*GaMfXDS?y9C%_LS{0q9(RsecSlO6p{4ls_-SCPA^oD<69nZGCP@j>l zg3nztZgY{X2lMS3jt19u_?+Ky8hXFpcI0j6+2}l9wr|>JYr{0ZS?>sz=9DFOk2$AV zHg!JtNx5yHQ)B_;{)>68GIiB1zmYLtcde)CSZ?&V`_0fw`;e3BLpD1)6FRSk;#Tk$ ze@e=u+27Cu-#|Hj)9ieb;7hlkrw+yMH6~}#t>n=oWxc;%``2~l(VR~zC2k=fLZyyi=%jjuRATn zJq>O?3Tr&@ogcJIFF>1J$r!LOsvOOH=R42$<@YY`;?KVBSnQ5nI9bDa#)Edq0`UG< zcv^avm+zRLMZQm?i_YTmH6hU1Q)zIMzWa^`?N}o~w^42-{e8xKBj4++sHA$%@=fzB z-!r8ex&PP3$!C7hYFR+^ZzccFI_4+obL_hH`K@znvO2Xr*_->oPm1fKFKVSMApXz% zxh4BOvX1$Z@6+@-Np&6fO?&IIx+RDU()Gr{%JZJO&OAS8l`J6n54@T_|I0Gw8-AZf zpOdHleeMC&y$yNt$dV?@co6CZ*jJqeUL$cBj|ZUtU5&s_&l+}Hi^#V72Gs8*af%-|a_7QMy$VHs#d_vJ>OB(Zw(C6gA zSNAVwbvm;+Pach=Ntz!tOBSH-e-8VvgBrm*Ds9x5-)esE;w4!sD+hRYj4g=^vl-#I z`GMA>i=G=%C-1}l^L5O1*A-QksCmBlz0MIUDvvyHkaaSjDHCV+lPBLiY2wC%B4q(+ zUcvq|yhji{;M_cTx@n^3`K^-gU0kBVYKLh~qXc7OTidE|H{*eg@1QJDOh00bUdH{Z z>uV1HbAX$o>dWUH`^xL=_6@&pw~dos2Hne&=CpRJve@a``P-cz%wr*|hWeJ?`Y6o zB2V7FX+DEZSDMo~bG~p}9badHicj5#Jd-DH#{S2CcNw)BuRQrluGaladDf}X`_{&O!vi>9>uq`P=%zFW(^k{mr&uTGrZVNhl`(tR zoe&>dP+1>6Kz|;1-I7M<3Zyyi%^K14XKuUbkolD{rr+B>bAs=73oY~D$zHcWa#NDq zFQ-hE2cLGNVIFa_G<{bT=j;MA%-HD;!rA=>J@yIWOulMiP-#ohyed^8GKuIct* z2A6jDe@ob>r8^0_MV8G|ce3|7;oa~PC$kW|YcExLNQ2z>>nK`By+aqU7kseX4dwF1@rwz2!F9*6FT8GuueE;UzR6Lvj(XQSE6|$o z&D~HoUmTB1*b9EX=bmrhyxSEY-TvKYEGc{41IwD=1ht!X;oPiz51ALw|38~^&v&zM zEefveyrTMf(z~;lj!Yh)`y4mf;{+jN}BgYoCo=(7Vr5kx-O9Sm!PlNxlm%q07IX6DC5j4MVFyf@b-?_3og6* zR^?xGKGO5BC+veUJ>*mW?T&kswHJJ52k!Y!svl_oBHidPn6ce$*{v!Pl+5;Q!U(d%jKk6VGSwZ%6few;k+1x4a2* zNypm`o?`6Qp`9LDpVyoekTGbeCR_J36NvpVNM?Xqx7MCtWf2LmjtXzbk2T5F@zL^8DmH1`vX#m4AM z%on=uOe*C0XTbeyd$ND7C6zUTGdY@b$&eBD)48RrfpzQ|mEbl2j+fEbC%$KXdu*~s za5D&t_Cd}mWqf!W>r3ar7w&|=wrx)m`kI%es{@yB&^`}@=80#kjda?yqkIQ*c0F?A zyXbdkGtS-w-<^xBRrq{TFzMg(C7+U4FY6kI9XL?lq8(*^HP7T4VEuVZc<_O&Kc2uC zd=~Sq%Xw}TzrcSCp79LrWC7vDgcs{K@1EuN<2-ls{@3_dl6DGusuO$q%M&KfE00ai zwL8C>HSl_WU8yw5e$!hjjk3aPRMwuM7kvt^KNME5RH}u6CO65vSUMOUW5Rud;TnL! zU=2Vuc@03AyW;c=0_ZpKs{s2gaRpa#C0K@EI0gDSQHvYF!d*T9v+ z4Eu({VTQd!;jqqzf?`SF7EI`=bC)J@7B4nWxB4nWxBIJhqZFnHqXNN)14fopL zLD&u3pH+bR@RU0ADUcJMWYw-x4hz>6j{>^ky5dpbv~Yhteq(&Yef8Ob9ufIP3F}~rn_UC?g+p`-^>mN>ka{Jd5w?8`J;r+SSt^oRbpB;|i5B>Ic z_(@#>VTf+Hu7Ewm`B`0oT>eM6t^fpWh7|Hs3*nI8S_p>x*g`1e*A_xOf@jtEB!w-6 zrYJm=VVIp&Lt%E-01##u1hou$!sJ64Od1T=N>mM+DzAd8Rbhy&;#4u5Wa3u=)PjQm ZYRRh@^bCDh5vs@!z69bV>$COq{{Z);QUw42 diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg index 0af1a177..4fcaa861 100644 --- a/docs/images/dls-logo.svg +++ b/docs/images/dls-logo.svg @@ -1,11 +1,11 @@ - - - - - - - - - - \ No newline at end of file + + + + + + + + + + diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..730b3fdc --- /dev/null +++ b/docs/index.md @@ -0,0 +1,56 @@ +--- +html_theme.sidebar_secondary.remove: true +--- + +```{include} ../README.md +:end-before: + +::::{grid} 2 +:gutter: 4 + +:::{grid-item-card} {material-regular}`directions_walk;2em` +```{toctree} +:maxdepth: 2 +tutorials +``` ++++ +Tutorials for installation and typical usage. New users start here. +::: + +:::{grid-item-card} {material-regular}`directions;2em` +```{toctree} +:maxdepth: 2 +how-to +``` ++++ +Practical step-by-step guides for the more experienced user. +::: + +:::{grid-item-card} {material-regular}`info;2em` +```{toctree} +:maxdepth: 2 +explanations +``` ++++ +Explanations of how it works and why it works that way. +::: + +:::{grid-item-card} {material-regular}`menu_book;2em` +```{toctree} +:maxdepth: 2 +reference +``` ++++ +Technical reference material including APIs and release notes. +::: + +:::: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 580feb34..00000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -:html_theme.sidebar_secondary.remove: - -.. include:: ../README.rst - :end-before: when included in index.rst - -How the documentation is structured ------------------------------------ - -The documentation is split into 2 sections: - -.. grid:: 2 - - .. grid-item-card:: :material-regular:`person;4em` - :link: user/index - :link-type: doc - - The User Guide contains documentation on how to install and use pytac. - - .. grid-item-card:: :material-regular:`code;4em` - :link: developer/index - :link-type: doc - - The Developer Guide contains documentation on how to develop and contribute changes back to pytac. - -.. toctree:: - :hidden: - - user/index - developer/index diff --git a/docs/reference.md b/docs/reference.md new file mode 100644 index 00000000..13ac7983 --- /dev/null +++ b/docs/reference.md @@ -0,0 +1,12 @@ +# Reference + +Technical reference material including APIs and release notes. + +```{toctree} +:maxdepth: 1 +:glob: + +API <_api/pytac> +genindex +Release Notes +``` diff --git a/docs/tutorials.md b/docs/tutorials.md new file mode 100644 index 00000000..1fe66c54 --- /dev/null +++ b/docs/tutorials.md @@ -0,0 +1,10 @@ +# Tutorials + +Tutorials for installation and typical usage. New users start here. + +```{toctree} +:maxdepth: 1 +:glob: + +tutorials/* +``` diff --git a/docs/user/tutorials/basic-tutorial.rst b/docs/tutorials/basic-tutorial.rst similarity index 100% rename from docs/user/tutorials/basic-tutorial.rst rename to docs/tutorials/basic-tutorial.rst diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md new file mode 100644 index 00000000..c8a9a215 --- /dev/null +++ b/docs/tutorials/installation.md @@ -0,0 +1,42 @@ +# Installation + +## Check your version of python + +You will need python 3.11 or later. You can check your version of python by +typing into a terminal: + +``` +$ python3 --version +``` + +## Create a virtual environment + +It is recommended that you install into a “virtual environment” so this +installation will not interfere with any existing Python software: + +``` +$ python3 -m venv /path/to/venv +$ source /path/to/venv/bin/activate +``` + +## Installing the library + +You can now use `pip` to install the library and its dependencies: + +``` +$ python3 -m pip install pytac +``` + +If you require a feature that is not currently released you can also install +from github: + +``` +$ python3 -m pip install git+https://github.com/DiamondLightSource/pytac.git +``` + +The library should now be installed and the commandline interface on your path. +You can check the version that has been installed by typing: + +``` +$ pytac --version +``` diff --git a/docs/user/explanations/docs-structure.rst b/docs/user/explanations/docs-structure.rst deleted file mode 100644 index f25a09ba..00000000 --- a/docs/user/explanations/docs-structure.rst +++ /dev/null @@ -1,18 +0,0 @@ -About the documentation ------------------------ - - :material-regular:`format_quote;2em` - - The Grand Unified Theory of Documentation - - -- David Laing - -There is a secret that needs to be understood in order to write good software -documentation: there isn't one thing called *documentation*, there are four. - -They are: *tutorials*, *how-to guides*, *technical reference* and *explanation*. -They represent four different purposes or functions, and require four different -approaches to their creation. Understanding the implications of this will help -improve most documentation - often immensely. - -`More information on this topic. `_ diff --git a/docs/user/how-to/run-container.rst b/docs/user/how-to/run-container.rst deleted file mode 100644 index d4dc4690..00000000 --- a/docs/user/how-to/run-container.rst +++ /dev/null @@ -1,15 +0,0 @@ -Run in a container -================== - -Pre-built containers with pytac and its dependencies already -installed are available on `Github Container Registry -`_. - -Starting the container ----------------------- - -To pull the container from github container registry and run:: - - $ docker run ghcr.io/DiamondLightSource/pytac:main --version - -To get a released version, use a numbered release instead of ``main``. diff --git a/docs/user/index.rst b/docs/user/index.rst deleted file mode 100644 index 2c94a0c0..00000000 --- a/docs/user/index.rst +++ /dev/null @@ -1,57 +0,0 @@ -User Guide -========== - -Documentation is split into four categories, also accessible from links in the -side-bar. - -.. grid:: 2 - :gutter: 4 - - .. grid-item-card:: :material-regular:`directions_walk;3em` - - .. toctree:: - :caption: Tutorials - :maxdepth: 1 - - tutorials/installation - - +++ - - Tutorials for installation and typical usage. New users start here. - - .. grid-item-card:: :material-regular:`directions;3em` - - .. toctree:: - :caption: How-to Guides - :maxdepth: 1 - - how-to/run-container - - +++ - - Practical step-by-step guides for the more experienced user. - - .. grid-item-card:: :material-regular:`info;3em` - - .. toctree:: - :caption: Explanations - :maxdepth: 1 - - explanations/docs-structure - - +++ - - Explanations of how the library works and why it works that way. - - .. grid-item-card:: :material-regular:`menu_book;3em` - - .. toctree:: - :caption: Reference - :maxdepth: 1 - - reference/api - ../genindex - - +++ - - Technical reference material including APIs and release notes. diff --git a/docs/user/reference/api.rst b/docs/user/reference/api.rst deleted file mode 100644 index d81cc42b..00000000 --- a/docs/user/reference/api.rst +++ /dev/null @@ -1,14 +0,0 @@ -API -=== - -.. automodule:: pytac - - ``pytac`` - ----------------------------------- - -This is the internal API reference for pytac - -.. data:: pytac.__version__ - :type: str - - Version number as calculated by https://github.com/pypa/setuptools_scm diff --git a/docs/user/tutorials/installation.rst b/docs/user/tutorials/installation.rst deleted file mode 100644 index 26f94e64..00000000 --- a/docs/user/tutorials/installation.rst +++ /dev/null @@ -1,38 +0,0 @@ -Installation -============ - -Check your version of python ----------------------------- - -You will need python 3.8 or later. You can check your version of python by -typing into a terminal:: - - $ python3 --version - - -Create a virtual environment ----------------------------- - -It is recommended that you install into a “virtual environment” so this -installation will not interfere with any existing Python software:: - - $ python3 -m venv /path/to/venv - $ source /path/to/venv/bin/activate - - -Installing the library ----------------------- - -You can now use ``pip`` to install the library and its dependencies:: - - $ python3 -m pip install pytac - -If you require a feature that is not currently released you can also install -from github:: - - $ python3 -m pip install git+https://github.com/DiamondLightSource/pytac.git - -The library should now be installed and the commandline interface on your path. -You can check the version that has been installed by typing:: - - $ pytac --version diff --git a/pyproject.toml b/pyproject.toml index f13d33d5..5c9dfb59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=64", "setuptools_scm[toml]>=6.2", "wheel"] +requires = ["setuptools>=64", "setuptools_scm[toml]>=8"] build-backend = "setuptools.build_meta" [project] @@ -7,27 +7,25 @@ name = "pytac" classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] description = "Python Toolkit for Accelerator Controls (Pytac) is a Python library for working with elements of particle accelerators, developed at Diamond Light Source." dependencies = [ - "typing-extensions;python_version<'3.8'", "numpy", "scipy", ] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" -readme = "README.rst" -requires-python = ">=3.7" +readme = "README.md" +requires-python = ">=3.11" [project.optional-dependencies] dev = [ - "black", + "copier", "mypy", + "myst-parser", "pipdeptree", "pre-commit", "pydata-sphinx-theme>=0.12", @@ -53,7 +51,7 @@ name = "Phil Smith" [tool.setuptools_scm] -write_to = "src/pytac/_version.py" +version_file = "src/pytac/_version.py" [tool.mypy] ignore_missing_imports = true # Ignore missing stubs in imported modules @@ -69,6 +67,7 @@ filterwarnings = "error" testpaths = "docs src tests" [tool.coverage.run] +patch = ["subprocess"] data_file = "/tmp/pytac.coverage" [tool.coverage.paths] @@ -82,7 +81,7 @@ legacy_tox_ini = """ [tox] skipsdist=True -[testenv:{pre-commit,mypy,pytest,docs}] +[testenv:{pre-commit,type-checking,tests,docs}] # Don't create a virtualenv for the command, requires tox-direct plugin direct = True passenv = * @@ -93,20 +92,29 @@ allowlist_externals = sphinx-build sphinx-autobuild commands = - pytest: pytest --cov=pytac --cov-report term --cov-report xml:cov.xml {posargs} - mypy: mypy src tests {posargs} - pre-commit: pre-commit run --all-files {posargs} + pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} + type-checking: mypy src tests {posargs} + tests: pytest --cov=pytac --cov-report term --cov-report xml:cov.xml {posargs} docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html """ - [tool.ruff] src = ["src", "tests"] line-length = 88 -select = [ - "C4", # flake8-comprehensions - https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4 - "E", # pycodestyle errors - https://beta.ruff.rs/docs/rules/#error-e - "F", # pyflakes rules - https://beta.ruff.rs/docs/rules/#pyflakes-f - "W", # pycodestyle warnings - https://beta.ruff.rs/docs/rules/#warning-w - "I001", # isort +lint.select = [ + "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "C4", # flake8-comprehensions - https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 + "E", # pycodestyle errors - https://docs.astral.sh/ruff/rules/#error-e + "F", # pyflakes rules - https://docs.astral.sh/ruff/rules/#pyflakes-f + "N", # pep8-naming - https://docs.astral.sh/ruff/rules/#pep8-naming-n + "W", # pycodestyle warnings - https://docs.astral.sh/ruff/rules/#warning-w + "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i + "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up + "SLF", # self - https://docs.astral.sh/ruff/settings/#lintflake8-self ] + +[tool.ruff.lint.per-file-ignores] +# By default, private member access is allowed in tests +# See https://github.com/DiamondLightSource/python-copier-template/issues/154 +# Remove this line to forbid private member access in tests +"tests/**/*" = ["SLF001"] diff --git a/src/pytac/__init__.py b/src/pytac/__init__.py index 221c2b46..eca16724 100644 --- a/src/pytac/__init__.py +++ b/src/pytac/__init__.py @@ -1,9 +1,10 @@ -import sys +"""Top level API. -if sys.version_info < (3, 8): - from importlib_metadata import version # noqa -else: - from importlib.metadata import version # noqa +.. data:: __version__ + :type: str +""" + +from ._version import __version__ # PV types. SP = "setpoint" @@ -17,7 +18,7 @@ # Default argument flag. DEFAULT = "default" -from . import ( # isort:skip +from . import ( data_source, device, element, @@ -28,12 +29,6 @@ utils, ) -"""Ignore isort (flake8 Error 402) as we cannot import these modules at the top of the -file as the strings above must be set first or the imports will fail. -""" -__version__ = version("pytac") -del version - __all__ = [ "__version__", "data_source", diff --git a/src/pytac/__main__.py b/src/pytac/__main__.py index d5a4d195..579b3999 100644 --- a/src/pytac/__main__.py +++ b/src/pytac/__main__.py @@ -1,16 +1,22 @@ from argparse import ArgumentParser +from collections.abc import Sequence from . import __version__ __all__ = ["main"] -def main(args=None): +def main(args: Sequence[str] | None = None) -> None: + """Argument parser for the CLI.""" parser = ArgumentParser() - parser.add_argument("-v", "--version", action="version", version=__version__) - args = parser.parse_args(args) + parser.add_argument( + "-v", + "--version", + action="version", + version=__version__, + ) + parser.parse_args(args) -# test with: python -m pytac if __name__ == "__main__": main() diff --git a/tests/conftest.py b/tests/conftest.py index 307840f6..2c0ded6c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ from unittest import mock import pytest + +import pytac from constants import ( CURRENT_DIR_PATH, DUMMY_ARRAY, @@ -12,8 +14,6 @@ RB_PV, SP_PV, ) - -import pytac from pytac import load_csv from pytac.data_source import DataSourceManager, DeviceDataSource from pytac.device import EpicsDevice, SimpleDevice diff --git a/tests/test_cothread_cs.py b/tests/test_cothread_cs.py index ca8b6ac7..ef2ad8f5 100644 --- a/tests/test_cothread_cs.py +++ b/tests/test_cothread_cs.py @@ -6,11 +6,11 @@ """ import pytest -from constants import RB_PV, SP_PV from cothread.catools import ca_nothing, caget, caput from testfixtures import LogCapture import pytac +from constants import RB_PV, SP_PV from pytac.cothread_cs import CothreadControlSystem diff --git a/tests/test_data_source.py b/tests/test_data_source.py index 45532e1c..14f522a6 100644 --- a/tests/test_data_source.py +++ b/tests/test_data_source.py @@ -1,7 +1,7 @@ import pytest -from constants import DUMMY_VALUE_2 import pytac +from constants import DUMMY_VALUE_2 @pytest.mark.parametrize( diff --git a/tests/test_device.py b/tests/test_device.py index cbd470a5..5e9a4500 100644 --- a/tests/test_device.py +++ b/tests/test_device.py @@ -1,9 +1,9 @@ from unittest import mock import pytest -from constants import PREFIX, RB_PV, SP_PV import pytac +from constants import PREFIX, RB_PV, SP_PV from pytac.device import EpicsDevice, PvEnabler, SimpleDevice from pytac.exceptions import DataSourceException diff --git a/tests/test_element.py b/tests/test_element.py index 0900e43c..ded68ff3 100644 --- a/tests/test_element.py +++ b/tests/test_element.py @@ -1,9 +1,9 @@ from unittest import mock import pytest -from constants import DUMMY_VALUE_1, DUMMY_VALUE_2 import pytac +from constants import DUMMY_VALUE_1, DUMMY_VALUE_2 from pytac.device import SimpleDevice from pytac.element import Element from pytac.lattice import Lattice diff --git a/tests/test_epics.py b/tests/test_epics.py index bf1ae9ae..5c8165cb 100644 --- a/tests/test_epics.py +++ b/tests/test_epics.py @@ -2,9 +2,9 @@ import numpy import pytest -from constants import DUMMY_ARRAY, RB_PV, SP_PV import pytac +from constants import DUMMY_ARRAY, RB_PV, SP_PV def test_get_values_live(simple_epics_lattice, mock_cs): diff --git a/tests/test_lattice.py b/tests/test_lattice.py index 865cd591..02eb8271 100644 --- a/tests/test_lattice.py +++ b/tests/test_lattice.py @@ -2,9 +2,9 @@ import numpy import pytest -from constants import DUMMY_ARRAY, LATTICE_NAME import pytac +from constants import DUMMY_ARRAY, LATTICE_NAME from pytac.element import Element from pytac.lattice import Lattice diff --git a/tests/test_load.py b/tests/test_load.py index 4b5a9b6e..a2663f2a 100644 --- a/tests/test_load.py +++ b/tests/test_load.py @@ -2,10 +2,10 @@ from unittest.mock import patch import pytest -from constants import SUPPORTED_MODES, TESTING_MODE from testfixtures import LogCapture import pytac +from constants import SUPPORTED_MODES, TESTING_MODE from pytac.load_csv import available_ringmodes, load, load_unitconv, resolve_unitconv diff --git a/tests/test_machine.py b/tests/test_machine.py index 703b17bf..91748308 100644 --- a/tests/test_machine.py +++ b/tests/test_machine.py @@ -8,9 +8,9 @@ import numpy import pytest -from constants import TESTING_MODE, TESTING_MODE_RING import pytac +from constants import TESTING_MODE, TESTING_MODE_RING EPS = 1e-8 diff --git a/tests/test_units.py b/tests/test_units.py index df21aae7..c0e91b2d 100644 --- a/tests/test_units.py +++ b/tests/test_units.py @@ -2,9 +2,9 @@ import numpy import pytest -from constants import DUMMY_VALUE_1, DUMMY_VALUE_2, DUMMY_VALUE_3 import pytac +from constants import DUMMY_VALUE_1, DUMMY_VALUE_2, DUMMY_VALUE_3 from pytac.units import NullUnitConv, PchipUnitConv, PolyUnitConv, UnitConv From d3e6e4c5e84d59a6afcef1fe5259570e21647927 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Mon, 13 Oct 2025 12:45:43 +0000 Subject: [PATCH 3/7] Test fixes and run ruff format --- docs/explanations/what-is-pytac.md | 2 +- docs/tutorials/basic-tutorial.rst | 8 +++---- pyproject.toml | 5 +++- src/pytac/cothread_cs.py | 4 ++-- src/pytac/cs.py | 2 +- src/pytac/data_source.py | 12 ++++++---- src/pytac/device.py | 6 ++--- src/pytac/element.py | 24 +++++++++---------- src/pytac/exceptions.py | 10 ++++---- src/pytac/lattice.py | 25 +++++++++----------- src/pytac/load_csv.py | 38 +++++++++++++++++------------- src/pytac/units.py | 2 +- 12 files changed, 72 insertions(+), 66 deletions(-) diff --git a/docs/explanations/what-is-pytac.md b/docs/explanations/what-is-pytac.md index 0d6ece3e..b1030c37 100644 --- a/docs/explanations/what-is-pytac.md +++ b/docs/explanations/what-is-pytac.md @@ -79,4 +79,4 @@ Models of accelerators, physical or simulated, are defined using a set of ``.csv`` files, located by default in the ``pytac/data`` directory. Each model should be saved in its own directory i.e. different models of the same accelerator should be separate, just as models of different accelerators would -be. \ No newline at end of file +be. diff --git a/docs/tutorials/basic-tutorial.rst b/docs/tutorials/basic-tutorial.rst index 72228591..d9620bc0 100644 --- a/docs/tutorials/basic-tutorial.rst +++ b/docs/tutorials/basic-tutorial.rst @@ -47,10 +47,10 @@ We can ask for the values of these fields. These commands will try to get the real values from the live machine (so won't work if you're not on a suitable Diamond network):: - >>> lattice.get_value("energy") - 3000000000.0 + >>> lattice.get_value("energy", units=pytac.PHYS) + np.float64(3000000000.0) >>> lattice.get_value("beam_current") - 296.6981619696345 + 296.773194429911 Families, elements and fields ----------------------------- @@ -59,7 +59,7 @@ The elements in the lattice are grouped by families, and this is the most common way to choose some to access. We can list the available families:: >>> lattice.get_all_families() - ['ap', 'aperture', 'bb', 'bbvmxl', 'bbvmxs', 'bend', 'bpm', 'bpm10', 'bump', 'bumpss', 'd054ba', 'd054bal', 'd09_1', 'd09_10', 'd09_12', 'd09_13', 'd09_14', 'd09_2', 'd09_3', 'd09_5', 'd09_6', 'd09_7', 'd09_8', 'd09_9', 'd104ba0', 'd104ba0r', 'd104ba1', 'd104baa', 'd104bab', 'd104bac', 'd104bad', 'd104bar', 'd10_1', 'd10_2', 'd10_3', 'd10_4', 'd10_5', 'd10_6', 'd10_7', 'd10_8', 'd10_9', 'd13_1', 'd13_10', 'd13_12', 'd13_13', 'd13_14', 'd13_2', 'd13_3', 'd13_5', 'd13_6', 'd13_7', 'd13_8', 'd13_9', 'd154bal', 'd154bar', 'd1a', 'd1aa', 'd1ab', 'd1d2', 'd1m4ba', 'd1m4bal1', 'd1m4bal2', 'd1s4ba', 'd1x', 'd1y', 'd1yad1ya', 'd1yb', 'd20_1', 'd20_2', 'd2a', 'd2b', 'd2b4ba', 'd2xl', 'd2xr', 'd2ya', 'd2yb', 'd2yc', 'd3a', 'd3aa', 'd3ab', 'd3b', 'd3b4ba', 'd3b4bar', 'd4a', 'd4aa', 'd4ab', 'd4b', 'd4b4ba0', 'd4ba', 'd4bb', 'd5b', 'd6b', 'd6b4ba0', 'd6ba', 'd6bb', 'dbpm', 'dhs4ba', 'dhs4bal1', 'dhs4bal2', 'dhs4bar1', 'dhs4bar2', 'dhs4bar3', 'di05', 'dk4bas20', 'dk4bas21', 'dk4bas22', 'dk4bas23', 'dlb4bal', 'dlb4bar', 'dlbm4ba0', 'dlbm4ba1', 'dlbm4bal1', 'dlbm4bal2', 'drbm4ba2', 'drbm4bal1', 'drbm4bal2', 'drbm4bar1', 'drbm4bar2', 'drift', 'drift_drift_s2a', 'drift_s2a', 'dsext', 'hchica', 'hpmu', 'hstr', 'htrim', 'hu64', 'kd1', 'mpw12', 'mpw15', 'mpw45', 'pad', 'q1ab', 'q1ad', 'q1b', 'q1be', 'q1d', 'q2ab', 'q2ad', 'q2b', 'q2be', 'q2d', 'q3b', 'q3d', 'q3e', 'q4e', 'qm09', 'qm13', 'quadrupole', 'rf', 'rfcavity', 's1a', 's1b', 's1be', 's1c', 's1d', 's2a', 's2b', 's2be', 's2c', 's2d', 's3e', 's4e', 'sextupole', 'shim', 'source', 'spacer', 'squad', 'tempdrift', 'u21', 'u23a', 'u23b', 'u27', 'ux', 'vstr', 'vtrim', 'wiggler'] + {'drift_drift_s2a', 'd2yc', 'd10_1', 'd09_6', 'hu64', 'd4b4ba0', 'd13_13', 'd104bar', 'd13_14', 'd104bad', 'd104baa', 'd3b', 'd13_1', 'd13_12', 'q3e', 'd2b4ba', 'dsext', 'd09_7', 'd4b', 'ux', 'q2ad', 'dlbm4ba0', 'dlbm4ba1', 'q2b', 'dk4bas21', 'd13_10', 'd13_6', 's1c', 'd09_1', 'd10_6', 'q3b', 'd054bal', 'd104ba1', 'q2d', 'mpw45', 'd2xl', 'd3b4ba', 'd6b4ba0', 'd2ya', 'hpmu', 'aperture', 'dlbm4bal1', 'vstr', 'd6b', 'u21', 'drbm4bar1', 'd3a', 'd1x', 'd09_10', 'source', 'q1ab', 'shim', 'd2xr', 'd3b4bar', 'd154bal', 'd104ba0r', 'htrim', 'd3aa', 'd5b', 's2be', 'bpm', 'drbm4bal2', 'dhs4bar3', 'd09_13', 'd13_5', 'd4a', 'd13_8', 'mpw15', 'q2ab', 'rf', 'd1a', 'd10_3', 's2d', 's2c', 'd1m4bal1', 'q1d', 'hstr', 'q1b', 'bb', 'spacer', 'bend', 'd104ba0', 'd09_8', 'd104bac', 'bbvmxs', 'd20_2', 'd10_9', 'd104bab', 'd054ba', 's1a', 'dk4bas22', 'd3ab', 's2b', 'd09_14', 'd09_3', 'd09_12', 'qm09', 'bbvmxl', 'bumpss', 'd154bar', 'd4ba', 'd10_8', 'd1d2', 'd4aa', 's4e', 'd1y', 'dlbm4bal2', 'dlb4bar', 'd13_3', 'd4bb', 'squad', 'kd1', 'q1be', 'd1yb', 'd13_2', 'mpw12', 'dhs4bar1', 's1d', 'u23b', 'q1ad', 'ap', 'd1aa', 'dbpm', 's1be', 'd1s4ba', 'wiggler', 'd13_7', 'di05', 'drbm4bal1', 'pad', 'rfcavity', 'd09_5', 'drift_s2a', 'drift', 'drbm4ba2', 'd4ab', 'qm13', 'dhs4bar2', 'd2yb', 'd1ab', 'dhs4bal2', 'd10_5', 'vtrim', 'd10_4', 'q3d', 'q2be', 'bump', 'd09_9', 'd2b', 'd1m4ba', 'u23a', 'd6bb', 'd20_1', 'hchica', 'dk4bas20', 'd09_2', 's3e', 'd6ba', 'd10_2', 'u27', 's2a', 'tempdrift', 'quadrupole', 's1b', 'sextupole', 'd10_7', 'dk4bas23', 'q4e', 'dlb4bal', 'drbm4bar2', 'dhs4ba', 'd1m4bal2', 'bpm10', 'd13_9', 'd1yad1ya', 'd2a', 'dhs4bal1'} Let's get all the beam position monitors (BPMs). We do this by using get_elements which takes an argument for family name - in this case we use the family name "BPM":: diff --git a/pyproject.toml b/pyproject.toml index 5c9dfb59..39ef850d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,8 @@ description = "Python Toolkit for Accelerator Controls (Pytac) is a Python libra dependencies = [ "numpy", "scipy", + "cothread", + "epicscorelibs", #needed to get ca libraries without an epics base install ] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" @@ -37,6 +39,7 @@ dev = [ "sphinx-design", "tox-direct", "types-mock", + "testfixtures", ] [project.scripts] @@ -64,7 +67,7 @@ addopts = """ # https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings filterwarnings = "error" # Doctest python code in docs, python code in src docstrings, test functions in tests -testpaths = "docs src tests" +testpaths = "src tests" [tool.coverage.run] patch = ["subprocess"] diff --git a/src/pytac/cothread_cs.py b/src/pytac/cothread_cs.py index cf122fed..ac5a9c06 100644 --- a/src/pytac/cothread_cs.py +++ b/src/pytac/cothread_cs.py @@ -39,7 +39,7 @@ def get_single(self, pv, throw=True): except ca_nothing: error_msg = f"Cannot connect to {pv}." if throw: - raise ControlSystemException(error_msg) + raise ControlSystemException(error_msg) # noqa: B904 else: logging.warning(error_msg) return None @@ -96,7 +96,7 @@ def set_single(self, pv, value, throw=True): except ca_nothing: error_msg = f"Cannot connect to {pv}." if throw: - raise ControlSystemException(error_msg) + raise ControlSystemException(error_msg) # noqa: B904 else: logging.warning(error_msg) return False diff --git a/src/pytac/cs.py b/src/pytac/cs.py index 858c58e7..8daca9a7 100644 --- a/src/pytac/cs.py +++ b/src/pytac/cs.py @@ -1,7 +1,7 @@ """Class representing an abstract control system.""" -class ControlSystem(object): +class ControlSystem: """Abstract base class representing a control system. A specialised implementation of this class would be used to communicate diff --git a/src/pytac/data_source.py b/src/pytac/data_source.py index ec75fa74..928052e0 100644 --- a/src/pytac/data_source.py +++ b/src/pytac/data_source.py @@ -4,7 +4,7 @@ from pytac.exceptions import DataSourceException, FieldException -class DataSource(object): +class DataSource: """Abstract base class for element or lattice data sources. Typically an instance would represent hardware via a control system, @@ -54,7 +54,7 @@ def set_value(self, field, value, throw): raise NotImplementedError() -class DataSourceManager(object): +class DataSourceManager: """Class that manages all the data sources and UnitConv objects associated with a lattice or element. @@ -108,7 +108,7 @@ def get_data_source(self, data_source_type): except KeyError: raise DataSourceException( f"No data source {data_source_type} on manager {self}." - ) + ) from KeyError def get_fields(self): """Get all the fields defined on the manager. @@ -178,7 +178,7 @@ def get_unitconv(self, field): except KeyError: raise FieldException( f"No unit conversion option for field {field} on manager {self}." - ) + ) from KeyError def set_unitconv(self, field, uc): """set the unit conversion option for the specified field. @@ -309,7 +309,9 @@ def get_device(self, field): try: return self._devices[field] except KeyError: - raise FieldException(f"No field {field} on data source {self}.") + raise FieldException( + f"No field {field} on data source {self}." + ) from KeyError def get_fields(self): """Get all the fields from the data_source. diff --git a/src/pytac/device.py b/src/pytac/device.py index c139f0f8..4e351359 100644 --- a/src/pytac/device.py +++ b/src/pytac/device.py @@ -6,8 +6,6 @@ magnets and a skew quadrupole. """ -from typing import List, Union - import pytac from pytac.exceptions import DataSourceException, HandleException @@ -64,7 +62,7 @@ class SimpleDevice(Device): def __init__( self, - value: Union[float, List[float]], + value: float | list[float], enabled: bool = True, readonly: bool = True, ): @@ -219,7 +217,7 @@ def get_pv_name(self, handle): raise HandleException(f"Device {self.name} has no {handle} PV.") -class PvEnabler(object): +class PvEnabler: """A PvEnabler class to check whether a device is enabled. The class will behave like True if the PV value equals enabled_value, diff --git a/src/pytac/element.py b/src/pytac/element.py index 160ce8f1..d028f783 100644 --- a/src/pytac/element.py +++ b/src/pytac/element.py @@ -5,7 +5,7 @@ from pytac.exceptions import DataSourceException, FieldException -class Element(object): +class Element: """Class representing one physical element in an accelerator lattice. An element has zero or more devices (e.g. quadrupole magnet) associated @@ -53,7 +53,7 @@ def index(self): if self._lattice is None: return None else: - return self._lattice._elements.index(self) + 1 + return self._lattice._elements.index(self) + 1 # noqa: SLF001 @property def s(self): @@ -158,7 +158,7 @@ def add_device(self, field, device, uc): try: self._data_source_manager.add_device(field, device, uc) except DataSourceException as e: - raise DataSourceException(f"{self}: {e}.") + raise DataSourceException(f"{self}: {e}.") from e def get_device(self, field): """Get the device for the given field. @@ -179,7 +179,7 @@ def get_device(self, field): try: return self._data_source_manager.get_device(field) except DataSourceException as e: - raise DataSourceException(f"{self}: {e}.") + raise DataSourceException(f"{self}: {e}.") from e def get_unitconv(self, field): """Get the unit conversion option for the specified field. @@ -196,7 +196,7 @@ def get_unitconv(self, field): try: return self._data_source_manager.get_unitconv(field) except FieldException as e: - raise FieldException(f"{self}: {e}") + raise FieldException(f"{self}: {e}") from e def set_unitconv(self, field, uc): """Set the unit conversion option for the specified field. @@ -261,9 +261,9 @@ def get_value( field, handle, units, data_source, throw ) except DataSourceException as e: - raise DataSourceException(f"{self}: {e}") + raise DataSourceException(f"{self}: {e}") from e except FieldException as e: - raise FieldException(f"{self}: {e}") + raise FieldException(f"{self}: {e}") from e def set_value( self, @@ -292,9 +292,9 @@ def set_value( try: self._data_source_manager.set_value(field, value, units, data_source, throw) except DataSourceException as e: - raise DataSourceException(f"{self}: {e}") + raise DataSourceException(f"{self}: {e}") from e except FieldException as e: - raise FieldException(f"{self}: {e}") + raise FieldException(f"{self}: {e}") from e def set_lattice(self, lattice): """Set the stored lattice reference for this element to the passed @@ -335,11 +335,11 @@ def get_pv_name(self, field, handle): .get_pv_name(handle) ) except DataSourceException as e: - raise DataSourceException(f"{self}: {e}") + raise DataSourceException(f"{self}: {e}") from e except AttributeError: raise DataSourceException( f"Cannot get PV for field {field} on element " f"{self}, as the device does not have associated PVs." - ) + ) from AttributeError except FieldException as e: - raise FieldException(f"{self}: {e}") + raise FieldException(f"{self}: {e}") from e diff --git a/src/pytac/exceptions.py b/src/pytac/exceptions.py index 058d128e..56c8afc3 100644 --- a/src/pytac/exceptions.py +++ b/src/pytac/exceptions.py @@ -1,19 +1,19 @@ """Module containing all the exceptions used in pytac.""" -class FieldException(Exception): +class FieldException(Exception): # noqa: N818 """Exception associated with invalid field requests.""" pass -class HandleException(Exception): +class HandleException(Exception): # noqa: N818 """Exception associated with requests with invalid handles.""" pass -class DataSourceException(Exception): +class DataSourceException(Exception): # noqa: N818 """Exception associated with Device misconfiguration or invalid requests to a data source. """ @@ -21,13 +21,13 @@ class DataSourceException(Exception): pass -class UnitsException(Exception): +class UnitsException(Exception): # noqa: N818 """Conversion not understood.""" pass -class ControlSystemException(Exception): +class ControlSystemException(Exception): # noqa: N818 """Exception associated with control system misconfiguration.""" pass diff --git a/src/pytac/lattice.py b/src/pytac/lattice.py index bbd1bfbb..579525be 100644 --- a/src/pytac/lattice.py +++ b/src/pytac/lattice.py @@ -3,7 +3,6 @@ """ import logging -from typing import List, Optional import numpy @@ -32,7 +31,7 @@ class Lattice: with this lattice. """ - def __init__(self, name: str, symmetry: Optional[int] = None) -> None: + def __init__(self, name: str, symmetry: int | None = None) -> None: """Args: name: The name of the lattice. symmetry: The symmetry of the lattice (the number of cells). @@ -41,14 +40,14 @@ def __init__(self, name: str, symmetry: Optional[int] = None) -> None: """ self.name = name self.symmetry = symmetry - self._elements: List[Element] = [] + self._elements: list[Element] = [] self._data_source_manager = DataSourceManager() def __str__(self) -> str: return f"Lattice {self.name}" @property - def cell_length(self) -> Optional[float]: + def cell_length(self) -> float | None: """The average length of a cell in the lattice.""" if (self.symmetry is None) or (self.get_length() == 0): return None @@ -56,7 +55,7 @@ def cell_length(self) -> Optional[float]: return self.get_length() / self.symmetry @property - def cell_bounds(self) -> Optional[List[int]]: + def cell_bounds(self) -> list[int] | None: """The indexes of elements in which a cell boundary occurs. Examples: @@ -430,7 +429,7 @@ def set_element_values( f"Number of elements in given array({len(values)}) must be " f"equal to the number of elements in the family({len(elements)})." ) - for element, value in zip(elements, values): + for element, value in zip(elements, values, strict=False): status = element.set_value( field, value, @@ -477,7 +476,7 @@ def set_default_data_source(self, data_source_type: str) -> None: self._data_source_manager.default_data_source = data_source_type elems = self.get_elements() for elem in elems: - elem._data_source_manager.default_data_source = data_source_type + elem._data_source_manager.default_data_source = data_source_type # noqa: SLF001 else: raise DataSourceException( f"{data_source_type} is not a data source. " @@ -519,7 +518,7 @@ def convert_family_values(self, family, field, values, origin, target): f"be equal to the number of elements in the family({len(elements)})." ) converted_values = [] - for elem, value in zip(elements, values): + for elem, value in zip(elements, values, strict=False): uc = elem.get_unitconv(field) converted_values.append(uc.convert(value, origin, target)) return converted_values @@ -556,7 +555,7 @@ def __init__(self, name, epics_cs, symmetry=None): **Methods:** """ - super(EpicsLattice, self).__init__(name, symmetry) + super().__init__(name, symmetry) self._cs = epics_cs def get_pv_name(self, field, handle): @@ -579,7 +578,7 @@ def get_pv_name(self, field, handle): raise DataSourceException( f"Cannot get PV for field {field} on lattice " f"{self}, as the device does not have associated PVs." - ) + ) from AttributeError def get_element_pv_names(self, family, field, handle): """Get the PV names for the given field, and handle, on all elements @@ -642,7 +641,7 @@ def get_element_values( family, field, values, pytac.ENG, pytac.PHYS ) else: - values = super(EpicsLattice, self).get_element_values( + values = super().get_element_values( family, field, handle, units, data_source, throw ) if dtype is not None: @@ -692,6 +691,4 @@ def set_element_values( ) self._cs.set_multiple(pv_names, values, throw) else: - super(EpicsLattice, self).set_element_values( - family, field, values, units, data_source, throw - ) + super().set_element_values(family, field, values, units, data_source, throw) diff --git a/src/pytac/load_csv.py b/src/pytac/load_csv.py index 29f74d9a..7f9a3fc5 100644 --- a/src/pytac/load_csv.py +++ b/src/pytac/load_csv.py @@ -17,8 +17,8 @@ import csv import logging import os +from collections.abc import Iterator from pathlib import Path -from typing import Dict, Iterator, Set import pytac from pytac import data_source, element, utils @@ -43,7 +43,7 @@ def csv_loader(csv_file: Path) -> Iterator[csv.DictReader]: yield csv_reader -def load_poly_unitconv(filepath: Path) -> Dict[int, PolyUnitConv]: +def load_poly_unitconv(filepath: Path) -> dict[int, PolyUnitConv]: """Load polynomial unit conversions from a csv file. Args: @@ -52,19 +52,19 @@ def load_poly_unitconv(filepath: Path) -> Dict[int, PolyUnitConv]: Returns: dict: A dictionary of the unit conversions. """ - unitconvs: Dict[int, PolyUnitConv] = {} + unitconvs: dict[int, PolyUnitConv] = {} data = collections.defaultdict(list) with csv_loader(filepath) as csv_reader: for item in csv_reader: data[(int(item["uc_id"]))].append((int(item["coeff"]), float(item["val"]))) # Create PolyUnitConv for each item and put in the dict for uc_id in data: - u = PolyUnitConv([x[1] for x in reversed(sorted(data[uc_id]))], name=uc_id) + u = PolyUnitConv([x[1] for x in sorted(data[uc_id], reverse=True)], name=uc_id) unitconvs[uc_id] = u return unitconvs -def load_pchip_unitconv(filepath: Path) -> Dict[int, PchipUnitConv]: +def load_pchip_unitconv(filepath: Path) -> dict[int, PchipUnitConv]: """Load pchip unit conversions from a csv file. Args: @@ -73,7 +73,7 @@ def load_pchip_unitconv(filepath: Path) -> Dict[int, PchipUnitConv]: Returns: dict: A dictionary of the unit conversions. """ - unitconvs: Dict[int, PchipUnitConv] = {} + unitconvs: dict[int, PchipUnitConv] = {} data = collections.defaultdict(list) with csv_loader(filepath) as csv_reader: for item in csv_reader: @@ -88,7 +88,7 @@ def load_pchip_unitconv(filepath: Path) -> Dict[int, PchipUnitConv]: def resolve_unitconv( - uc_params: Dict, unitconvs: Dict, polyconv_file: Path, pchipconv_file: Path + uc_params: dict, unitconvs: dict, polyconv_file: Path, pchipconv_file: Path ) -> UnitConv: """Create a unit conversion object based on the dictionary of parameters passed. @@ -118,11 +118,17 @@ def resolve_unitconv( uc = copy.copy(unitconvs[int(uc_params["uc_id"])]) except KeyError: if uc_params["uc_type"] == "poly" and not polyconv_file.exists(): - raise UnitsException(error_msg + f"{polyconv_file} not found.") + raise UnitsException( + error_msg + f"{polyconv_file} not found." + ) from KeyError elif uc_params["uc_type"] == "pchip" and not pchipconv_file.exists(): - raise UnitsException(error_msg + f"{pchipconv_file} not found.") + raise UnitsException( + error_msg + f"{pchipconv_file} not found." + ) from KeyError else: - raise UnitsException(error_msg + "unrecognised UnitConv type.") + raise UnitsException( + error_msg + "unrecognised UnitConv type." + ) from KeyError uc.phys_units = uc_params["phys_units"] uc.eng_units = uc_params["eng_units"] lower, upper = [ @@ -140,7 +146,7 @@ def load_unitconv(mode_dir: Path, lattice: Lattice) -> None: mode_dir: Path to directory containing CSV files. lattice: The lattice object that will be used. """ - unitconvs: Dict[int, UnitConv] = {} + unitconvs: dict[int, UnitConv] = {} # Assemble datasets from the polynomial file polyconv_file = mode_dir / POLY_FILENAME if polyconv_file.exists(): @@ -173,7 +179,7 @@ def load_unitconv(mode_dir: Path, lattice: Lattice) -> None: "multipole", "bend", } - if item["uc_type"] != "null" and element._families & rigidity_families: + if item["uc_type"] != "null" and element._families & rigidity_families: # noqa: SLF001 energy = lattice.get_value("energy", units=pytac.ENG) uc.set_post_eng_to_phys(utils.get_div_rigidity(energy)) uc.set_pre_phys_to_eng(utils.get_mult_rigidity(energy)) @@ -211,7 +217,7 @@ def load(mode, control_system=None, directory=None, symmetry=None) -> EpicsLatti raise ControlSystemException( "Please install cothread to load a lattice using the default control system" " (found in cothread_cs.py)." - ) + ) from ImportError if directory is None: directory = Path(__file__).resolve().parent / "data" mode_dir = directory / mode @@ -252,10 +258,10 @@ def load(mode, control_system=None, directory=None, symmetry=None) -> EpicsLatti try: readonly = ast.literal_eval(item["readonly"]) assert isinstance(readonly, bool) - except (ValueError, AssertionError): + except (ValueError, AssertionError) as e: raise ValueError( f"Unable to evaluate {item['readonly']} as a boolean." - ) + ) from e # Devices on index 0 are attached to the lattice not elements. target = lat if index == 0 else lat[index - 1] # Create with a default UnitConv that returns the input unchanged. @@ -273,7 +279,7 @@ def load(mode, control_system=None, directory=None, symmetry=None) -> EpicsLatti return lat -def available_ringmodes(directory=None) -> Set[str]: +def available_ringmodes(directory=None) -> set[str]: """Return the possible ringmodes based on the subdirectories and files in the given directory. diff --git a/src/pytac/units.py b/src/pytac/units.py index 2d51ab65..7f463cbd 100644 --- a/src/pytac/units.py +++ b/src/pytac/units.py @@ -407,7 +407,7 @@ def __init__( # Note that the x coefficients are checked by the PchipInterpolator # constructor. y_diff = numpy.diff(y) - if not ((numpy.all(y_diff > 0)) or (numpy.all((y_diff < 0)))): + if not ((numpy.all(y_diff > 0)) or (numpy.all(y_diff < 0))): raise ValueError( "y coefficients must be monotonically increasing or decreasing." ) From 09604abba34aee8c8c5abfed56cec0d2bce59a32 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Mon, 13 Oct 2025 13:36:39 +0000 Subject: [PATCH 4/7] Stop sphinx build failing on nitpicky warnings --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 39ef850d..8b0406bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,8 +98,9 @@ commands = pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs} type-checking: mypy src tests {posargs} tests: pytest --cov=pytac --cov-report term --cov-report xml:cov.xml {posargs} - docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html + docs: sphinx-{posargs:build -E --keep-going} -T docs build/html """ +# Add -W flag to sphinx-build if you want to fail on warnings [tool.ruff] src = ["src", "tests"] From b4c23894ed3c0a82ad0fa1b5eaee0b90446353b9 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Mon, 13 Oct 2025 13:36:50 +0000 Subject: [PATCH 5/7] Add epicscorelibs as dependency --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8b0406bb..f787e9c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ "numpy", "scipy", "cothread", - "epicscorelibs", #needed to get ca libraries without an epics base install + "epicscorelibs", ] # Add project dependencies here, e.g. ["click", "numpy"] dynamic = ["version"] license.file = "LICENSE" From d628c88fe6517422d59328ceeccc2740c8834f35 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Mon, 13 Oct 2025 13:37:12 +0000 Subject: [PATCH 6/7] Fix ruff check complaints --- src/pytac/__init__.py | 10 ---------- tests/conftest.py | 2 +- tests/test_cothread_cs.py | 10 +++++----- tests/test_device.py | 6 +++--- tests/test_element.py | 12 ++++++------ tests/test_epics.py | 8 ++++---- tests/test_invalid_classes.py | 6 +++--- tests/test_lattice.py | 16 +++++++++------- tests/test_load.py | 29 ++++++++++++++++++----------- tests/test_machine.py | 8 ++++---- tests/test_units.py | 22 +++++++++++----------- 11 files changed, 64 insertions(+), 65 deletions(-) diff --git a/src/pytac/__init__.py b/src/pytac/__init__.py index eca16724..5571c537 100644 --- a/src/pytac/__init__.py +++ b/src/pytac/__init__.py @@ -18,16 +18,6 @@ # Default argument flag. DEFAULT = "default" -from . import ( - data_source, - device, - element, - exceptions, - lattice, - load_csv, - units, - utils, -) __all__ = [ "__version__", diff --git a/tests/conftest.py b/tests/conftest.py index 2c0ded6c..b2b4500e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -31,7 +31,7 @@ def pytest_sessionstart(): (caget and caput). """ - class ca_nothing(Exception): + class ca_nothing(Exception): # noqa: N801, N818 """A minimal mock of the cothread ca_nothing exception class.""" def __init__(self, name, errorcode=True): diff --git a/tests/test_cothread_cs.py b/tests/test_cothread_cs.py index ef2ad8f5..288ef7ff 100644 --- a/tests/test_cothread_cs.py +++ b/tests/test_cothread_cs.py @@ -55,7 +55,7 @@ def test_set_multiple_calls_caput_correctly(cs): ) -def test_get_multiple_raises_ControlSystemException(cs): +def test_get_multiple_raises_control_system_exception(cs): """Here we check that errors are thrown, suppressed and logged correctly.""" caget.return_value = [12, ca_nothing("pv", False)] with pytest.raises(pytac.exceptions.ControlSystemException): @@ -65,7 +65,7 @@ def test_get_multiple_raises_ControlSystemException(cs): log.check(("root", "WARNING", "Cannot connect to pv.")) -def test_set_multiple_raises_ControlSystemException(cs): +def test_set_multiple_raises_control_system_exception(cs): """Here we check that errors are thrown, suppressed and logged correctly.""" caput.return_value = [ca_nothing("pv1", True), ca_nothing("pv2", False)] with pytest.raises(pytac.exceptions.ControlSystemException): @@ -75,7 +75,7 @@ def test_set_multiple_raises_ControlSystemException(cs): log.check(("root", "WARNING", "Cannot connect to pv2.")) -def test_get_single_raises_ControlSystemException(cs): +def test_get_single_raises_control_system_exception(cs): """Here we check that errors are thrown, suppressed and logged correctly.""" caget.side_effect = ca_nothing("pv", False) with LogCapture() as log: @@ -85,7 +85,7 @@ def test_get_single_raises_ControlSystemException(cs): log.check(("root", "WARNING", "Cannot connect to prefix:rb.")) -def test_set_single_raises_ControlSystemException(cs): +def test_set_single_raises_control_system_exception(cs): """Here we check that errors are thrown, suppressed and logged correctly.""" caput.side_effect = ca_nothing("pv", False) with LogCapture() as log: @@ -95,7 +95,7 @@ def test_set_single_raises_ControlSystemException(cs): log.check(("root", "WARNING", "Cannot connect to prefix:sp.")) -def test_set_multiple_raises_ValueError_on_input_length_mismatch(cs): +def test_set_multiple_raises_value_error_on_input_length_mismatch(cs): with pytest.raises(ValueError): cs.set_multiple([SP_PV], [42, 6]) with pytest.raises(ValueError): diff --git a/tests/test_device.py b/tests/test_device.py index 5e9a4500..245db4fc 100644 --- a/tests/test_device.py +++ b/tests/test_device.py @@ -61,7 +61,7 @@ def test_get_simple_device_value_with_handle(): assert device.get_value(handle=pytac.RB) == 1.0 -def test_simple_device_raises_DataSourceException_if_readonly_and_set_value_called(): +def test_simple_device_raises_data_source_exception_if_readonly_and_set_value_called(): device = SimpleDevice(10, readonly=True) with pytest.raises(DataSourceException): device.set_value(4) @@ -79,7 +79,7 @@ def test_device_is_enabled_by_default(device_creation_function): @pytest.mark.parametrize( "device_creation_function", [create_epics_device, create_simple_device] ) -def test_device_is_disabled_if_False_enabler(device_creation_function): +def test_device_is_disabled_if_false_enabler(device_creation_function): device = device_creation_function(enabled=False) assert not device.is_enabled() @@ -93,7 +93,7 @@ def test_device_is_enabled_returns_bool_value(device_creation_function): # PvEnabler test. -def test_PvEnabler(mock_cs): +def test_pv_enabler(mock_cs): pve = PvEnabler("enable-pv", 40, mock_cs) assert pve mock_cs.get_single.return_value = 50 diff --git a/tests/test_element.py b/tests/test_element.py index ded68ff3..7efb4b2d 100644 --- a/tests/test_element.py +++ b/tests/test_element.py @@ -18,7 +18,7 @@ def test_create_element(): assert e._lattice == lat -def test_element_properties_are_None_without_lattice(): +def test_element_properties_are_none_without_lattice(): e = Element(1.2, "SEXT") assert e.index is None assert e.s is None @@ -53,7 +53,7 @@ def test_add_element_to_family_and_case_insensitive_retrieval(): assert e.is_in_family("FAM") -def test_device_methods_raise_DataSourceException_if_no_live_data_source( +def test_device_methods_raise_data_source_exception_if_no_live_data_source( simple_element, ): basic_element = simple_element @@ -66,7 +66,7 @@ def test_device_methods_raise_DataSourceException_if_no_live_data_source( basic_element.get_device("x") -def test_get_device_raises_KeyError_if_device_not_present(simple_element): +def test_get_device_raises_key_error_if_device_not_present(simple_element): with pytest.raises(pytac.exceptions.FieldException): simple_element.get_device("not-a-device") @@ -76,7 +76,7 @@ def test_get_unitconv_returns_unitconv_object(simple_element, unit_uc, double_uc assert simple_element.get_unitconv("y") == double_uc -def test_set_unit_conv(simple_element): +def test_set_unitconv(simple_element): with pytest.raises(KeyError): simple_element._data_source_manager._uc["field1"] uc = mock.Mock() @@ -84,7 +84,7 @@ def test_set_unit_conv(simple_element): assert simple_element._data_source_manager._uc["field1"] == uc -def test_get_unitconv_raises_FieldException_if_device_not_present(simple_element): +def test_get_unitconv_raises_field_exception_if_device_not_present(simple_element): with pytest.raises(pytac.exceptions.FieldException): simple_element.get_unitconv("not-a-device") @@ -144,7 +144,7 @@ def test_identity_conversion(simple_element): def test_get_fields(simple_element): - assert set(simple_element.get_fields()[pytac.LIVE]) == set(["y", "x"]) + assert set(simple_element.get_fields()[pytac.LIVE]) == {"y", "x"} def test_element_representation(): diff --git a/tests/test_epics.py b/tests/test_epics.py index 5c8165cb..dd5462a9 100644 --- a/tests/test_epics.py +++ b/tests/test_epics.py @@ -93,12 +93,12 @@ def test_get_value_uses_cs_if_data_source_live(simple_epics_element, mock_cs): mock_cs.get_single.assert_called_with(RB_PV, True) -def test_get_value_raises_HandleExceptions(simple_epics_element): +def test_get_value_raises_handle_exceptions(simple_epics_element): with pytest.raises(pytac.exceptions.HandleException): simple_epics_element.get_value("y", "unknown_handle") -def test_lattice_get_pv_name_raises_DataSourceException(simple_epics_lattice): +def test_lattice_get_pv_name_raises_data_source_exception(simple_epics_lattice): basic_epics_lattice = simple_epics_lattice with pytest.raises(pytac.exceptions.DataSourceException): basic_epics_lattice.get_pv_name("basic", pytac.RB) @@ -107,7 +107,7 @@ def test_lattice_get_pv_name_raises_DataSourceException(simple_epics_lattice): basic_epics_lattice.get_pv_name("x", pytac.RB) -def test_set_element_values_length_mismatch_raises_IndexError(simple_epics_lattice): +def test_set_element_values_length_mismatch_raises_index_error(simple_epics_lattice): with pytest.raises(IndexError): simple_epics_lattice.set_element_values("family", "x", [1, 2]) with pytest.raises(IndexError): @@ -125,6 +125,6 @@ def test_element_get_pv_name_raises_exceptions(simple_epics_element): basic_epics_element.get_pv_name("x", pytac.RB) -def test_create_EpicsDevice_raises_DataSourceException_if_no_PVs_are_given(): +def test_create_epics_device_raises_data_source_exception_if_no_PVs_are_given(): # noqa: N802 with pytest.raises(pytac.exceptions.DataSourceException): pytac.device.EpicsDevice("device_1", "a_control_system") diff --git a/tests/test_invalid_classes.py b/tests/test_invalid_classes.py index 290c6709..d662d798 100644 --- a/tests/test_invalid_classes.py +++ b/tests/test_invalid_classes.py @@ -3,7 +3,7 @@ from pytac import cs, data_source, device -def test_ControlSystem_throws_NotImplementedError(): +def test_control_system_throws_not_implemented_error(): test_cs = cs.ControlSystem() with pytest.raises(NotImplementedError): test_cs.get_single("dummy", "throw") @@ -15,7 +15,7 @@ def test_ControlSystem_throws_NotImplementedError(): test_cs.set_multiple(["dummy_1", "dummy_2"], [1, 2], "throw") -def test_DataSource_throws_NotImplementedError(): +def test_data_source_throws_not_implemented_error(): test_ds = data_source.DataSource() with pytest.raises(NotImplementedError): test_ds.get_fields() @@ -25,7 +25,7 @@ def test_DataSource_throws_NotImplementedError(): test_ds.set_value("field", 0.0, "throw") -def test_Device_throws_NotImplementedError(): +def test_device_throws_not_implemented_error(): test_d = device.Device() with pytest.raises(NotImplementedError): test_d.is_enabled() diff --git a/tests/test_lattice.py b/tests/test_lattice.py index 02eb8271..1605aaf7 100644 --- a/tests/test_lattice.py +++ b/tests/test_lattice.py @@ -39,7 +39,7 @@ def test_lattice_without_symmetry(): def test_lattice_cell_properties(): lat = Lattice("", 2) - for i in range(5): + for _ in range(5): lat.add_element(Element(0.5, "DRIFT")) assert lat.cell_length == 1.25 assert lat.cell_bounds == [1, 4, 5] @@ -51,7 +51,7 @@ def test_get_element_devices(simple_lattice): assert devices[0].name == "x_device" -def test_device_methods_raise_DataSourceException_if_no_live_data_source( +def test_device_methods_raise_data_source_exception_if_no_live_data_source( simple_lattice, ): basic_lattice = simple_lattice @@ -64,7 +64,7 @@ def test_device_methods_raise_DataSourceException_if_no_live_data_source( basic_lattice.get_device("x") -def test_get_unitconv_raises_FieldException_if_no_uc_for_field(simple_lattice): +def test_get_unitconv_raises_field_exception_if_no_uc_for_field(simple_lattice): with pytest.raises(pytac.exceptions.FieldException): simple_lattice.get_unitconv("not_a_field") @@ -93,7 +93,7 @@ def test_set_value_raises_exceptions_correctly(simple_lattice): simple_lattice.set_value("not_a_field", 0) -def test_get_element_devices_raises_ValueError_for_mismatched_family(simple_lattice): +def test_get_element_devices_raises_value_error_for_mismatched_family(simple_lattice): with pytest.raises(ValueError): devices = simple_lattice.get_element_devices("not-a-family", "x") basic_element = simple_lattice.get_elements("family")[0] @@ -104,7 +104,9 @@ def test_get_element_devices_raises_ValueError_for_mismatched_family(simple_latt assert devices == [] -def test_get_element_devices_raises_FieldException_if_field_not_matched(simple_lattice): +def test_get_element_devices_raises_field_exception_if_field_not_matched( + simple_lattice, +): with pytest.raises(pytac.exceptions.FieldException): simple_lattice.get_element_devices("family", "not-a-field") @@ -176,7 +178,7 @@ def test_set_element_values(simple_lattice): ) -def test_set_element_values_raises_Exceptions_correctly(simple_lattice): +def test_set_element_values_raises_exceptions_correctly(simple_lattice): with pytest.raises(IndexError): simple_lattice.set_element_values("family", "x", [1, 2]) with pytest.raises(IndexError): @@ -240,7 +242,7 @@ def test_convert_family_values(simple_lattice): assert post_values == [12] -def test_convert_family_values_length_mismatch_raises_IndexError(simple_lattice): +def test_convert_family_values_length_mismatch_raises_index_error(simple_lattice): with pytest.raises(IndexError): simple_lattice.convert_family_values( "family", "x", [1, 2], pytac.ENG, pytac.PHYS diff --git a/tests/test_load.py b/tests/test_load.py index a2663f2a..9df124e1 100644 --- a/tests/test_load.py +++ b/tests/test_load.py @@ -10,7 +10,7 @@ @pytest.fixture -def mock_cs_raises_ImportError(): +def mock_cs_raises_import_error(): """We create a mock control system to replace CothreadControlSystem, so that we can check that when it raises an ImportError load_csv.load catches it and raises a ControlSystemException instead. @@ -21,8 +21,9 @@ def mock_cs_raises_ImportError(): ImportError when the code is compiled. """ - def CothreadControlSystem(): - raise ImportError + class CothreadControlSystem: + def __init__(self): + raise ImportError return CothreadControlSystem @@ -37,13 +38,13 @@ def test_default_control_system_import(): assert isinstance(load(TESTING_MODE)._cs, pytac.cothread_cs.CothreadControlSystem) -def test_import_fail_raises_ControlSystemException(mock_cs_raises_ImportError): +def test_import_fail_raises_control_system_exception(mock_cs_raises_import_error): """In this test we: - check that load corectly fails if cothread cannot be imported - check that when the import of the CothreadControlSystem fails the ImportError raised is replaced with a ControlSystemException """ - with patch("pytac.cothread_cs.CothreadControlSystem", mock_cs_raises_ImportError): + with patch("pytac.cothread_cs.CothreadControlSystem", mock_cs_raises_import_error): with pytest.raises(pytac.exceptions.ControlSystemException): load(TESTING_MODE) @@ -69,10 +70,16 @@ def test_devices_loaded(lattice): def test_families_loaded(lattice): - assert lattice.get_all_families() == set( - ["drift", "sext", "quad", "ds", "qf", "qs", "sd"] - ) - assert lattice.get_elements("quad")[0].families == set(["quad", "qf", "qs"]) + assert lattice.get_all_families() == { + "drift", + "sext", + "quad", + "ds", + "qf", + "qs", + "sd", + } + assert lattice.get_elements("quad")[0].families == {"quad", "qf", "qs"} def test_load_unitconv_warns_if_pchip_or_poly_data_file_not_found( @@ -94,7 +101,7 @@ def test_load_unitconv_warns_if_pchip_or_poly_data_file_not_found( ) -def test_resolve_unitconv_raises_UnitsException_if_pchip_or_poly_data_file_not_found( +def test_resolve_unitconv_raises_units_exception_if_pchip_or_poly_data_file_not_found( polyconv_file, pchipconv_file ): uc_params = { @@ -119,7 +126,7 @@ def test_resolve_unitconv_raises_UnitsException_if_pchip_or_poly_data_file_not_f resolve_unitconv(uc_params, {}, polyconv_file, pchipconv_file) -def test_resolve_unitconv_raises_UnitsException_if_unrecognised_UnitConv_type( +def test_resolve_unitconv_raises_units_exception_if_unrecognised_unitconv_type( polyconv_file, pchipconv_file ): uc_params = { diff --git a/tests/test_machine.py b/tests/test_machine.py index 91748308..7481fd9d 100644 --- a/tests/test_machine.py +++ b/tests/test_machine.py @@ -99,7 +99,7 @@ def test_load_quadrupoles(lattice, n_quads, request): quads = lattice.get_elements("Quadrupole") assert len(quads) == n_quads for quad in quads: - assert set(quad.get_fields()[pytac.LIVE]) == set(["b1"]) + assert set(quad.get_fields()[pytac.LIVE]) == {"b1"} device = quad.get_device("b1") assert re.match("SR.*Q.*:I", device.rb_pv) assert re.match("SR.*Q.*:SETI", device.sp_pv) @@ -197,11 +197,11 @@ def test_quad_unitconv_raise_exception(): def test_quad_unitconv_known_failing_test(): - LAT_ENERGY = 3000 + lat_energy = 3000 uc = pytac.units.PchipUnitConv([50.0, 100.0, 180.0], [-4.95, -9.85, -17.56]) - uc._post_eng_to_phys = pytac.utils.get_div_rigidity(LAT_ENERGY) - uc._pre_phys_to_eng = pytac.utils.get_mult_rigidity(LAT_ENERGY) + uc._post_eng_to_phys = pytac.utils.get_div_rigidity(lat_energy) + uc._pre_phys_to_eng = pytac.utils.get_mult_rigidity(lat_energy) numpy.testing.assert_allclose(uc.eng_to_phys(70), -0.69133465) numpy.testing.assert_allclose(uc.phys_to_eng(-0.7), 70.8834284954) diff --git a/tests/test_units.py b/tests/test_units.py index c0e91b2d..27424bfb 100644 --- a/tests/test_units.py +++ b/tests/test_units.py @@ -16,7 +16,7 @@ def f2(value): return value / 2 -def test_UnitConv_not_implemented(): +def test_unitconv_not_implemented(): uc = UnitConv(0) with pytest.raises(NotImplementedError): uc.convert(10, pytac.PHYS, pytac.ENG) @@ -64,7 +64,7 @@ def test_get_conversion_limits(): @pytest.mark.parametrize( "origin, target", [(pytac.ENG, pytac.PHYS), (pytac.PHYS, pytac.ENG)] ) -def test_UnitConv_raises_UnitsException_for_values_outside_limits(origin, target): +def test_unitconv_raises_units_exception_for_values_outside_limits(origin, target): uc = NullUnitConv() uc.set_conversion_limits(0, 10) with pytest.raises(pytac.exceptions.UnitsException): @@ -73,7 +73,7 @@ def test_UnitConv_raises_UnitsException_for_values_outside_limits(origin, target uc.convert(11, origin, target) # above upper limit -def test_UnitConv_includes_name_in_exception(): +def test_unitconv_includes_name_in_exception(): uc = UnitConv(name="test_unitconv") with pytest.raises(NotImplementedError, match="test_unitconv"): uc.convert(10, pytac.ENG, pytac.PHYS) @@ -82,7 +82,7 @@ def test_UnitConv_includes_name_in_exception(): @pytest.mark.parametrize( "origin, target", [(pytac.LIVE, pytac.ENG), (pytac.PHYS, pytac.SP), ("a", "b")] ) -def test_UnitConv_requires_correct_arguments(origin, target): +def test_unitconv_requires_correct_arguments(origin, target): uc = UnitConv(name=12) assert uc.name == 12 with pytest.raises(pytac.exceptions.UnitsException): @@ -133,7 +133,7 @@ def test_quadratic_conversion(): quadratic_conversion.convert(2.5, pytac.PHYS, pytac.ENG) -def test_poly_unit_conv_removes_imaginary_roots(): +def test_poly_unitconv_removes_imaginary_roots(): poly_uc = PolyUnitConv([1, -3, 4]) with pytest.raises(pytac.exceptions.UnitsException): poly_uc.convert(1, pytac.PHYS, pytac.ENG) @@ -161,19 +161,19 @@ def test_pp_conversion_to_machine_2_points(): assert pchip_uc.phys_to_eng(1.5) == 1.5 -def test_PchipInterpolator_raises_ValueError_if_x_not_monotonically_increasing(): +def test_pchip_interpolator_raises_value_error_if_x_not_monotonically_increasing(): with pytest.raises(ValueError): PchipUnitConv([1, 3, 2], [1, 2, 3]) with pytest.raises(ValueError): PchipUnitConv([-1, -2, -3], [-1, -2, -3]) -def test_PchipInterpolator_raises_ValueError_if_y_not_monotonic(): +def test_pchip_interpolator_raises_value_error_if_y_not_monotonic(): with pytest.raises(ValueError): PchipUnitConv([1, 2, 3], [1, 3, 2]) -def test_PchipUnitConv_with_solution_outside_bounds_raises_UnitsException(): +def test_pchip_unitconv_with_solution_outside_bounds_raises_units_exception(): # This is a linear relationship, but the root is 0, outside of the # range of measurements. pchip_uc = PchipUnitConv((1, 2, 3), (1, 2, 3)) @@ -181,7 +181,7 @@ def test_PchipUnitConv_with_solution_outside_bounds_raises_UnitsException(): pchip_uc.phys_to_eng(0) -def test_PchipUnitConv_with_additional_function(): +def test_pchip_unitconv_with_additional_function(): pchip_uc = PchipUnitConv([2, 4], [2, 4], f1, f2) assert pchip_uc.eng_to_phys(2) == 4.0 assert pchip_uc.eng_to_phys(3) == 6.0 @@ -189,7 +189,7 @@ def test_PchipUnitConv_with_additional_function(): assert pchip_uc.phys_to_eng(6.0) == 3 -def test_PolyUnitConv_with_additional_function(): +def test_poly_unitconv_with_additional_function(): poly_uc = PolyUnitConv([2, 3], f1, f2) assert poly_uc.eng_to_phys(4) == 22.0 assert poly_uc.eng_to_phys(5) == 26.0 @@ -199,7 +199,7 @@ def test_PolyUnitConv_with_additional_function(): assert poly_uc.phys_to_eng(18.0) == 3 -def test_NullUnitConv(): +def test_null_unitconv(): null_uc = NullUnitConv() assert null_uc.eng_to_phys(DUMMY_VALUE_1) == DUMMY_VALUE_1 assert null_uc.eng_to_phys(DUMMY_VALUE_2) == DUMMY_VALUE_2 From c92300dc794389d5adf4928b30af91c106c64552 Mon Sep 17 00:00:00 2001 From: Phil Smith Date: Tue, 14 Oct 2025 09:43:59 +0000 Subject: [PATCH 7/7] Add init imports back in --- src/pytac/__init__.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/pytac/__init__.py b/src/pytac/__init__.py index 5571c537..192207f6 100644 --- a/src/pytac/__init__.py +++ b/src/pytac/__init__.py @@ -18,6 +18,16 @@ # Default argument flag. DEFAULT = "default" +from . import ( # noqa: E402 + data_source, + device, + element, + exceptions, + lattice, + load_csv, + units, + utils, +) __all__ = [ "__version__",