diff --git a/.copier-answers.yml b/.copier-answers.yml
new file mode 100644
index 0000000..5a456e9
--- /dev/null
+++ b/.copier-answers.yml
@@ -0,0 +1,18 @@
+# Changes here will be overwritten by Copier
+_commit: 2.6.0
+_src_path: gh:DiamondLightSource/python-copier-template
+author_email: tobyn.nicholls@diamond.ac.uk, philip.smith@diamond.ac.uk
+author_name: Tobyn Nicholls, Phil Smith
+component_lifecycle: production
+component_owner: group:default/high-level-apps
+component_type: user-interface
+description: Accelerator Toolbox Interface for Pytac
+distribution_name: atip
+docker: true
+docs_type: sphinx
+git_platform: github.com
+github_org: DiamondLightSource
+package_name: atip
+pypi: true
+repo_name: atip
+type_checker: pyright
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 0000000..979a89c
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,46 @@
+// For format details, see https://containers.dev/implementors/json_reference/
+{
+ "name": "Python 3 Developer Container",
+ "build": {
+ "dockerfile": "../Dockerfile",
+ "target": "developer"
+ },
+ "remoteEnv": {
+ // Allow X11 apps to run inside the container
+ "DISPLAY": "${localEnv:DISPLAY}"
+ },
+ "customizations": {
+ "vscode": {
+ // Set *default* container specific settings.json values on container create.
+ "settings": {
+ "python.defaultInterpreterPath": "/venv/bin/python"
+ },
+ // Add the IDs of extensions you want installed when the container is created.
+ "extensions": [
+ "ms-python.python",
+ "github.vscode-github-actions",
+ "tamasfe.even-better-toml",
+ "redhat.vscode-yaml",
+ "ryanluker.vscode-coverage-gutters",
+ "charliermarsh.ruff",
+ "ms-azuretools.vscode-docker"
+ ]
+ }
+ },
+ "features": {
+ // add in eternal history and other bash features
+ "ghcr.io/diamondlightsource/devcontainer-features/bash-config:1.0.0": {}
+ },
+ // Create the config folder for the bash-config feature
+ "initializeCommand": "mkdir -p ${localEnv:HOME}/.config/bash-config",
+ "runArgs": [
+ // Allow the container to access the host X11 display and EPICS CA
+ "--net=host",
+ // Make sure SELinux does not disable with access to host filesystems like tmp
+ "--security-opt=label=disable"
+ ],
+ // Mount the parent as /workspaces so we can pip install peers as editable
+ "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspaces,type=bind",
+ // After the container is created, install the python project in editable form
+ "postCreateCommand": "pip install $([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e '.[dev]' && pre-commit install"
+}
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 0000000..45a50f9
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,27 @@
+# Contribute to the project
+
+Contributions and issues are most welcome! All issues and pull requests are
+handled through [GitHub](https://github.com/DiamondLightSource/atip/issues). Also, please check for any existing issues before
+filing a new one. If you have a great idea but it involves big changes, please
+file a ticket before making a pull request! We want to make sure you don't spend
+your time coding something that might not fit the scope of the project.
+
+## Issue or Discussion?
+
+Github also offers [discussions](https://github.com/DiamondLightSource/atip/discussions) as a place to ask questions and share ideas. If
+your issue is open ended and it is not obvious when it can be "closed", please
+raise it as a discussion instead.
+
+## Code Coverage
+
+While 100% code coverage does not make a library bug-free, it significantly
+reduces the number of easily caught bugs! Please make sure coverage remains the
+same or is improved by a pull request!
+
+## Developer Information
+
+It is recommended that developers use a [vscode devcontainer](https://code.visualstudio.com/docs/devcontainers/containers). This repository contains configuration to set up a containerized development environment that suits its own needs.
+
+This project was created using the [Diamond Light Source Copier Template](https://github.com/DiamondLightSource/python-copier-template) for Python projects.
+
+For more information on common tasks like setting up a developer environment, running the tests, and setting a pre-commit hook, see the template's [How-to guides](https://diamondlightsource.github.io/python-copier-template/2.6.0/how-to.html).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..aa65892
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,21 @@
+---
+name: Bug Report
+about: The template to use for reporting bugs and usability issues
+title: " "
+labels: 'bug'
+assignees: ''
+
+---
+
+Describe the bug, including a clear and concise description of the expected behavior, the actual behavior and the context in which you encountered it (ideally include details of your environment).
+
+## Steps To Reproduce
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+
+## Acceptance Criteria
+- Specific criteria that will be used to judge if the issue is fixed
diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md
new file mode 100644
index 0000000..52c84dd
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/issue.md
@@ -0,0 +1,13 @@
+---
+name: Issue
+about: The standard template to use for feature requests, design discussions and tasks
+title: " "
+labels: ''
+assignees: ''
+
+---
+
+A brief description of the issue, including specific stakeholders and the business case where appropriate
+
+## Acceptance Criteria
+- Specific criteria that will be used to judge if the issue is fixed
diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
new file mode 100644
index 0000000..8200afe
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
@@ -0,0 +1,8 @@
+Fixes #ISSUE
+
+### Instructions to reviewer on how to test:
+1. Do thing x
+2. Confirm thing y happens
+
+### Checks for reviewer
+- [ ] Would the PR title make sense to a user on a set of release notes
diff --git a/.github/actions/install_requirements/action.yml b/.github/actions/install_requirements/action.yml
new file mode 100644
index 0000000..d33e080
--- /dev/null
+++ b/.github/actions/install_requirements/action.yml
@@ -0,0 +1,34 @@
+name: Install requirements
+description: Install a version of python then call pip install and report what was installed
+inputs:
+ python-version:
+ description: Python version to install, default is from Dockerfile
+ default: "dev"
+ pip-install:
+ description: Parameters to pass to pip install
+ default: "$([ -f dev-requirements.txt ] && echo '-c dev-requirements.txt') -e .[dev]"
+
+runs:
+ using: composite
+ steps:
+ - name: Get version of python
+ run: |
+ PYTHON_VERSION="${{ inputs.python-version }}"
+ if [ $PYTHON_VERSION == "dev" ]; then
+ PYTHON_VERSION=$(sed -n "s/ARG PYTHON_VERSION=//p" Dockerfile)
+ fi
+ echo "PYTHON_VERSION=$PYTHON_VERSION" >> "$GITHUB_ENV"
+ shell: bash
+
+ - name: Setup python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+
+ - name: Install packages
+ run: pip install ${{ inputs.pip-install }}
+ shell: bash
+
+ - name: Report what was installed
+ run: pip freeze
+ shell: bash
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..184ba36
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,24 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ groups:
+ actions:
+ patterns:
+ - "*"
+
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "weekly"
+ groups:
+ dev-dependencies:
+ patterns:
+ - "*"
diff --git a/.github/pages/index.html b/.github/pages/index.html
new file mode 100644
index 0000000..c495f39
--- /dev/null
+++ b/.github/pages/index.html
@@ -0,0 +1,11 @@
+
+
+
+
+ Redirecting to main branch
+
+
+
+
+
+
diff --git a/.github/pages/make_switcher.py b/.github/pages/make_switcher.py
new file mode 100755
index 0000000..c06813a
--- /dev/null
+++ b/.github/pages/make_switcher.py
@@ -0,0 +1,96 @@
+"""Make switcher.json to allow docs to switch between different versions."""
+
+import json
+import logging
+from argparse import ArgumentParser
+from pathlib import Path
+from subprocess import CalledProcessError, check_output
+
+
+def report_output(stdout: bytes, label: str) -> list[str]:
+ """Print and return something received frm stdout."""
+ ret = stdout.decode().strip().split("\n")
+ print(f"{label}: {ret}")
+ return ret
+
+
+def get_branch_contents(ref: str) -> list[str]:
+ """Get the list of directories in a branch."""
+ stdout = check_output(["git", "ls-tree", "-d", "--name-only", ref])
+ return report_output(stdout, "Branch contents")
+
+
+def get_sorted_tags_list() -> list[str]:
+ """Get a list of sorted tags in descending order from the repository."""
+ stdout = check_output(["git", "tag", "-l", "--sort=-v:refname"])
+ return report_output(stdout, "Tags list")
+
+
+def get_versions(ref: str, add: str | None) -> list[str]:
+ """Generate the file containing the list of all GitHub Pages builds."""
+ # Get the directories (i.e. builds) from the GitHub Pages branch
+ try:
+ builds = set(get_branch_contents(ref))
+ except CalledProcessError:
+ builds = set()
+ logging.warning(f"Cannot get {ref} contents")
+
+ # Add and remove from the list of builds
+ if add:
+ builds.add(add)
+
+ # Get a sorted list of tags
+ tags = get_sorted_tags_list()
+
+ # Make the sorted versions list from main branches and tags
+ versions: list[str] = []
+ for version in ["master", "main"] + tags:
+ if version in builds:
+ versions.append(version)
+ builds.remove(version)
+
+ # Add in anything that is left to the bottom
+ versions += sorted(builds)
+ print(f"Sorted versions: {versions}")
+ return versions
+
+
+def write_json(path: Path, repository: str, versions: list[str]):
+ """Write the JSON switcher to path."""
+ org, repo_name = repository.split("/")
+ struct = [
+ {"version": version, "url": f"https://{org}.github.io/{repo_name}/{version}/"}
+ for version in versions
+ ]
+ text = json.dumps(struct, indent=2)
+ print(f"JSON switcher:\n{text}")
+ path.write_text(text, encoding="utf-8")
+
+
+def main(args=None):
+ """Parse args and write switcher."""
+ parser = ArgumentParser(
+ description="Make a versions.json file from gh-pages directories"
+ )
+ parser.add_argument(
+ "--add",
+ help="Add this directory to the list of existing directories",
+ )
+ parser.add_argument(
+ "repository",
+ help="The GitHub org and repository name: ORG/REPO",
+ )
+ parser.add_argument(
+ "output",
+ type=Path,
+ help="Path of write switcher.json to",
+ )
+ args = parser.parse_args(args)
+
+ # Write the versions file
+ versions = get_versions("origin/gh-pages", args.add)
+ write_json(args.output, args.repository, versions)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/workflows/_check.yml b/.github/workflows/_check.yml
new file mode 100644
index 0000000..a6139c1
--- /dev/null
+++ b/.github/workflows/_check.yml
@@ -0,0 +1,27 @@
+on:
+ workflow_call:
+ outputs:
+ branch-pr:
+ description: The PR number if the branch is in one
+ value: ${{ jobs.pr.outputs.branch-pr }}
+
+jobs:
+ pr:
+ runs-on: "ubuntu-latest"
+ outputs:
+ branch-pr: ${{ steps.script.outputs.result }}
+ steps:
+ - uses: actions/github-script@v7
+ id: script
+ if: github.event_name == 'push'
+ with:
+ script: |
+ const prs = await github.rest.pulls.list({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ head: context.repo.owner + ':${{ github.ref_name }}'
+ })
+ if (prs.data.length) {
+ console.log(`::notice ::Skipping CI on branch push as it is already run in PR #${prs.data[0]["number"]}`)
+ return prs.data[0]["number"]
+ }
diff --git a/.github/workflows/_container.yml b/.github/workflows/_container.yml
new file mode 100644
index 0000000..da5e493
--- /dev/null
+++ b/.github/workflows/_container.yml
@@ -0,0 +1,60 @@
+on:
+ workflow_call:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - name: Set up Docker Buildx
+ id: buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Log in to GitHub Docker Registry
+ if: github.event_name != 'pull_request'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build and export to Docker local cache
+ uses: docker/build-push-action@v6
+ env:
+ DOCKER_BUILD_RECORD_UPLOAD: false
+ with:
+ context: .
+ # Need load and tags so we can test it below
+ load: true
+ tags: tag_for_testing
+
+ - name: Test cli works in cached runtime image
+ run: docker run --rm tag_for_testing --version
+
+ - name: Create tags for publishing image
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ghcr.io/${{ github.repository }}
+ tags: |
+ type=ref,event=tag
+ type=raw,value=latest
+
+ - name: Push cached image to container registry
+ if: github.ref_type == 'tag'
+ uses: docker/build-push-action@v6
+ env:
+ DOCKER_BUILD_RECORD_UPLOAD: false
+ # This does not build the image again, it will find the image in the
+ # Docker cache and publish it
+ with:
+ context: .
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/.github/workflows/_dist.yml b/.github/workflows/_dist.yml
new file mode 100644
index 0000000..b1c4c93
--- /dev/null
+++ b/.github/workflows/_dist.yml
@@ -0,0 +1,36 @@
+on:
+ workflow_call:
+
+jobs:
+ build:
+ runs-on: "ubuntu-latest"
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - name: Build sdist and wheel
+ run: >
+ export SOURCE_DATE_EPOCH=$(git log -1 --pretty=%ct) &&
+ pipx run build
+
+ - name: Upload sdist and wheel as artifacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist
+ path: dist
+
+ - name: Check for packaging errors
+ run: pipx run twine check --strict dist/*
+
+ - name: Install produced wheel
+ uses: ./.github/actions/install_requirements
+ with:
+ pip-install: dist/*.whl
+
+ - name: Test module --version works using the installed wheel
+ # If more than one module in src/ replace with module name to test
+ run: python -m $(ls --hide='*.egg-info' src | head -1) --version
diff --git a/.github/workflows/_docs.yml b/.github/workflows/_docs.yml
new file mode 100644
index 0000000..a1cafca
--- /dev/null
+++ b/.github/workflows/_docs.yml
@@ -0,0 +1,54 @@
+on:
+ workflow_call:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Avoid git conflicts when tag and branch pushed at same time
+ if: github.ref_type == 'tag'
+ run: sleep 60
+
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - name: Install system packages
+ run: sudo apt-get install graphviz
+
+ - name: Install python packages
+ uses: ./.github/actions/install_requirements
+
+ - name: Build docs
+ run: tox -e docs
+
+ - name: Remove environment.pickle
+ run: rm build/html/.doctrees/environment.pickle
+
+ - name: Upload built docs artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: docs
+ path: build
+
+ - name: Sanitize ref name for docs version
+ run: echo "DOCS_VERSION=${GITHUB_REF_NAME//[^A-Za-z0-9._-]/_}" >> $GITHUB_ENV
+
+ - name: Move to versioned directory
+ run: mv build/html .github/pages/$DOCS_VERSION
+
+ - name: Write switcher.json
+ run: python .github/pages/make_switcher.py --add $DOCS_VERSION ${{ github.repository }} .github/pages/switcher.json
+
+ - name: Publish Docs to gh-pages
+ if: github.ref_type == 'tag' || github.ref_name == 'main'
+ # We pin to the SHA, not the tag, for security reasons.
+ # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
+ uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: .github/pages
+ keep_files: true
diff --git a/.github/workflows/_pypi.yml b/.github/workflows/_pypi.yml
new file mode 100644
index 0000000..8032bba
--- /dev/null
+++ b/.github/workflows/_pypi.yml
@@ -0,0 +1,19 @@
+on:
+ workflow_call:
+
+jobs:
+ upload:
+ runs-on: ubuntu-latest
+ environment: release
+
+ steps:
+ - name: Download dist artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: dist
+ path: dist
+
+ - name: Publish to PyPI using trusted publishing
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ attestations: false
diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml
new file mode 100644
index 0000000..81b6264
--- /dev/null
+++ b/.github/workflows/_release.yml
@@ -0,0 +1,32 @@
+on:
+ workflow_call:
+
+jobs:
+ artifacts:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v4
+ with:
+ merge-multiple: true
+
+ - name: Zip up docs
+ run: |
+ set -vxeuo pipefail
+ if [ -d html ]; then
+ mv html $GITHUB_REF_NAME
+ zip -r docs.zip $GITHUB_REF_NAME
+ rm -rf $GITHUB_REF_NAME
+ fi
+
+ - name: Create GitHub Release
+ # We pin to the SHA, not the tag, for security reasons.
+ # https://docs.github.com/en/actions/learn-github-actions/security-hardening-for-github-actions#using-third-party-actions
+ uses: softprops/action-gh-release@e7a8f85e1c67a31e6ed99a94b41bd0b71bbee6b8 # v2.0.9
+ with:
+ prerelease: ${{ contains(github.ref_name, 'a') || contains(github.ref_name, 'b') || contains(github.ref_name, 'rc') }}
+ files: "*"
+ generate_release_notes: true
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml
new file mode 100644
index 0000000..f652d41
--- /dev/null
+++ b/.github/workflows/_test.yml
@@ -0,0 +1,62 @@
+on:
+ workflow_call:
+ inputs:
+ python-version:
+ type: string
+ description: The version of python to install
+ required: true
+ runs-on:
+ type: string
+ description: The runner to run this job on
+ required: true
+ secrets:
+ CODECOV_TOKEN:
+ required: true
+
+env:
+ # https://github.com/pytest-dev/pytest/issues/2042
+ PY_IGNORE_IMPORTMISMATCH: "1"
+
+jobs:
+ run:
+ runs-on: ${{ inputs.runs-on }}
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ # Need this to get version number from last tag
+ fetch-depth: 0
+
+ - if: inputs.python-version == 'dev'
+ name: Install dev versions of python packages
+ uses: ./.github/actions/install_requirements
+
+ - if: inputs.python-version == 'dev'
+ name: Write the requirements as an artifact
+ run: pip freeze --exclude-editable > /tmp/dev-requirements.txt
+
+ - if: inputs.python-version == 'dev'
+ name: Upload dev-requirements.txt
+ uses: actions/upload-artifact@v4
+ with:
+ name: dev-requirements
+ path: /tmp/dev-requirements.txt
+
+ - if: inputs.python-version != 'dev'
+ name: Install latest versions of python packages
+ uses: ./.github/actions/install_requirements
+ with:
+ python-version: ${{ inputs.python-version }}
+ pip-install: ".[dev]"
+
+ - name: Run tests
+ run: tox -e tests
+
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v4
+ with:
+ name: ${{ inputs.python-version }}/${{ inputs.runs-on }}
+ files: cov.xml
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/_tox.yml b/.github/workflows/_tox.yml
new file mode 100644
index 0000000..a13536d
--- /dev/null
+++ b/.github/workflows/_tox.yml
@@ -0,0 +1,22 @@
+on:
+ workflow_call:
+ inputs:
+ tox:
+ type: string
+ description: What to run under tox
+ required: true
+
+
+jobs:
+ run:
+ runs-on: "ubuntu-latest"
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Install python packages
+ uses: ./.github/actions/install_requirements
+
+ - name: Run tox
+ run: tox -e ${{ inputs.tox }}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..485b82b
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,67 @@
+name: CI
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ check:
+ uses: ./.github/workflows/_check.yml
+
+ lint:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_tox.yml
+ with:
+ tox: pre-commit,type-checking
+
+ test:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ strategy:
+ matrix:
+ runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest
+ python-version: ["3.10", "3.11", "3.12"]
+ include:
+ # Include one that runs in the dev environment
+ - runs-on: "ubuntu-latest"
+ python-version: "dev"
+ fail-fast: false
+ uses: ./.github/workflows/_test.yml
+ with:
+ runs-on: ${{ matrix.runs-on }}
+ python-version: ${{ matrix.python-version }}
+ secrets:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
+
+ container:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_container.yml
+ permissions:
+ contents: read
+ packages: write
+
+ docs:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_docs.yml
+
+ dist:
+ needs: check
+ if: needs.check.outputs.branch-pr == ''
+ uses: ./.github/workflows/_dist.yml
+
+ pypi:
+ if: github.ref_type == 'tag'
+ needs: dist
+ uses: ./.github/workflows/_pypi.yml
+ permissions:
+ id-token: write
+
+ release:
+ if: github.ref_type == 'tag'
+ needs: [dist, docs]
+ uses: ./.github/workflows/_release.yml
+ permissions:
+ contents: write
diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml
index 0ffe238..e6a9080 100644
--- a/.github/workflows/code.yml
+++ b/.github/workflows/code.yml
@@ -1,30 +1,30 @@
-name: Code CI
+# name: Code CI
-on:
- push:
- pull_request:
+# on:
+# push:
+# pull_request:
-jobs:
- build:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- python: ["3.8", "3.9"]
+# jobs:
+# build:
+# runs-on: ubuntu-latest
+# strategy:
+# fail-fast: false
+# matrix:
+# python: ["3.8", "3.9"]
- steps:
- - name: Checkout Source
- uses: actions/checkout@v2
+# steps:
+# - name: Checkout Source
+# uses: actions/checkout@v2
- - name: Set up Python ${{ matrix.python }}
- uses: actions/setup-python@v2
- with:
- python-version: ${{ matrix.python }}
+# - name: Set up Python ${{ matrix.python }}
+# uses: actions/setup-python@v2
+# with:
+# python-version: ${{ matrix.python }}
- - name: Install Python Dependencies
- run: |
- pip install pipenv twine
- pipenv install --dev --deploy --python $(which python) && pipenv graph
+# - name: Install Python Dependencies
+# run: |
+# pip install pipenv twine
+# pipenv install --dev --deploy --python $(which python) && pipenv graph
- - name: Run tests and check formatting
- run: pipenv run tests
+# - name: Run tests and check formatting
+# run: pipenv run tests
diff --git a/.github/workflows/periodic.yml b/.github/workflows/periodic.yml
new file mode 100644
index 0000000..e2a0fd1
--- /dev/null
+++ b/.github/workflows/periodic.yml
@@ -0,0 +1,13 @@
+name: Periodic
+
+on:
+ workflow_dispatch:
+ schedule:
+ # Run weekly to check URL links still resolve
+ - cron: "0 8 * * WED"
+
+jobs:
+ linkcheck:
+ uses: ./.github/workflows/_tox.yml
+ with:
+ tox: docs build -- -b linkcheck
diff --git a/.gitignore b/.gitignore
index d3e4b41..0f33bf2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,23 +1,71 @@
-# compiled python
-*.pyc
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
-# pytest cache
-*.cache
+# C extensions
+*.so
-# virtual environments
-venv*
-
-# build artifacts
-build
-dist
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
*.egg-info/
+.installed.cfg
+*.egg
+**/_version.py
-# coverage
-.coverage
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+cov.xml
+.pytest_cache/
+.mypy_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+docs/_api
+
+# PyBuilder
+target/
+
+# likely venv names
+.venv*
+venv*
-# temporary files
-*~
+# further build artifacts
+lockfiles/
-# built documentation
-docs/_build
+# ruff cache
+.ruff_cache/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..60fc23f
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,24 @@
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: check-added-large-files
+ - id: check-yaml
+ - id: check-merge-conflict
+ - id: end-of-file-fixer
+
+ - repo: local
+ hooks:
+ - id: ruff
+ name: lint with ruff
+ language: system
+ entry: ruff check --force-exclude
+ types: [python]
+ require_serial: true
+
+ - id: ruff-format
+ name: format with ruff
+ language: system
+ entry: ruff format --force-exclude
+ types: [python]
+ require_serial: true
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 0000000..933c580
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,5 @@
+{
+ "recommendations": [
+ "ms-vscode-remote.remote-containers",
+ ]
+}
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000..60bb089
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,39 @@
+{
+ "configurations": [
+ {
+ "name": "Python Debugger: Current File",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${file}",
+ "console": "integratedTerminal"
+ },
+ {
+ "name": "Python Debugger: Virtac",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "/venv/bin/virtac",
+ "console": "integratedTerminal",
+ "env": {
+ "EPICS_CA_SERVER_PORT": "7064",
+ "EPICS_CA_REPEATER_PORT": "7065",
+ },
+ },
+ {
+ "name": "Debug Unit Test",
+ "type": "debugpy",
+ "request": "launch",
+ "justMyCode": false,
+ "program": "${file}",
+ "purpose": [
+ "debug-test"
+ ],
+ "console": "integratedTerminal",
+ "env": {
+ // Enable break on exception when debugging tests (see: tests/conftest.py)
+ "PYTEST_RAISE": "1",
+ "EPICS_CA_SERVER_PORT": "7064",
+ "EPICS_CA_REPEATER_PORT": "7065",
+ },
+ }
+ ]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..101c75f
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,12 @@
+{
+ "python.testing.unittestEnabled": false,
+ "python.testing.pytestEnabled": true,
+ "editor.formatOnSave": true,
+ "editor.codeActionsOnSave": {
+ "source.organizeImports": "explicit"
+ },
+ "files.insertFinalNewline": true,
+ "[python]": {
+ "editor.defaultFormatter": "charliermarsh.ruff",
+ },
+}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..c999e86
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,16 @@
+// See https://go.microsoft.com/fwlink/?LinkId=733558
+// for the documentation about the tasks.json format
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "type": "shell",
+ "label": "Tests, lint and docs",
+ "command": "tox -p",
+ "options": {
+ "cwd": "${workspaceRoot}"
+ },
+ "problemMatcher": [],
+ }
+ ]
+}
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..830e71b
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,29 @@
+# The devcontainer should use the developer target and run as root with podman
+# or docker with user namespaces.
+ARG PYTHON_VERSION=3.11
+FROM python:${PYTHON_VERSION} AS developer
+
+# Add any system dependencies for the developer/build environment here
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ graphviz \
+ && rm -rf /var/lib/apt/lists/*
+
+# Set up a virtual environment and put it in PATH
+RUN python -m venv /venv
+ENV PATH=/venv/bin:$PATH
+
+# The build stage installs the context into the venv
+FROM developer AS build
+COPY . /context
+WORKDIR /context
+RUN touch dev-requirements.txt && pip install -c dev-requirements.txt .
+
+# The runtime stage copies the built venv into a slim runtime container
+FROM python:${PYTHON_VERSION}-slim AS runtime
+# Add apt-get system dependecies for runtime here if needed
+COPY --from=build /venv/ /venv/
+ENV PATH=/venv/bin:$PATH
+
+# change this entrypoint if it is not the same as the repo
+ENTRYPOINT ["atip"]
+CMD ["--version"]
diff --git a/INSTALL.rst b/INSTALL.rst
deleted file mode 100644
index 73d9434..0000000
--- a/INSTALL.rst
+++ /dev/null
@@ -1,37 +0,0 @@
-=================
-ATIP Installation
-=================
-
-This guide is for Linux and is based on the current structures of AT and Pytac,
-if you find a mistake anywhere in ATIP please raise an issue on ATIP's GitHub
-page, `here. `_
-
-Initial Setup and Installation
-------------------------------
-
-**Option 1: Install ATIP using pip**::
-
- $ pip install atip
-
-**Option 2: Install ATIP from GitHub**:
-
-1. Clone ATIP::
-
- $ cd
- $ git clone https://github.com/DiamondLightSource/atip.git
-
-2. Create a pipenv and install the dependencies::
-
- $ cd atip
- $ pipenv install --dev
- $ pipenv shell
-
-3. Run the tests to ensure everything is working correctly::
-
- $ python -m pytest
-
-Troubleshooting
----------------
-
-Please note that for ATIP to function with Python 3.7 or later, you must
-use Cothread>=2.16.
diff --git a/LICENSE b/LICENSE
index 83adffc..8dada3e 100644
--- a/LICENSE
+++ b/LICENSE
@@ -178,7 +178,7 @@
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
+ boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
- Copyright 2023 Diamond Light Source Ltd.
+ Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 1295f66..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,2 +0,0 @@
-include atip/rings/*.m*
-include virtac/data/*.csv
diff --git a/Pipfile b/Pipfile
deleted file mode 100644
index 669fcd8..0000000
--- a/Pipfile
+++ /dev/null
@@ -1,23 +0,0 @@
-[[source]]
-url = "https://pypi.org/simple"
-verify_ssl = true
-name = "pypi"
-
-[dev-packages]
-pytest = "*"
-pytest-cov = "*"
-pytest-black = "*"
-pytest-flake8 = "*"
-testfixtures = "*"
-mock = "*"
-flake8 = "<4.0"
-sphinx = "*"
-sphinx_rtd_theme = "*"
-black = "==22.3.0"
-
-[packages]
-# All production requirements from setup.cfg.
-atip = {editable = true, path = "."}
-
-[scripts]
-tests = "python -m pytest atip tests --flake8 --black --cov=atip --cov-fail-under=70 --cov-report=term"
diff --git a/Pipfile.lock b/Pipfile.lock
deleted file mode 100644
index 5c1316c..0000000
--- a/Pipfile.lock
+++ /dev/null
@@ -1,598 +0,0 @@
-{
- "_meta": {
- "hash": {
- "sha256": "126890cdbef97afc54ae976cf384775a3261debdf7b2f12cf333cfa3c6ecc27b"
- },
- "pipfile-spec": 6,
- "requires": {},
- "sources": [
- {
- "name": "pypi",
- "url": "https://pypi.org/simple",
- "verify_ssl": true
- }
- ]
- },
- "default": {
- "accelerator-toolbox": {
- "hashes": [
- "sha256:03640cdb80ccc463cb0f6ba021ce1bc90dd86f2f1754663bbefdb63083e8b7b9",
- "sha256:074a2dfe83fc1b6c2f7ebee1439e0a5838b4db7927da3065d6ef72911d87797b",
- "sha256:1889bfbe230ebf896c9096453bcf0a56f22abd9d98334b699fb6f05fb535943e",
- "sha256:2041cdf427db81237497e2bf955c13a09b9be4291a1467db3d54e5f186c5c06a",
- "sha256:20ff6efd77c7846600d59a6ff8340dbab5c5c06e47233975ce02ffe85856d5aa",
- "sha256:239b48f04a5839c21d1ede1b2fd0077e0b01470f0e3dd798d89c1fbc84d2a4f5",
- "sha256:2c1a344db1bf960e3d056501e3672acfc2abc5d0ea7d454192b10e8331cc8c14",
- "sha256:2e02b5ee3ee000a5f9bcb988ceec741ce781ce0394fb358c982c651f8f05aeb2",
- "sha256:371867fc96ef47be83b338adf5188951c04286f488f25296f00eeda37606da4a",
- "sha256:3ef5058b4301c02ebb50dd0f4ed8bf4c8ae764ac10b38f030921487297e7ac7b",
- "sha256:3f24fd2451ddc6b7b0747b082e155be349b23674a4d506429e0f160a164d46e0",
- "sha256:4257ff6d7e19243b9b467e614717ac19b3f1620642956efe1a569106c1e51c3f",
- "sha256:455329f5a04901b2d20c04f375346d79f2f97ef4784adbd80d1b316e6f1c5d21",
- "sha256:514d21845fb889273aa53320f707dd06bad551d24c0322d385ae3be4c412b41d",
- "sha256:54f3f32bae6e931ba4b091ec302802866e904a499e0e15d6fe610996deac3b61",
- "sha256:58cc4e4af2816a393daed515e78defdffc4617a0c3e51a1308dea5c3ab6be9bf",
- "sha256:5ed71f97d63449b958642e48a9b71f37b36cdd6065f9c6355fe77675378c8b0d",
- "sha256:623a80b134a2dce2e9614e00b477b3a15e9887ef87e02bdcfae7ad9fa1bd04f6",
- "sha256:71dd48e499322e0b58fcae1a7d42eb7f5940764994c3600ea8295bc905f733ee",
- "sha256:73fe01c32f942187ee70f05b938c53ca2f8756b607b435ee0e448f72cd807801",
- "sha256:7a99e6facf592cc3c4b75683a660f0864be0b043b5da08fd242294e9682e3a84",
- "sha256:816da70e67729626d9bf8c6c471a4c1a6b43f924b394f33f49343fa2e0c0379e",
- "sha256:899b22ed57fc6d4de90b30a9496b8b87f0eaa5ef3b6fba7ef1ba5e0c835aca71",
- "sha256:a3a1a599fa60eada443d810b7466583e93ef89b8a79695984e9f8701841b2150",
- "sha256:a7dc407d761c294393b4858dd5d42b1b3fc09f49c9554ab6dc19fbb900851e0b",
- "sha256:b2d453adfb52ea13eaae9ce20c4f67f54749b8408691f97c27fadeb421807782",
- "sha256:b9128c35f378e1aae6d6178daf604f5201214ccf2e5a719015031da4a7c17894",
- "sha256:bc74991e6acdfde348a1613eae31237e1534c7e62f8a9bdc5d06290183f812ba",
- "sha256:bed0930a1951eb733eb2ef8a137675286c8ee0281e17eaabed6c9bfcb114dd3d",
- "sha256:c313e07c49b63c4e073464ee45355c558975000c56e1c69ad4c337520e379ff1",
- "sha256:c4729635638ce9e13690eaaa469065aa83e685c7663c4b616d982193c26160e1",
- "sha256:d4ee247c36f23f273a0479aa918f6de144cf1fbc250a7365fad1bb7ccd218e16",
- "sha256:db9551d23cfc7f967b39155ed13c1c7a07c3237c5eb95a8c26747f23016452a9",
- "sha256:dc29525300539b13474799d3562ae25cedb34c2ef68245c55061248caba53064",
- "sha256:dd3025dd62776fff8f9d6510fce9736cc0720102308987f522a927714e7f8ce5",
- "sha256:ea398c5158397753e6947f95ec51f9d23bb9d1040b1b4dc9fc6da827e909bae0",
- "sha256:f4ecf25e869181aebc91ec19fd07e2242770b79c7676d20ae844e01b6fd4faee",
- "sha256:f528aaca5eb378d5297690f39b545c70f1f0438521d73c9c0ff1a537f7bcc070",
- "sha256:fb00682d240a61de8e031e2bc6aceb9588ddcdd3b2942d5454fc7c8fcb8d5b73"
- ],
- "markers": "python_full_version >= '3.6.0'",
- "version": "==0.2.2"
- },
- "atip": {
- "editable": true,
- "path": "."
- },
- "cothread": {
- "hashes": [
- "sha256:c62e6697814256d6f07d4bbdddcce9f89816fe30df8dd1bbb3cf5af1ad6330c1",
- "sha256:caa0e37ae6d4f9adddf56addd76959103f2a388d0a6e1c1ddd833a87f0551d40",
- "sha256:f0f57697eee1b87cf2bad4683e0deda5b3fe4505400084369274393999fcc3d0"
- ],
- "version": "==2.19.1"
- },
- "epicscorelibs": {
- "hashes": [
- "sha256:283735c6100c3c24ab9a535719b14e48b033b1cc7bb7a9d744c9e847667c8c96",
- "sha256:4b6b7986f898dfe77fe58b175426a4eb5848a740c0635a47b2dd722b506a8ab5",
- "sha256:bf7f165cc0d49e3e530ef9a8d864d6cc2ab1991c7eaec853f81a01e68b191289"
- ],
- "version": "==7.0.7.99.0.2"
- },
- "epicsdbbuilder": {
- "hashes": [
- "sha256:40e01ca308b667d17b31dc1907816df20c31b389415268d9ec6e2be6c3b8f283",
- "sha256:ae8dc724c72478d2c6a68b08145d027a50af98702d17e4692f2d73f145818e74"
- ],
- "version": "==1.5"
- },
- "importlib-metadata": {
- "hashes": [
- "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad",
- "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"
- ],
- "markers": "python_version < '3.8'",
- "version": "==6.0.0"
- },
- "numpy": {
- "hashes": [
- "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac",
- "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3",
- "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6",
- "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1",
- "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a",
- "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b",
- "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470",
- "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1",
- "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab",
- "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46",
- "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673",
- "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7",
- "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db",
- "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e",
- "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786",
- "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552",
- "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25",
- "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6",
- "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2",
- "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a",
- "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf",
- "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f",
- "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c",
- "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4",
- "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b",
- "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0",
- "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3",
- "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656",
- "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0",
- "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb",
- "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e"
- ],
- "markers": "python_version < '3.10'",
- "version": "==1.21.6"
- },
- "pvxslibs": {
- "hashes": [
- "sha256:3330431d9c87eef3604a41fe5162e52a21125ebf0dd3d6c38e17f6e621ae93d7",
- "sha256:a7d1fa9cafa2609e4c2f8c77c04d02993ab4b11152e2418d82cc8a1193dbd9b5",
- "sha256:c7019e5dac7c552747c41d34d39a1bd016d2cc5892f139c37693638436e19b11"
- ],
- "version": "==1.3.1"
- },
- "pytac": {
- "hashes": [
- "sha256:199435f8ed00d57e2c0c9cf355a84dcc4e53061a298453ba5666c1f4d6fae1a2",
- "sha256:a9afceb92778a7a7b5ff591962c8e898a4ad003990305b89064aa7b94058c851"
- ],
- "version": "==0.5.0"
- },
- "scipy": {
- "hashes": [
- "sha256:01b38dec7e9f897d4db04f8de4e20f0f5be3feac98468188a0f47a991b796055",
- "sha256:10dbcc7de03b8d635a1031cb18fd3eaa997969b64fdf78f99f19ac163a825445",
- "sha256:19aeac1ad3e57338723f4657ac8520f41714804568f2e30bd547d684d72c392e",
- "sha256:1b21c6e0dc97b1762590b70dee0daddb291271be0580384d39f02c480b78290a",
- "sha256:1caade0ede6967cc675e235c41451f9fb89ae34319ddf4740194094ab736b88d",
- "sha256:23995dfcf269ec3735e5a8c80cfceaf384369a47699df111a6246b83a55da582",
- "sha256:2a799714bf1f791fb2650d73222b248d18d53fd40d6af2df2c898db048189606",
- "sha256:3274ce145b5dc416c49c0cf8b6119f787f0965cd35e22058fe1932c09fe15d77",
- "sha256:33d1677d46111cfa1c84b87472a0274dde9ef4a7ef2e1f155f012f5f1e995d8f",
- "sha256:44d452850f77e65e25b1eb1ac01e25770323a782bfe3a1a3e43847ad4266d93d",
- "sha256:9e3302149a369697c6aaea18b430b216e3c88f9a61b62869f6104881e5f9ef85",
- "sha256:a75b014d3294fce26852a9d04ea27b5671d86736beb34acdfc05859246260707",
- "sha256:ad7269254de06743fb4768f658753de47d8b54e4672c5ebe8612a007a088bd48",
- "sha256:b30280fbc1fd8082ac822994a98632111810311a9ece71a0e48f739df3c555a2",
- "sha256:b79104878003487e2b4639a20b9092b02e1bad07fc4cf924b495cf413748a777",
- "sha256:d449d40e830366b4c612692ad19fbebb722b6b847f78a7b701b1e0d6cda3cc13",
- "sha256:d647757373985207af3343301d89fe738d5a294435a4f2aafb04c13b4388c896",
- "sha256:f68eb46b86b2c246af99fcaa6f6e37c7a7a413e1084a794990b877f2ff71f7b6",
- "sha256:fdf606341cd798530b05705c87779606fcdfaf768a8129c348ea94441da15b04"
- ],
- "version": "==1.6.3"
- },
- "setuptools": {
- "hashes": [
- "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a",
- "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"
- ],
- "version": "==67.6.1"
- },
- "setuptools-dso": {
- "hashes": [
- "sha256:b23019f5e9cec37bc5df3a9735b86ee3948ce7fb2fda42307ca0ba49625d1b44",
- "sha256:b85c8d662b7b942c6edc729765f963c77b758a691fcbe3e216e44fd97f2519f3"
- ],
- "version": "==2.10"
- },
- "softioc": {
- "hashes": [
- "sha256:4437ed6d05a1c089945741d193861a360738d224985553a94ca0b86a52748299",
- "sha256:b1258966b99bb0850ed61e873f4c6a0340cff467340b3095310b41fa7a9b4852",
- "sha256:e1a96d42367c601b6682d466f2451c052bd1b3d9e8608da9894009d043cddb51"
- ],
- "version": "==4.5.0"
- },
- "typing-extensions": {
- "hashes": [
- "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
- "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
- ],
- "markers": "python_version < '3.8'",
- "version": "==4.5.0"
- },
- "zipp": {
- "hashes": [
- "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6",
- "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"
- ],
- "version": "==3.13.0"
- }
- },
- "develop": {
- "alabaster": {
- "hashes": [
- "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
- "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
- ],
- "version": "==0.7.12"
- },
- "attrs": {
- "hashes": [
- "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836",
- "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"
- ],
- "version": "==22.2.0"
- },
- "babel": {
- "hashes": [
- "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe",
- "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"
- ],
- "version": "==2.11.0"
- },
- "black": {
- "hashes": [
- "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b",
- "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176",
- "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09",
- "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a",
- "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015",
- "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79",
- "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb",
- "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20",
- "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464",
- "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968",
- "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82",
- "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21",
- "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0",
- "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265",
- "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b",
- "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a",
- "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72",
- "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce",
- "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0",
- "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a",
- "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163",
- "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad",
- "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"
- ],
- "index": "pypi",
- "version": "==22.3.0"
- },
- "certifi": {
- "hashes": [
- "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
- "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
- ],
- "version": "==2022.12.7"
- },
- "charset-normalizer": {
- "hashes": [
- "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
- "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
- ],
- "markers": "python_version >= '3'",
- "version": "==2.1.1"
- },
- "click": {
- "hashes": [
- "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
- "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
- ],
- "version": "==8.1.3"
- },
- "coverage": {
- "extras": [
- "toml"
- ],
- "hashes": [
- "sha256:10188fe543560ec4874f974b5305cd1a8bdcfa885ee00ea3a03733464c4ca265",
- "sha256:db61a79c07331e88b9a9974815c075fbd812bc9dbc4dc44b366b5368a2936063",
- "sha256:ddb726cb861c3117a553f940372a495fe1078249ff5f8a5478c0576c7be12050"
- ],
- "version": "==7.1.0"
- },
- "docutils": {
- "hashes": [
- "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125",
- "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"
- ],
- "version": "==0.17.1"
- },
- "exceptiongroup": {
- "hashes": [
- "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e",
- "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"
- ],
- "markers": "python_version < '3.11'",
- "version": "==1.1.0"
- },
- "flake8": {
- "hashes": [
- "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b",
- "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"
- ],
- "index": "pypi",
- "version": "==3.9.2"
- },
- "idna": {
- "hashes": [
- "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
- "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
- ],
- "markers": "python_version >= '3'",
- "version": "==3.4"
- },
- "imagesize": {
- "hashes": [
- "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b",
- "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"
- ],
- "version": "==1.4.1"
- },
- "importlib-metadata": {
- "hashes": [
- "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad",
- "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"
- ],
- "markers": "python_version < '3.10'",
- "version": "==6.0.0"
- },
- "iniconfig": {
- "hashes": [
- "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3",
- "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"
- ],
- "version": "==2.0.0"
- },
- "jinja2": {
- "hashes": [
- "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
- "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
- ],
- "version": "==3.1.2"
- },
- "markupsafe": {
- "hashes": [
- "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77",
- "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b",
- "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"
- ],
- "version": "==2.1.1"
- },
- "mccabe": {
- "hashes": [
- "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
- "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
- ],
- "version": "==0.6.1"
- },
- "mock": {
- "hashes": [
- "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62",
- "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"
- ],
- "index": "pypi",
- "version": "==4.0.3"
- },
- "mypy-extensions": {
- "hashes": [
- "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d",
- "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"
- ],
- "version": "==0.4.3"
- },
- "packaging": {
- "hashes": [
- "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2",
- "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"
- ],
- "version": "==23.0"
- },
- "pathspec": {
- "hashes": [
- "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93",
- "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"
- ],
- "version": "==0.10.1"
- },
- "platformdirs": {
- "hashes": [
- "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788",
- "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"
- ],
- "version": "==2.5.2"
- },
- "pluggy": {
- "hashes": [
- "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
- "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"
- ],
- "version": "==1.0.0"
- },
- "pycodestyle": {
- "hashes": [
- "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068",
- "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"
- ],
- "version": "==2.7.0"
- },
- "pyflakes": {
- "hashes": [
- "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3",
- "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"
- ],
- "version": "==2.3.1"
- },
- "pygments": {
- "hashes": [
- "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1",
- "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"
- ],
- "version": "==2.13.0"
- },
- "pytest": {
- "hashes": [
- "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5",
- "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"
- ],
- "index": "pypi",
- "version": "==7.2.1"
- },
- "pytest-black": {
- "hashes": [
- "sha256:1d339b004f764d6cd0f06e690f6dd748df3d62e6fe1a692d6a5500ac2c5b75a5"
- ],
- "index": "pypi",
- "version": "==0.3.12"
- },
- "pytest-cov": {
- "hashes": [
- "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b",
- "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"
- ],
- "index": "pypi",
- "version": "==4.0.0"
- },
- "pytest-flake8": {
- "hashes": [
- "sha256:358d449ca06b80dbadcb43506cd3e38685d273b4968ac825da871bd4cc436202",
- "sha256:f1b19dad0b9f0aa651d391c9527ebc20ac1a0f847aa78581094c747462bfa182"
- ],
- "index": "pypi",
- "version": "==1.1.0"
- },
- "pytz": {
- "hashes": [
- "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91",
- "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"
- ],
- "version": "==2022.4"
- },
- "requests": {
- "hashes": [
- "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
- "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
- ],
- "version": "==2.28.1"
- },
- "snowballstemmer": {
- "hashes": [
- "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1",
- "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"
- ],
- "version": "==2.2.0"
- },
- "sphinx": {
- "hashes": [
- "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6",
- "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"
- ],
- "index": "pypi",
- "version": "==4.5.0"
- },
- "sphinx-rtd-theme": {
- "hashes": [
- "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8",
- "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"
- ],
- "index": "pypi",
- "version": "==1.0.0"
- },
- "sphinxcontrib-applehelp": {
- "hashes": [
- "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
- "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
- ],
- "version": "==1.0.2"
- },
- "sphinxcontrib-devhelp": {
- "hashes": [
- "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
- "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
- ],
- "version": "==1.0.2"
- },
- "sphinxcontrib-htmlhelp": {
- "hashes": [
- "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07",
- "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"
- ],
- "version": "==2.0.0"
- },
- "sphinxcontrib-jsmath": {
- "hashes": [
- "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
- "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
- ],
- "version": "==1.0.1"
- },
- "sphinxcontrib-qthelp": {
- "hashes": [
- "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
- "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
- ],
- "version": "==1.0.3"
- },
- "sphinxcontrib-serializinghtml": {
- "hashes": [
- "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd",
- "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"
- ],
- "version": "==1.1.5"
- },
- "testfixtures": {
- "hashes": [
- "sha256:2600100ae96ffd082334b378e355550fef8b4a529a6fa4c34f47130905c7426d",
- "sha256:6ddb7f56a123e1a9339f130a200359092bd0a6455e31838d6c477e8729bb7763"
- ],
- "index": "pypi",
- "version": "==6.18.3"
- },
- "toml": {
- "hashes": [
- "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
- "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
- ],
- "version": "==0.10.2"
- },
- "tomli": {
- "hashes": [
- "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
- "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
- ],
- "markers": "python_version < '3.11'",
- "version": "==2.0.1"
- },
- "typed-ast": {
- "hashes": [
- "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2",
- "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"
- ],
- "markers": "python_version < '3.8' and implementation_name == 'cpython'",
- "version": "==1.5.4"
- },
- "typing-extensions": {
- "hashes": [
- "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb",
- "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"
- ],
- "markers": "python_version < '3.10'",
- "version": "==4.5.0"
- },
- "urllib3": {
- "hashes": [
- "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21",
- "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"
- ],
- "version": "==1.26.17"
- },
- "zipp": {
- "hashes": [
- "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6",
- "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"
- ],
- "version": "==3.13.0"
- }
- }
-}
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..38fcd16
--- /dev/null
+++ b/README.md
@@ -0,0 +1,37 @@
+[](https://github.com/DiamondLightSource/atip/actions/workflows/ci.yml)
+[](https://codecov.io/gh/DiamondLightSource/atip)
+[](https://pypi.org/project/atip)
+[](https://www.apache.org/licenses/LICENSE-2.0)
+
+
+# ATIP - Accelerator Toolbox Interface for Pytac
+
+ATIP is an addition to [Pytac](),
+a framework for controlling particle accelerators. ATIP adds a simulator to
+Pytac, which can be used and addressed in the same way as a real accelerator.
+
+ATIP enables the easy offline testing of high level accelerator
+controls applications, by either of two methods:
+
+* By replacing the real accelerator at the point where it is addressed by the
+ software, in the Pytac lattice object;
+
+* In a standalone application as a "virtual accelerator", publishing the same
+ control system interface as the live machine. At Diamond Light Source this
+ has been implemented with EPICS, and run on a different port to the
+ operational control system. So the only change required to test software is
+ to configure this EPICS port.
+
+The python implementation of
+[Accelerator Toolbox]() (pyAT) is used for the
+simulation.
+
+Source |
+:---: | :---:
+PyPI | `pip install atip`
+Docker | `docker run ghcr.io/diamondlightsource/atip:latest`
+Documentation |
+Installation |
+Releases |
+
+
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 160d484..0000000
--- a/README.rst
+++ /dev/null
@@ -1,216 +0,0 @@
-.. image:: https://travis-ci.org/DiamondLightSource/atip.svg?branch=master
- :target: https://travis-ci.org/DiamondLightSource/atip
-.. image:: https://coveralls.io/repos/github/DiamondLightSource/atip/badge.svg?branch=master
- :target: https://coveralls.io/github/DiamondLightSource/atip?branch=master
-.. image:: https://readthedocs.org/projects/atip/badge/?version=latest
- :target: https://atip.readthedocs.io/en/latest/?badge=latest
-.. image:: https://badge.fury.io/py/atip.svg
- :target: https://badge.fury.io/py/atip
-
-==============================================
-ATIP - Accelerator Toolbox Interface for Pytac
-==============================================
-
-ATIP is an addition to `Pytac `_,
-a framework for controlling particle accelerators. ATIP adds a simulator to
-Pytac, which can be used and addressed in the same way as a real accelerator.
-
-ATIP enables the easy offline testing of high level accelerator
-controls applications, by either of two methods:
-
-* By replacing the real accelerator at the point where it is addressed by the
- software, in the Pytac lattice object;
-
-* In a standalone application as a "virtual accelerator", publishing the same
- control system interface as the live machine. At Diamond Light Source this
- has been implemented with EPICS, and run on a different port to the
- operational control system. So the only change required to test software is
- to configure this EPICS port.
-
-The python implementation of
-`Accelerator Toolbox `_ (pyAT) is used for the
-simulation.
-
-For further information on any of ATIP's functions or classes please read the
-documentation `here `_.
-
-Installation:
--------------
-
-See the ``INSTALL.rst`` document.
-
-General Use:
-------------
-
-ATIP produces an "integrated lattice", which is a Pytac lattice object with a
-simulation data source added. The simulated data sources are added using the
-``load()`` function found in ``load_sim.py``.
-
-This adds ``pytac.SIM`` data sources on to the lattice and each of the
-elements.
-
-The integrated lattice acts like a normal Pytac lattice; the simulator can be
-referenced like the live machine but with the data source specified as
-``pytac.SIM`` instead of ``pytac.LIVE``.
-
-For example, a get request to a BPM would be
-``.get_value('x', data_source=pytac.SIM)``.
-
-The simulated data sources behave exactly like the live machine, except for a
-few cases. For example, the simulator has a number of lattice fields that the
-live accelerator doesn't have; and the live machine has a few element fields
-that the simulator doesn't.
-
-Example
-^^^^^^^
-
-Note that you need an AT lattice that is compatible with Pytac. Some are provided
-in ``atip/rings/``, otherwise try running the Matlab function
-``atip/rings/create_lattice_matfile.m`` with an AT lattice loaded.
-
-.. code-block:: python
-
- >>> import pytac
- >>> import atip
- >>> # Load the DIAD lattice from Pytac.
- >>> lat = pytac.load_csv.load('DIAD')
- >>> # Load the AT sim into the Pytac lattice.
- >>> atip.load_sim.load_from_filepath(lat, 'atip/rings/DIAD.mat')
- >>> # Use the sim by default.
- >>> lat.set_default_data_source(pytac.SIM)
- >>> # The initial beam position is zero.
- >>> lat.get_value('x')
- array([0., 0., 0., ..., 0., 0., 0.])
- >>> # Get the first horizontal corrector magnet and set its current to 1A.
- >>> hcor1 = lat.get_elements('HSTR')[0]
- >>> hcor1.set_value('x_kick', 1, units=pytac.ENG)
- >>> # Now the x beam position has changed.
- >>> lat.get_value('x')
- array([0.00240101, 0.00240101, 0.00239875, ..., 0.00240393, 0.00240327,
- 0.00240327])
- >>>
-
-Virtual Accelerator:
---------------------
-
-Instructions for using ATIP as a virtual accelerator can be found in
-``virtac/README.rst``.
-
-Implementation:
----------------
-
-All the accelerator data for the simulator is held in an ``ATSimulator``
-object, which is referenced by the data sources of the lattice and each
-element.Each Pytac element has an equivalent pyAT element, held in a
-``ATElementDataSource``; when a get request is made, the appropriate data from
-that AT element is returned.
-
-The ``ATSimulator`` object has a queue of pending changes. When a set request
-is received by an element, the element puts the changes onto the queue of the
-``ATSimulator``. Inside the ``ATSimulator`` a
-`Cothread `_ thread checks the
-length of the queue. When it sees changes on the queue, the thread
-recalculates the physics data of the lattice to ensure that it is up to date.
-This means that the emittance and linear optics data held by ``ATSimulator``
-is updated after every batch of changes, and that without excessive calculation
-a very recent version of the lattice's physics data is always available.
-
-API:
-----
-
-load_sim:
- * ``load_from_filepath(pytac_lattice, at_lattice_filepath, callback=None)``
- - loads the AT lattice from the given filepath to the .mat file and then
- calls ``load``.
- * ``load(pytac_lattice, at_lattice, callback=None)`` - loads the simulator
- onto the passed Pytac lattice, callback is a callable that is passed to
- ATSimulator during creation to be called on completion of each round of
- physics calculations.
-
-ATElementDataSource:
- * ``get_fields()`` - return the fields on the element.
- * ``add_field(field)`` - add the given field to this element's data source.
- * ``get_value(field)`` - get the value for a given field on the element.
- * ``set_value(field, value)`` - set the value for a given field on the
- element, appends the change to the queue.
-
-ATLatticeDataSource:
- * ``get_fields()`` - return the fields on the lattice.
- * ``get_value(field)`` - get the value for a given field on the lattice.
- * ``set_value(field, set_value)`` - set the value for a given field on the
- lattice, currently not supported so raises HandleException.
-
-ATSimulator:
- * ``toggle_calculations()`` - pause or unpause the recalculation thread.
- * ``wait_for_calculations(timeout=10)`` - wait up to 'timeout' seconds for
- the current calculations to conclude, if they do it returns True, if not
- False is returned; if 'timeout' is not passed it will wait 10 seconds.
- * ``get_at_element(index)`` - return a shallow copy of the specified AT
- element from the central AT ring, N.B. An 'index' of 1 returns ring[0].
- * ``get_at_lattice()`` - return a shallow copy of the entire centralised AT
- lattice object.
- * ``get_s()`` - return the 's position' of every element in the lattice.
- * ``get_total_bend_angle()`` - return the total bending angle of all the
- dipoles in the lattice.
- * ``get_total_absolute_bend_angle()`` - return the total absolute bending
- angle of all the dipoles in the lattice.
- * ``get_energy()`` - return the energy of the lattice.
- * ``get_tune(field)`` - return the specified plane of the lattice's
- 'tune'; 'x' or 'y'.
- * ``get_chromaticity(field)`` - return the specified plane of the lattice's
- 'chromaticity'; 'x' or 'y'.
- * ``get_orbit(field)`` - return the specified plane of the lattice's
- 'closed orbit'; 'x', 'phase_x', 'y', or 'phase_y'.
- * ``get_dispersion()`` - return the 'dispersion' vector for every element
- in the lattice.
- * ``get_alpha()`` - return the 'alpha' vector at every element in the
- lattice.
- * ``get_beta()`` - return the 'beta' vector at every element in the
- lattice.
- * ``get_mu()`` - return 'mu' at every element in the lattice.
- * ``get_m44()`` - return the 4x4 transfer matrix for every element in the
- lattice.
- * ``get_emittance(field)`` - return the specified plane of the lattice's
- 'emittance'; 'x' or 'y'.
- * ``get_radiation_integrals()`` - return the 5 Synchrotron Integrals for
- the lattice.
- * ``get_momentum_compaction()`` - return the momentum compaction factor
- for the lattice.
- * ``get_energy_spread()`` - return the energy spread for the lattice.
- * ``get_energy_loss()`` - return the energy loss per turn of the lattice.
- * ``get_damping_partition_numbers()`` - return the damping partition
- numbers for the lattice's three normal modes.
- * ``get_damping_times()`` - return the damping times for the lattice's
- three normal modes.
- * ``get_linear_dispersion_action()`` - return the Linear Dispersion Action
- ("curly H") for the lattice.
- * ``get_horizontal_emittance()`` - return the horizontal ('x') emittance
- for the lattice calculated from the radiation integrals.
-
-
-Specific Notes:
----------------
-
-In order for ATIP to function correctly, the AT and Pytac lattices used must be
-directly equivalent, i.e. they must have the same length and elements in the
-same positions.
-
-If local (not pip) installations are used, ATIP, AT, and Pytac must all be
-located in the same source directory in order for ATIP to function correctly.
-
-The methods on ATIP's data sources that take ``handle`` and ``throw`` arguments
-do so only to conform with the Pytac ``DataSource`` base class from which they
-inherit. Inside ATIP they are not used and can be ignored.
-
-To interpret which data is to be returned or set, both ``ATElementDataSource``
-and ``ATLatticeDataSource`` use a dictionary of functions corresponding to
-fields. In the case where a cell needs to be passed to the data handling
-functions, for further specification, functools' ``partial()`` is used.
-
-The physics data is received from AT all together; to make it easier to manage,
-it is split by ATIP and accessed by a number of methods of the ``ATSimulator``
-object. This aims to be more convenient for the user but does result in the
-ATSimulator object having a large number of methods.
-
-A number of functions that perform tasks that are frequent or long-winded are
-included in ``utils.py`` to make life easier for the user.
diff --git a/atip/__init__.py b/atip/__init__.py
deleted file mode 100644
index aa8d172..0000000
--- a/atip/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-"""ATIP: Accelerator Toolbox Interface for Pytac.
-See README.rst & INSTALL.rst for more information.
-"""
-from . import load_sim, sim_data_sources, simulator, utils
-
-__all__ = ["load_sim", "sim_data_sources", "simulator", "utils"]
diff --git a/catalog-info.yaml b/catalog-info.yaml
new file mode 100644
index 0000000..fa33dff
--- /dev/null
+++ b/catalog-info.yaml
@@ -0,0 +1,10 @@
+apiVersion: backstage.io/v1alpha1
+kind: Component
+metadata:
+ name: atip
+ title: atip
+ description: Accelerator Toolbox Interface for Pytac
+spec:
+ type: user-interface
+ lifecycle: production
+ owner: group:default/high-level-apps
diff --git a/docs/_api.rst b/docs/_api.rst
new file mode 100644
index 0000000..d782093
--- /dev/null
+++ b/docs/_api.rst
@@ -0,0 +1,17 @@
+:orphan:
+
+..
+ This page is not included in the TOC tree, but must exist so that the
+ autosummary pages are generated for atip and all its
+ subpackages
+
+API
+===
+
+.. autosummary::
+ :toctree: _api
+ :template: custom-module-template.rst
+ :recursive:
+
+ atip
+ virtac
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
new file mode 100644
index 0000000..02bfd7f
--- /dev/null
+++ b/docs/_static/custom.css
@@ -0,0 +1,12 @@
+img[src*='#left'] {
+ float: left;
+}
+
+img[src*='#right'] {
+ float: right;
+}
+
+img[src*='#center'] {
+ display: block;
+ margin: auto;
+}
diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst
new file mode 100644
index 0000000..9aeca54
--- /dev/null
+++ b/docs/_templates/custom-module-template.rst
@@ -0,0 +1,37 @@
+{{ ('``' + fullname + '``') | underline }}
+
+{%- set filtered_members = [] %}
+{%- for item in members %}
+ {%- if item in functions + classes + exceptions + attributes %}
+ {% set _ = filtered_members.append(item) %}
+ {%- endif %}
+{%- endfor %}
+
+.. automodule:: {{ fullname }}
+ :members:
+
+ {% block modules %}
+ {% if modules %}
+ .. rubric:: Submodules
+
+ .. autosummary::
+ :toctree:
+ :template: custom-module-template.rst
+ :recursive:
+ {% for item in modules %}
+ {{ item }}
+ {%- endfor %}
+ {% endif %}
+ {% endblock %}
+
+ {% block members %}
+ {% if filtered_members %}
+ .. rubric:: Members
+
+ .. autosummary::
+ :nosignatures:
+ {% for item in filtered_members %}
+ {{ item }}
+ {%- endfor %}
+ {% endif %}
+ {% endblock %}
diff --git a/docs/atip.rst b/docs/atip.rst
deleted file mode 100644
index e424942..0000000
--- a/docs/atip.rst
+++ /dev/null
@@ -1,39 +0,0 @@
-API Documentation
-=================
-
-.. automodule:: atip
- :members:
- :undoc-members:
- :show-inheritance:
-
-atip.load_sim module
---------------------
-
-.. automodule:: atip.load_sim
- :members:
- :undoc-members:
- :show-inheritance:
-
-atip.sim_data_sources module
-----------------------------
-
-.. automodule:: atip.sim_data_sources
- :members:
- :undoc-members:
- :show-inheritance:
-
-atip.simulator module
-------------------------
-
-.. automodule:: atip.simulator
- :members:
- :undoc-members:
- :show-inheritance:
-
-atip.utils module
-------------------------
-
-.. automodule:: atip.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/atip_class_diagram.uml b/docs/atip_class_diagram.uml
new file mode 100644
index 0000000..a1b2e23
--- /dev/null
+++ b/docs/atip_class_diagram.uml
@@ -0,0 +1,129 @@
+classDiagram
+ pytac.data_source.DataSource <|-- ATElementDataSource : inherits
+ pytac.data_source.DataSource <|-- ATLatticeDataSource : inherits
+ pyAT.lattice_object.Lattice <|-- ATSimulator : implements
+ ATLatticeDataSource --|> ATSimulator : implements
+ ATElementDataSource --|> ATSimulator : implements
+ ATElementDataSource --|> pyAT.elements.Element : implements
+ LatticeData --|> pyAT.lattice_object.Lattice : relates to
+ LatticeData <|-- ATSimulator : implements
+ ATIPServer --|> pytac.lattice.Lattice : implements
+ callback_offset --|> ATIPServer: implements
+
+
+ namespace pytac {
+ class pytac.data_source.DataSource {
+
+ }
+ class pytac.lattice.Lattice {
+
+ }
+ }
+
+ namespace pyAT {
+ class pyAT.lattice_object.Lattice {
+
+ }
+ class pyAT.elements.Element {
+
+ }
+ }
+
+ namespace atip {
+ class ATElementDataSource {
+ +String units
+ +__init__(at_element, index, atsim, fields)
+ +get_fields()
+ +add_field()
+ +get_value()
+ +set_value()
+ }
+
+ class ATLatticeDataSource {
+ +String units
+ +__init__(atsim)
+ +get_fields()
+ +get_value()
+ +set_value()
+ }
+ class LatticeData {
+ +ArrayLike twiss
+ +ArrayLike tunes
+ +ArrayLike chrom
+ +ArrayLike emittance
+ +ArrayLike radint
+ }
+ class ATSimulator {
+ +cothread.Event up_to_date
+ +__init__(at_lattice, callback, disable_emittance)
+ +queue_set(func, field, value)
+ +quit_calculation_thread(timeout)
+ +toggle_calculations()
+ +pause_calculations()
+ +unpause_calculations()
+ +trigger_calculation()
+ +wait_for_calculations()
+ +get_at_element()
+ +get_at_lattice()
+ +get_s()
+ +get_total_bend_angle()
+ +get_total_absolute_bend_angle()
+ +get_energy()
+ +get_tune()
+ +get_chromaticity()
+ +get_orbit()
+ +get_dispersion()
+ +get_alpha()
+ +get_beta()
+ +get_mu()
+ +get_m66()
+ +get_emittance()
+ +get_radiation_integrals()
+ +get_momentum_compaction()
+ +get_energy_spread()
+ +get_energy_loss()
+ +get_damping_partition_numbers()
+ +get_damping_times()
+ +get_linear_dispersion_action()
+ +get_horizontal_emittance()
+ }
+ }
+
+ namespace virtac {
+ class ATIPServer {
+ +pytac.lattice.Lattice lattice
+ +bool tune_feedback_status
+ +dict all_record_names
+ +__init__(ring_mode, limits_csv, feedbac_csv, mirror_csv, tune_csv, disable_emittance)
+ +update_pvs()
+ +monitor_mirrored_pvs()
+ +refresh_record()
+ +setup_tune_feedback()
+ +stop_all_monitoring()
+ +set_feedback_record()
+ }
+ class callback_offset {
+ +ATIPServer server
+ +String quad_pv
+ +pythonSoftIoc.RecordWrapper offset_record
+ +callback(value, index)
+ }
+ class callback_set {
+ +list(pythonSoftIoc.RecordWrapper) output
+ +callback(value, index)
+ }
+ class caget_mask {
+ +String pv
+ +String name
+ +get
+ }
+ class caput_mask {
+ +String pv
+ +String name
+ +set
+ }
+ }
+
+ note for ATIPServer "Creates softioc records"
+ note for callback_offset "Should be done in pytac?"
+ note for ATIPServer "Calls atip.utils.loader which creates a pytac lattice and sets up ATSimulator"
diff --git a/docs/conf.py b/docs/conf.py
index 169d25e..aaad594 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,191 +1,205 @@
-# -*- coding: utf-8 -*-
-#
-# Configuration file for the Sphinx documentation builder.
-#
-# This file does only contain a selection of the most common options. For a
-# full list see the documentation:
-# http://www.sphinx-doc.org/en/master/config
-
-import os
-import sys
+"""Configuration file for the Sphinx documentation builder.
-# -- Path setup --------------------------------------------------------------
+This file only contains a selection of the most common options. For a full
+list see the documentation:
+https://www.sphinx-doc.org/en/master/usage/configuration.html
+"""
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import sys
+from pathlib import Path
+from subprocess import check_output
-# -- Project information -----------------------------------------------------
+import requests
-project = u'ATIP'
-copyright = u'2019, Tobyn Nicholls'
-author = u'Tobyn Nicholls'
+import atip
-# The short X.Y version
-version = u'1.0'
-# The full version, including alpha/beta/rc tags
-release = u'1.0'
+# -- General configuration ------------------------------------------------
+# General information about the project.
+project = "atip"
-# -- General configuration ---------------------------------------------------
+# The full version, including alpha/beta/rc tags.
+release = atip.__version__
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
+# The short X.Y version.
+if "+" in release:
+ # Not on a tag, use branch name
+ root = Path(__file__).absolute().parent.parent
+ git_branch = check_output("git branch --show-current".split(), cwd=root)
+ version = git_branch.decode().strip()
+else:
+ version = release
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.intersphinx',
- 'sphinx.ext.napoleon',
- 'sphinx.ext.viewcode',
+ # Use this for generating API docs
+ "sphinx.ext.autodoc",
+ # and making summary tables at the top of API docs
+ "sphinx.ext.autosummary",
+ # This can parse google style docstrings
+ "sphinx.ext.napoleon",
+ # For linking to external sphinx documentation
+ "sphinx.ext.intersphinx",
+ # Add links to source code in API docs
+ "sphinx.ext.viewcode",
+ # Adds the inheritance-diagram generation directive
+ "sphinx.ext.inheritance_diagram",
+ # Add a copy button to each code block
+ "sphinx_copybutton",
+ # For the card element
+ "sphinx_design",
+ # So we can write markdown files
+ "myst_parser",
+ # For rendering mermaid diagrams
+ "sphinxcontrib.mermaid",
]
-# Include both class and __init__() docstrings.
-autoclass_content = 'both'
+# So we can use the ::: syntax
+myst_enable_extensions = ["colon_fence"]
+
+myst_fence_as_directive = ["mermaid", "autoclasstree"]
+mermaid_version = "11.6.0"
+
+# If true, Sphinx will warn about all references where the target cannot
+# be found.
+nitpicky = True
+
+# A list of (type, target) tuples (by default empty) that should be ignored when
+# generating warnings in "nitpicky mode". Note that type should include the
+# domain name if present. Example entries would be ('py:func', 'int') or
+# ('envvar', 'LD_LIBRARY_PATH').
+nitpick_ignore = [
+ ("py:class", "NoneType"),
+ ("py:class", "'str'"),
+ ("py:class", "'float'"),
+ ("py:class", "'int'"),
+ ("py:class", "'bool'"),
+ ("py:class", "'object'"),
+ ("py:class", "'id'"),
+ ("py:class", "typing_extensions.Literal"),
+]
+
+# Both the class’ and the __init__ method’s docstring are concatenated and
+# inserted into the main body of the autoclass directive
+autoclass_content = "both"
+
+# Order the members by the order they appear in the source code
+autodoc_member_order = "bysource"
+
+# Don't inherit docstrings from baseclasses
+autodoc_inherit_docstrings = False
+
+# Document only what is in __all__
+autosummary_ignore_module_all = False
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+# Output graphviz directive produced images in a scalable format
+graphviz_output_format = "svg"
-# The master toctree document.
-master_doc = 'index'
+# The name of a reST role (builtin or Sphinx extension) to use as the default
+# role, that is, for text marked up `like this`
+default_role = "any"
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = 'en'
+# The master toctree document.
+master_doc = "index"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ['_build']
+# These patterns also affect html_static_path and html_extra_path
+exclude_patterns = ["_build"]
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
+# This means you can link things like `str` and `asyncio` to the relevant
+# docs in the python documentation.
+intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)}
-# -- Options for HTML output -------------------------------------------------
+# A dictionary of graphviz graph attributes for inheritance diagrams.
+inheritance_graph_attrs = {"rankdir": "TB"}
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = 'sphinx_rtd_theme'
+# Ignore localhost links for periodic check that links in docs are valid
+linkcheck_ignore = [r"http://localhost:\d+/"]
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#
-# html_theme_options = {}
+# Set copy-button to ignore python and bash prompts
+# https://sphinx-copybutton.readthedocs.io/en/latest/use.html#using-regexp-prompt-identifiers
+copybutton_prompt_text = r">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
+copybutton_prompt_is_regexp = True
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
+# -- Options for HTML output -------------------------------------------------
-# Custom sidebar templates, must be a dictionary that maps document names
-# to template names.
-#
-# The default sidebars (for documents that don't match any pattern) are
-# defined by theme itself. Builtin themes are using these templates by
-# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
-# 'searchbox.html']``.
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
#
-# html_sidebars = {}
-
-
-# -- Options for HTMLHelp output ---------------------------------------------
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'docsdoc'
-
-
-# -- Options for LaTeX output ------------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
+html_theme = "pydata_sphinx_theme"
+github_repo = "atip"
+github_user = "DiamondLightSource"
+switcher_json = f"https://{github_user}.github.io/{github_repo}/switcher.json"
+switcher_exists = requests.get(switcher_json).ok
+if not switcher_exists:
+ print(
+ "*** Can't read version switcher, is GitHub pages enabled? \n"
+ " Once Docs CI job has successfully run once, set the "
+ "Github pages source branch to be 'gh-pages' at:\n"
+ f" https://github.com/{github_user}/{github_repo}/settings/pages",
+ file=sys.stderr,
+ )
+
+# Theme options for pydata_sphinx_theme
+# We don't check switcher because there are 3 possible states for a repo:
+# 1. New project, docs are not published so there is no switcher
+# 2. Existing project with latest copier template, switcher exists and works
+# 3. Existing project with old copier template that makes broken switcher,
+# switcher exists but is broken
+# Point 3 makes checking switcher difficult, because the updated copier template
+# will fix the switcher at the end of the docs workflow, but never gets a chance
+# to complete as the docs build warns and fails.
+html_theme_options = {
+ "logo": {
+ "text": project,
+ },
+ "use_edit_page_button": True,
+ "github_url": f"https://github.com/{github_user}/{github_repo}",
+ "icon_links": [
+ {
+ "name": "PyPI",
+ "url": f"https://pypi.org/project/{project}",
+ "icon": "fas fa-cube",
+ }
+ ],
+ "switcher": {
+ "json_url": switcher_json,
+ "version_match": version,
+ },
+ "check_switcher": False,
+ "navbar_end": ["theme-switcher", "icon-links", "version-switcher"],
+ "navigation_with_keys": False,
}
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'docs.tex', u'ATIP Documentation',
- u'Tobyn Nicholls', 'manual'),
-]
-
+# A dictionary of values to pass into the template engine’s context for all pages
+html_context = {
+ "github_user": github_user,
+ "github_repo": github_repo,
+ "github_version": version,
+ "doc_path": "docs",
+}
-# -- Options for manual page output ------------------------------------------
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+html_show_sphinx = False
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'docs', u'ATIP Documentation',
- [author], 1)
-]
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+html_show_copyright = False
+# Logo
+html_logo = "images/dls-logo.svg"
+html_favicon = html_logo
-# -- Options for Texinfo output ----------------------------------------------
+# These folders are copied to the documentation's HTML output
+html_static_path = ["_static"]
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (master_doc, 'docs', u'ATIP Documentation',
- author, 'docs', 'ATIP - Accelerator Toolbox Interface for Pytac.',
- 'Accelerator Physics'),
+# These paths are either relative to html_static_path
+# or fully qualified paths (eg. https://...)
+html_css_files = [
+ "custom.css",
]
-
-
-# -- Options for Epub output -------------------------------------------------
-
-# Bibliographic Dublin Core info.
-epub_title = project
-
-# The unique identifier of the text. This can be a ISBN number
-# or the project homepage.
-#
-# epub_identifier = ''
-
-# A unique identification for the text.
-#
-# epub_uid = ''
-
-# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
-
-
-# -- Extension configuration -------------------------------------------------
-
-# -- Options for intersphinx extension ---------------------------------------
-
-# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {
- 'python': ('https://docs.python.org/', None),
- 'pytac': ('https://pytac.readthedocs.io/en/latest/', None),
- 'cothread': ('https://cothread.readthedocs.io/en/latest/', None)
-}
diff --git a/docs/explanations.md b/docs/explanations.md
new file mode 100644
index 0000000..65c7121
--- /dev/null
+++ b/docs/explanations.md
@@ -0,0 +1,11 @@
+# Explanations
+
+Explanations of how it works and why it works that way.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+explanations/implementation_details
+explanations/*
+```
diff --git a/docs/explanations/decisions.md b/docs/explanations/decisions.md
new file mode 100644
index 0000000..0533b98
--- /dev/null
+++ b/docs/explanations/decisions.md
@@ -0,0 +1,12 @@
+# Architectural Decision Records
+
+Architectural decisions are made throughout a project's lifetime. As a way of keeping track of these decisions, we record these decisions in Architecture Decision Records (ADRs) listed below.
+
+```{toctree}
+:glob: true
+:maxdepth: 1
+
+decisions/*
+```
+
+For more information on ADRs see this [blog by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions).
diff --git a/docs/explanations/decisions/0001-record-architecture-decisions.md b/docs/explanations/decisions/0001-record-architecture-decisions.md
new file mode 100644
index 0000000..44d234e
--- /dev/null
+++ b/docs/explanations/decisions/0001-record-architecture-decisions.md
@@ -0,0 +1,18 @@
+# 1. Record architecture decisions
+
+## Status
+
+Accepted
+
+## Context
+
+We need to record the architectural decisions made on this project.
+
+## Decision
+
+We will use Architecture Decision Records, as [described by Michael Nygard](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions).
+
+## Consequences
+
+See Michael Nygard's article, linked above. To create new ADRs we will copy and
+paste from existing ones.
diff --git a/docs/explanations/decisions/0002-switched-to-python-copier-template.md b/docs/explanations/decisions/0002-switched-to-python-copier-template.md
new file mode 100644
index 0000000..66fe5d8
--- /dev/null
+++ b/docs/explanations/decisions/0002-switched-to-python-copier-template.md
@@ -0,0 +1,28 @@
+# 2. Adopt python-copier-template for project structure
+
+## Status
+
+Accepted
+
+## Context
+
+We should use the following [python-copier-template](https://github.com/DiamondLightSource/python-copier-template).
+The template will ensure consistency in developer
+environments and package management.
+
+## Decision
+
+We have switched to using the template.
+
+## Consequences
+
+This module will use a fixed set of tools as developed in `python-copier-template`
+and can pull from this template to update the packaging to the latest techniques.
+
+As such, the developer environment may have changed, the following could be
+different:
+
+- linting
+- formatting
+- pip venv setup
+- CI/CD
diff --git a/docs/explanations/decisions/COPYME b/docs/explanations/decisions/COPYME
new file mode 100644
index 0000000..b466c79
--- /dev/null
+++ b/docs/explanations/decisions/COPYME
@@ -0,0 +1,19 @@
+# 3. Short descriptive title
+
+Date: Today's date
+
+## Status
+
+Accepted
+
+## Context
+
+Background to allow us to make the decision, to show how we arrived at our conclusions.
+
+## Decision
+
+What decision we made.
+
+## Consequences
+
+What we will do as a result of this decision.
diff --git a/virtac/FEEDBACK_SYSTEMS.rst b/docs/explanations/feedback_systems.md
similarity index 96%
rename from virtac/FEEDBACK_SYSTEMS.rst
rename to docs/explanations/feedback_systems.md
index 375aecc..638ea06 100644
--- a/virtac/FEEDBACK_SYSTEMS.rst
+++ b/docs/explanations/feedback_systems.md
@@ -1,6 +1,5 @@
-================
-Feedback Systems
-================
+# Feedback Systems
+
Currently supported "slow" feedback systems at Diamond are:
@@ -12,8 +11,7 @@ Currently supported "slow" feedback systems at Diamond are:
In order to support these various feedback systems, the virtual accelerator
makes several adjustments and additions to core ATIP functionality.
-Mirrored Records:
------------------
+## Mirrored Records:
The ability to create mirror records is provided. A mirror record can take
value(s) from one or more records as inputs and set its output dependent on
@@ -36,8 +34,7 @@ For more information on mirror records see docstrings of the classes in
``mirror_objects.py``, the relevant methods on ``ATIPServer``, and
``generate_mirrored_pvs`` in ``create_csv.py``.
-Masks:
-------
+## Masks:
Masks are wrappers for existing functions to enable them to be addressed using
a different syntax than normal. The types of masks are:
@@ -54,8 +51,7 @@ a different syntax than normal. The types of masks are:
record object, ``.set(value)`` simply calls ``caput(stored_pv, value)``.
-Tune feedback
--------------
+## Tune feedback
As mentioned above, the ``callback_offset`` class allows the tune feedback
system to function exactly as it does on the live machine.
diff --git a/docs/explanations/implementation_details.md b/docs/explanations/implementation_details.md
new file mode 100644
index 0000000..b297192
--- /dev/null
+++ b/docs/explanations/implementation_details.md
@@ -0,0 +1,61 @@
+# Implementation details
+
+All the accelerator data for the simulator is held in an ATSimulator object, which is referenced by the data sources of the lattice and each element.Each Pytac element has an equivalent pyAT element, held in a ATElementDataSource; when a get request is made, the appropriate data from that AT element is returned.
+
+The ATSimulator object has a queue of pending changes. When a set request is received by an element, the element puts the changes onto the queue of the ATSimulator. Inside the ATSimulator a Cothread thread checks the length of the queue. When it sees changes on the queue, the thread recalculates the physics data of the lattice to ensure that it is up to date. This means that the emittance and linear optics data held by ATSimulator is updated after every batch of changes, and that without excessive calculation a very recent version of the lattice's physics data is always available.
+
+## API:
+
+load_sim:
+
+ load_from_filepath(pytac_lattice, at_lattice_filepath, callback=None) - loads the AT lattice from the given filepath to the .mat file and then calls load.
+ load(pytac_lattice, at_lattice, callback=None) - loads the simulator onto the passed Pytac lattice, callback is a callable that is passed to ATSimulator during creation to be called on completion of each round of physics calculations.
+
+ATElementDataSource:
+
+ get_fields() - return the fields on the element.
+ add_field(field) - add the given field to this element's data source.
+ get_value(field) - get the value for a given field on the element.
+ set_value(field, value) - set the value for a given field on the element, appends the change to the queue.
+
+ATLatticeDataSource:
+
+ get_fields() - return the fields on the lattice.
+ get_value(field) - get the value for a given field on the lattice.
+ set_value(field, set_value) - set the value for a given field on the lattice, currently not supported so raises HandleException.
+
+ATSimulator:
+
+ toggle_calculations() - pause or unpause the recalculation thread.
+ wait_for_calculations(timeout=10) - wait up to 'timeout' seconds for the current calculations to conclude, if they do it returns True, if not False is returned; if 'timeout' is not passed it will wait 10 seconds.
+ get_at_element(index) - return a shallow copy of the specified AT element from the central AT ring, N.B. An 'index' of 1 returns ring[0].
+ get_at_lattice() - return a shallow copy of the entire centralised AT lattice object.
+ get_s() - return the 's position' of every element in the lattice.
+ get_total_bend_angle() - return the total bending angle of all the dipoles in the lattice.
+ get_total_absolute_bend_angle() - return the total absolute bending angle of all the dipoles in the lattice.
+ get_energy() - return the energy of the lattice.
+ get_tune(field) - return the specified plane of the lattice's 'tune'; 'x' or 'y'.
+ get_chromaticity(field) - return the specified plane of the lattice's 'chromaticity'; 'x' or 'y'.
+ get_orbit(field) - return the specified plane of the lattice's 'closed orbit'; 'x', 'phase_x', 'y', or 'phase_y'.
+ get_dispersion() - return the 'dispersion' vector for every element in the lattice.
+ get_alpha() - return the 'alpha' vector at every element in the lattice.
+ get_beta() - return the 'beta' vector at every element in the lattice.
+ get_mu() - return 'mu' at every element in the lattice.
+ get_m44() - return the 4x4 transfer matrix for every element in the lattice.
+ get_emittance(field) - return the specified plane of the lattice's 'emittance'; 'x' or 'y'.
+ get_radiation_integrals() - return the 5 Synchrotron Integrals for the lattice.
+ get_momentum_compaction() - return the momentum compaction factor for the lattice.
+ get_energy_spread() - return the energy spread for the lattice.
+ get_energy_loss() - return the energy loss per turn of the lattice.
+ get_damping_partition_numbers() - return the damping partition numbers for the lattice's three normal modes.
+ get_damping_times() - return the damping times for the lattice's three normal modes.
+ get_linear_dispersion_action() - return the Linear Dispersion Action ("curly H") for the lattice.
+ get_horizontal_emittance() - return the horizontal ('x') emittance for the lattice calculated from the radiation integrals.
+
+
+
+```mermaid ../atip_class_diagram.uml
+:zoom:
+```
diff --git a/docs/genindex.md b/docs/genindex.md
new file mode 100644
index 0000000..73f1191
--- /dev/null
+++ b/docs/genindex.md
@@ -0,0 +1,3 @@
+# Index
+
+
diff --git a/docs/how-to.md b/docs/how-to.md
new file mode 100644
index 0000000..6b16141
--- /dev/null
+++ b/docs/how-to.md
@@ -0,0 +1,10 @@
+# How-to Guides
+
+Practical step-by-step guides for the more experienced user.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+how-to/*
+```
diff --git a/docs/how-to/contribute.md b/docs/how-to/contribute.md
new file mode 100644
index 0000000..6e41979
--- /dev/null
+++ b/docs/how-to/contribute.md
@@ -0,0 +1,2 @@
+```{include} ../../.github/CONTRIBUTING.md
+```
diff --git a/docs/how-to/run-container.md b/docs/how-to/run-container.md
new file mode 100644
index 0000000..f07a6a4
--- /dev/null
+++ b/docs/how-to/run-container.md
@@ -0,0 +1,14 @@
+# Run in a container
+
+Pre-built containers with atip and its dependencies already
+installed are available on [Github Container Registry](https://ghcr.io/DiamondLightSource/atip).
+
+## Starting the container
+
+To pull the container from github container registry and run:
+
+```
+$ docker run ghcr.io/diamondlightsource/atip:latest --version
+```
+
+To get a released version, use a numbered release instead of `latest`.
diff --git a/docs/control_structure.png b/docs/images/control_structure.png
similarity index 100%
rename from docs/control_structure.png
rename to docs/images/control_structure.png
diff --git a/docs/images/dls-logo.svg b/docs/images/dls-logo.svg
new file mode 100644
index 0000000..4fcaa86
--- /dev/null
+++ b/docs/images/dls-logo.svg
@@ -0,0 +1,11 @@
+
+
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..730b3fd
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,56 @@
+---
+html_theme.sidebar_secondary.remove: true
+---
+
+```{include} ../README.md
+:end-before:
+
+::::{grid} 2
+:gutter: 4
+
+:::{grid-item-card} {material-regular}`directions_walk;2em`
+```{toctree}
+:maxdepth: 2
+tutorials
+```
++++
+Tutorials for installation and typical usage. New users start here.
+:::
+
+:::{grid-item-card} {material-regular}`directions;2em`
+```{toctree}
+:maxdepth: 2
+how-to
+```
++++
+Practical step-by-step guides for the more experienced user.
+:::
+
+:::{grid-item-card} {material-regular}`info;2em`
+```{toctree}
+:maxdepth: 2
+explanations
+```
++++
+Explanations of how it works and why it works that way.
+:::
+
+:::{grid-item-card} {material-regular}`menu_book;2em`
+```{toctree}
+:maxdepth: 2
+reference
+```
++++
+Technical reference material including APIs and release notes.
+:::
+
+::::
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 956eb2e..0000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,82 +0,0 @@
-ATIP - Accelerator Toolbox Interface for Pytac
-==============================================
-
-ATIP is an addition to `Pytac `_,
-a framework for controlling particle accelerators. ATIP adds a simulator to
-Pytac, which can be used and addressed in the same way as a real accelerator.
-This enables the easy offline testing of high level accelerator controls
-applications.
-
-ATIP is hosted on Github `here `_.
-
-The python implementation of
-`Accelerator Toolbox `_ (pyAT) is used
-for the simulation.
-
-.. sidebar:: How ATIP fits into the combined control structure.
-
- .. image:: control_structure.png
- :width: 400
-
-ATIP allows an AT lattice to be fitted into the simulation data source of a
-Pytac lattice. This integrated lattice acts like a normal Pytac lattice, and
-enables the AT simulator to react and respond to changes as the real
-accelerator would.
-
-ATIP also makes use of a `Cothread `_
-thread to recalculate and update the stored physics data any time a change is
-made to the lattice.
-
-ATIP can also be run in a standalone application as a "virtual accelerator",
-publishing the same control system interface as the live machine. At Diamond
-Light Source this has been implemented with EPICS, using
-`PythonSoftIOC `_.
-This functionality is not documented here but an explanation of how it works
-and how to use it may be found in the ``.rst`` files inside ATIP's ``virtac``
-directory.
-
-Example
--------
-
-Note that you need an AT lattice that is compatible with Pytac. Some are provided
-in ``atip/rings/``, otherwise try running the Matlab function
-``atip/rings/create_lattice_matfile.m`` with an AT lattice loaded.
-
-.. code-block:: python
-
- >>> import pytac
- >>> import atip
- >>> # Load the DIAD lattice from Pytac.
- >>> lat = pytac.load_csv.load('DIAD')
- >>> # Load the AT sim into the Pytac lattice.
- >>> atip.load_sim.load_from_filepath(lat, 'atip/rings/DIAD.mat')
- >>> # Use the sim by default.
- >>> lat.set_default_data_source(pytac.SIM)
- >>> # The initial beam position is zero.
- >>> lat.get_value('x')
- array([0., 0., 0., ..., 0., 0., 0.])
- >>> # Get the first horizontal corrector magnet and set its current to 1A.
- >>> hcor1 = lat.get_elements('HSTR')[0]
- >>> hcor1.set_value('x_kick', 1, units=pytac.ENG)
- >>> # Now the x beam position has changed.
- >>> lat.get_value('x')
- array([0.00240101, 0.00240101, 0.00239875, ..., 0.00240393, 0.00240327,
- 0.00240327])
- >>>
-
-Contents:
-=========
-
-.. toctree::
- :maxdepth: 2
-
- self
- atip
-
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
diff --git a/docs/reference.md b/docs/reference.md
new file mode 100644
index 0000000..b2278f3
--- /dev/null
+++ b/docs/reference.md
@@ -0,0 +1,12 @@
+# Reference
+
+Technical reference material including APIs and release notes.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+API <_api/atip>
+genindex
+Release Notes
+```
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index 6e3b13a..0000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-# Still required for readthedocs.
-accelerator-toolbox>=0.0.2
-pytac>=0.3.0
-cothread
-numpy
-scipy
-pytest
-pytest-cov
-testfixtures
-coveralls
-mock
-flake8
-sphinx
-sphinx-rtd-theme
diff --git a/docs/tutorials.md b/docs/tutorials.md
new file mode 100644
index 0000000..b80c28b
--- /dev/null
+++ b/docs/tutorials.md
@@ -0,0 +1,13 @@
+# Tutorials
+
+Tutorials for installation and typical usage. New users start here.
+
+```{toctree}
+:maxdepth: 1
+:glob:
+
+tutorials/installation
+tutorials/overview
+tutorials/atip_example
+tutorials/virtac_example
+```
diff --git a/docs/tutorials/atip_example.md b/docs/tutorials/atip_example.md
new file mode 100644
index 0000000..fa954ff
--- /dev/null
+++ b/docs/tutorials/atip_example.md
@@ -0,0 +1,29 @@
+# ATIP example
+
+## Simmulating accelerator physics using ATIP as a data source for Pytac
+
+Note that you need an AT lattice that is compatible with Pytac. Some are provided
+in ``atip/rings/``, otherwise try running the Matlab function
+``atip/rings/create_lattice_matfile.m`` with an AT lattice loaded.
+
+:::{code-block} python
+
+>>> import pytac
+>>> import atip
+>>> # Load the DIAD lattice from Pytac.
+>>> lat = pytac.load_csv.load('DIAD')
+>>> # Load the AT sim into the Pytac lattice.
+>>> atip.load_sim.load_from_filepath(lat, 'atip/rings/DIAD.mat')
+>>> # Use the sim by default.
+>>> lat.set_default_data_source(pytac.SIM)
+>>> # The initial beam position is zero.
+>>> lat.get_value('x')
+array([0., 0., 0., ..., 0., 0., 0.])
+>>> # Get the first horizontal corrector magnet and set its current to 1A.
+>>> hcor1 = lat.get_elements('HSTR')[0]
+>>> hcor1.set_value('x_kick', 1, units=pytac.ENG)
+>>> # Now the x beam position has changed.
+>>> lat.get_value('x')
+array([0.00240101, 0.00240101, 0.00239875, ..., 0.00240393, 0.00240327,
+ 0.00240327])
+:::
diff --git a/docs/tutorials/installation.md b/docs/tutorials/installation.md
new file mode 100644
index 0000000..2e1473f
--- /dev/null
+++ b/docs/tutorials/installation.md
@@ -0,0 +1,42 @@
+# Installation
+
+## Check your version of python
+
+You will need python 3.10 or later. You can check your version of python by
+typing into a terminal:
+
+```
+$ python3 --version
+```
+
+## Create a virtual environment
+
+It is recommended that you install into a “virtual environment” so this
+installation will not interfere with any existing Python software:
+
+```
+$ python3 -m venv /path/to/venv
+$ source /path/to/venv/bin/activate
+```
+
+## Installing the library
+
+You can now use `pip` to install the library and its dependencies:
+
+```
+$ python3 -m pip install atip
+```
+
+If you require a feature that is not currently released you can also install
+from github:
+
+```
+$ python3 -m pip install git+https://github.com/DiamondLightSource/atip.git
+```
+
+The library should now be installed and the commandline interface on your path.
+You can check the version that has been installed by typing:
+
+```
+$ atip --version
+```
diff --git a/docs/tutorials/overview.md b/docs/tutorials/overview.md
new file mode 100644
index 0000000..b353fd0
--- /dev/null
+++ b/docs/tutorials/overview.md
@@ -0,0 +1,48 @@
+# Overview
+
+ATIP is an addition to [Pytac]()
+a framework for controlling particle accelerators. ATIP adds a simulator to
+Pytac, which can be used and addressed in the same way as a real accelerator.
+This enables the easy offline testing of high level accelerator controls
+applications.
+
+ATIP is hosted on Github [here]()
+
+The python implementation of
+[Accelerator Toolbox]() (pyAT) is used
+for the simulation.
+
+:::sidebar
+
+:::
+
+ATIP allows an AT lattice to be fitted into the simulation data source of a
+Pytac lattice. This integrated lattice acts like a normal Pytac lattice, and
+enables the AT simulator to react and respond to changes as the real
+accelerator would.
+
+ATIP also makes use of a [Cothread]()
+thread to recalculate and update the stored physics data any time a change is
+made to the lattice.
+
+ATIP can also be run in a standalone application as a "virtual accelerator",
+publishing the same control system interface as the live machine. At Diamond
+Light Source this has been implemented with EPICS, using
+[PythonSoftIOC]()
+
+More documentation on how to use the virtual accelerator can be found [here]()
+
+
+## Helpful tips:
+
+In order for ATIP to function correctly, the AT and Pytac lattices used must be directly equivalent, i.e. they must have the same length and elements in the same positions.
+
+If local (not pip) installations are used, ATIP, AT, and Pytac must all be located in the same source directory in order for ATIP to function correctly.
+
+The methods on ATIP's data sources that take handle and throw arguments do so only to conform with the Pytac DataSource base class from which they inherit. Inside ATIP they are not used and can be ignored.
+
+To interpret which data is to be returned or set, both ATElementDataSource and ATLatticeDataSource use a dictionary of functions corresponding to fields. In the case where a cell needs to be passed to the data handling functions, for further specification, functools' partial() is used.
+
+The physics data is received from AT all together; to make it easier to manage, it is split by ATIP and accessed by a number of methods of the ATSimulator object. This aims to be more convenient for the user but does result in the ATSimulator object having a large number of methods.
+
+A number of functions that perform tasks that are frequent or long-winded are included in utils.py to make life easier for the user.
diff --git a/virtac/README.rst b/docs/tutorials/virtac_example.md
similarity index 64%
rename from virtac/README.rst
rename to docs/tutorials/virtac_example.md
index 3ba9cea..25660ad 100644
--- a/virtac/README.rst
+++ b/docs/tutorials/virtac_example.md
@@ -1,6 +1,6 @@
-===========================================================
-Running ATIP as a Virtual Accelerator using Python Soft IOC
-===========================================================
+# VIRTAC example
+
+## Running ATIP as a Virtual Accelerator using Python Soft IOC
Using `PythonSoftIOC `_, ATIP can
emulate machine PVs, so that the ATIP simulator can be addressed in the same
@@ -13,44 +13,48 @@ used by convention at Diamond for simulations) to avoid conflict with the same
PVs on the live machine.
-Start the virtual accelerator
------------------------------
+## Start the virtual accelerator
Run the virtac under the development EPICS port::
- $ export EPICS_CA_SERVER_PORT=6064
- $ export EPICS_CAS_SERVER_PORT=6064
- $ export EPICS_CA_REPEATER_PORT=6065
- $ # at Diamond the above can be set in one go using: . changeports 6064
- $ pipenv run virtac
+:::{code-block} bash
+$ export EPICS_CA_SERVER_PORT=6064
+$ export EPICS_CAS_SERVER_PORT=6064
+$ export EPICS_CA_REPEATER_PORT=6065
+$ # at Diamond the above can be set in one go using: . changeports 6064
+$ pipenv run virtac
+:::
It takes 10 seconds or so to load the interactive console::
- Starting record creation.
- ~*~*Woah, we're halfway there, Wo-oah...*~*~
- Finished creating all 2981 records.
- Starting iocInit
- ############################################################################
- ## EPICS 7.0.6.0
- ## Rev. 7.0.6.99.1.0
- ############################################################################
- iocRun: All initialization complete
- Python 3.7.2 (default, Jan 20 2020, 11:03:41)
- [GCC 4.8.5 20150623 (Red Hat 4.8.5-39)] on linux
- Type "help", "copyright", "credits" or "license" for more information.
- (InteractiveConsole)
- >>>
+:::{code-block} bash
+Starting record creation.
+~*~*Woah, were halfway there, Wo-oah...*~*~
+Finished creating all 2981 records.
+Starting iocInit
+############################################################################
+## EPICS 7.0.6.0
+## Rev. 7.0.6.99.1.0
+############################################################################
+iocRun: All initialization complete
+Python 3.7.2 (default, Jan 20 2020, 11:03:41)
+[GCC 4.8.5 20150623 (Red Hat 4.8.5-39)] on linux
+Type "help", "copyright", "credits" or "license" for more information.
+(InteractiveConsole)
+>>>
+:::
Leave the server running and in a new terminal update the EPICS port::
- $ export EPICS_CA_SERVER_PORT=6064
- $ # or: . changeports 6064
+:::{code-block} bash
+$ export EPICS_CA_SERVER_PORT=6064
+$ # or: . changeports 6064
+:::
In this new terminal you are then free to address the simulator as you would
the live machine, either through Pytac or by directly accessing the PVs.
-Feedback Records:
------------------
+## Feedback Records:
A number of PVs related to feedback systems are supported. These have been
added to aid testing of the high level applications at Diamond that control
@@ -95,17 +99,20 @@ This is done inside the server console, in the terminal where one you ran
For example disabling SOFB on the first BPM::
- >>> server.set_feedback_record(3, 'enabled', 0)
+:::{code-block} bash
+>>> server.set_feedback_record(3, 'enabled', 0)
+:::
or reducing the beam current::
- >>> server.set_feedback_record(0, 'beam_current', 280)
+:::{code-block} bash
+>>> server.set_feedback_record(0, 'beam_current', 280)
+:::
For further information on working with feedback systems, please refer to
``FEEDBACK_SYSTEMS.rst``.
-Ring Mode:
-----------
+## Ring Mode:
You can run the virtual accelerator in any ring mode that is supported by
Pytac; currently 'VMX', 'VMXSP', 'DIAD', and 'I04'. The ring mode can be set by the
@@ -119,7 +126,9 @@ If none of these is set then the virtual accelerator will default to 'I04'.
For example::
- $ pipenv run virtac I04
- $ export RINGMODE=I04
- $ caput SR-CS-RING-01:MODE 3
- $ # Having none of these set would also start in mode 'I04'.
+:::{code-block} bash
+$ pipenv run virtac I04
+$ export RINGMODE=I04
+$ caput SR-CS-RING-01:MODE 3
+$ # Having none of these set would also start in mode 'I04'.
+:::
diff --git a/pyproject.toml b/pyproject.toml
index 07de284..7dbd287 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,133 @@
[build-system]
-requires = ["setuptools", "wheel"]
-build-backend = "setuptools.build_meta"
\ No newline at end of file
+requires = ["setuptools>=64", "setuptools_scm[toml]>=8"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "atip"
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
+description = "Accelerator Toolbox Interface for Pytac"
+dependencies = [
+ "numpy",
+ "scipy",
+ "pytac>=0.3.0",
+ "accelerator-toolbox>=0.2.0",
+ "cothread",
+ "softioc",
+]
+
+dynamic = ["version"]
+license.file = "LICENSE"
+readme = "README.md"
+requires-python = ">=3.10"
+
+[project.optional-dependencies]
+dev = [
+ "copier",
+ "myst-parser",
+ "pipdeptree",
+ "pre-commit",
+ "pydata-sphinx-theme>=0.12",
+ "pyright",
+ "pytest",
+ "pytest-cov",
+ "testfixtures",
+ "ruff",
+ "sphinx-autobuild",
+ "sphinx-copybutton",
+ "sphinx-design",
+ "sphinxcontrib-mermaid",
+ "tox-direct",
+ "types-mock",
+]
+
+[project.scripts]
+atip = "atip.__main__:main"
+virtac = "virtac.__main__:main"
+
+[project.urls]
+GitHub = "https://github.com/DiamondLightSource/atip"
+
+[[project.authors]] # Further authors may be added by duplicating this section
+email = "tobyn.nicholls@diamond.ac.uk"
+name = "Tobyn Nicholls"
+
+[[project.authors]] # Further authors may be added by duplicating this section
+email = "philip.smith@diamond.ac.uk"
+name = "Phil Smith"
+
+[tool.setuptools_scm]
+version_file = "src/atip/_version.py"
+
+[tool.pyright]
+typeCheckingMode = "standard" # TODO: This should be switched back to "standard" at some point!
+reportMissingImports = false # Ignore missing stubs in imported modules
+ignore = ["tests"] #Not currently type checking tests
+
+[tool.pytest.ini_options]
+# Run pytest with all our checkers, and don't spam us with massive tracebacks on error
+addopts = """
+ --tb=native -vv --doctest-modules --doctest-glob="*.rst"
+ """
+# https://iscinumpy.gitlab.io/post/bound-version-constraints/#watch-for-warnings
+filterwarnings = "error"
+# Doctest python code in docs, python code in src docstrings, test functions in tests
+testpaths = "docs src tests"
+
+[tool.coverage.run]
+data_file = "/tmp/atip.coverage"
+
+[tool.coverage.paths]
+# Tests are run from installed location, map back to the src directory
+source = ["src", "**/site-packages/"]
+
+# tox must currently be configured via an embedded ini string
+# See: https://github.com/tox-dev/tox/issues/999
+[tool.tox]
+legacy_tox_ini = """
+[tox]
+skipsdist=True
+
+[testenv:{pre-commit,type-checking,tests,docs}]
+# Don't create a virtualenv for the command, requires tox-direct plugin
+direct = True
+passenv = *
+allowlist_externals =
+ pytest
+ pre-commit
+ pyright
+ sphinx-build
+ sphinx-autobuild
+commands =
+ pre-commit: pre-commit run --all-files --show-diff-on-failure {posargs}
+ type-checking: pyright src tests {posargs}
+ tests: pytest --cov=atip --cov-report term --cov-report xml:cov.xml {posargs}
+ docs: sphinx-{posargs:build -EW --keep-going} -T docs build/html
+"""
+
+#TODO: ^ Add/remove the -W flag from sphinx to enable/disable exiting on docs build errors
+
+[tool.ruff]
+src = ["src", "tests"]
+line-length = 88
+lint.select = [
+ "B", # flake8-bugbear - https://docs.astral.sh/ruff/rules/#flake8-bugbear-b
+ "C4", # flake8-comprehensions - https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4
+ "E", # pycodestyle errors - https://docs.astral.sh/ruff/rules/#error-e
+ "F", # pyflakes rules - https://docs.astral.sh/ruff/rules/#pyflakes-f
+ "W", # pycodestyle warnings - https://docs.astral.sh/ruff/rules/#warning-w
+ "I", # isort - https://docs.astral.sh/ruff/rules/#isort-i
+ "UP", # pyupgrade - https://docs.astral.sh/ruff/rules/#pyupgrade-up
+ "SLF", # self - https://docs.astral.sh/ruff/settings/#lintflake8-self
+]
+
+[tool.ruff.lint.per-file-ignores]
+# By default, private member access is allowed in tests
+# See https://github.com/DiamondLightSource/python-copier-template/issues/154
+# Remove this line to forbid private member access in tests
+"tests/**/*" = ["SLF001"]
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 19eeb54..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,53 +0,0 @@
-[metadata]
-name = atip
-version = 0.2.0
-description = ATIP: Accelerator Toolbox Interface for Pytac
-long-description = file: README.rst
-author = Tobyn Nicholls
-license = Apache License 2.0
-url = https://github.com/DiamondLightSource/atip
-classifiers =
- Development Status :: 3 - Alpha
- Intended Audience :: Developers
- Topic :: Software Development :: Build Tools
- License :: OSI Approved :: Apache Software License
- Programming Language :: Python :: 3.8
- Programming Language :: Python :: 3.9
-keywords = accelerator physics
-
-[options]
-packages =
- atip
- virtac
-include_package_data = true
-install_requires =
- numpy
- scipy
- pytac>=0.3.0
- accelerator-toolbox>=0.2.0
- cothread
- softioc
-
-[options.entry_points]
-console_scripts =
- virtac = virtac.atip_ioc_entry:main
-
-[coverage:run]
-omit =
- atip/ease.py
- atip/utils.py
- */junk/*
- */venv*
-
-[flake8]
-# Make flake8 respect black's line length (default 88),
-max-line-length = 88
-exclude =
- .git
- atip/junk
- venv*
- docs
- build
-
-[bdist_wheel]
-universal = 1
diff --git a/setup.py b/setup.py
deleted file mode 100644
index b024da8..0000000
--- a/setup.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from setuptools import setup
-
-
-setup()
diff --git a/src/atip/__init__.py b/src/atip/__init__.py
new file mode 100644
index 0000000..260a3bd
--- /dev/null
+++ b/src/atip/__init__.py
@@ -0,0 +1,13 @@
+"""ATIP: Accelerator Toolbox Interface for Pytac.
+See README.md for more information.
+
+.. data:: __version__
+ :type: str
+
+ Version number as calculated by https://github.com/pypa/setuptools_scm
+"""
+
+from . import load_sim, sim_data_sources, simulator, utils
+from ._version import __version__
+
+__all__ = ["__version__", "load_sim", "sim_data_sources", "simulator", "utils"]
diff --git a/src/atip/__main__.py b/src/atip/__main__.py
new file mode 100644
index 0000000..ec41ba7
--- /dev/null
+++ b/src/atip/__main__.py
@@ -0,0 +1,35 @@
+"""Atip entrypoint. This file currently does not do anything interesting, as atip is a
+library and not really designed to be a runnable application."""
+
+from argparse import ArgumentParser
+from collections.abc import Sequence
+
+import pytac
+
+import atip
+
+__all__ = ["main"]
+
+
+def main(args: Sequence[str] | None = None) -> None:
+ """Argument parser for the CLI."""
+ parser = ArgumentParser()
+ parser.add_argument(
+ "-v",
+ "--version",
+ action="version",
+ version=atip.__version__,
+ )
+ parser.parse_args(args)
+
+ lat = pytac.load_csv.load("DIAD")
+ atip.load_sim.load_from_filepath(lat, "src/atip/rings/DIAD.mat")
+ lat.set_default_data_source(pytac.SIM)
+ print(lat.get_value("x"))
+ hcor1 = lat.get_elements("HSTR")[0]
+ hcor1.set_value("x_kick", 1, units=pytac.ENG)
+ print(lat.get_value("x"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/atip/load_sim.py b/src/atip/load_sim.py
similarity index 99%
rename from atip/load_sim.py
rename to src/atip/load_sim.py
index ae25c5d..27fabf1 100644
--- a/atip/load_sim.py
+++ b/src/atip/load_sim.py
@@ -1,4 +1,5 @@
"""Module responsible for handling the loading of simulator data sources."""
+
import at
import pytac
from pytac.exceptions import FieldException
diff --git a/atip/rings/DBA.mat b/src/atip/rings/DBA.mat
similarity index 100%
rename from atip/rings/DBA.mat
rename to src/atip/rings/DBA.mat
diff --git a/atip/rings/DIAD.mat b/src/atip/rings/DIAD.mat
similarity index 100%
rename from atip/rings/DIAD.mat
rename to src/atip/rings/DIAD.mat
diff --git a/atip/rings/DIAD_WIG.mat b/src/atip/rings/DIAD_WIG.mat
similarity index 100%
rename from atip/rings/DIAD_WIG.mat
rename to src/atip/rings/DIAD_WIG.mat
diff --git a/atip/rings/DIAMOND2SP.lat b/src/atip/rings/DIAMOND2SP.lat
similarity index 100%
rename from atip/rings/DIAMOND2SP.lat
rename to src/atip/rings/DIAMOND2SP.lat
diff --git a/atip/rings/HMBA.mat b/src/atip/rings/HMBA.mat
similarity index 100%
rename from atip/rings/HMBA.mat
rename to src/atip/rings/HMBA.mat
diff --git a/atip/rings/I04.mat b/src/atip/rings/I04.mat
similarity index 100%
rename from atip/rings/I04.mat
rename to src/atip/rings/I04.mat
diff --git a/atip/rings/VMX.mat b/src/atip/rings/VMX.mat
similarity index 100%
rename from atip/rings/VMX.mat
rename to src/atip/rings/VMX.mat
diff --git a/atip/rings/VMXSP.mat b/src/atip/rings/VMXSP.mat
similarity index 100%
rename from atip/rings/VMXSP.mat
rename to src/atip/rings/VMXSP.mat
diff --git a/atip/rings/VMXSP_WIG.mat b/src/atip/rings/VMXSP_WIG.mat
similarity index 100%
rename from atip/rings/VMXSP_WIG.mat
rename to src/atip/rings/VMXSP_WIG.mat
diff --git a/atip/rings/VMX_WIG.mat b/src/atip/rings/VMX_WIG.mat
similarity index 100%
rename from atip/rings/VMX_WIG.mat
rename to src/atip/rings/VMX_WIG.mat
diff --git a/atip/rings/create_lattice_matfile.m b/src/atip/rings/create_lattice_matfile.m
similarity index 100%
rename from atip/rings/create_lattice_matfile.m
rename to src/atip/rings/create_lattice_matfile.m
diff --git a/atip/sim_data_sources.py b/src/atip/sim_data_sources.py
similarity index 99%
rename from atip/sim_data_sources.py
rename to src/atip/sim_data_sources.py
index f6d12ad..d37a204 100644
--- a/atip/sim_data_sources.py
+++ b/src/atip/sim_data_sources.py
@@ -1,4 +1,5 @@
"""Module containing the pytac data sources for the AT simulator."""
+
import logging
from functools import partial
@@ -438,7 +439,7 @@ def get_value(self, field, handle=None, throw=True):
# complete before a value is returned; if the wait times out then raise
# an error message or log a warning according to the value of throw.
if not self._atsim.wait_for_calculations():
- error_msg = "Check for completion of outstanding " "calculations timed out."
+ error_msg = "Check for completion of outstanding calculations timed out."
if throw:
raise ControlSystemException(error_msg)
else:
diff --git a/atip/simulator.py b/src/atip/simulator.py
similarity index 99%
rename from atip/simulator.py
rename to src/atip/simulator.py
index 30e0425..950847b 100644
--- a/atip/simulator.py
+++ b/src/atip/simulator.py
@@ -1,4 +1,5 @@
"""Module containing an interface with the AT simulator."""
+
import logging
from dataclasses import dataclass
from warnings import warn
@@ -13,7 +14,6 @@
@dataclass
class LatticeData:
-
twiss: ArrayLike
tunes: ArrayLike
chrom: ArrayLike
@@ -60,7 +60,7 @@ def calculate_optics(
return LatticeData(twiss, beamdata.tune, beamdata.chromaticity, emitdata, radint)
-class ATSimulator(object):
+class ATSimulator:
"""A centralised class which makes use of AT to simulate the physics data
for the copy of the AT lattice which it holds. It works as follows, when a
change is made to the lattice in Pytac it is added to the queue attribute
@@ -197,7 +197,7 @@ def _recalculate_phys_data(self, callback):
self._at_lat, self._rp, self._disable_emittance
)
except Exception as e:
- warn(at.AtWarning(e))
+ warn(at.AtWarning(e), stacklevel=1)
# Signal the up to date flag since the physics data is now up to date.
# We do this before the callback is executed in case the callback
# checks the flag.
diff --git a/atip/utils.py b/src/atip/utils.py
similarity index 96%
rename from atip/utils.py
rename to src/atip/utils.py
index d6be733..50ba71b 100644
--- a/atip/utils.py
+++ b/src/atip/utils.py
@@ -56,6 +56,7 @@ def loader(mode="I04", callback=None, disable_emittance=False):
return lattice
+# TODO: This doesnt seem to work, fix or remove
def preload_at(at_lat):
"""Load the elements onto an 'elems' object's attributes by type so that
groups of elements of the same type (class) can be more easily accessed,
@@ -73,7 +74,7 @@ def preload_at(at_lat):
class elems:
pass
- setattr(elems, "all", [elem for elem in at_lat])
+ elems.all = [elem for elem in at_lat] # noqa: C416
elems_dict = {
type_: []
for type_ in [
@@ -120,7 +121,7 @@ def preload(pytac_lat):
class elems:
pass
- setattr(elems, "all", pytac_lat.get_elements())
+ elems.all = pytac_lat.get_elements()
for family in pytac_lat.get_all_families():
setattr(elems, family, pytac_lat.get_elements(family))
return elems
@@ -142,7 +143,7 @@ def get_atsim(target):
if isinstance(target, atip.simulator.ATSimulator):
return target
else: # Pytac lattice
- return target._data_source_manager._data_sources[pytac.SIM]._atsim
+ return target._data_source_manager._data_sources[pytac.SIM]._atsim # noqa: SLF001 TODO: Fix private member access
def get_sim_lattice(target):
diff --git a/src/virtac/__init__.py b/src/virtac/__init__.py
new file mode 100644
index 0000000..c0c1661
--- /dev/null
+++ b/src/virtac/__init__.py
@@ -0,0 +1,18 @@
+"""virtac: a python based virtual accelerator using ATIP.
+See README.rst & FEEDBACK_SYSTEMS.rst for more information.
+
+.. data:: __version__
+ :type: str
+
+ Version number as calculated by https://github.com/pypa/setuptools_scm
+"""
+
+from . import __main__, atip_server, create_csv, masks, mirror_objects
+
+__all__ = [
+ "__main__",
+ "atip_server",
+ "create_csv",
+ "masks",
+ "mirror_objects",
+]
diff --git a/virtac/atip_ioc_entry.py b/src/virtac/__main__.py
similarity index 85%
rename from virtac/atip_ioc_entry.py
rename to src/virtac/__main__.py
index 6012313..118512f 100644
--- a/virtac/atip_ioc_entry.py
+++ b/src/virtac/__main__.py
@@ -1,4 +1,6 @@
-import argparse
+"""Interface for ``python -m atip``."""
+
+import argparse # noqa: I001
import logging
import os
import socket
@@ -7,9 +9,12 @@
import epicscorelibs.path.cothread # noqa
from cothread.catools import ca_nothing, caget
-from softioc import builder, softioc
+from softioc import builder, softioc
from . import atip_server
+from atip import __version__
+
+__all__ = ["main"]
LOG_FORMAT = "%(asctime)s %(message)s"
@@ -35,6 +40,12 @@ def parse_arguments():
parser.add_argument(
"--verbose", "-v", help="increase output verbosity", action="store_true"
)
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=__version__,
+ )
return parser.parse_args()
@@ -78,17 +89,22 @@ def main():
if 5064 in ports_list or 5065 in ports_list:
warn(
f"At least one of {epics_env_vars} is set to 5064 or 5065"
- + conflict_warning
+ + conflict_warning,
+ stacklevel=1,
)
elif all(port == 0 for port in ports_list):
warn(
"No EPICS port set, default base port (5064) will be used"
- + conflict_warning
+ + conflict_warning,
+ stacklevel=1,
)
# Avoid PV conflict between multiple IP interfaces on the same machine.
primary_ip = socket.gethostbyname(socket.getfqdn())
if "EPICS_CAS_INTF_ADDR_LIST" in os.environ.keys():
- warn("Pre-existing 'EPICS_CAS_INTF_ADDR_LIST' value" + conflict_warning)
+ warn(
+ "Pre-existing 'EPICS_CAS_INTF_ADDR_LIST' value" + conflict_warning,
+ stacklevel=1,
+ )
else:
os.environ["EPICS_CAS_INTF_ADDR_LIST"] = primary_ip
os.environ["EPICS_CAS_BEACON_ADDR_LIST"] = primary_ip
@@ -102,3 +118,7 @@ def main():
server.setup_tune_feedback()
softioc.interactive_ioc(globals())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/virtac/atip_server.py b/src/virtac/atip_server.py
similarity index 98%
rename from virtac/atip_server.py
rename to src/virtac/atip_server.py
index 178c56d..3ae7682 100644
--- a/virtac/atip_server.py
+++ b/src/virtac/atip_server.py
@@ -15,7 +15,7 @@
from .mirror_objects import collate, refresher, summate, transform
-class ATIPServer(object):
+class ATIPServer:
"""A soft-ioc server allowing ATIP to be interfaced using EPICS, in the
same manner as the live machine.
@@ -309,10 +309,10 @@ def _create_feedback_records(self, feedback_csv, disable_emittance):
try:
readonly = ast.literal_eval(line["read-only"])
assert isinstance(readonly, bool)
- except (ValueError, AssertionError):
+ except (ValueError, AssertionError) as err:
raise ValueError(
f"Unable to evaluate {line['read-only']} as a boolean."
- )
+ ) from err
prefix, suffix = line["pv"].split(":", 1)
builder.SetDeviceName(prefix)
if readonly:
@@ -392,8 +392,7 @@ def _create_mirror_records(self, mirror_csv):
line["mirror type"] in ["collate", "summate"]
):
raise IndexError(
- "collation and summation mirror types take at"
- " least two input PVs."
+ "collation and summation mirror types take at least two input PVs."
)
monitor = input_pvs # need to update to support camonitor multiple
# Convert input pvs to record objects
@@ -463,7 +462,7 @@ def monitor_mirrored_pvs(self):
try:
self._monitored_pvs[pv] = camonitor(pv, mask.callback)
except Exception as e:
- warn(e)
+ warn(str(e), stacklevel=1)
def refresh_record(self, pv_name):
"""For a given PV refresh the time-stamp of the associated record,
@@ -477,7 +476,7 @@ def refresh_record(self, pv_name):
except KeyError:
raise ValueError(
f"{pv_name} is not the name of a record created by this server."
- )
+ ) from KeyError
else:
record.set(record.get())
@@ -514,7 +513,7 @@ def setup_tune_feedback(self, tune_csv=None):
line["delta"], mask.callback
)
except Exception as e:
- warn(e)
+ warn(str(e), stacklevel=1)
def stop_all_monitoring(self):
"""Stop monitoring mirrored records and tune feedback offsets."""
@@ -551,8 +550,9 @@ def set_feedback_record(self, index, field, value):
if index == 0:
raise FieldException(
f"Simulated lattice {self.lattice} does not have field {field}."
- )
+ ) from KeyError
else:
raise FieldException(
- f"Simulated element {self.lattice[index]} does not have field {field}."
- )
+ f"Simulated element {self.lattice[index]} does not have \
+ field {field}."
+ ) from KeyError
diff --git a/virtac/create_csv.py b/src/virtac/create_csv.py
similarity index 97%
rename from virtac/create_csv.py
rename to src/virtac/create_csv.py
index a89ba45..509deaf 100644
--- a/virtac/create_csv.py
+++ b/src/virtac/create_csv.py
@@ -1,6 +1,7 @@
"""N/B all of the data collection functions in the file should be run on the
default EPICS port for the live machine not 6064.
"""
+
import argparse
import csv
import os
@@ -98,12 +99,14 @@ def generate_pv_limits(lattice):
def generate_mirrored_pvs(lattice):
"""Structure of data:
+
output type: The type of output record to create, only 'aIn', 'longIn',
'Waveform' types are currently supported; if '' then output to an
existing in record already created in ATIPServer, 'caput' is also a
special case it creates a mask for cothread.catools.caput calling
set(value) on this mask will call caput with the output PV and the
passed value.
+
mirror type: The type of mirroring to apply:
- basic: set the value of the input record to the output record.
- summate: sum the values of the input records and set the result to
@@ -114,13 +117,18 @@ def generate_mirrored_pvs(lattice):
the only transformation type currently supported is 'inverse'.
- refresh: monitor the in PV and on a change call refresh_record on
the output PV.
+
in: The PV(s) to be monitored, on change mirror is updated, if multiple
then the PVs should be separated by a comma and one space.
+
out: The single PV to output to, if a 'record type' is spcified then a new
record will be created and so must not exist already.
+
value: The inital value of the output record.
"""
- data = [("output type", "mirror type", "in", "out", "value")]
+ data: list[tuple[str, str, str, str, int]] = [
+ ("output type", "mirror type", "in", "out", 0)
+ ]
# Tune PV aliases.
tune = [
lattice.get_value("tune_x", pytac.RB, data_source=pytac.SIM),
@@ -165,7 +173,9 @@ def generate_mirrored_pvs(lattice):
)
)
# Electron BPMs enabled.
- bpm_enabled_pvs = lattice.get_element_pv_names("BPM", "enabled", pytac.RB)
+ bpm_enabled_pvs: list[str] = lattice.get_element_pv_names(
+ "BPM", "enabled", pytac.RB
+ )
data.append(
(
"Waveform",
@@ -234,7 +244,9 @@ def generate_tune_pvs(lattice):
for pv in tune_pvs:
offset_pvs.append(":".join([pv.split(":")[0], "OFFSET1"]))
delta_pvs.append(f"SR-CS-TFB-01:{pv[2:4]}{pv[9:12]}{pv[13:15]}:I")
- for tune_pv, offset_pv, delta_pv in zip(tune_pvs, offset_pvs, delta_pvs):
+ for tune_pv, offset_pv, delta_pv in zip(
+ tune_pvs, offset_pvs, delta_pvs, strict=False
+ ):
data.append((tune_pv, offset_pv, delta_pv))
return data
diff --git a/virtac/data/DIAD/feedback.csv b/src/virtac/data/DIAD/feedback.csv
similarity index 100%
rename from virtac/data/DIAD/feedback.csv
rename to src/virtac/data/DIAD/feedback.csv
diff --git a/virtac/data/DIAD/limits.csv b/src/virtac/data/DIAD/limits.csv
similarity index 100%
rename from virtac/data/DIAD/limits.csv
rename to src/virtac/data/DIAD/limits.csv
diff --git a/virtac/data/DIAD/mirrored.csv b/src/virtac/data/DIAD/mirrored.csv
similarity index 100%
rename from virtac/data/DIAD/mirrored.csv
rename to src/virtac/data/DIAD/mirrored.csv
diff --git a/virtac/data/DIAD/tunefb.csv b/src/virtac/data/DIAD/tunefb.csv
similarity index 100%
rename from virtac/data/DIAD/tunefb.csv
rename to src/virtac/data/DIAD/tunefb.csv
diff --git a/virtac/data/I04/feedback.csv b/src/virtac/data/I04/feedback.csv
similarity index 100%
rename from virtac/data/I04/feedback.csv
rename to src/virtac/data/I04/feedback.csv
diff --git a/virtac/data/I04/limits.csv b/src/virtac/data/I04/limits.csv
similarity index 100%
rename from virtac/data/I04/limits.csv
rename to src/virtac/data/I04/limits.csv
diff --git a/virtac/data/I04/mirrored.csv b/src/virtac/data/I04/mirrored.csv
similarity index 100%
rename from virtac/data/I04/mirrored.csv
rename to src/virtac/data/I04/mirrored.csv
diff --git a/virtac/data/I04/tunefb.csv b/src/virtac/data/I04/tunefb.csv
similarity index 100%
rename from virtac/data/I04/tunefb.csv
rename to src/virtac/data/I04/tunefb.csv
diff --git a/virtac/masks.py b/src/virtac/masks.py
similarity index 95%
rename from virtac/masks.py
rename to src/virtac/masks.py
index c92addb..7b51155 100644
--- a/virtac/masks.py
+++ b/src/virtac/masks.py
@@ -1,7 +1,7 @@
from cothread.catools import caget, caput
-class callback_offset(object):
+class callback_offset:
"""A class to hold a method to be passed as a callback to camonitor."""
def __init__(self, server, quad_pv, offset_record):
@@ -31,7 +31,7 @@ def callback(self, value, index=None):
self.server.refresh_record(self.quad_pv)
-class callback_set(object):
+class callback_set:
"""A class to hold a method to be passed as a callback to camonitor."""
def __init__(self, output):
@@ -52,7 +52,7 @@ def callback(self, value, index=None):
record.set(value)
-class caget_mask(object):
+class caget_mask:
"""A mask for caget so it can comply with the record.get() syntax."""
def __init__(self, pv):
@@ -67,7 +67,7 @@ def get(self):
return caget(self.pv)
-class caput_mask(object):
+class caput_mask:
"""A mask for caput so it can comply with the record.set(value) syntax."""
def __init__(self, pv):
diff --git a/virtac/mirror_objects.py b/src/virtac/mirror_objects.py
similarity index 97%
rename from virtac/mirror_objects.py
rename to src/virtac/mirror_objects.py
index 6b7e716..4e93e9a 100644
--- a/virtac/mirror_objects.py
+++ b/src/virtac/mirror_objects.py
@@ -1,7 +1,7 @@
import numpy
-class summate(object):
+class summate:
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it takes the sum of all the input records and
sets it to the output record.
@@ -27,7 +27,7 @@ def set(self, value=None):
self.output_record.set(value)
-class collate(object):
+class collate:
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it gets the values of all the input records and
combines them in order before setting the combined array to the output
@@ -55,7 +55,7 @@ def set(self, value=None):
self.output_record.set(value)
-class transform(object):
+class transform:
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it applies the held transformation and then sets
the new value to the held output record.
@@ -85,7 +85,7 @@ def set(self, value):
self.output_record.set(value)
-class refresher(object):
+class refresher:
"""This class is designed to be passed instead of a mirror record, when its
set method is then called it refreshes the held PV on the held server.
"""
diff --git a/tests/conftest.py b/tests/conftest.py
index 90d3716..af87d65 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,13 +1,31 @@
import os
+import unittest.mock as mock
+from typing import Any
import at
-import mock
import numpy
import pytest
from pytac import cs, load_csv
import atip
+# Prevent pytest from catching exceptions when debugging in vscode so that break on
+# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409)
+if os.getenv("PYTEST_RAISE", "0") == "1":
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_exception_interact(call: pytest.CallInfo[Any]):
+ if call.excinfo is not None:
+ raise call.excinfo.value
+ else:
+ raise RuntimeError(
+ f"{call} has no exception data, an unknown error has occurred"
+ )
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_internalerror(excinfo: pytest.ExceptionInfo[Any]):
+ raise excinfo.value
+
@pytest.fixture(scope="session")
def at_elem():
@@ -57,7 +75,7 @@ def pytac_lattice():
@pytest.fixture(scope="session")
def mat_filepath():
here = os.path.dirname(__file__)
- return os.path.realpath(os.path.join(here, "../atip/rings/DIAD.mat"))
+ return os.path.realpath(os.path.join(here, "../src/atip/rings/DIAD.mat"))
@pytest.fixture(scope="session")
diff --git a/tests/test_at_simulator_object.py b/tests/test_at_simulator_object.py
index 95ff79f..9861cc8 100644
--- a/tests/test_at_simulator_object.py
+++ b/tests/test_at_simulator_object.py
@@ -1,6 +1,7 @@
+from unittest import mock
+
import at
import cothread
-import mock
import numpy
import pytest
from pytac.exceptions import DataSourceException, FieldException
@@ -198,7 +199,7 @@ def test_toggle_calculations_and_wait_for_calculations(atsim, initial_phys_data)
atsim._at_lat[5].PolynomB[1] = 2.5
atsim.queue_set(mock.Mock(), "f", 0)
assert atsim.wait_for_calculations(2) is False
- _check_initial_phys_data(atsim, initial_phys_data) is True
+ _check_initial_phys_data(atsim, initial_phys_data)
atsim.toggle_calculations()
atsim.queue_set(mock.Mock(), "f", 0)
assert atsim.wait_for_calculations() is True
@@ -225,7 +226,7 @@ def test_get_at_element(atsim, at_lattice):
def test_get_at_lattice(atsim, at_lattice):
- for elem1, elem2 in zip(atsim.get_at_lattice(), atsim._at_lat):
+ for elem1, elem2 in zip(atsim.get_at_lattice(), atsim._at_lat, strict=False):
assert elem1 == elem2
diff --git a/tests/test_cli.py b/tests/test_cli.py
new file mode 100644
index 0000000..0fd7f5f
--- /dev/null
+++ b/tests/test_cli.py
@@ -0,0 +1,9 @@
+import subprocess
+import sys
+
+from atip import __version__
+
+
+def test_cli_version():
+ cmd = [sys.executable, "-m", "atip", "--version"]
+ assert subprocess.check_output(cmd).decode().strip() == __version__
diff --git a/tests/test_element_data_source.py b/tests/test_element_data_source.py
index 57935e2..db0c5ca 100644
--- a/tests/test_element_data_source.py
+++ b/tests/test_element_data_source.py
@@ -1,5 +1,6 @@
+from unittest import mock
+
import at
-import mock
import pytest
from pytac.exceptions import ControlSystemException, FieldException, HandleException
from testfixtures import LogCapture
diff --git a/tests/test_lattice_data_source.py b/tests/test_lattice_data_source.py
index edfdfcc..561d08f 100644
--- a/tests/test_lattice_data_source.py
+++ b/tests/test_lattice_data_source.py
@@ -1,4 +1,5 @@
-import mock
+from unittest import mock
+
import pytest
from pytac.exceptions import ControlSystemException, FieldException, HandleException
from testfixtures import LogCapture
@@ -114,13 +115,13 @@ def test_lat_get_value():
atlds = atip.sim_data_sources.ATLatticeDataSource(atsim)
assert atlds.get_value("dispersion") == 2.5
atlds.get_value("x")
- assert atsim.get_orbit.called_with("x")
+ atsim.get_orbit.assert_called_with("x")
atlds.get_value("phase_x")
- assert atsim.get_orbit.called_with("px")
+ atsim.get_orbit.assert_called_with("px")
atlds.get_value("y")
- assert atsim.get_orbit.called_with("y")
+ atsim.get_orbit.assert_called_with("y")
atlds.get_value("phase_y")
- assert atsim.get_orbit.called_with("py")
+ atsim.get_orbit.assert_called_with("py")
@pytest.mark.parametrize(
diff --git a/tests/test_load.py b/tests/test_load.py
index 564cdb7..91a5542 100644
--- a/tests/test_load.py
+++ b/tests/test_load.py
@@ -1,4 +1,5 @@
-import mock
+from unittest import mock
+
import pytac
import pytest
diff --git a/virtac/__init__.py b/virtac/__init__.py
deleted file mode 100644
index 733900a..0000000
--- a/virtac/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-"""virtac: a python based virtual accelerator using ATIP.
-See README.rst & FEEDBACK_SYSTEMS.rst for more information.
-"""
-from . import atip_ioc_entry, atip_server, create_csv, masks, mirror_objects
-
-__all__ = ["atip_ioc_entry", "atip_server", "create_csv", "masks", "mirror_objects"]