Skip to content

Commit

Permalink
Merge branch '341-new-j20-setup' of github.com:DiamondLightSource/dod…
Browse files Browse the repository at this point in the history
…al into 341-new-j20-setup
  • Loading branch information
stan-dot committed Apr 17, 2024
2 parents 68739bf + 22f8f19 commit 7781a6f
Show file tree
Hide file tree
Showing 64 changed files with 1,733 additions and 1,553 deletions.
90 changes: 0 additions & 90 deletions .github/workflows/code.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,96 +112,6 @@ jobs:
# If more than one module in src/ replace with module name to test
run: python -m dodal --version

container:
needs: [lint, dist, test]
runs-on: ubuntu-latest

permissions:
contents: read
packages: write

env:
TEST_TAG: "testing"

steps:
- name: Checkout
uses: actions/checkout@v4

# image names must be all lower case
- name: Generate image repo name
run: echo IMAGE_REPOSITORY=ghcr.io/$(tr '[:upper:]' '[:lower:]' <<< "${{ github.repository }}") >> $GITHUB_ENV

- name: Set lockfile location in environment
run: |
echo "DIST_LOCKFILE_PATH=lockfiles-${{ env.CONTAINER_PYTHON }}-dist-${{ github.sha }}" >> $GITHUB_ENV
- name: Download wheel and lockfiles
uses: actions/[email protected]
with:
path: artifacts/
pattern: "*dist*"

- name: Log in to GitHub Docker Registry
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3

- name: Build and export to Docker local cache
uses: docker/build-push-action@v5
with:
# Note build-args, context, file, and target must all match between this
# step and the later build-push-action, otherwise the second build-push-action
# will attempt to build the image again
build-args: |
PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl
context: artifacts/
file: ./Dockerfile
target: runtime
load: true
tags: ${{ env.TEST_TAG }}
# If you have a long docker build (2+ minutes), uncomment the
# following to turn on caching. For short build times this
# makes it a little slower
#cache-from: type=gha
#cache-to: type=gha,mode=max

- name: Create tags for publishing image
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.IMAGE_REPOSITORY }}
tags: |
type=ref,event=tag
type=raw,value=latest, enable=${{ github.ref_type == 'tag' }}
# type=edge,branch=main
# Add line above to generate image for every commit to given branch,
# and uncomment the end of if clause in next step

- name: Push cached image to container registry
if: github.ref_type == 'tag' # || github.ref_name == 'main'
uses: docker/build-push-action@v5
# This does not build the image again, it will find the image in the
# Docker cache and publish it
with:
# Note build-args, context, file, and target must all match between this
# step and the previous build-push-action, otherwise this step will
# attempt to build the image again
build-args: |
PIP_OPTIONS=-r ${{ env.DIST_LOCKFILE_PATH }}/requirements.txt ${{ env.DIST_WHEEL_PATH }}/*.whl
context: artifacts/
file: ./Dockerfile
target: runtime
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

release:
# upload to PyPI and make a release on every tag
needs: [lint, dist, test]
Expand Down
12 changes: 0 additions & 12 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,3 @@ WORKDIR /context

# install python package into /venv
RUN pip install ${PIP_OPTIONS}

FROM python:3.11-slim as runtime

# Add apt-get system dependecies for runtime here if needed

# copy the virtual environment from the build stage and put it in PATH
COPY --from=build /venv/ /venv/
ENV PATH=/venv/bin:$PATH

# change this entrypoint if it is not the same as the repo
ENTRYPOINT ["dodal"]
CMD ["--version"]
15 changes: 0 additions & 15 deletions docs/user/how-to/run-container.rst

This file was deleted.

1 change: 0 additions & 1 deletion docs/user/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ side-bar.
:caption: How-to Guides
:maxdepth: 1

how-to/run-container
how-to/create-beamline.rst

+++
Expand Down
17 changes: 12 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,19 @@ classifiers = [
description = "Ophyd devices and other utils that could be used across DLS beamlines"
dependencies = [
"ophyd",
"ophyd_async@git+https://github.com/bluesky/ophyd-async@ec5729640041ee5b77b4614158793af3a34cf9d8", #Use a specific branch from ophyd async until https://github.com/bluesky/ophyd-async/pull/101 is merged
"ophyd-async@git+https://github.com/bluesky/ophyd-async",
"bluesky",
"pyepics",
"dataclasses-json",
"pillow",
"zocalo",
"requests",
"graypy",
"pydantic<2.0",
"opencv-python-headless", # For pin-tip detection.
"aioca", # Required for CA support with ophyd-async.
"p4p", # Required for PVA support with ophyd-async.
"pydantic",
"opencv-python-headless", # For pin-tip detection.
"aioca", # Required for CA support with ophyd-async.
"p4p", # Required for PVA support with ophyd-async.
"numpy",
]

dynamic = ["version"]
Expand Down Expand Up @@ -78,6 +79,7 @@ ignore_missing_imports = true # Ignore missing stubs in imported modules

[tool.pytest.ini_options]
# Run pytest with all our checkers, and don't spam us with massive tracebacks on error
asyncio_mode = "auto"
markers = [
"s03: marks tests as requiring the s03 simulator running (deselect with '-m \"not s03\"')",
]
Expand All @@ -88,6 +90,11 @@ addopts = """
# Doctest python code in docs, python code in src docstrings, test functions in tests
testpaths = "docs src tests"

[tool.coverage.report]
exclude_also = [
'^"""', # Ignore the start/end of a file-level triple quoted docstring
]

[tool.coverage.run]
data_file = "/tmp/dodal.coverage"

Expand Down
16 changes: 11 additions & 5 deletions src/dodal/beamlines/beamline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,12 @@ def _wait_for_connection(
device.wait_for_connection(timeout=timeout)
elif isinstance(device, OphydV2Device):
call_in_bluesky_event_loop(
v2_device_wait_for_connection(coros=device.connect(sim=sim)),
timeout=timeout,
v2_device_wait_for_connection(
coros=device.connect(
sim=sim,
timeout=timeout,
)
),
)
else:
raise TypeError(
Expand Down Expand Up @@ -98,9 +102,11 @@ def device_instantiation(
if already_existing_device is None:
device_instance = device_factory(
name=name,
prefix=f"{(BeamlinePrefix(BL).beamline_prefix)}{prefix}"
if bl_prefix
else prefix,
prefix=(
f"{(BeamlinePrefix(BL).beamline_prefix)}{prefix}"
if bl_prefix
else prefix
),
**kwargs,
)
ACTIVE_DEVICES[name] = device_instance
Expand Down
2 changes: 1 addition & 1 deletion src/dodal/beamlines/i03.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def fast_grid_scan(
return device_instantiation(
device_factory=FastGridScan,
name="fast_grid_scan",
prefix="-MO-SGON-01:FGS:",
prefix="-MO-SGON-01:",
wait=wait_for_connection,
fake=fake_with_ophyd_sim,
)
Expand Down
23 changes: 7 additions & 16 deletions src/dodal/beamlines/i20_1.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,24 @@
from dodal.beamlines.beamline_utils import device_instantiation
from dodal.beamlines.beamline_utils import set_beamline as set_utils_beamline
from dodal.devices.motors import EpicsMotor
from dodal.devices.i20_1.turbo_slit import TurboSlit
from dodal.devices.turbo_slit import TurboSlit
from dodal.log import set_beamline as set_log_beamline
from dodal.utils import get_beamline_name, get_hostname, skip_device
from dodal.utils import get_beamline_name

BL = get_beamline_name("i20_1")
set_log_beamline(BL)
set_utils_beamline(BL)


def _is_i20_1_machine():
"""
Devices using PVA can only connect from i20_1 machines, due to the absence of
PVA gateways at present.
"""
hostname = get_hostname()
return hostname.startswith("i20_1")


@skip_device(lambda: not _is_i20_1_machine())
def turbo_slit_motor(
def turbo_slit(
wait_for_connection: bool = True, fake_with_ophyd_sim: bool = False
) -> TurboSlit:
"""Get the i20-1 motor"""
"""
turboslit for selecting energy from the polychromator
"""

return device_instantiation(
TurboSlit,
prefix="-OP-PCHRO-01",
prefix="-OP-PCHRO-01:TS:",
name="turbo_slit",
wait=wait_for_connection,
fake=fake_with_ophyd_sim,
Expand Down
12 changes: 12 additions & 0 deletions src/dodal/common/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from .coordination import group_uuid, inject
from .maths import in_micros, step_to_num
from .types import MsgGenerator, PlanGenerator

__all__ = [
"group_uuid",
"inject",
"in_micros",
"MsgGenerator",
"PlanGenerator",
"step_to_num",
]
38 changes: 38 additions & 0 deletions src/dodal/common/coordination.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import uuid

from dodal.common.types import Group


def group_uuid(name: str) -> Group:
"""
Returns a unique but human-readable string, to assist debugging orchestrated groups.
Args:
name (str): A human readable name
Returns:
readable_uid (Group): name appended with a unique string
"""
return f"{name}-{str(uuid.uuid4())[:6]}"


def inject(name: str): # type: ignore
"""
Function to mark a default argument of a plan method as a reference to a device
that is stored in the Blueapi context, as devices are constructed on startup of the
service, and are not available to be used when writing plans.
Bypasses mypy linting, returning x as Any and therefore valid as a default
argument.
e.g. For a 1-dimensional scan, that is usually performed on a consistent Movable
axis with name "stage_x"
def scan(x: Movable = inject("stage_x"), start: float = 0.0 ...)
Args:
name (str): Name of a device to be fetched from the Blueapi context
Returns:
Any: name but without typing checking, valid as any default type
"""

return name
52 changes: 52 additions & 0 deletions src/dodal/common/maths.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from typing import Tuple

import numpy as np


def step_to_num(start: float, stop: float, step: float) -> Tuple[float, float, int]:
"""
Standard handling for converting from start, stop, step to start, stop, num
Forces step to be same direction as length
Includes a final point if it is within 1% of the final step, prevents floating
point arithmatic errors from giving inconsistent shaped scans between steps of an
outer axis.
Args:
start (float):
Start of length, will be returned unchanged
stop (float):
End of length, if length/step does not divide cleanly will be returned
extended up to 1% of step, or else truncated.
step (float):
Length of a step along the line formed from start to stop.
If stop < start, will be coerced to be backwards.
Returns:
start, adjusted_stop, num = Tuple[float, float, int]
start will be returned unchanged
adjusted_stop = start + (num - 1) * step
num is the maximal number of steps that could fit into the length.
"""
# Make step be the right direction
step = abs(step) if stop >= start else -abs(step)
# If stop is within 1% of a step then include it
steps = int((stop - start) / step + 0.01)
return start, start + steps * step, steps + 1 # include 1st point


def in_micros(t: float) -> int:
"""
Converts between a positive number of seconds and an equivalent
number of microseconds.
Args:
t (float): A time in seconds
Raises:
ValueError: if t < 0
Returns:
t (int): A time in microseconds, rounded up to the nearest whole microsecond,
"""
if t < 0:
raise ValueError(f"Expected a positive time in seconds, got {t!r}")
return int(np.ceil(t * 1e6))
14 changes: 14 additions & 0 deletions src/dodal/common/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from typing import (
Any,
Callable,
Generator,
)

from bluesky.utils import Msg

# String identifier used by 'wait' or stubs that await
Group = str
# A true 'plan', usually the output of a generator function
MsgGenerator = Generator[Msg, Any, None]
# A function that generates a plan
PlanGenerator = Callable[..., MsgGenerator]
Loading

0 comments on commit 7781a6f

Please sign in to comment.