Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 0 additions & 9 deletions .flake8

This file was deleted.

48 changes: 48 additions & 0 deletions .github/workflows/Lint-and-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python

name: Python application

on:
workflow_call:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]

jobs:
call-workflow:
uses: ISISComputingGroup/reusable-workflows/.github/workflows/linters.yml@main
with:
compare-branch: origin/main
python-ver: '3.13'
runs-on: 'ubuntu-latest'
tests:
strategy:
matrix:
version: ['3.12', '3.13', '3.14']
os: ["ubuntu-latest", "windows-latest"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v6
- name: Install uv and set the python version
uses: astral-sh/setup-uv@v7
with:
python-version: ${{ matrix.runs-on }}
- name: Install dependencies
run: uv sync --all-extras --dev
- name: Test with pytest
run: uv run pytest tests
results:
if: ${{ always() }}
runs-on: ubuntu-latest
name: Final Results
needs: [tests, call-workflow]
steps:
- run: exit 1
# see https://stackoverflow.com/a/67532120/4907315
if: >-
${{
contains(needs.*.result, 'failure')
|| contains(needs.*.result, 'cancelled')
}}
92 changes: 92 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
name: Publish Python distribution to PyPI
on: push
jobs:
lint-and-test:
if: github.ref_type == 'tag'
name: Run linter and tests
uses: ./.github/workflows/Lint-and-test.yml
build:
needs: lint-and-test
if: github.ref_type == 'tag'
name: build distribution
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v6
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.13"
- name: Install pypa/build
run: >-
python3 -m
pip install
build
--user
- name: Build a binary wheel and a source tarball
run: python3 -m build
- name: Store the distribution packages
uses: actions/upload-artifact@v6
with:
name: python-package-distributions
path: dist/
publish-to-pypi:
name: >-
Publish Python distribution to PyPI
if: github.ref_type == 'tag'
needs: [lint-and-test, build]
runs-on: ubuntu-latest
environment:
name: release
url: https://pypi.org/p/isis-streaming-data-types
permissions:
id-token: write # IMPORTANT: mandatory for trusted publishing
steps:
- name: Download all the dists
uses: actions/download-artifact@v7
with:
name: python-package-distributions
path: dist/
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
github-release:
name: >-
Sign the Python distribution with Sigstore
and upload them to GitHub Release
needs: [lint-and-test, build, publish-to-pypi]
runs-on: ubuntu-latest

permissions:
contents: write # IMPORTANT: mandatory for making GitHub Releases
id-token: write # IMPORTANT: mandatory for sigstore

steps:
- name: Download all the dists
uses: actions/download-artifact@v7
with:
name: python-package-distributions
path: dist/
- name: Sign the dists with Sigstore
uses: sigstore/gh-action-sigstore-python@v3.2.0
with:
inputs: >-
./dist/*.tar.gz
./dist/*.whl
- name: Create GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
run: >-
gh release create
'${{ github.ref_name }}'
--repo '${{ github.repository }}'
--notes ""
- name: Upload artifact signatures to GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
# Upload to GitHub Release using the `gh` CLI.
# `dist/` contains the built packages, and the
# sigstore-produced signatures and certificates.
run: >-
gh release upload
'${{ github.ref_name }}' dist/**
--repo '${{ github.repository }}'
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,5 @@ dmypy.json
# VSCode
.vscode


src/streaming_data_types/_version.py
23 changes: 0 additions & 23 deletions .pre-commit-config.yaml

This file was deleted.

4 changes: 0 additions & 4 deletions MANIFEST.in

This file was deleted.

5 changes: 0 additions & 5 deletions Makefile

This file was deleted.

103 changes: 103 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
[build-system]
requires = ["setuptools", "setuptools_scm>=8"]
build-backend = "setuptools.build_meta"


[project]
name = "isis_streaming_data_types"
dynamic = ["version"]
description = "Python utilities for handling ISIS streamed data"
readme = "README.md"
requires-python = ">=3.12"
license-files = ["LICENSE"]

authors = [
{name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" }
]
maintainers = [
{name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" }
]

# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see https://pypi.org/classifiers/
classifiers = [
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",

# Specify the Python versions you support here. In particular, ensure
# that you indicate you support Python 3. These classifiers are *not*
# checked by "pip install". See instead "requires-python" key in this file.
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
"Programming Language :: Python :: 3 :: Only",
]

dependencies = [
"flatbuffers",
"numpy>2"
]

[project.optional-dependencies]
dev = [
"ruff>=0.8",
"pyright",
"pytest",
"pytest-cov",
]

[project.urls]
"Homepage" = "https://github.com/isiscomputinggroup/isis_streaming_data_types"
"Bug Reports" = "https://github.com/isiscomputinggroup/isis_streaming_data_types/issues"
"Source" = "https://github.com/isiscomputinggroup/isis_streaming_data_types"

[tool.pytest.ini_options]
testpaths = "tests"
asyncio_mode = "auto"
addopts = "--cov --cov-report=html -vv"
filterwarnings = [
'ignore:FigureCanvasAgg is non-interactive, and thus cannot be shown:UserWarning',
'error:Using UFloat objects with std_dev==0 may give unexpected results.:UserWarning',
]

[tool.coverage.run]
branch = true
source = ["src"]

[tool.coverage.report]
fail_under = 100
exclude_lines = [
"pragma: no cover",
"if TYPE_CHECKING:",
"if typing.TYPE_CHECKING:",
"@abstractmethod",
]

[tool.coverage.html]
directory = "coverage_html_report"

[tool.pyright]
include = ["src", "tests"]
reportConstantRedefinition = true
reportDeprecated = true
reportInconsistentConstructor = true
reportMissingParameterType = true
reportMissingTypeArgument = true
reportUnnecessaryCast = true
reportUnnecessaryComparison = true
reportUnnecessaryContains = true
reportUnnecessaryIsInstance = true
reportUntypedBaseClass = true
reportUntypedClassDecorator = true
reportUntypedFunctionDecorator = true

[tool.setuptools_scm]
version_file = "src/streaming_data_types/_version.py"


10 changes: 0 additions & 10 deletions requirements-dev.txt

This file was deleted.

2 changes: 0 additions & 2 deletions requirements.txt

This file was deleted.

32 changes: 0 additions & 32 deletions setup.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@
)
from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2
from streaming_data_types.timestamps_tdct import deserialise_tdct, serialise_tdct
from streaming_data_types.units_un00 import serialise_un00, deserialise_un00

__version__ = version

from streaming_data_types.units_un00 import serialise_un00, deserialise_un00

SERIALISERS = {
"an44": serialise_an44,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -490,8 +490,8 @@ def _serialise_value(
):
# We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema
# but we have to handle strings separately as there are many subtypes
if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype(
value.dtype, np.string_
if np.issubdtype(value.dtype, np.str_) or np.issubdtype(
value.dtype, np.bytes_
):
string_serialiser(builder, value, source)
else:
Expand All @@ -501,7 +501,7 @@ def _serialise_value(
# There are a few numpy types we don't try to handle, for example complex numbers
raise NotImplementedError(
f"Cannot serialise data of type {value.dtype}, must use one of "
f"{list(_map_scalar_type_to_serialiser.keys()) + [np.unicode_]}"
f"{list(_map_scalar_type_to_serialiser.keys()) + [np.str_]}"
)


Expand Down Expand Up @@ -539,8 +539,8 @@ def _serialise_value(

def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]:
if value.ndim == 0 and (
np.issubdtype(value.dtype, np.unicode_)
or np.issubdtype(value.dtype, np.string_)
np.issubdtype(value.dtype, np.str_)
or np.issubdtype(value.dtype, np.bytes_)
):
return value.item().decode()
return value
Expand Down
File renamed without changes.
4 changes: 0 additions & 4 deletions streaming_data_types/_version.py

This file was deleted.

Loading
Loading