diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 183b5bc..293ea0d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,10 +4,12 @@ on: push: branches: - main + - candidate-* pull_request: types: [opened, synchronize, reopened] branches: - main + - candidate-* workflow_dispatch: permissions: @@ -22,7 +24,7 @@ jobs: - name: Setup PDM uses: pdm-project/setup-pdm@v4 with: - python-version: "3.8" + python-version: "3.12" cache: true - name: Install dependencies run: | @@ -38,7 +40,7 @@ jobs: name: Windows/MacOS unit tests strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13"] os: - macos-latest - windows-latest @@ -61,7 +63,7 @@ jobs: name: Test with coverage strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13"] os: - ubuntu-latest runs-on: ${{ matrix.os }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a0fb22..72044e6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.4 + rev: v0.12.7 hooks: - id: ruff args: [ --fix ] diff --git a/CHANGELOG.md b/CHANGELOG.md index 531bf5a..42dc1c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,29 @@ We follow [Common Changelog](https://common-changelog.org/) formatting for this document. +## Unreleased + +### Fixed + +- namespace events and statuses to capabilities instead of services ([commit 1](https://github.com/INTERSECT-SDK/python-sdk/commit/e05e27471f05bf50e0bc5a0123f7e0133a3d969e) [commit 2](https://github.com/INTERSECT-SDK/python-sdk/commit/1460e989f70efaf9713eb77bbb4508698db3e655)) (Lance-Drane) + +### Changed + +- Allow user to specify whatever message Content-Type they would like in messages, and provide handling for non-JSON data types ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/dcd536ebb03973e8939e2715e51dfc3da0d8bd16)) (Lance Drane) +- change events API; instead of using `@intersect_event` or `@intersect_message(events=...)`, declare all events in capability variable `intersect_sdk_events` ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/1460e989f70efaf9713eb77bbb4508698db3e655)) (Lance Drane) +- move Pika (AMQP) to be a required dependency instead of an optional dependency ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/5ed5be6a51917b5598043115fb9cb176a6627a2a)) (Lance Drane) +- bump required Paho MQTT version from v1 to v2 ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/d0bcb9550aa92c7ef327e313a2ad5b34d914a3b3)) (Lance Drane) +- change internal message structure representation; metadata is sent through as headers, while the direct payload is always the data. This decreases the number of JSON serializations/deserializations from 2 to at most 1 (if the data is actually JSON). This does NOT modify any APIs already in use, with the exception of core services ([initial commit](https://github.com/INTERSECT-SDK/python-sdk/commit/e09f13f9b244b92b6bcecc814df49c81340dcc02#diff-725ea87422115a87ba1869854601d413f1fcac6bea0c965ce5a14e2fcb0461b1) [commit which adds campaign IDs](https://github.com/INTERSECT-SDK/python-sdk/commit/ef8db5415c97af80df267277f8ddca6347440b5e)) (Lance Drane) + +### Added + +- Added MQTT 5.0 support ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/e09f13f9b244b92b6bcecc814df49c81340dcc02)) (Lance Drane) +- Added a default `intersect_sdk` capability meant to encompass common system querying information ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/e05e27471f05bf50e0bc5a0123f7e0133a3d969e)) (Lance Drane) + +### Removed + +- Dropped MQTT 3.1.1 support ([commit](https://github.com/INTERSECT-SDK/python-sdk/commit/e09f13f9b244b92b6bcecc814df49c81340dcc02)) (Lance Drane) + ## [0.8.4] - 2026-02-05 ### Fixed diff --git a/README.md b/README.md index 4cddbbc..c1b84b6 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,8 @@ For a high-level overview, please see [the architecture website.](https://inters - Event-driven architecture - Support core interaction types: request/response, events, commands, statuses -- Borrows several concepts from [AsyncAPI](https://www.asyncapi.com/docs/reference/specification/latest), and intends to support multiple different protocols. Currently, we support MQTT 3.1.1 and AMQP 0.9.1, but other protocols will be supported as well. +- Borrows several concepts from [AsyncAPI](https://www.asyncapi.com/docs/reference/specification/latest), and intends to support multiple different protocols. Currently, we support MQTT 5.0 and AMQP 0.9.1, but other protocols will be supported as well. + - As a general rule, we will not support any protocols which do not support headers, do not allow for asynchronous messaging, or require the microservice itself to "keep alive" multiple connections. - Users automatically generate schema from code; schemas are part of the core contract of an INTERSECT microservice, and both external inputs and microservice outputs are required to uphold this contract. ## Authors diff --git a/docker-compose.yml b/docker-compose.yml index 9d72077..00049f0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ services: broker: - image: "bitnamilegacy/rabbitmq:3.13.3" + image: "bitnamilegacy/rabbitmq:4.1" ports: - "1883:1883" # MQTT port - "5672:5672" # AMQP port diff --git a/docs/core_concepts.rst b/docs/core_concepts.rst index 2a4e70d..860cc61 100644 --- a/docs/core_concepts.rst +++ b/docs/core_concepts.rst @@ -38,15 +38,27 @@ Arguments to the ``@intersect_message()`` decorator can be used to specify speci CapabilityImplementation - Events --------------------------------- -You can emit events globally as part of an ``@intersect_message()`` annotated function by configuring the ``events`` argument to the decorator as a dictionary/mapping of event names (as strings) to IntersectEventDefinitions. -An IntersectEventDefinition consists of an event_type, which is the typing of the event you'll emit. +You can emit events globally, with or without input from other INTERSECT messages. -You can also emit events without having to react to an external request by annotating a function with ``@intersect_event()`` and providing the ``events`` argument to the decorator. +To do this, you must create a mapping of keys to ``IntersectEventDefinition`` as your BaseCapability's ``intersect_sdk_events`` class variable. +An IntersectEventDefinition consists of an ``event_type``, which is the typing of the event you'll emit. -You can emit an event by calling ``self.intersect_sdk_emit_event(event_name, event)`` . The typing of ``event`` must match the typing in the decorator configuration. -Calling this function will only be effective if called from either an ``@intersect_message`` or ``@intersect_event`` decorated function, or an inner function called from a decorated function. +You can emit an event by calling ``self.intersect_sdk_emit_event(event_name, event)`` . The typing of ``event`` must match the typing of ``IntersectEventDefinition(event_type)``. -You can specify the same event name on multiple functions, but it must always contain the same IntersectEventDefinition configuration. +A simple example of how to configure this: + +.. code-block:: python + + class YourCapability(IntersectBaseCapabilityImplementation): + # You should configure it on the class itself. Do NOT configure it on the instance. + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'my_integer_event': IntersectEventDefinition(event_type=int), + 'my_str_event': IntersectEventDefinition(event_type=str), + 'my_float_event': IntersectEventDefinition(event_type=float), + } + + +Now this capability can call ``self.intersect_sdk_emit_event('my_integer_event', value)`` as long as "value" is actually an integer. Client ------ diff --git a/docs/pydantic.rst b/docs/pydantic.rst index 2817ac1..88967b8 100644 --- a/docs/pydantic.rst +++ b/docs/pydantic.rst @@ -18,7 +18,7 @@ Usage External users would benefit most by understanding the `Models `_, `Fields `_, and `Types `_ documentation pages on Pydantic's own documentation website. -INTERSECT-SDK will handle the schema generation logic, but users are able to customize fields themselves. For example, users can combine ``typing_extensions.Annotated`` +INTERSECT-SDK will handle the schema generation logic, but users are able to customize fields themselves. For example, users can combine ``typing.Annotated`` with ``pydantic.Field`` to specify regular expression patterns for string, minimum lengths for arrays/lists, and many other validation concepts. For handling complex objects, your class should either extend ``pydantic.BaseModel`` or be a `Python Dataclass `_. diff --git a/examples/1_hello_world/hello_client.py b/examples/1_hello_world/hello_client.py index 4c0d231..0111697 100644 --- a/examples/1_hello_world/hello_client.py +++ b/examples/1_hello_world/hello_client.py @@ -1,7 +1,7 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, @@ -13,7 +13,7 @@ def simple_client_callback( - _source: str, _operation: str, _has_error: bool, payload: INTERSECT_JSON_VALUE + _source: str, _operation: str, _has_error: bool, payload: INTERSECT_RESPONSE_VALUE ) -> None: """This simply prints the response from the service to your console. @@ -48,7 +48,7 @@ def simple_client_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/1_hello_world/hello_service.py b/examples/1_hello_world/hello_service.py index 1b91819..1659f17 100644 --- a/examples/1_hello_world/hello_service.py +++ b/examples/1_hello_world/hello_service.py @@ -21,7 +21,7 @@ class HelloServiceCapabilityImplementation(IntersectBaseCapabilityImplementation but we do not use it here. The operation we are calling is `say_hello_to_name` , so the message being sent will need to have - an operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload, + an operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload, and will send a string back in its own payload. """ @@ -50,7 +50,7 @@ def say_hello_to_name(self, name: str) -> str: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/1_hello_world/hello_service_schema.json b/examples/1_hello_world/hello_service_schema.json index 43d6968..272dfd4 100644 --- a/examples/1_hello_world/hello_service_schema.json +++ b/examples/1_hello_world/hello_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "HelloExample": { - "channels": { + "endpoints": { "say_hello_to_name": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "say_hello_to_name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" @@ -32,51 +41,122 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" - }, - "events": [] + } } }, - "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." + "events": {}, + "status": { + "type": "string", + "title": "Status" + }, + "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." } }, - "events": {}, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -86,46 +166,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, - "required": ["source", "destination", "created_at", "sdk_version"], - "title": "UserspaceMessageHeader", + "required": [ + "message_id", + "campaign_id", + "request_id", + "source", + "destination", + "created_at", + "sdk_version", + "operation_id" + ], + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -136,20 +220,30 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, - "required": ["source", "created_at", "sdk_version", "event_name"], + "required": [ + "message_id", + "source", + "created_at", + "sdk_version", + "capability_name", + "event_name" + ], "title": "EventMessageHeaders", "type": "object" } diff --git a/examples/1_hello_world_amqp/hello_client.py b/examples/1_hello_world_amqp/hello_client.py index 688a19b..ce8809d 100644 --- a/examples/1_hello_world_amqp/hello_client.py +++ b/examples/1_hello_world_amqp/hello_client.py @@ -1,7 +1,7 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, @@ -14,7 +14,7 @@ def simple_client_callback( - _source: str, _operation: str, _has_error: bool, payload: INTERSECT_JSON_VALUE + _source: str, _operation: str, _has_error: bool, payload: INTERSECT_RESPONSE_VALUE ) -> None: """This simply prints the response from the service to your console. diff --git a/examples/1_hello_world_amqp/hello_service.py b/examples/1_hello_world_amqp/hello_service.py index 6b4369c..463711e 100644 --- a/examples/1_hello_world_amqp/hello_service.py +++ b/examples/1_hello_world_amqp/hello_service.py @@ -22,7 +22,7 @@ class HelloServiceCapabilityImplementation(IntersectBaseCapabilityImplementation but we do not use it here. The operation we are calling is `say_hello_to_name` , so the message being sent will need to have - an operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload, + an operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload, and will send a string back in its own payload. """ diff --git a/examples/1_hello_world_amqp/hello_service_schema.json b/examples/1_hello_world_amqp/hello_service_schema.json index 43d6968..272dfd4 100644 --- a/examples/1_hello_world_amqp/hello_service_schema.json +++ b/examples/1_hello_world_amqp/hello_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "HelloExample": { - "channels": { + "endpoints": { "say_hello_to_name": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "say_hello_to_name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" @@ -32,51 +41,122 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" - }, - "events": [] + } } }, - "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." + "events": {}, + "status": { + "type": "string", + "title": "Status" + }, + "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." } }, - "events": {}, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -86,46 +166,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, - "required": ["source", "destination", "created_at", "sdk_version"], - "title": "UserspaceMessageHeader", + "required": [ + "message_id", + "campaign_id", + "request_id", + "source", + "destination", + "created_at", + "sdk_version", + "operation_id" + ], + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -136,20 +220,30 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, - "required": ["source", "created_at", "sdk_version", "event_name"], + "required": [ + "message_id", + "source", + "created_at", + "sdk_version", + "capability_name", + "event_name" + ], "title": "EventMessageHeaders", "type": "object" } diff --git a/examples/1_hello_world_events/hello_client.py b/examples/1_hello_world_events/hello_client.py index 3b1081b..37711d0 100644 --- a/examples/1_hello_world_events/hello_client.py +++ b/examples/1_hello_world_events/hello_client.py @@ -1,11 +1,12 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, IntersectDirectMessageParams, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) @@ -13,7 +14,7 @@ def simple_client_callback( - _source: str, _operation: str, _has_error: bool, payload: INTERSECT_JSON_VALUE + _source: str, _operation: str, _has_error: bool, payload: INTERSECT_RESPONSE_VALUE ) -> None: """This simply prints the response from the service to your console. @@ -37,7 +38,7 @@ def simple_client_callback( def simple_event_callback( - _source: str, _operation: str, _event_name: str, payload: INTERSECT_JSON_VALUE + _source: str, _capability_name: str, _event_name: str, payload: INTERSECT_RESPONSE_VALUE ) -> None: """This simply prints the event from the service to your console. @@ -45,11 +46,9 @@ def simple_event_callback( Params: _source: the source of the response message. In this case it will always be from the hello_service. - _operation: the name of the function we called in the original message. In this case it will always be "say_hello_to_name". + _capability_name: the name of the capability we called in the original message. In this case it will always be "HelloExample". _event_name: the name of the event. In this case it will always be "hello_event". - payload: Value of the response from the Service. The typing of the payload varies, based on the operation called and whether or not - _has_error was set to "True". In this case, since we do not have an error, we can defer to the operation's response type. This response type is - "str", so the type will be "str". The value will always be "Hello, hello_client!". + payload: Value of the response from the Service. The typing of the payload varies, based on the values of the first three parameters. Note that the payload will always be a deserialized Python object, but the types are fairly limited: str, bool, float, int, None, List[T], and Dict[str, T] are the only types the payload can have. "T" in this case can be any of the 7 types just mentioned. @@ -69,7 +68,7 @@ def simple_event_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } @@ -92,12 +91,17 @@ def simple_event_callback( payload='hello_client', ) ] + events = [ + IntersectEventMessageParams( + hierarchy='hello-organization.hello-facility.hello-system.hello-subsystem.hello-service', + capability_name='HelloExample', + event_name='hello_event', + ) + ] config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( messages_to_send=initial_messages, - services_to_start_listening_for_events=[ - 'hello-organization.hello-facility.hello-system.hello-subsystem.hello-service' - ], + services_to_start_listening_for_events=events, ), **from_config_file, ) diff --git a/examples/1_hello_world_events/hello_service.py b/examples/1_hello_world_events/hello_service.py index 90f3beb..d414683 100644 --- a/examples/1_hello_world_events/hello_service.py +++ b/examples/1_hello_world_events/hello_service.py @@ -1,4 +1,5 @@ import logging +from typing import ClassVar from intersect_sdk import ( HierarchyConfig, @@ -22,20 +23,23 @@ class HelloServiceCapabilityImplementation(IntersectBaseCapabilityImplementation but we do not use it here. The operation we are calling is `say_hello_to_name` , so the message being sent will need to have - an operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload, + an operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload, and will send a string back in its own payload. The operation we are calling also emits its own event. """ intersect_sdk_capability_name = 'HelloExample' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'hello_event': IntersectEventDefinition(event_type=str) + } @intersect_status() def status(self) -> str: """Basic status function which returns a hard-coded string.""" return 'Up' - @intersect_message(events={'hello_event': IntersectEventDefinition(event_type=str)}) + @intersect_message def say_hello_to_name(self, name: str) -> str: """Takes in a string parameter and says 'Hello' to the parameter! This ALSO emits a separate event, broadcast globally.""" self.intersect_sdk_emit_event('hello_event', f'{name} requested a salutation!') @@ -54,7 +58,7 @@ def say_hello_to_name(self, name: str) -> str: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/1_hello_world_events/hello_service_schema.json b/examples/1_hello_world_events/hello_service_schema.json index b81c228..9ec7763 100644 --- a/examples/1_hello_world_events/hello_service_schema.json +++ b/examples/1_hello_world_events/hello_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "HelloExample": { - "channels": { + "endpoints": { "say_hello_to_name": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "say_hello_to_name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter! This ALSO emits a separate event, broadcast globally." @@ -32,60 +41,127 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter! This ALSO emits a separate event, broadcast globally." - }, - "events": [ - "hello_event" - ] + } } }, - "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload.\n\nThe operation we are calling also emits its own event." - } - }, - "events": { - "hello_event": { - "type": "string" + "events": { + "hello_event": { + "type": "string", + "title": "hello_event" + } + }, + "status": { + "type": "string", + "title": "Status" + }, + "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload.\n\nThe operation we are calling also emits its own event." } }, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -95,54 +171,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -153,23 +225,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/1_hello_world_minio/hello_client.py b/examples/1_hello_world_minio/hello_client.py index a78d1fd..336ee07 100644 --- a/examples/1_hello_world_minio/hello_client.py +++ b/examples/1_hello_world_minio/hello_client.py @@ -1,7 +1,7 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, @@ -14,7 +14,7 @@ def simple_client_callback( - _source: str, _operation: str, _has_error: bool, payload: INTERSECT_JSON_VALUE + _source: str, _operation: str, _has_error: bool, payload: INTERSECT_RESPONSE_VALUE ) -> None: """This simply prints the response from the service to your console. @@ -59,7 +59,7 @@ def simple_client_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/1_hello_world_minio/hello_service.py b/examples/1_hello_world_minio/hello_service.py index d0b034a..d10fd75 100644 --- a/examples/1_hello_world_minio/hello_service.py +++ b/examples/1_hello_world_minio/hello_service.py @@ -22,7 +22,7 @@ class HelloServiceCapabilityImplementation(IntersectBaseCapabilityImplementation but we do not use it here. The operation we are calling is `say_hello_to_name` , so the message being sent will need to have - an operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload, + an operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload, and will send a string back in its own payload. """ @@ -62,7 +62,7 @@ def say_hello_to_name(self, name: str) -> str: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/1_hello_world_minio/hello_service_schema.json b/examples/1_hello_world_minio/hello_service_schema.json index 43d6968..272dfd4 100644 --- a/examples/1_hello_world_minio/hello_service_schema.json +++ b/examples/1_hello_world_minio/hello_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "HelloExample": { - "channels": { + "endpoints": { "say_hello_to_name": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "say_hello_to_name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" @@ -32,51 +41,122 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "name" } }, "description": "Takes in a string parameter and says 'Hello' to the parameter!" - }, - "events": [] + } } }, - "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." + "events": {}, + "status": { + "type": "string", + "title": "Status" + }, + "description": "Rudimentary capability implementation example.\n\nAll capability implementations are required to have an @intersect_status decorated function,\nbut we do not use it here.\n\nThe operation we are calling is `say_hello_to_name` , so the message being sent will need to have\nan operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload,\nand will send a string back in its own payload." } }, - "events": {}, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -86,46 +166,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, - "required": ["source", "destination", "created_at", "sdk_version"], - "title": "UserspaceMessageHeader", + "required": [ + "message_id", + "campaign_id", + "request_id", + "source", + "destination", + "created_at", + "sdk_version", + "operation_id" + ], + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [0, 1], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -136,20 +220,30 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, - "required": ["source", "created_at", "sdk_version", "event_name"], + "required": [ + "message_id", + "source", + "created_at", + "sdk_version", + "capability_name", + "event_name" + ], "title": "EventMessageHeaders", "type": "object" } diff --git a/examples/2_counting/counting_client.py b/examples/2_counting/counting_client.py index fe23cc5..e76853a 100644 --- a/examples/2_counting/counting_client.py +++ b/examples/2_counting/counting_client.py @@ -3,7 +3,7 @@ import time from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, @@ -95,7 +95,7 @@ def __init__(self) -> None: self.message_stack.reverse() def client_callback( - self, source: str, operation: str, _has_error: bool, payload: INTERSECT_JSON_VALUE + self, source: str, operation: str, _has_error: bool, payload: INTERSECT_RESPONSE_VALUE ) -> IntersectClientCallback: """This simply prints the response from the Service to your console. @@ -141,7 +141,7 @@ def client_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/2_counting/counting_service.py b/examples/2_counting/counting_service.py index a02f4b9..ab2d5b6 100644 --- a/examples/2_counting/counting_service.py +++ b/examples/2_counting/counting_service.py @@ -2,10 +2,9 @@ import threading import time from dataclasses import dataclass -from typing import Optional +from typing import Annotated from pydantic import BaseModel, Field -from typing_extensions import Annotated from intersect_sdk import ( HierarchyConfig, @@ -72,7 +71,7 @@ def __init__(self) -> None: """ super().__init__() self.state = CountingServiceCapabilityImplementationState() - self.counter_thread: Optional[threading.Thread] = None + self.counter_thread: threading.Thread | None = None @intersect_status() def status(self) -> CountingServiceCapabilityImplementationState: @@ -169,7 +168,7 @@ def _run_count(self) -> None: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/2_counting/counting_service_schema.json b/examples/2_counting/counting_service_schema.json index cd84fda..0545b4d 100644 --- a/examples/2_counting/counting_service_schema.json +++ b/examples/2_counting/counting_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "CountingExample": { - "channels": { + "endpoints": { "reset_count": { "publish": { "message": { @@ -32,12 +40,12 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "boolean" + "type": "boolean", + "title": "start_again" } }, "description": "Set the counter back to 0.\n\nParams\n start_again: if True, start the counter again; if False, the\n counter will remain off.\n\nReturns:\n the state BEFORE the counter was reset" - }, - "events": [] + } }, "start_count": { "publish": { @@ -62,8 +70,7 @@ } }, "description": "Start the counter (potentially from any number). \"Fails\" if the counter is already running.\n\nReturns:\n A CountingServiceCapabilityImplementationResponse object. The success value will be:\n True - if counter was started successfully\n False - if counter was already running and this was called" - }, - "events": [] + } }, "stop_count": { "publish": { @@ -88,19 +95,81 @@ } }, "description": "Stop the new ticker.\n\nReturns:\n A CountingServiceCapabilityImplementationResponse object. The success value will be:\n True - if counter was stopped successfully\n False - if counter was already not running and this was called" - }, - "events": [] + } } }, + "events": {}, + "status": { + "$ref": "#/components/schemas/CountingServiceCapabilityImplementationState" + }, "description": "This example is meant to showcase that your implementation is able to track state if you want it to.\n\nPlease note that this is not an especially robust implementation, as in the instance\nthe service gets two messages at the same time, it may manage to create\ntwo threads at once." } }, - "events": {}, - "status": { - "$ref": "#/components/schemas/CountingServiceCapabilityImplementationState" - }, "components": { "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + }, "CountingServiceCapabilityImplementationState": { "description": "We can't just use any class to represent state. This class either needs to extend Pydantic's BaseModel class, or be a dataclass. Both the Python standard library's dataclass and Pydantic's dataclass are valid.", "properties": { @@ -119,10 +188,28 @@ "title": "CountingServiceCapabilityImplementationState", "type": "object" }, + "__CountingServiceCapabilityImplementationState": { + "description": "We can't just use any class to represent state. This class either needs to extend Pydantic's BaseModel class, or be a dataclass. Both the Python standard library's dataclass and Pydantic's dataclass are valid.", + "properties": { + "count": { + "default": 0, + "minimum": 0, + "title": "Count", + "type": "integer" + }, + "counting": { + "default": false, + "title": "Counting", + "type": "boolean" + } + }, + "title": "CountingServiceCapabilityImplementationState", + "type": "object" + }, "CountingServiceCapabilityImplementationResponse": { "properties": { "state": { - "$ref": "#/components/schemas/CountingServiceCapabilityImplementationState" + "$ref": "#/components/schemas/__CountingServiceCapabilityImplementationState" }, "success": { "title": "Success", @@ -140,34 +227,37 @@ "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -177,54 +267,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -235,23 +321,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/2_counting_events/counting_client.py b/examples/2_counting_events/counting_client.py index f04194a..f3e572b 100644 --- a/examples/2_counting_events/counting_client.py +++ b/examples/2_counting_events/counting_client.py @@ -1,10 +1,11 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) @@ -26,7 +27,11 @@ def __init__(self) -> None: self.events_encountered = 0 def event_callback( - self, _source: str, _operation: str, _event_name: str, payload: INTERSECT_JSON_VALUE + self, + _source: str, + _capability_name: str, + _event_name: str, + payload: INTERSECT_RESPONSE_VALUE, ) -> None: """Handles events from the Counting Service. @@ -37,8 +42,7 @@ def event_callback( Params: - _source: the source of the event (in this instance, it will always be the counting service) - - _operation: the name of the operation from the service which emitted the event. Sometimes this comes from a message. - In this case it will always be 'increment_counter_function', since that's the function the event was configured on. + - _capability_name: the name of the capability from the service which emitted the event. In this case it will always be 'CountingExample'. - _event_name: the name of the event. In this case it will always be 'increment_counter'. - payload: the actual value of the emitted event. In this case it will always be an integer (3, 27, 81, 243, ...) """ @@ -59,17 +63,22 @@ def event_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } # start listening to events from the counting service + events = [ + IntersectEventMessageParams( + hierarchy='counting-organization.counting-facility.counting-system.counting-subsystem.counting-service', + capability_name='CountingExample', + event_name='increment_counter', + ) + ] config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( - services_to_start_listening_for_events=[ - 'counting-organization.counting-facility.counting-system.counting-subsystem.counting-service', - ] + services_to_start_listening_for_events=events, ), **from_config_file, ) diff --git a/examples/2_counting_events/counting_service.py b/examples/2_counting_events/counting_service.py index 2d6d06d..f264d05 100644 --- a/examples/2_counting_events/counting_service.py +++ b/examples/2_counting_events/counting_service.py @@ -1,6 +1,7 @@ import logging import threading import time +from typing import ClassVar from intersect_sdk import ( HierarchyConfig, @@ -9,7 +10,6 @@ IntersectService, IntersectServiceConfig, default_intersect_lifecycle_loop, - intersect_event, ) logging.basicConfig(level=logging.INFO) @@ -23,6 +23,9 @@ class CountingServiceCapabilityImplementation(IntersectBaseCapabilityImplementat """ intersect_sdk_capability_name = 'CountingExample' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'increment_counter': IntersectEventDefinition(event_type=int), + } def after_service_startup(self) -> None: """This is a 'post-initialization' method. @@ -37,14 +40,13 @@ def after_service_startup(self) -> None: ) self.counter_thread.start() - @intersect_event(events={'increment_counter': IntersectEventDefinition(event_type=int)}) def increment_counter_function(self) -> None: """This is the event thread which continually emits count events. Every 3 seconds, we fire off a new 'increment_counter' event; each time, we are firing off a value 3 times greater than before. - We have to configure our event on the @intersect_event decorator. Since 'increment_counter' is emitting an integer value, - we need to specify the emission type on the decorator. Failure to register the event and its type will mean that the event won't + We have to configure our event on the intersect_sdk_events class variable. Since 'increment_counter' is emitting an integer value, + we need to specify the emission type on the class variable. Failure to register the event and its type will mean that the event won't be emitted. """ while True: @@ -60,7 +62,7 @@ def increment_counter_function(self) -> None: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/2_counting_events/counting_service_schema.json b/examples/2_counting_events/counting_service_schema.json index e2da624..6eeea36 100644 --- a/examples/2_counting_events/counting_service_schema.json +++ b/examples/2_counting_events/counting_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "CountingExample": { - "channels": {}, + "endpoints": {}, + "events": { + "increment_counter": { + "type": "integer", + "title": "increment_counter" + } + }, + "status": { + "type": "null" + }, "description": "This example is meant to showcase a simple event emitter.\n\nThis service does not have any endpoints, but simply fires off a single event every three seconds." } }, - "events": { - "increment_counter": { - "type": "integer" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/3_ping_pong_events/ping_pong_client.py b/examples/3_ping_pong_events/ping_pong_client.py index 2f2b476..4187ab5 100644 --- a/examples/3_ping_pong_events/ping_pong_client.py +++ b/examples/3_ping_pong_events/ping_pong_client.py @@ -1,17 +1,27 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) logging.basicConfig(level=logging.INFO) -PING_SERVICE = 'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.ping-service' -PONG_SERVICE = 'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.pong-service' + +def get_event_message_params(png: str) -> IntersectEventMessageParams: + """Generate the event information we'll handle based on the value of 'png'. + + We always want to pass in a new object; don't mutate a persistent object. + """ + return IntersectEventMessageParams( + hierarchy=f'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.{png}-service', + capability_name=png, + event_name=png, + ) class SampleOrchestrator: @@ -27,7 +37,11 @@ def __init__(self) -> None: self.events_encountered = 0 def event_callback( - self, _source: str, _operation: str, event_name: str, payload: INTERSECT_JSON_VALUE + self, + _source: str, + _capability_name: str, + event_name: str, + payload: INTERSECT_RESPONSE_VALUE, ) -> IntersectClientCallback: """Handles events from two Services at once. @@ -43,15 +57,15 @@ def event_callback( if self.events_encountered == self.MAX_EVENTS_TO_PROCESS: raise Exception - # we would normally also check the source here. With certain services, checking the operation can also be helpful. + # In this case, we can check any of the source, the capability_name, or the event_name. For maximum robustness, you should check all three values. if event_name == 'ping': return IntersectClientCallback( - services_to_start_listening_for_events=[PONG_SERVICE], - services_to_stop_listening_for_events=[PING_SERVICE], + services_to_start_listening_for_events=[get_event_message_params('pong')], + services_to_stop_listening_for_events=[get_event_message_params('ping')], ) return IntersectClientCallback( - services_to_start_listening_for_events=[PING_SERVICE], - services_to_stop_listening_for_events=[PONG_SERVICE], + services_to_start_listening_for_events=[get_event_message_params('ping')], + services_to_stop_listening_for_events=[get_event_message_params('pong')], ) @@ -62,7 +76,7 @@ def event_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } @@ -71,7 +85,7 @@ def event_callback( config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( services_to_start_listening_for_events=[ - PING_SERVICE, + get_event_message_params('ping'), ] ), **from_config_file, diff --git a/examples/3_ping_pong_events/ping_service.py b/examples/3_ping_pong_events/ping_service.py index c63eddb..08c965b 100644 --- a/examples/3_ping_pong_events/ping_service.py +++ b/examples/3_ping_pong_events/ping_service.py @@ -3,8 +3,9 @@ import logging import threading import time +from typing import ClassVar -from intersect_sdk import IntersectEventDefinition, intersect_event +from intersect_sdk import IntersectEventDefinition from .service_runner import P_ngBaseCapabilityImplementation, run_service @@ -15,6 +16,9 @@ class PingCapabilityImplementation(P_ngBaseCapabilityImplementation): """Basic capability definition, very similar to the other capability except for the type of event it emits.""" intersect_sdk_capability_name = 'ping' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'ping': IntersectEventDefinition(event_type=str), + } def after_service_startup(self) -> None: """Called after service startup.""" @@ -25,7 +29,6 @@ def after_service_startup(self) -> None: ) self.counter_thread.start() - @intersect_event(events={'ping': IntersectEventDefinition(event_type=str)}) def ping_event(self) -> None: """Send out a ping event every 2 seconds.""" while True: diff --git a/examples/3_ping_pong_events/ping_service_schema.json b/examples/3_ping_pong_events/ping_service_schema.json index 199d4e7..7d2011c 100644 --- a/examples/3_ping_pong_events/ping_service_schema.json +++ b/examples/3_ping_pong_events/ping_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "events": {}, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "ping": { - "channels": {}, + "endpoints": {}, + "status": { + "type": "null" + }, + "events": { + "ping": { + "type": "string", + "title": "ping" + } + }, "description": "Basic capability definition, very similar to the other capability except for the type of event it emits." } }, - "events": { - "ping": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/3_ping_pong_events/pong_service.py b/examples/3_ping_pong_events/pong_service.py index 400d355..de7e160 100644 --- a/examples/3_ping_pong_events/pong_service.py +++ b/examples/3_ping_pong_events/pong_service.py @@ -3,8 +3,9 @@ import logging import threading import time +from typing import ClassVar -from intersect_sdk import IntersectEventDefinition, intersect_event +from intersect_sdk import IntersectEventDefinition from .service_runner import P_ngBaseCapabilityImplementation, run_service @@ -15,6 +16,9 @@ class PongCapabilityImplementation(P_ngBaseCapabilityImplementation): """Basic capability definition, very similar to the other capability except for the type of event it emits.""" intersect_sdk_capability_name = 'pong' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'pong': IntersectEventDefinition(event_type=str), + } def after_service_startup(self) -> None: """Called after service startup.""" @@ -25,7 +29,6 @@ def after_service_startup(self) -> None: ) self.counter_thread.start() - @intersect_event(events={'pong': IntersectEventDefinition(event_type=str)}) def pong_event(self) -> None: """Send out a pong event every 2 seconds.""" while True: diff --git a/examples/3_ping_pong_events/pong_service_schema.json b/examples/3_ping_pong_events/pong_service_schema.json index b8a2b0e..519222d 100644 --- a/examples/3_ping_pong_events/pong_service_schema.json +++ b/examples/3_ping_pong_events/pong_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "events": {}, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "pong": { - "channels": {}, + "endpoints": {}, + "status": { + "type": "null" + }, + "events": { + "pong": { + "type": "string", + "title": "pong" + } + }, "description": "Basic capability definition, very similar to the other capability except for the type of event it emits." } }, - "events": { - "pong": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/3_ping_pong_events/service_runner.py b/examples/3_ping_pong_events/service_runner.py index d3738e0..b80a853 100644 --- a/examples/3_ping_pong_events/service_runner.py +++ b/examples/3_ping_pong_events/service_runner.py @@ -33,7 +33,7 @@ def run_service(capability: P_ngBaseCapabilityImplementation) -> None: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/3_ping_pong_events_amqp/ping_pong_client.py b/examples/3_ping_pong_events_amqp/ping_pong_client.py index e055086..24bec81 100644 --- a/examples/3_ping_pong_events_amqp/ping_pong_client.py +++ b/examples/3_ping_pong_events_amqp/ping_pong_client.py @@ -1,10 +1,11 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) @@ -13,8 +14,17 @@ logger = logging.getLogger(__name__) -PING_SERVICE = 'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.ping-service' -PONG_SERVICE = 'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.pong-service' + +def get_event_message_params(png: str) -> IntersectEventMessageParams: + """Generate the event information we'll handle based on the value of 'png'. + + We always want to pass in a new object; don't mutate a persistent object. + """ + return IntersectEventMessageParams( + hierarchy=f'p-ng-organization.p-ng-facility.p-ng-system.p-ng-subsystem.{png}-service', + capability_name=png, + event_name=png, + ) class SampleOrchestrator: @@ -30,7 +40,11 @@ def __init__(self) -> None: self.events_encountered = 0 def event_callback( - self, _source: str, _operation: str, event_name: str, payload: INTERSECT_JSON_VALUE + self, + _source: str, + _capability_name: str, + event_name: str, + payload: INTERSECT_RESPONSE_VALUE, ) -> IntersectClientCallback: """Handles events from two Services at once. @@ -47,15 +61,15 @@ def event_callback( if self.events_encountered == self.MAX_EVENTS_TO_PROCESS: raise Exception - # we would normally also check the source here. With certain services, checking the operation can also be helpful. + # In this case, we can check any of the source, the capability_name, or the event_name. For maximum robustness, you should check all three values. if event_name == 'ping': return IntersectClientCallback( - services_to_start_listening_for_events=[PONG_SERVICE], - services_to_stop_listening_for_events=[PING_SERVICE], + services_to_start_listening_for_events=[get_event_message_params('pong')], + services_to_stop_listening_for_events=[get_event_message_params('ping')], ) return IntersectClientCallback( - services_to_start_listening_for_events=[PING_SERVICE], - services_to_stop_listening_for_events=[PONG_SERVICE], + services_to_start_listening_for_events=[get_event_message_params('ping')], + services_to_stop_listening_for_events=[get_event_message_params('pong')], ) @@ -75,7 +89,7 @@ def event_callback( config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( services_to_start_listening_for_events=[ - PING_SERVICE, + get_event_message_params('ping'), ] ), **from_config_file, diff --git a/examples/3_ping_pong_events_amqp/ping_service.py b/examples/3_ping_pong_events_amqp/ping_service.py index 6a45273..4819934 100644 --- a/examples/3_ping_pong_events_amqp/ping_service.py +++ b/examples/3_ping_pong_events_amqp/ping_service.py @@ -3,8 +3,9 @@ import logging import threading import time +from typing import ClassVar -from intersect_sdk import IntersectEventDefinition, intersect_event +from intersect_sdk import IntersectEventDefinition from .service_runner import P_ngBaseCapabilityImplementation, run_service @@ -16,6 +17,9 @@ class PingCapabilityImplementation(P_ngBaseCapabilityImplementation): """Basic capability definition, very similar to the other capability except for the type of event it emits.""" intersect_sdk_capability_name = 'ping' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'ping': IntersectEventDefinition(event_type=str), + } def after_service_startup(self) -> None: """Called after service startup.""" @@ -26,7 +30,6 @@ def after_service_startup(self) -> None: ) self.counter_thread.start() - @intersect_event(events={'ping': IntersectEventDefinition(event_type=str)}) def ping_event(self) -> None: """Send out a ping event every 2 seconds.""" while True: diff --git a/examples/3_ping_pong_events_amqp/ping_service_schema.json b/examples/3_ping_pong_events_amqp/ping_service_schema.json index 199d4e7..7d2011c 100644 --- a/examples/3_ping_pong_events_amqp/ping_service_schema.json +++ b/examples/3_ping_pong_events_amqp/ping_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "events": {}, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "ping": { - "channels": {}, + "endpoints": {}, + "status": { + "type": "null" + }, + "events": { + "ping": { + "type": "string", + "title": "ping" + } + }, "description": "Basic capability definition, very similar to the other capability except for the type of event it emits." } }, - "events": { - "ping": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/3_ping_pong_events_amqp/pong_service.py b/examples/3_ping_pong_events_amqp/pong_service.py index 24aa468..ef1b85b 100644 --- a/examples/3_ping_pong_events_amqp/pong_service.py +++ b/examples/3_ping_pong_events_amqp/pong_service.py @@ -3,8 +3,9 @@ import logging import threading import time +from typing import ClassVar -from intersect_sdk import IntersectEventDefinition, intersect_event +from intersect_sdk import IntersectEventDefinition from .service_runner import P_ngBaseCapabilityImplementation, run_service @@ -16,6 +17,9 @@ class PongCapabilityImplementation(P_ngBaseCapabilityImplementation): """Basic capability definition, very similar to the other capability except for the type of event it emits.""" intersect_sdk_capability_name = 'pong' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'pong': IntersectEventDefinition(event_type=str), + } def after_service_startup(self) -> None: """Called after service startup.""" @@ -26,7 +30,6 @@ def after_service_startup(self) -> None: ) self.counter_thread.start() - @intersect_event(events={'pong': IntersectEventDefinition(event_type=str)}) def pong_event(self) -> None: """Send out a pong event every 2 seconds.""" while True: diff --git a/examples/3_ping_pong_events_amqp/pong_service_schema.json b/examples/3_ping_pong_events_amqp/pong_service_schema.json index b8a2b0e..519222d 100644 --- a/examples/3_ping_pong_events_amqp/pong_service_schema.json +++ b/examples/3_ping_pong_events_amqp/pong_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "events": {}, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "pong": { - "channels": {}, + "endpoints": {}, + "status": { + "type": "null" + }, + "events": { + "pong": { + "type": "string", + "title": "pong" + } + }, "description": "Basic capability definition, very similar to the other capability except for the type of event it emits." } }, - "events": { - "pong": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/4_service_to_service/example_1_service.py b/examples/4_service_to_service/example_1_service.py index 63e365a..d6c7d32 100644 --- a/examples/4_service_to_service/example_1_service.py +++ b/examples/4_service_to_service/example_1_service.py @@ -1,6 +1,7 @@ """First Service for example. Sends a message to service two and emits an event for the client.""" import logging +from typing import ClassVar from intersect_sdk import ( HierarchyConfig, @@ -10,7 +11,6 @@ IntersectService, IntersectServiceConfig, default_intersect_lifecycle_loop, - intersect_event, intersect_message, intersect_status, ) @@ -23,6 +23,9 @@ class ExampleServiceOneCapabilityImplementation(IntersectBaseCapabilityImplement """Service 1 Capability.""" intersect_sdk_capability_name = 'ServiceOne' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'response_event': IntersectEventDefinition(event_type=str), + } @intersect_status() def status(self) -> str: @@ -41,12 +44,11 @@ def pass_text_to_service_2(self, text: str) -> None: # Send intersect message to another service self.intersect_sdk_call_service(msg_to_send, self.service_2_handler) - @intersect_event(events={'response_event': IntersectEventDefinition(event_type=str)}) def service_2_handler(self, _source: str, _operation: str, _has_error: bool, msg: str) -> None: """Handles first response from service 2, emits the response as an event for the client, and sends a hardcoded message to service 2.""" self.intersect_sdk_emit_event('response_event', f'Received Response from Service 2: {msg}') - # verify that we can call the service multiple + # verify that we can call the service multiple times msg_to_send = IntersectDirectMessageParams( destination='example-organization.example-facility.example-system.example-subsystem.service-two', operation='ServiceTwo.test_service', @@ -54,7 +56,6 @@ def service_2_handler(self, _source: str, _operation: str, _has_error: bool, msg ) self.intersect_sdk_call_service(msg_to_send, self.additional_service_handler) - @intersect_event(events={'response_event': IntersectEventDefinition(event_type=str)}) def additional_service_handler( self, _source: str, _operation: str, _has_error: bool, msg: str ) -> None: @@ -71,7 +72,7 @@ def additional_service_handler( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/4_service_to_service/example_1_service_schema.json b/examples/4_service_to_service/example_1_service_schema.json index c02c6b1..93d54b2 100644 --- a/examples/4_service_to_service/example_1_service_schema.json +++ b/examples/4_service_to_service/example_1_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "ServiceOne": { - "channels": { + "endpoints": { "pass_text_to_service_2": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "null" + "type": "null", + "title": "pass_text_to_service_2" } }, "description": "Takes in a string parameter and sends it to service 2." @@ -32,58 +41,127 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "text" } }, "description": "Takes in a string parameter and sends it to service 2." - }, - "events": [] + } } }, + "events": { + "response_event": { + "type": "string", + "title": "response_event" + } + }, + "status": { + "type": "string", + "title": "Status" + }, "description": "Service 1 Capability." } }, - "events": { - "response_event": { - "type": "string" - } - }, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -93,54 +171,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -151,23 +225,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/4_service_to_service/example_2_service.py b/examples/4_service_to_service/example_2_service.py index 18ea230..d48c6e0 100644 --- a/examples/4_service_to_service/example_2_service.py +++ b/examples/4_service_to_service/example_2_service.py @@ -39,7 +39,7 @@ def test_service(self, text: str) -> str: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } diff --git a/examples/4_service_to_service/example_2_service_schema.json b/examples/4_service_to_service/example_2_service_schema.json index 237bf97..11d6b82 100644 --- a/examples/4_service_to_service/example_2_service_schema.json +++ b/examples/4_service_to_service/example_2_service_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "ServiceTwo": { - "channels": { + "endpoints": { "test_service": { "publish": { "message": { @@ -19,7 +27,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "test_service" } }, "description": "Returns the text given along with acknowledgement." @@ -32,54 +41,122 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "text" } }, "description": "Returns the text given along with acknowledgement." - }, - "events": [] + } } }, + "events": {}, + "status": { + "type": "string", + "title": "Status" + }, "description": "Service 2 Capability." } }, - "events": {}, - "status": { - "type": "string" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -89,54 +166,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -147,23 +220,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/4_service_to_service/example_client.py b/examples/4_service_to_service/example_client.py index 685e765..4b61981 100644 --- a/examples/4_service_to_service/example_client.py +++ b/examples/4_service_to_service/example_client.py @@ -8,11 +8,12 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, IntersectDirectMessageParams, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) @@ -28,14 +29,18 @@ def __init__(self) -> None: self.got_first_event = False def event_callback( - self, _source: str, _operation: str, _event_name: str, payload: INTERSECT_JSON_VALUE + self, + _source: str, + _capability_name: str, + _event_name: str, + payload: INTERSECT_RESPONSE_VALUE, ) -> None: """This simply prints the event from Service 1 to your console. Params: source: the source of the response message. - operation: the name of the function we called in the original message. - _has_error: Boolean value which represents an error. + capability_name: the name of the capability which emitted the event. + event_name: Name of the event. payload: Value of the response from the Service. """ print(payload) @@ -53,7 +58,7 @@ def event_callback( 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], } @@ -67,12 +72,17 @@ def event_callback( payload='Kicking off the example!', ) ] + events = [ + IntersectEventMessageParams( + hierarchy='example-organization.example-facility.example-system.example-subsystem.service-one', + capability_name='ServiceOne', + event_name='response_event', + ) + ] config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( messages_to_send=initial_messages, - services_to_start_listening_for_events=[ - 'example-organization.example-facility.example-system.example-subsystem.service-one' - ], + services_to_start_listening_for_events=events, ), **from_config_file, ) diff --git a/examples/4_service_to_service_events/example_client.py b/examples/4_service_to_service_events/example_client.py index 1f8198f..49ea702 100644 --- a/examples/4_service_to_service_events/example_client.py +++ b/examples/4_service_to_service_events/example_client.py @@ -7,10 +7,11 @@ import logging from intersect_sdk import ( - INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, IntersectClient, IntersectClientCallback, IntersectClientConfig, + IntersectEventMessageParams, default_intersect_lifecycle_loop, ) @@ -29,14 +30,18 @@ def __init__(self) -> None: self.got_first_event = False def event_callback( - self, _source: str, _operation: str, _event_name: str, payload: INTERSECT_JSON_VALUE + self, + _source: str, + _capability_name: str, + _event_name: str, + payload: INTERSECT_RESPONSE_VALUE, ) -> None: """This simply prints the event from the exposed service to your console. Params: source: the source of the response message. - operation: the name of the function we called in the original message. - _has_error: Boolean value which represents an error. + capability_name: the name of the capability which emitted the event. + event_name: Name of the event. payload: Value of the response from the Service. """ print(payload) @@ -60,11 +65,16 @@ def event_callback( } # Listen for an event on the exposed service + events = [ + IntersectEventMessageParams( + hierarchy='example-organization.example-facility.example-system.example-subsystem.exposed-service', + capability_name='ExposedServiceCapability', + event_name='exposed_service_event', + ) + ] config = IntersectClientConfig( initial_message_event_config=IntersectClientCallback( - services_to_start_listening_for_events=[ - 'example-organization.example-facility.example-system.example-subsystem.exposed-service' - ], + services_to_start_listening_for_events=events, ), **from_config_file, ) diff --git a/examples/4_service_to_service_events/exposed_service.py b/examples/4_service_to_service_events/exposed_service.py index 73a46e4..4d5182d 100644 --- a/examples/4_service_to_service_events/exposed_service.py +++ b/examples/4_service_to_service_events/exposed_service.py @@ -5,6 +5,7 @@ """ import logging +from typing import ClassVar from intersect_sdk import ( HierarchyConfig, @@ -13,7 +14,6 @@ IntersectService, IntersectServiceConfig, default_intersect_lifecycle_loop, - intersect_event, ) logging.basicConfig(level=logging.INFO) @@ -23,7 +23,10 @@ class ExposedServiceCapabilityImplementation(IntersectBaseCapabilityImplementation): """Exposed service capability.""" - intersect_sdk_capability_name = 'ExposedService' + intersect_sdk_capability_name = 'ExposedServiceCapability' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'exposed_service_event': IntersectEventDefinition(event_type=str), + } def on_service_startup(self) -> None: """This function will get called when starting up the Service. @@ -44,13 +47,13 @@ def on_service_startup(self) -> None: subsystem='example-subsystem', service='internal-service', ), + 'InternalServiceCapability', 'internal_service_event', self.on_internal_service_event, ) - @intersect_event(events={'exposed_service_event': IntersectEventDefinition(event_type=str)}) def on_internal_service_event( - self, source: str, _operation: str, event_name: str, payload: str + self, source: str, _capability_name: str, event_name: str, payload: str ) -> None: """When we get an event back from the internal_service, we will emit our own event.""" self.intersect_sdk_emit_event( @@ -83,5 +86,5 @@ def on_internal_service_event( ) capability = ExposedServiceCapabilityImplementation() service = IntersectService([capability], config) - logger.info('Starting Service 1, use Ctrl+C to exit.') + logger.info('Starting Exposed Service, use Ctrl+C to exit.') default_intersect_lifecycle_loop(service, post_startup_callback=capability.on_service_startup) diff --git a/examples/4_service_to_service_events/exposed_service_schema.json b/examples/4_service_to_service_events/exposed_service_schema.json index cded062..66601f7 100644 --- a/examples/4_service_to_service_events/exposed_service_schema.json +++ b/examples/4_service_to_service_events/exposed_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { - "ExposedService": { - "channels": {}, + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, + "ExposedServiceCapability": { + "endpoints": {}, + "events": { + "exposed_service_event": { + "type": "string", + "title": "exposed_service_event" + } + }, + "status": { + "type": "null" + }, "description": "Exposed service capability." } }, - "events": { - "exposed_service_event": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/4_service_to_service_events/internal_service.py b/examples/4_service_to_service_events/internal_service.py index fdfc9c4..161d01c 100644 --- a/examples/4_service_to_service_events/internal_service.py +++ b/examples/4_service_to_service_events/internal_service.py @@ -7,6 +7,7 @@ import logging import threading import time +from typing import ClassVar from intersect_sdk import ( HierarchyConfig, @@ -15,7 +16,6 @@ IntersectService, IntersectServiceConfig, default_intersect_lifecycle_loop, - intersect_event, ) logging.basicConfig(level=logging.INFO) @@ -25,7 +25,10 @@ class InternalServiceCapabilityImplementation(IntersectBaseCapabilityImplementation): """Internal service capability.""" - intersect_sdk_capability_name = 'InternalService' + intersect_sdk_capability_name = 'InternalServiceCapability' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'internal_service_event': IntersectEventDefinition(event_type=str), + } def after_service_startup(self) -> None: """Called after service startup.""" @@ -34,7 +37,6 @@ def after_service_startup(self) -> None: ) self.thread.start() - @intersect_event(events={'internal_service_event': IntersectEventDefinition(event_type=str)}) def internal_service_event_generator(self) -> str: """Emits a periodic internal_service_event event.""" while True: @@ -66,7 +68,7 @@ def internal_service_event_generator(self) -> str: ) capability = InternalServiceCapabilityImplementation() service = IntersectService([capability], config) - logger.info('Starting Service 2, use Ctrl+C to exit.') + logger.info('Starting Internal Service, use Ctrl+C to exit.') default_intersect_lifecycle_loop( service, post_startup_callback=capability.after_service_startup, diff --git a/examples/4_service_to_service_events/internal_service_schema.json b/examples/4_service_to_service_events/internal_service_schema.json index 60a9e30..9db426c 100644 --- a/examples/4_service_to_service_events/internal_service_schema.json +++ b/examples/4_service_to_service_events/internal_service_schema.json @@ -8,52 +8,128 @@ }, "defaultContentType": "application/json", "capabilities": { - "InternalService": { - "channels": {}, + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, + "InternalServiceCapability": { + "endpoints": {}, + "events": { + "internal_service_event": { + "type": "string", + "title": "internal_service_event" + } + }, + "status": { + "type": "null" + }, "description": "Internal service capability." } }, - "events": { - "internal_service_event": { - "type": "string" - } - }, - "status": { - "type": "null" - }, "components": { - "schemas": {}, + "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + } + }, "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -63,54 +139,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -121,23 +193,28 @@ "type": "string" }, "data_handler": { - "allOf": [ - { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler" - } - ], - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/examples/README.md b/examples/README.md index a4b31ba..27d1d8e 100644 --- a/examples/README.md +++ b/examples/README.md @@ -48,7 +48,7 @@ By default, all examples use MQTT. To test out AMQP, change this block: 'username': 'intersect_username', 'password': 'intersect_password', 'port': 1883, - 'protocol': 'mqtt3.1.1', + 'protocol': 'mqtt5.0', }, ], ``` diff --git a/examples/SUBMISSION_RULES.md b/examples/SUBMISSION_RULES.md index fbd8e45..12158d7 100644 --- a/examples/SUBMISSION_RULES.md +++ b/examples/SUBMISSION_RULES.md @@ -37,7 +37,7 @@ class HelloServiceCapabilityImplementation(IntersectBaseCapabilityImplementation but we do not use it here. The operation we are calling is `say_hello_to_name` , so the message being sent will need to have - an operationId of `say_hello_to_name`. The operation expects a string sent to it in the payload, + an operation_id of `say_hello_to_name`. The operation expects a string sent to it in the payload, and will send a string back in its own payload. """ @@ -97,4 +97,4 @@ if __name__ == '__main__': ## Validation -Validation should be expressed as declaratively as possible, through `pydantic`, `typing_extensions`, and `annotated_types` imports. Try to avoid implementing validation directly in the function body, as that cannot be represented in the generated schemas. +Validation should be expressed as declaratively as possible, through `pydantic`, `typing_extensions`, `typing`, and `annotated_types` imports. Try to avoid implementing validation directly in the function body, as that cannot be represented in the generated schemas. diff --git a/pdm.lock b/pdm.lock index 37f4cf7..6bd94e5 100644 --- a/pdm.lock +++ b/pdm.lock @@ -2,13 +2,13 @@ # It is not intended for manual editing. [metadata] -groups = ["default", "amqp", "docs", "lint", "test"] +groups = ["default", "docs", "lint", "test"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:01496ba2183edd321b27ad6408825bff436ac2152d58364254ff713e086f3fa1" +content_hash = "sha256:6092f52e5a133eae863e91b78ffeb23090b7a9a7dc49a7eb6350ba3dc52cf132" [[metadata.targets]] -requires_python = ">=3.8.10,<4.0" +requires_python = "~=3.10" [[package]] name = "alabaster" @@ -71,11 +71,6 @@ files = [ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, ] [[package]] @@ -202,26 +197,6 @@ files = [ {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] @@ -289,36 +264,6 @@ files = [ {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] @@ -402,26 +347,6 @@ files = [ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] @@ -488,26 +413,6 @@ files = [ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] @@ -533,18 +438,6 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] -[[package]] -name = "eval-type-backport" -version = "0.2.0" -requires_python = ">=3.8" -summary = "Like `typing._eval_type`, but lets older Python versions use newer typing features." -groups = ["default"] -marker = "python_version < \"3.10\"" -files = [ - {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, - {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, -] - [[package]] name = "exceptiongroup" version = "1.2.2" @@ -642,37 +535,6 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -[[package]] -name = "importlib-metadata" -version = "8.2.0" -requires_python = ">=3.8" -summary = "Read metadata from Python packages" -groups = ["docs"] -marker = "python_version < \"3.10\"" -dependencies = [ - "typing-extensions>=3.6.4; python_version < \"3.8\"", - "zipp>=0.5", -] -files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, -] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -requires_python = ">=3.8" -summary = "Read resources from Python packages" -groups = ["default"] -marker = "python_version < \"3.9\"" -dependencies = [ - "zipp>=3.1.0; python_version < \"3.10\"", -] -files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, -] - [[package]] name = "iniconfig" version = "2.0.0" @@ -817,26 +679,6 @@ files = [ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] @@ -885,16 +727,6 @@ files = [ {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] @@ -934,11 +766,13 @@ files = [ [[package]] name = "paho-mqtt" -version = "1.6.1" +version = "2.1.0" +requires_python = ">=3.7" summary = "MQTT version 5.0/3.1.1 client class" groups = ["default"] files = [ - {file = "paho-mqtt-1.6.1.tar.gz", hash = "sha256:2a8291c81623aec00372b5a85558a372c747cbca8e9934dfe218638b8eefc26f"}, + {file = "paho_mqtt-2.1.0-py3-none-any.whl", hash = "sha256:6db9ba9b34ed5bc6b6e3812718c7e06e2fd7444540df2455d2c51bd58808feee"}, + {file = "paho_mqtt-2.1.0.tar.gz", hash = "sha256:12d6e7511d4137555a3f6ea167ae846af2c7357b10bc6fa4f7c3968fc1723834"}, ] [[package]] @@ -946,24 +780,12 @@ name = "pika" version = "1.3.2" requires_python = ">=3.7" summary = "Pika Python AMQP Client Library" -groups = ["amqp"] +groups = ["default"] files = [ {file = "pika-1.3.2-py3-none-any.whl", hash = "sha256:0779a7c1fafd805672796085560d290213a465e4f6f76a6fb19e378d8041a14f"}, {file = "pika-1.3.2.tar.gz", hash = "sha256:b2a327ddddf8570b4965b3576ac77091b850262d34ce8c1d8cb4e4146aa4145f"}, ] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -requires_python = ">=3.6" -summary = "Resolve a name to an object." -groups = ["default"] -marker = "python_version < \"3.9\"" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" version = "4.2.2" @@ -1004,6 +826,23 @@ files = [ {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, ] +[[package]] +name = "psutil" +version = "7.0.0" +requires_python = ">=3.6" +summary = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +groups = ["default"] +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + [[package]] name = "pycparser" version = "2.22" @@ -1036,10 +875,6 @@ files = [ {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3"}, {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea"}, {file = "pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5"}, - {file = "pycryptodome-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e"}, {file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"}, ] @@ -1124,32 +959,6 @@ files = [ {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, @@ -1159,15 +968,6 @@ files = [ {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] @@ -1230,17 +1030,6 @@ files = [ {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] -[[package]] -name = "pytz" -version = "2024.1" -summary = "World timezone definitions, modern and historical" -groups = ["docs"] -marker = "python_version < \"3.9\"" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - [[package]] name = "pyyaml" version = "6.0.2" @@ -1284,22 +1073,6 @@ files = [ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] @@ -1432,32 +1205,6 @@ files = [ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, @@ -1470,46 +1217,34 @@ files = [ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] name = "ruff" -version = "0.9.4" +version = "0.12.7" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["lint"] files = [ - {file = "ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706"}, - {file = "ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf"}, - {file = "ruff-0.9.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214"}, - {file = "ruff-0.9.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231"}, - {file = "ruff-0.9.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b"}, - {file = "ruff-0.9.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6"}, - {file = "ruff-0.9.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c"}, - {file = "ruff-0.9.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0"}, - {file = "ruff-0.9.4-py3-none-win32.whl", hash = "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402"}, - {file = "ruff-0.9.4-py3-none-win_amd64.whl", hash = "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e"}, - {file = "ruff-0.9.4-py3-none-win_arm64.whl", hash = "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41"}, - {file = "ruff-0.9.4.tar.gz", hash = "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7"}, + {file = "ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303"}, + {file = "ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb"}, + {file = "ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e"}, + {file = "ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606"}, + {file = "ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8"}, + {file = "ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa"}, + {file = "ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5"}, + {file = "ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4"}, + {file = "ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77"}, + {file = "ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f"}, + {file = "ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69"}, + {file = "ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71"}, ] [[package]] @@ -1666,17 +1401,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "types-paho-mqtt" -version = "1.6.0.20240321" -requires_python = ">=3.8" -summary = "Typing stubs for paho-mqtt" -groups = ["lint"] -files = [ - {file = "types-paho-mqtt-1.6.0.20240321.tar.gz", hash = "sha256:694eec160340f2a2b151237dcc3f107a63e1c4e5b8f9fcda0ba392049af9cbec"}, - {file = "types_paho_mqtt-1.6.0.20240321-py3-none-any.whl", hash = "sha256:cd275c14f39363c2a0f8286ead9a46962e5421ebd477547b892ae016699f5a4a"}, -] - [[package]] name = "types-python-dateutil" version = "2.9.0.20240316" @@ -1748,15 +1472,3 @@ files = [ {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, ] - -[[package]] -name = "zipp" -version = "3.19.2" -requires_python = ">=3.8" -summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["default", "docs"] -marker = "python_version < \"3.10\"" -files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, -] diff --git a/pyproject.toml b/pyproject.toml index c8081f7..b6abadf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,24 +16,25 @@ authors = [ ] readme = "README.md" license = { text = "BSD-3-Clause" } -requires-python = ">=3.8.10,<4.0" +requires-python = ">=3.10,<4.0" keywords = ["intersect"] dynamic = ["version"] classifiers = [ "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ "pydantic>=2.7.0", "retrying>=1.3.4,<2.0.0", - "paho-mqtt>=1.6.1,<2.0.0", + "paho-mqtt>=2.1.0,<3.0.0", + "pika>=1.3.2,<2.0.0", "minio>=7.2.3", "jsonschema[format-nongpl]>=4.21.1", # extras necessary for enforcing formats - "eval-type-backport>=0.1.3;python_version<'3.10'", # make pydantic work with newer syntax and older python #"brotli>=1.1.0", # TODO - add this dependency when we add compression + "psutil>=7.0.0", ] [project.urls] @@ -43,7 +44,6 @@ Documentation = "https://intersect-python-sdk.readthedocs.io/en/latest/" Issues = "https://github.com/INTERSECT-SDK/python-sdk/issues" [project.optional-dependencies] -amqp = ["pika>=1.3.2,<2.0.0"] docs = ["sphinx>=5.3.0", "furo>=2023.3.27"] [tool.ruff] @@ -55,7 +55,7 @@ isort = { known-first-party = ['src'] } pydocstyle = { convention = 'google' } flake8-quotes = { inline-quotes = 'single', multiline-quotes = 'double' } mccabe = { max-complexity = 20 } -pylint = { max-args = 10, max-branches = 20, max-returns = 10, max-statements = 75 } +pylint = { max-args = 10, max-branches = 20, max-returns = 15, max-statements = 75 } # pyflakes and the relevant pycodestyle rules are already configured extend-select = [ 'C90', # mccabe complexity @@ -90,7 +90,7 @@ extend-select = [ 'SLF', # flake8-self 'SLOT', # flake8-slots 'SIM', # flake8-simplify - 'TCH', # flake8-type-checking + 'TC', # flake8-type-checking 'ARG', # flake8-unused-arguments 'PTH', # flake8-use-pathlib 'PGH', # pygrep-hooks @@ -98,6 +98,7 @@ extend-select = [ 'TRY', # tryceratops 'FLY', # flynt 'RUF', # RUFF additional rules + 'INT', # flake8-gettext ] # If you're seeking to disable a rule, first consider whether the rule is overbearing, or if it should only be turned off for your usecase. ignore = [ @@ -108,8 +109,15 @@ ignore = [ 'PLR2004', # allow "magic numbers" ] +[tool.ruff.lint.flake8-type-checking] +runtime-evaluated-base-classes = ["pydantic.BaseModel", "intersect_sdk.IntersectBaseCapabilityImplementation"] +runtime-evaluated-decorators = ["pydantic.dataclasses.dataclass","pydantic.validate_call"] + [tool.ruff.lint.extend-per-file-ignores] -'__init__.py' = ['F401'] # __init__.py commonly has unused imports +'__init__.py' = [ + 'F401', # __init__.py commonly has unused imports + 'TC004', # do lazy imports when importing from the base module +] 'docs/*' = [ 'D', # the documentation folder does not need documentation 'INP001', # docs are not a namespace package @@ -121,7 +129,6 @@ ignore = [ 'D100', # documenting modules in examples is unhelpful 'D104', # documenting packages in examples is unhelpful 'TRY002', # examples can raise their own exception - 'FA100', # examples are tested on Python 3.8, and future annotations cause problems with Pydantic ] 'tests/*' = [ 'S101', # allow assert statements in tests @@ -131,8 +138,8 @@ ignore = [ 'ANN', # tests in general don't need types, unless they are runtime types. 'ARG', # allow unused parameters in tests 'D', # ignore documentation in tests - 'FA100', # tests frequently use runtime typing annotations -] + 'RUF012', # permit "mutable" class attributes to not be annotated with typing.ClassVar (these shouldn't be mutated anyways...) + ] # see https://mypy.readthedocs.io/en/stable/config_file.html for a complete reference [tool.mypy] @@ -169,9 +176,8 @@ exclude_also = [ [tool.pdm.dev-dependencies] lint = [ "pre-commit>=3.3.1", - "ruff==0.9.4", + "ruff==0.12.7", "mypy>=1.10.0", - "types-paho-mqtt>=1.6.0.20240106", "codespell>=2.3.0", ] test = ["pytest>=7.3.2", "pytest-cov>=4.1.0", "httpretty>=1.1.4"] diff --git a/src/intersect_sdk/__init__.py b/src/intersect_sdk/__init__.py index 9eb584d..f512d93 100644 --- a/src/intersect_sdk/__init__.py +++ b/src/intersect_sdk/__init__.py @@ -7,45 +7,53 @@ - When a new data service is integrated into INTERSECT, ALL adapters will need to update to support this data service, which will include new dependencies. """ -from .app_lifecycle import default_intersect_lifecycle_loop -from .capability.base import IntersectBaseCapabilityImplementation -from .client import IntersectClient -from .client_callback_definitions import ( - INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, - INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE, - IntersectClientCallback, -) -from .config.client import IntersectClientConfig -from .config.service import IntersectServiceConfig -from .config.shared import ( - ControlPlaneConfig, - ControlProvider, - DataStoreConfig, - DataStoreConfigMap, - HierarchyConfig, -) -from .core_definitions import IntersectDataHandler, IntersectMimeType -from .schema import get_schema_from_capability_implementations -from .service import IntersectService -from .service_callback_definitions import ( - INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, -) -from .service_definitions import ( - IntersectEventDefinition, - intersect_event, - intersect_message, - intersect_status, -) -from .shared_callback_definitions import ( - INTERSECT_JSON_VALUE, - IntersectDirectMessageParams, -) -from .version import __version__, version_info, version_string +from importlib import import_module +from typing import TYPE_CHECKING + +# import everything eagerly for IDEs/LSPs +if TYPE_CHECKING: + from .app_lifecycle import default_intersect_lifecycle_loop + from .capability.base import IntersectBaseCapabilityImplementation + from .client import IntersectClient + from .client_callback_definitions import ( + INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, + INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE, + IntersectClientCallback, + ) + from .config.client import IntersectClientConfig + from .config.service import IntersectServiceConfig + from .config.shared import ( + ControlPlaneConfig, + ControlProvider, + DataStoreConfig, + DataStoreConfigMap, + HierarchyConfig, + ) + from .core_definitions import IntersectDataHandler, IntersectMimeType + from .exceptions import IntersectCapabilityError + from .schema import get_schema_from_capability_implementations + from .service import IntersectService + from .service_callback_definitions import ( + INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, + ) + from .service_definitions import ( + IntersectEventDefinition, + intersect_message, + intersect_status, + ) + from .shared_callback_definitions import ( + INTERSECT_JSON_VALUE, + INTERSECT_RESPONSE_VALUE, + IntersectDirectMessageParams, + IntersectEventMessageParams, + ) + from .version import __version__, version_info, version_string -__all__ = [ +__all__ = ( 'INTERSECT_CLIENT_EVENT_CALLBACK_TYPE', 'INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE', 'INTERSECT_JSON_VALUE', + 'INTERSECT_RESPONSE_VALUE', 'INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE', 'ControlPlaneConfig', 'ControlProvider', @@ -53,21 +61,70 @@ 'DataStoreConfigMap', 'HierarchyConfig', 'IntersectBaseCapabilityImplementation', + 'IntersectCapabilityError', 'IntersectClient', 'IntersectClientCallback', 'IntersectClientConfig', 'IntersectDataHandler', 'IntersectDirectMessageParams', 'IntersectEventDefinition', + 'IntersectEventMessageParams', 'IntersectMimeType', 'IntersectService', 'IntersectServiceConfig', '__version__', 'default_intersect_lifecycle_loop', 'get_schema_from_capability_implementations', - 'intersect_event', 'intersect_message', 'intersect_status', 'version_info', 'version_string', -] +) + +# PEP 562 stuff: do lazy imports for people who just want to import from the top-level module + +__lazy_imports = { + 'INTERSECT_CLIENT_EVENT_CALLBACK_TYPE': '.client_callback_definitions', + 'INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE': '.client_callback_definitions', + 'INTERSECT_JSON_VALUE': '.shared_callback_definitions', + 'INTERSECT_RESPONSE_VALUE': '.shared_callback_definitions', + 'INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE': '.service_callback_definitions', + 'ControlPlaneConfig': '.config.shared', + 'ControlProvider': '.config.shared', + 'DataStoreConfig': '.config.shared', + 'DataStoreConfigMap': '.config.shared', + 'HierarchyConfig': '.config.shared', + 'IntersectBaseCapabilityImplementation': '.capability.base', + 'IntersectCapabilityError': '.exceptions', + 'IntersectClient': '.client', + 'IntersectClientCallback': '.client_callback_definitions', + 'IntersectClientConfig': '.config.client', + 'IntersectDataHandler': '.core_definitions', + 'IntersectDirectMessageParams': '.shared_callback_definitions', + 'IntersectEventDefinition': '.service_definitions', + 'IntersectEventMessageParams': '.shared_callback_definitions', + 'IntersectMimeType': '.core_definitions', + 'IntersectService': '.service', + 'IntersectServiceConfig': '.config.service', + '__version__': '.version', + 'default_intersect_lifecycle_loop': '.app_lifecycle', + 'get_schema_from_capability_implementations': '.schema', + 'intersect_message': '.service_definitions', + 'intersect_status': '.service_definitions', + 'version_info': '.version', + 'version_string': '.version', +} + + +def __getattr__(attr_name: str) -> object: + attr_module = __lazy_imports.get(attr_name) + if attr_module: + module = import_module(attr_module, package=__spec__.parent) + return getattr(module, attr_name) + + msg = f'module {__name__!r} has no attribute {attr_name!r}' + raise AttributeError(msg) + + +def __dir__() -> list[str]: + return list(__all__) diff --git a/src/intersect_sdk/_internal/constants.py b/src/intersect_sdk/_internal/constants.py index 0753697..fc3557a 100644 --- a/src/intersect_sdk/_internal/constants.py +++ b/src/intersect_sdk/_internal/constants.py @@ -1,10 +1,8 @@ BASE_RESPONSE_ATTR = '__is_intersect_response__' BASE_STATUS_ATTR = '__is_intersect_status__' -BASE_EVENT_ATTR = '__is_intersect_event__' # in theory, as long as the next attributes are unique, they can be any string REQUEST_CONTENT = '__request_content_type__' RESPONSE_CONTENT = '__response_content_type__' RESPONSE_DATA = '__response_data_transfer_handler__' STRICT_VALIDATION = '__strict_validation__' SHUTDOWN_KEYS = '__ignore_message__' -EVENT_ATTR_KEY = '__intersect_sdk_events__' diff --git a/src/intersect_sdk/_internal/control_plane/brokers/amqp_client.py b/src/intersect_sdk/_internal/control_plane/brokers/amqp_client.py index 5499a9c..6e8492f 100644 --- a/src/intersect_sdk/_internal/control_plane/brokers/amqp_client.py +++ b/src/intersect_sdk/_internal/control_plane/brokers/amqp_client.py @@ -12,7 +12,7 @@ import functools import threading from hashlib import sha384 -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import pika import pika.delivery_mode @@ -24,10 +24,13 @@ from .broker_client import BrokerClient if TYPE_CHECKING: + from collections.abc import Callable + from pika.channel import Channel from pika.frame import Frame from pika.spec import Basic, BasicProperties + from ..definitions import MessageCallback from ..topic_handler import TopicHandler @@ -191,7 +194,9 @@ def is_connected(self) -> bool: def considered_unrecoverable(self) -> bool: return self._unrecoverable - def publish(self, topic: str, payload: bytes, persist: bool) -> None: + def publish( + self, topic: str, payload: bytes, content_type: str, headers: dict[str, str], persist: bool + ) -> None: """Publish the given message. Publish payload with the pre-existing connection (via connect()) on topic. @@ -199,6 +204,8 @@ def publish(self, topic: str, payload: bytes, persist: bool) -> None: Args: topic: The topic on which to publish the message as a string payload: The message to publish, as raw bytes. + content_type: The content type of the message (if the data plane used is the control plane itself), or the value to be retrieved from the data plane (if the message handler is MINIO/etc.) + headers: UTF-8 dictionary which can help parse information about the message persist: True if message should persist until consumers available, False if message should be removed immediately. """ topic = _hierarchy_2_amqp(topic) @@ -210,11 +217,11 @@ def publish(self, topic: str, payload: bytes, persist: bool) -> None: routing_key=topic, body=payload, properties=pika.BasicProperties( - content_type='text/plain', + content_type=content_type, + headers=headers, delivery_mode=pika.delivery_mode.DeliveryMode.Persistent if persist else pika.delivery_mode.DeliveryMode.Transient, - # expiration=None if persist else '8640000', ), ) else: @@ -533,7 +540,7 @@ def _consume_message( self, channel: Channel, basic_deliver: Basic.Deliver, - _properties: BasicProperties, + properties: BasicProperties, body: bytes, persist: bool, ) -> None: @@ -545,7 +552,7 @@ def _consume_message( Args: channel: The AMQP channel the message was received on. Used to manually acknowledge messages. basic_deliver: Contains internal AMQP delivery information - i.e. the routing key. - _properties: Object from the AMQP call. Ignored. + properties: Object from the AMQP call. Contains various metadata. body: the AMQP message to be handled. persist: Whether or not our queue should persist on either broker or application shutdown. """ @@ -557,6 +564,18 @@ def _consume_message( channel.basic_reject(basic_deliver.delivery_tag) return + # make sure that we have a content-type and headers, note that this does not publish a "reply" message if we fail here + content_type = properties.content_type + if not content_type: + logger.error('Missing message content type') + channel.basic_ack(basic_deliver.delivery_tag) + return + headers = properties.headers + if not headers: + logger.error('Missing message headers') + channel.basic_ack(basic_deliver.delivery_tag) + return + tth_key = _amqp_2_hierarchy(basic_deliver.routing_key) topic_handler = self._topics_to_handlers().get(tth_key) if topic_handler: @@ -571,7 +590,15 @@ def _consume_message( consumer_tag_info.wait(1.0) thrd = threading.Thread( target=self._consume_message_subthread, - args=(channel, topic_handler.callbacks, body, basic_deliver.delivery_tag, persist), + args=( + channel, + topic_handler.callbacks, + body, + content_type, + headers, + basic_deliver.delivery_tag, + persist, + ), ) self._consumer_tags_to_threads[consumer_tag_info.consumer_tag] = thrd thrd.start() @@ -582,14 +609,16 @@ def _consume_message( def _consume_message_subthread( self, channel: Channel, - callbacks: set[Callable[[bytes], None]], + callbacks: set[MessageCallback], body: bytes, + content_type: str, + headers: dict[str, str], delivery_tag: int, persist: bool, ) -> None: """This is a subthread which executes the consumer code without blocking the IO loop. Without using a subthread, the AMQP heartbeat checker will be blocked.""" for cb in callbacks: - cb(body) + cb(body, content_type, headers) # With persistent messages, we only acknowledge the message AFTER we are done processing # (this removes the message from the broker queue) # this allows us to retry a message if the broker OR this application goes down diff --git a/src/intersect_sdk/_internal/control_plane/brokers/broker_client.py b/src/intersect_sdk/_internal/control_plane/brokers/broker_client.py index 2589a7c..a69bbee 100644 --- a/src/intersect_sdk/_internal/control_plane/brokers/broker_client.py +++ b/src/intersect_sdk/_internal/control_plane/brokers/broker_client.py @@ -32,7 +32,9 @@ def considered_unrecoverable(self) -> bool: """ ... - def publish(self, topic: str, payload: bytes, persist: bool) -> None: + def publish( + self, topic: str, payload: bytes, content_type: str, headers: dict[str, str], persist: bool + ) -> None: """Publishes the given message. Publish payload with the pre-existing connection (via connect()) on topic. @@ -40,6 +42,8 @@ def publish(self, topic: str, payload: bytes, persist: bool) -> None: Args: topic: The topic on which to publish the message as a string. payload: The message to publish, as raw bytes. + content_type: The content type of the message (if the data plane used is the control plane itself), or the value to be retrieved from the data plane (if the message handler is MINIO/etc.) + headers: UTF-8 dictionary which can help parse information about the message persist: True = message will persist forever in associated queues until consumers are available (usually used for Userspace messages) False = remove message immediately if no consumers available (usually used for Event messages and Lifecycle messages) diff --git a/src/intersect_sdk/_internal/control_plane/brokers/mqtt_client.py b/src/intersect_sdk/_internal/control_plane/brokers/mqtt_client.py index 9ee8299..236de08 100644 --- a/src/intersect_sdk/_internal/control_plane/brokers/mqtt_client.py +++ b/src/intersect_sdk/_internal/control_plane/brokers/mqtt_client.py @@ -2,15 +2,23 @@ import threading import uuid -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any import paho.mqtt.client as paho_client +from paho.mqtt.enums import CallbackAPIVersion +from paho.mqtt.packettypes import PacketTypes +from paho.mqtt.properties import Properties from retrying import retry from ...logger import logger from .broker_client import BrokerClient if TYPE_CHECKING: + from collections.abc import Callable + + from paho.mqtt.client import DisconnectFlags + from paho.mqtt.reasoncodes import ReasonCode + from ..topic_handler import TopicHandler @@ -57,8 +65,11 @@ def __init__( self.port = port # Create a client to connect to RabbitMQ - # TODO clean_session param is ONLY for MQTT v3 here - self._connection = paho_client.Client(client_id=self.uid, clean_session=False) + self._connection = paho_client.Client( + callback_api_version=CallbackAPIVersion.VERSION2, + protocol=paho_client.MQTTv5, + client_id=self.uid, + ) self._connection.username_pw_set(username=username, password=password) # Whether the connection is currently active @@ -71,6 +82,10 @@ def __init__( # ConnectionManager callable state self._topics_to_handlers = topics_to_handlers + # MQTT v3.1.1 automatically downgrades a QOS which is too high (good), but MQTT v5 will terminate the connection (bad) + # see https://github.com/rabbitmq/rabbitmq-server/discussions/11842 + self._max_supported_qos = 2 + # MQTT callback functions self._connection.on_connect = self._handle_connect self._connection.on_disconnect = self._handle_disconnect @@ -80,10 +95,14 @@ def __init__( def connect(self) -> None: """Connect to the defined broker.""" # Create a client to connect to RabbitMQ - # TODO MQTT v5 implementations should set clean_start to NEVER here self._should_disconnect = False self._connected_flag.clear() - self._connection.connect(self.host, self.port, 60) + self._connection.connect( + self.host, + self.port, + 60, + clean_start=False, + ) self._connection.loop_start() while not self.is_connected() and not self._connected_flag.is_set(): self._connected_flag.wait(1.0) @@ -106,7 +125,9 @@ def is_connected(self) -> bool: def considered_unrecoverable(self) -> bool: return self._unrecoverable - def publish(self, topic: str, payload: bytes, persist: bool) -> None: + def publish( + self, topic: str, payload: bytes, content_type: str, headers: dict[str, str], persist: bool + ) -> None: """Publish the given message. Publish payload with the pre-existing connection (via connect()) on topic. @@ -114,11 +135,17 @@ def publish(self, topic: str, payload: bytes, persist: bool) -> None: Args: topic: The topic on which to publish the message as a string. payload: The message to publish, as raw bytes. + content_type: The content type of the message (if the data plane used is the control plane itself), or the value to be retrieved from the data plane (if the message handler is MINIO/etc.) + headers: UTF-8 dictionary which can help parse information about the message persist: Determine if the message should live until queue consumers or available (True), or if it should be removed immediately (False) """ - # NOTE: RabbitMQ only works with QOS of 1 and 0, and seems to convert QOS2 to QOS1 - self._connection.publish(topic, payload, qos=2 if persist else 0) + props = Properties(PacketTypes.PUBLISH) # type: ignore[no-untyped-call] + props.ContentType = content_type + props.UserProperty = list(headers.items()) + self._connection.publish( + topic, payload, qos=self._max_supported_qos if persist else 0, properties=props + ) def subscribe(self, topic: str, persist: bool) -> None: """Subscribe to a topic over the pre-existing connection (via connect()). @@ -128,7 +155,7 @@ def subscribe(self, topic: str, persist: bool) -> None: persist: Determine if the associated message queue of the topic is long-lived (True) or not (False) """ # NOTE: RabbitMQ only works with QOS of 1 and 0, and seems to convert QOS2 to QOS1 - self._connection.subscribe(topic, qos=2 if persist else 0) + self._connection.subscribe(topic, qos=2 if persist else 0, properties=None) def unsubscribe(self, topic: str) -> None: """Unsubscribe from a topic over the pre-existing connection. @@ -139,36 +166,78 @@ def unsubscribe(self, topic: str) -> None: self._connection.unsubscribe(topic) def _on_message( - self, _client: paho_client.Client, _userdata: Any, message: paho_client.MQTTMessage + self, + client: paho_client.Client, # noqa: ARG002 + userdata: Any, # noqa: ARG002 + message: paho_client.MQTTMessage, ) -> None: """Handle a message from the MQTT server. Args: - _client: the Paho client - _userdata: MQTT user data + client: the Paho client + userdata: MQTT user data message: MQTT message """ topic_handler = self._topics_to_handlers().get(message.topic) - if topic_handler: - for cb in topic_handler.callbacks: - cb(message.payload) + # Note that if we return prior to the callback, there will be no reply message + if not topic_handler: + logger.warning('Incompatible message topic %s, rejecting message', message.topic) + return + try: + content_type = message.properties.ContentType # type: ignore[union-attr] + headers = dict(message.properties.UserProperty) # type: ignore[union-attr] + except AttributeError as e: + logger.warning( + 'Missing mandatory property %s in received message. The message will be rejected', + e.name, + ) + return + except ValueError: + logger.warning( + 'Headers in received message are in improper format. The message will be rejected' + ) + return + for cb in topic_handler.callbacks: + cb(message.payload, content_type, headers) - def _handle_disconnect(self, client: paho_client.Client, _userdata: Any, _rc: int) -> None: + def _handle_disconnect( + self, + client: paho_client.Client, + userdata: Any, + flags: DisconnectFlags, + reason_code: ReasonCode, + properties: Properties | None, + ) -> None: """Handle a disconnection from the MQTT server. This callback usually implies a temporary connection fault, so we'll try to handle it. Args: client: The Paho client. - _userdata: MQTT user data. - rc: MQTT return code as an integer. + userdata: MQTT user data. + flags: List of MQTT connection flags. + reason_code: MQTT return code. + properties: MQTT user properties. """ + logger.debug( + 'mqtt disconnected log - uid=%s reason_code=%s flags=%s userdata=%s properties=%s', + self.uid, + reason_code, + flags, + userdata, + properties, + ) self._connected = False if not self._should_disconnect: client.reconnect() def _handle_connect( - self, _client: paho_client.Client, userdata: Any, flags: dict[str, Any], rc: int + self, + client: paho_client.Client, # noqa: ARG002 + userdata: Any, + flags: dict[str, Any], + reason_code: ReasonCode, + properties: Properties | None, ) -> None: """Set the connection status in response to the result of a Paho connection attempt. @@ -179,13 +248,26 @@ def _handle_connect( client: The Paho MQTT client. userdata: The MQTT userdata. flags: List of MQTT connection flags. - rc: The MQTT return code as an int. + reason_code: The MQTT return code. + properties: MQTT user properties """ - # Return code 0 means connection was successful - if rc == 0: + if str(reason_code) == 'Success': + logger.debug( + 'MQTT connected log - reason-code=%s properties=%s userdata=%s flags=%s', + reason_code, + properties, + userdata, + flags, + ) self._connected = True self._connection_retries = 0 self._should_disconnect = False + + # mimic "automatic QoS downgrade" of MQTTv3 for MQTTv5 + if properties and hasattr(properties, 'MaximumQoS'): + logger.info('MQTT: Maximum supported QoS is %s', properties.MaximumQoS) + self._max_supported_qos = properties.MaximumQoS + self._connected_flag.set() for topic, topic_handler in self._topics_to_handlers().items(): self.subscribe(topic, topic_handler.topic_persist) @@ -193,11 +275,13 @@ def _handle_connect( # This will generally suggest a misconfiguration self._connected = False self._connection_retries += 1 + logger.error('Bad connection (reason: %s)', reason_code) logger.error( - f'On connect error received (probable broker config error), have tried {self._connection_retries} times' + 'On connect error received (probable broker config error), have tried %s times', + self._connection_retries, ) - logger.error(f'Connection error userdata: {userdata}') - logger.error(f'Connection error flags: {flags}') + logger.error('Connection error userdata: %s', userdata) + logger.error('Connection error flags: %s', flags) if self._connection_retries >= _MQTT_MAX_RETRIES: logger.error('Giving up MQTT reconnection attempt') self._connected_flag.set() diff --git a/src/intersect_sdk/_internal/control_plane/control_plane_manager.py b/src/intersect_sdk/_internal/control_plane/control_plane_manager.py index e7a3628..6c23cc3 100644 --- a/src/intersect_sdk/_internal/control_plane/control_plane_manager.py +++ b/src/intersect_sdk/_internal/control_plane/control_plane_manager.py @@ -1,27 +1,17 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable, Literal - -from pydantic import TypeAdapter +from typing import TYPE_CHECKING, Literal from ..exceptions import IntersectInvalidBrokerError from ..logger import logger -from .brokers.mqtt_client import MQTTClient from .topic_handler import TopicHandler if TYPE_CHECKING: + from collections.abc import Callable + from ...config.shared import ControlPlaneConfig from .brokers.broker_client import BrokerClient - -GENERIC_MESSAGE_SERIALIZER: TypeAdapter[Any] = TypeAdapter(Any) - - -def serialize_message(message: Any) -> bytes: - """Serialize a message to bytes, in preparation for publishing it on a message broker. - - Works as a generic serializer/deserializer - """ - return GENERIC_MESSAGE_SERIALIZER.dump_json(message, warnings=False) + from .definitions import MessageCallback def create_control_provider( @@ -31,7 +21,9 @@ def create_control_provider( if config.protocol == 'amqp0.9.1': # only try to import the AMQP client if the user is using an AMQP broker try: - from .brokers.amqp_client import AMQPClient + from .brokers.amqp_client import ( # noqa: PLC0415 (lazy load all AMQP modules) + AMQPClient, + ) return AMQPClient( host=config.host, @@ -43,7 +35,10 @@ def create_control_provider( except ImportError as e: msg = "Configuration includes AMQP broker, but AMQP dependencies were not installed. Install intersect with the 'amqp' optional dependency to use this backend. (i.e. `pip install intersect_sdk[amqp]`)" raise IntersectInvalidBrokerError(msg) from e + # MQTT + from .brokers.mqtt_client import MQTTClient # noqa: PLC0415 (lazy load MQTT modules) + return MQTTClient( host=config.host, port=config.port or 1883, @@ -78,7 +73,7 @@ def __init__( self._topics_to_handlers: dict[str, TopicHandler] = {} def add_subscription_channel( - self, channel: str, callbacks: set[Callable[[bytes], None]], persist: bool + self, channel: str, callbacks: set[MessageCallback], persist: bool ) -> None: """Start listening for messages on a channel on all configured brokers. @@ -149,13 +144,20 @@ def disconnect(self) -> None: for provider in self._control_providers: provider.disconnect() - def publish_message(self, channel: str, msg: Any, persist: bool) -> None: + def publish_message( + self, + channel: str, + payload: bytes, + content_type: str, + headers: dict[str, str], + persist: bool, + ) -> None: """Publish message on channel for all brokers.""" if self.is_connected(): - serialized_message = serialize_message(msg) for provider in self._control_providers: - provider.publish(channel, serialized_message, persist) + provider.publish(channel, payload, content_type, headers, persist) else: + # TODO may want more robust error handling here logger.error('Cannot send message, providers are not connected') def is_connected(self) -> bool: diff --git a/src/intersect_sdk/_internal/control_plane/definitions.py b/src/intersect_sdk/_internal/control_plane/definitions.py new file mode 100644 index 0000000..2ca9371 --- /dev/null +++ b/src/intersect_sdk/_internal/control_plane/definitions.py @@ -0,0 +1,10 @@ +from collections.abc import Callable + +MessageCallback = Callable[[bytes, str, dict[str, str]], None] +""" +All subscription callback functions take three arguments, provided by the protocol handler: + +1. The PAYLOAD of the message, in raw bytes. +2. The content-type of the PAYLOAD (as a valid utf-8 string). This can be validated prior to the callback function. +3. A UTF-8 mapping of header keys to header values. These should generally be specific to a domain, and will get validated in the callback function. +""" diff --git a/src/intersect_sdk/_internal/control_plane/discovery_service.py b/src/intersect_sdk/_internal/control_plane/discovery_service.py deleted file mode 100644 index 0a3a2b0..0000000 --- a/src/intersect_sdk/_internal/control_plane/discovery_service.py +++ /dev/null @@ -1,40 +0,0 @@ -"""TODO: This is all old code we currently aren't using.""" - -from __future__ import annotations - -import json -from urllib.parse import urlparse -from urllib.request import Request, urlopen - - -def discover_broker(address: str, broker_endpoint: str) -> tuple[str, str, int]: - """Get the metadata for a broker from the discovery service. - - Args: - address: A string containing the address for the discovery service. - broker_endpoint: specific API for broker - Returns: - Three strings. The first is the name of the broker type (as used in - _create_broker_client()), the second is the broker's address, and - the third is the broker's port number. - """ - url = f'{address}/v0.1/{broker_endpoint}' - - # Get scheme associated with the `url` string - scheme = urlparse(url).scheme - - # Only accept `http` and `https` schemes, otherwise raise error - if scheme not in ('http', 'https'): - msg = f'URL scheme is {scheme}, only http or https schemes are accepted' - raise ValueError(msg) - - request = Request(url) # noqa: S310 (scheme checked earlier) - with urlopen(request) as response: # noqa: S310 (scheme checked earlier) - body = response.read() - - broker_info = json.loads(body.decode('utf-8')) - endpoint = broker_info['endpoint'] - backend_name = broker_info['backendName'] - address, port = endpoint.split(':', 1) - - return backend_name, address, port diff --git a/src/intersect_sdk/_internal/control_plane/topic_handler.py b/src/intersect_sdk/_internal/control_plane/topic_handler.py index 8bfe424..d8d09a2 100644 --- a/src/intersect_sdk/_internal/control_plane/topic_handler.py +++ b/src/intersect_sdk/_internal/control_plane/topic_handler.py @@ -1,12 +1,10 @@ -from __future__ import annotations - -from typing import Callable +from .definitions import MessageCallback class TopicHandler: """ControlPlaneManager information about a topic, avoids protocol specific information.""" - callbacks: set[Callable[[bytes], None]] + callbacks: set[MessageCallback] """Set of functions to call when consuming a message. (In practice there will only be one callback, but it could be helpful to add a debugging function callback in for development.) diff --git a/src/intersect_sdk/_internal/data_plane/data_plane_manager.py b/src/intersect_sdk/_internal/data_plane/data_plane_manager.py index 83285e3..98e55b9 100644 --- a/src/intersect_sdk/_internal/data_plane/data_plane_manager.py +++ b/src/intersect_sdk/_internal/data_plane/data_plane_manager.py @@ -3,6 +3,8 @@ import random from typing import TYPE_CHECKING +from pydantic import TypeAdapter, ValidationError + from ...core_definitions import IntersectDataHandler, IntersectMimeType from ..exceptions import IntersectError from ..logger import logger @@ -10,8 +12,9 @@ if TYPE_CHECKING: from ...config.shared import DataStoreConfigMap, HierarchyConfig - from ..messages.event import EventMessage - from ..messages.userspace import UserspaceMessage + + +MINIO_ADAPTER = TypeAdapter(MinioPayload) class DataPlaneManager: @@ -34,7 +37,9 @@ def __init__(self, hierarchy: HierarchyConfig, data_configs: DataStoreConfigMap) if not self._minio_providers: logger.warning('WARNING: This service cannot support any MINIO instances') - def incoming_message_data_handler(self, message: UserspaceMessage | EventMessage) -> bytes: + def incoming_message_data_handler( + self, message: bytes, request_data_handler: IntersectDataHandler + ) -> bytes: """Get data from the request data provider. Params: @@ -44,12 +49,15 @@ def incoming_message_data_handler(self, message: UserspaceMessage | EventMessage Raise: IntersectException - if we couldn't get the data """ - request_data_handler = message['headers']['data_handler'] if request_data_handler == IntersectDataHandler.MESSAGE: - return message['payload'] # type: ignore[return-value] + return message if request_data_handler == IntersectDataHandler.MINIO: # TODO - we may want to send additional provider information in the payload - payload: MinioPayload = message['payload'] # type: ignore[assignment] + try: + payload: MinioPayload = MINIO_ADAPTER.validate_json(message) + except ValidationError as e: + logger.warning('Invalid MINIO payload format, dropping message') + raise IntersectError from e provider = None for store in self._minio_providers: if store._base_url._url.geturl() == payload['minio_url']: # noqa: SLF001 (only way to get URL from MINIO API) @@ -69,7 +77,7 @@ def outgoing_message_data_handler( function_response: bytes, content_type: IntersectMimeType, data_handler: IntersectDataHandler, - ) -> bytes | MinioPayload: + ) -> bytes: """Send the user's response to the appropriate data provider. Params: @@ -78,7 +86,7 @@ def outgoing_message_data_handler( - data_handler - where we're going to send the data off to (i.e. the message, MINIO...) Returns: - the payload of the message + the payload of the message, this varies based off of the data_handler value Raise: IntersectException - if there was any error in submitting the response """ @@ -94,7 +102,10 @@ def outgoing_message_data_handler( ) raise IntersectError provider = random.choice(self._minio_providers) # noqa: S311 (TODO choose a MINIO provider better than at random - this may be determined from external message params) - return send_minio_object(function_response, provider, content_type, self._hierarchy) + minio_payload = send_minio_object( + function_response, provider, content_type, self._hierarchy + ) + return MINIO_ADAPTER.dump_json(minio_payload) logger.error( f'No support implemented for code {data_handler}, please upgrade your intersect-sdk version.' diff --git a/src/intersect_sdk/_internal/data_plane/minio_utils.py b/src/intersect_sdk/_internal/data_plane/minio_utils.py index f8ae683..6f94137 100644 --- a/src/intersect_sdk/_internal/data_plane/minio_utils.py +++ b/src/intersect_sdk/_internal/data_plane/minio_utils.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import mimetypes from hashlib import sha224 from io import BytesIO +from typing import TYPE_CHECKING from uuid import uuid4 from minio import Minio @@ -9,12 +12,14 @@ from urllib3.exceptions import MaxRetryError from urllib3.util import parse_url -from ...config.shared import DataStoreConfig, HierarchyConfig -from ...core_definitions import IntersectMimeType from ..exceptions import IntersectError from ..logger import logger from ..utils import die +if TYPE_CHECKING: + from ...config.shared import DataStoreConfig, HierarchyConfig + from ...core_definitions import IntersectMimeType + class MinioPayload(TypedDict): """This is a payload which gets sent in the actual userspace message if the data handler is "MINIO".""" @@ -86,7 +91,7 @@ def send_minio_object( """ bucket_name = _condense_minio_bucket_name(hierarchy) # mimetypes.guess_extension() is a nice-to-have for MINIO preview, but isn't essential. - object_id = str(uuid4()) + (mimetypes.guess_extension(content_type.value) or '') + object_id = str(uuid4()) + (mimetypes.guess_extension(content_type) or '') try: if not provider.bucket_exists(bucket_name): provider.make_bucket(bucket_name) @@ -96,7 +101,7 @@ def send_minio_object( object_name=object_id, data=buff_data, length=buff_data.getbuffer().nbytes, - content_type=content_type.value, + content_type=content_type, ) return MinioPayload( minio_url=provider._base_url._url.geturl(), # noqa: SLF001 (only way to get URL from MINIO API) diff --git a/src/intersect_sdk/_internal/event_metadata.py b/src/intersect_sdk/_internal/event_metadata.py index 731eaaa..0d41cb4 100644 --- a/src/intersect_sdk/_internal/event_metadata.py +++ b/src/intersect_sdk/_internal/event_metadata.py @@ -15,10 +15,6 @@ class EventMetadata(NamedTuple): NOTE: both this class and all properties in it should remain immutable after creation """ - operations: set[str] - """ - A hash set of operations which advertise this event - """ type: type """ The actual type of the event @@ -53,8 +49,8 @@ def definition_metadata_differences( differences.append( ( 'content_type', - f'{definition.content_type.__class__.__name__}.{definition.content_type.name}', - f'{metadata.content_type.__class__.__name__}.{metadata.content_type.name}', + f'{definition.content_type}', + f'{metadata.content_type}', ) ) if definition.data_handler != metadata.data_transfer_handler: diff --git a/src/intersect_sdk/_internal/exceptions.py b/src/intersect_sdk/_internal/exceptions.py index 5bca8f2..dacff0d 100644 --- a/src/intersect_sdk/_internal/exceptions.py +++ b/src/intersect_sdk/_internal/exceptions.py @@ -3,7 +3,7 @@ class IntersectError(Exception): class IntersectApplicationError(IntersectError): - """This is a special IntersectException, thrown if user application logic throws ANY kind of Exception. + """This is a special IntersectException, thrown if user application logic throws ANY kind of Exception. The only caveat is that if a user explicitly throws an IntersectCapabilityException, in which case that logic will be handled instead. In general, validation should be expressed through JSON schema as much as possible; however, JSON schema is NOT a complete prescription for input validation. When this exception is thrown, however, we do not leak any exception information in the error message. On the other hand, if the input fails diff --git a/src/intersect_sdk/_internal/function_metadata.py b/src/intersect_sdk/_internal/function_metadata.py index 43d6399..2f86c0c 100644 --- a/src/intersect_sdk/_internal/function_metadata.py +++ b/src/intersect_sdk/_internal/function_metadata.py @@ -1,10 +1,12 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable, NamedTuple +from typing import TYPE_CHECKING, Any, Literal, NamedTuple if TYPE_CHECKING: from pydantic import TypeAdapter + from ..core_definitions import IntersectDataHandler, IntersectMimeType + class FunctionMetadata(NamedTuple): """Internal cache of public function metadata. @@ -16,16 +18,35 @@ class FunctionMetadata(NamedTuple): """ The type of the class that implements the target method. """ - method: Callable[[Any], Any] + request_adapter: TypeAdapter[Any] | Literal[0] | None + """ + Type adapter for serializing and validating requests. + + Null if user did not specify a request parameter. + 0 if user did specify a request parameter, but Content-Type does not require a TypeAdapter. + """ + response_adapter: TypeAdapter[Any] | Literal[0] """ - The raw method of the function. The function itself is useless and should not be called, - but will store user-defined attributes needed for internal handling of data. + Type adapter for serializing and validating responses. + 0 if Content-Type does not require a TypeAdapter. """ - request_adapter: TypeAdapter[Any] | None + request_content_type: IntersectMimeType """ - Type adapter for serializing and validating requests. Should only be null if user did not specify a request parameter. + Content-Type of the request value """ - response_adapter: TypeAdapter[Any] + response_content_type: IntersectMimeType """ - Type adapter for serializing and validating responses. + Content-Type of the response value + """ + response_data_transfer_handler: IntersectDataHandler + """ + How we intend on handling the response value + """ + strict_validation: bool + """ + Whether or not we're using lenient Pydantic validation (default, False) or strict + """ + shutdown_keys: set[str] + """ + keys which should cause the function to be skipped if set """ diff --git a/src/intersect_sdk/_internal/generic_serializer.py b/src/intersect_sdk/_internal/generic_serializer.py new file mode 100644 index 0000000..b46df17 --- /dev/null +++ b/src/intersect_sdk/_internal/generic_serializer.py @@ -0,0 +1,5 @@ +from typing import Any + +from pydantic import TypeAdapter + +GENERIC_MESSAGE_SERIALIZER: TypeAdapter[Any] = TypeAdapter(Any) diff --git a/src/intersect_sdk/_internal/interfaces.py b/src/intersect_sdk/_internal/interfaces.py index 31bccae..a5844a8 100644 --- a/src/intersect_sdk/_internal/interfaces.py +++ b/src/intersect_sdk/_internal/interfaces.py @@ -21,13 +21,13 @@ class IntersectEventObserver(Protocol): Used as the common interface for event emitters (i.e. CapabilityImplementations). """ - def _on_observe_event(self, event_name: str, event_value: Any, operation: str) -> None: + def _on_observe_event(self, event_name: str, event_value: Any, capability_name: str) -> None: """How to react to an event being fired. Args: event_name: The key of the event which is fired. event_value: The value of the event which is fired. - operation: The source of the event (generally the function name, not directly invoked by application devs) + capability_name: The name of the capability which fired the event. """ ... @@ -52,6 +52,7 @@ def create_external_request( def register_event( self, service: HierarchyConfig, + capability_name: str, event_name: str, response_handler: INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, ) -> None: @@ -59,6 +60,7 @@ def register_event( Params: - service: HierarchyConfig of the service we want to talk to + - capability_name: name of capability which will fire off the event - event_name: name of event to subscribe to - response_handler: callback for how to handle the reception of an event """ diff --git a/src/intersect_sdk/_internal/messages/event.py b/src/intersect_sdk/_internal/messages/event.py index 19a49ad..8ec54f5 100644 --- a/src/intersect_sdk/_internal/messages/event.py +++ b/src/intersect_sdk/_internal/messages/event.py @@ -9,25 +9,28 @@ import datetime import uuid -from typing import Any, Union +from typing import Annotated -from pydantic import AwareDatetime, Field, TypeAdapter -from typing_extensions import Annotated, TypedDict +from pydantic import AwareDatetime, BaseModel, Field, field_serializer -from ...constants import SYSTEM_OF_SYSTEM_REGEX -from ...core_definitions import IntersectDataHandler, IntersectMimeType +from ...constants import CAPABILITY_REGEX, SYSTEM_OF_SYSTEM_REGEX +from ...core_definitions import IntersectDataHandler from ...version import version_string -from ..data_plane.minio_utils import MinioPayload # TODO - another property we should consider is an optional max_wait_time for events which are fired from functions. # This would mostly be useful for clients/orchestrators that are waiting for a specific event. # This should probably be configured on the schema level... -class EventMessageHeaders(TypedDict): - """Matches the current header definition for INTERSECT messages. +class EventMessageHeaders(BaseModel): + """ALL event messages must include this header. - ALL messages should contain this header. + We do not include the content type of the message in the header, it is handled separately. + """ + + message_id: Annotated[uuid.UUID, Field(description='Unique message ID')] + """ + ID of the message. """ source: Annotated[ @@ -78,80 +81,65 @@ class EventMessageHeaders(TypedDict): usage, the payload would indicate the URI to where the data is stored on MinIO. """ - event_name: str - """ - The name of an event. You can reasonably determine the structure of the message payload by parsing: - - 1) the source of this message header - 2) this "name" property - 3) the service schema itself - """ - - -class EventMessage(TypedDict): - messageId: uuid.UUID - """ - ID of the message. (NOTE: this is defined here to conform to the AsyncAPI spec) - """ - - operationId: str + capability_name: Annotated[ + str, + Field( + pattern=CAPABILITY_REGEX, + description='The name of the capability which emitted the event originally.', + ), + ] """ - The name of the operation that was called when an event was emitted. These would map to the names of user functions. + The name of the capability which emitted the event originally. """ - headers: EventMessageHeaders + event_name: Annotated[ + str, + Field( + pattern=CAPABILITY_REGEX, + description='The name of the event that was emitted, namespaced to the capability.', + ), + ] """ - the headers of the message + The name of the event that was emitted. This is meaningless without the capability name. """ - payload: Union[bytes, MinioPayload] # noqa: FA100 (Pydantic uses runtime annotations) - """ - main payload of the message. Needs to match the schema format, including the content type. + # make sure all non-string fields are serialized into strings, even in Python code - NOTE: The payload's contents will differ based on the data_handler property in the message header. - """ + @field_serializer('message_id', mode='plain') + def ser_uuid(self, uuid: uuid.UUID) -> str: + return str(uuid) - contentType: Annotated[IntersectMimeType, Field(IntersectMimeType.JSON)] - """ - The content type to use when encoding/decoding a message's payload. - The value MUST be a specific media type (e.g. application/json). - When omitted, the value MUST be the one specified on the defaultContentType field. + @field_serializer('created_at', mode='plain') + def ser_datetime(self, dt: datetime.datetime) -> str: + return dt.isoformat() - Note that if the data_handler type is anything other MESSAGE, the actual content-type of the message - payload will depend on the data_handler type. - """ + @field_serializer('data_handler', mode='plain') + def ser_enum(self, enum: IntersectDataHandler) -> str: + return enum.value -def create_event_message( +def create_event_message_headers( source: str, - operation_id: str, - content_type: IntersectMimeType, - data_handler: IntersectDataHandler, + capability_name: str, event_name: str, - payload: Any, -) -> EventMessage: - """Payloads depend on the data handler.""" - return EventMessage( - messageId=uuid.uuid4(), - operationId=operation_id, - contentType=content_type, - payload=payload, - headers=EventMessageHeaders( - source=source, - created_at=datetime.datetime.now(tz=datetime.timezone.utc), - sdk_version=version_string, - event_name=event_name, - data_handler=data_handler, - ), - ) - - -EVENT_MESSAGE_ADAPTER = TypeAdapter(EventMessage) - - -def deserialize_and_validate_event_message(msg: bytes) -> EventMessage: - """If the "msg" param is a valid userspace message, return the object. - - Raises Pydantic ValidationError if "msg" is not a valid userspace message - """ - return EVENT_MESSAGE_ADAPTER.validate_json(msg, strict=True) + data_handler: IntersectDataHandler, +) -> dict[str, str]: + """Generate raw headers and write them into a generic data structure which can be handled by any broker protocol.""" + return EventMessageHeaders( + source=source, + message_id=uuid.uuid4(), + created_at=datetime.datetime.now(tz=datetime.timezone.utc), + sdk_version=version_string, + capability_name=capability_name, + event_name=event_name, + data_handler=data_handler, + ).model_dump(by_alias=True) + + +def validate_event_message_headers(raw_headers: dict[str, str]) -> EventMessageHeaders: + """Validate raw headers and return the object. + + Raises: + pydantic.ValidationError - if the headers were missing any essential information + """ + return EventMessageHeaders(**raw_headers) # type: ignore[arg-type] diff --git a/src/intersect_sdk/_internal/messages/lifecycle.py b/src/intersect_sdk/_internal/messages/lifecycle.py index 6d15572..cae1d9a 100644 --- a/src/intersect_sdk/_internal/messages/lifecycle.py +++ b/src/intersect_sdk/_internal/messages/lifecycle.py @@ -9,60 +9,32 @@ import datetime import uuid -from enum import IntEnum -from typing import Any, Literal +from typing import Annotated, Literal -from pydantic import AwareDatetime, Field, TypeAdapter -from typing_extensions import Annotated, TypedDict +from pydantic import AwareDatetime, BaseModel, Field, field_serializer from ...constants import SYSTEM_OF_SYSTEM_REGEX from ...version import version_string +LifecycleType = Literal[ + 'LCT_STARTUP', + 'LCT_SHUTDOWN', + 'LCT_POLLING', + 'LCT_FUNCTIONS_ALLOWED', + 'LCT_FUNCTIONS_BLOCKED', +] -class LifecycleType(IntEnum): - """Lifecycle code which needs to be included in every message sent. - The lifecycle code is also what determines the structure of the payload. - """ +class LifecycleMessageHeaders(BaseModel): + """ALL lifecycle messages must include this header. - STARTUP = 0 - """ - Message sent on startup. Includes the schema in the payload. - """ - SHUTDOWN = 1 - """ - Message sent on shutdown. Can send a reason for shutdown in the payload. - """ - POLLING = 2 - """ - Message periodically sent out to indicate liveliness. - Failure to send a lifecycle message indicates unhealthy service. - A message with a POLLING enumeration also implies that the status has not been updated. + We do not include the content type of the message in the header, it is handled separately. - Includes the schema and the current status in the payload ({'schema': schema_str, 'status': current_status}) + A special note about lifecycle messages is that their content type must ALWAYS be "application/json". """ - STATUS_UPDATE = 3 - """ - Message sent out to explicitly indicate there was a status update. - Status updates are checked each time a user function is called and during the polling interval. - Includes the schema and the new status in the payload ({'schema': schema_str, 'status': current_status}) - """ - FUNCTIONS_ALLOWED = 4 - """ - Send out a list of functions now allowed in the payload - """ - FUNCTIONS_BLOCKED = 5 - """ - Send out a list of functions now blocked in the payload - """ - - -class LifecycleMessageHeaders(TypedDict): - """Matches the current header definition for INTERSECT messages. - - ALL messages should contain this header. - """ + message_id: Annotated[uuid.UUID, Field(description='Unique message ID')] + """UUID of the message.""" source: Annotated[ str, @@ -75,17 +47,6 @@ class LifecycleMessageHeaders(TypedDict): source of the message """ - destination: Annotated[ - str, - Field( - description='destination of the message', - pattern=SYSTEM_OF_SYSTEM_REGEX, - ), - ] - """ - destination of the message - """ - created_at: Annotated[ AwareDatetime, Field( @@ -114,61 +75,33 @@ class LifecycleMessageHeaders(TypedDict): The integer code of the lifecycle message being sent/received. """ + # make sure all non-string fields are serialized into strings, even in Python code -class LifecycleMessage(TypedDict): - messageId: uuid.UUID - """ - ID of the message. (NOTE: this is defined here to conform to the AsyncAPI spec) - """ - - headers: LifecycleMessageHeaders - """ - the headers of the message - """ - - payload: Any - """ - main payload of the message. - - NOTE: The payload's contents will differ based on the lifecycle_type property in the message header. - """ + @field_serializer('message_id', mode='plain') + def ser_uuid(self, uuid: uuid.UUID) -> str: + return str(uuid) - contentType: Annotated[Literal['application/json'], Field('application/json')] - """ - The content type to use when encoding/decoding a message's payload. - - NOTE: ContentType is provided to somewhat match the AsyncAPI spec, but this value must ALWAYS be - application/json , as a lifecycle message's payload should ALWAYS be represented in JSON. - """ + @field_serializer('created_at', mode='plain') + def ser_datetime(self, dt: datetime.datetime) -> str: + return dt.isoformat() -def create_lifecycle_message( +def create_lifecycle_message_headers( source: str, - destination: str, lifecycle_type: LifecycleType, - payload: Any, -) -> LifecycleMessage: - """The contents of the payload should vary based on the lifecycle type.""" - return LifecycleMessage( - messageId=uuid.uuid4(), - headers=LifecycleMessageHeaders( - source=source, - destination=destination, - created_at=datetime.datetime.now(tz=datetime.timezone.utc), - sdk_version=version_string, - lifecycle_type=lifecycle_type, - ), - payload=payload, - contentType='application/json', - ) - - -LIFECYCLE_MESSAGE_ADAPTER = TypeAdapter(LifecycleMessage) +) -> dict[str, str]: + """Generate raw headers and write them into a generic data structure which can be handled by any broker protocol. + The contents of the payload should vary based on the lifecycle type. + """ + return LifecycleMessageHeaders( + source=source, + message_id=uuid.uuid4(), + created_at=datetime.datetime.now(tz=datetime.timezone.utc), + sdk_version=version_string, + lifecycle_type=lifecycle_type, + ).model_dump(by_alias=True) -def deserialize_and_validate_lifecycle_message(msg: bytes) -> LifecycleMessage: - """If the "msg" param is a valid userspace message, return the object. - Raises Pydantic ValidationError if "msg" is not a valid userspace message - """ - return LIFECYCLE_MESSAGE_ADAPTER.validate_json(msg, strict=True) +def validate_lifecycle_message_headers(raw_headers: dict[str, str]) -> LifecycleMessageHeaders: + return LifecycleMessageHeaders(**raw_headers) # type: ignore[arg-type] diff --git a/src/intersect_sdk/_internal/messages/userspace.py b/src/intersect_sdk/_internal/messages/userspace.py index f2a0b9d..3ae66b7 100644 --- a/src/intersect_sdk/_internal/messages/userspace.py +++ b/src/intersect_sdk/_internal/messages/userspace.py @@ -16,28 +16,43 @@ from services they explicitly messaged. """ -from __future__ import annotations - import datetime import uuid -from typing import Any, Union +from typing import Annotated -from pydantic import AwareDatetime, Field, TypeAdapter -from typing_extensions import Annotated, TypedDict +from pydantic import AwareDatetime, BaseModel, Field, field_serializer from ...constants import SYSTEM_OF_SYSTEM_REGEX from ...core_definitions import ( IntersectDataHandler, - IntersectMimeType, ) from ...version import version_string -from ..data_plane.minio_utils import MinioPayload # noqa: TC001 (this is runtime checked) -class UserspaceMessageHeader(TypedDict): - """Matches the current header definition for INTERSECT messages. +class UserspaceMessageHeaders(BaseModel): + """ALL request/response/command messages must contain this header. + + We do not include the content type of the message in the header, it is handled separately. + """ + + message_id: Annotated[uuid.UUID, Field(description='Unique message ID')] + """ + ID of the message. + """ - ALL messages should contain this header. + campaign_id: Annotated[uuid.UUID, Field(description='ID associated with a campaign')] + """ + ID of the campaign. For Clients, this should be set once per run, and then not changed. For orchestrators, this is associated with a campaign. + """ + + request_id: Annotated[ + uuid.UUID, + Field( + description='ID associated with a specific request message and response message sequence' + ), + ] + """ + ID of the request. A Client/orchestrator generates this ID for each request message it sends, and the Service generates a response message with this ID. """ source: Annotated[ @@ -77,6 +92,18 @@ class UserspaceMessageHeader(TypedDict): SDKs should be assumed NOT to be compatible if they don't share the major version. """ + operation_id: Annotated[ + str, + Field( + description='Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}' + ), + ] + """ + The name of the operation we want to call. For Services, this indicates the operation which will be called; for Clients, this is the operation which was called. + + This maps to the format ${CAPABILITY_NAME}.${FUNCTION_NAME} . + """ + data_handler: Annotated[ IntersectDataHandler, Field( @@ -104,82 +131,53 @@ class UserspaceMessageHeader(TypedDict): This should only be set to "True" on return messages sent by services - NEVER clients. """ + # make sure all non-string fields are serialized into strings, even in Python code -class UserspaceMessage(TypedDict): - """Core definition of a message. - - The structure of this class is meant to somewhat closely mirror the AsyncAPI definition of a message: - https://www.asyncapi.com/docs/reference/specification/v2.6.0#messageObject - """ - - messageId: uuid.UUID - """ - ID of the message. (NOTE: this is defined here to conform to the AsyncAPI spec) - """ - - operationId: str - """ - The name of the operation we want to call. These would map to the names of user functions. - """ - - headers: UserspaceMessageHeader - """ - the headers of the message - """ - - payload: Union[bytes, MinioPayload] # noqa: UP007 (Pydantic uses runtime annotations) - """ - main payload of the message. Needs to match the schema format, including the content type. + @field_serializer('message_id', 'request_id', 'campaign_id', mode='plain') + def ser_uuid(self, uuid: uuid.UUID) -> str: + return str(uuid) - NOTE: The payload's contents will differ based on the data_handler property in the message header. - NOTE: If "has_error" flag in the message headers is set to "True", the payload will instead contain an error string. - """ + @field_serializer('created_at', mode='plain') + def ser_datetime(self, dt: datetime.datetime) -> str: + return dt.isoformat() - contentType: Annotated[IntersectMimeType, Field(IntersectMimeType.JSON)] - """ - The content type to use when encoding/decoding a message's payload. - The value MUST be a specific media type (e.g. application/json). - When omitted, the value MUST be the one specified on the defaultContentType field. + @field_serializer('has_error', mode='plain') + def ser_boolean(self, boolean: bool) -> str: + return str(boolean).lower() - Note that if the data_handler type is anything other MESSAGE, the actual content-type of the message - payload will depend on the data_handler type. - """ + @field_serializer('data_handler', mode='plain') + def ser_enum(self, enum: IntersectDataHandler) -> str: + return enum.value -def create_userspace_message( +def create_userspace_message_headers( source: str, destination: str, operation_id: str, - content_type: IntersectMimeType, data_handler: IntersectDataHandler, - payload: Any, - message_id: uuid.UUID | None = None, + campaign_id: uuid.UUID, + request_id: uuid.UUID, has_error: bool = False, -) -> UserspaceMessage: - """Payloads depend on the data_handler and has_error.""" - msg_id = message_id if message_id else uuid.uuid4() - return UserspaceMessage( - messageId=msg_id, - operationId=operation_id, - contentType=content_type, - payload=payload, - headers=UserspaceMessageHeader( - source=source, - destination=destination, - sdk_version=version_string, - created_at=datetime.datetime.now(tz=datetime.timezone.utc), - data_handler=data_handler, - has_error=has_error, - ), - ) - - -USERSPACE_MESSAGE_ADAPTER = TypeAdapter(UserspaceMessage) - - -def deserialize_and_validate_userspace_message(msg: bytes) -> UserspaceMessage: - """If the "msg" param is a valid userspace message, return the object. - - Raises Pydantic ValidationError if "msg" is not a valid userspace message - """ - return USERSPACE_MESSAGE_ADAPTER.validate_json(msg, strict=True) +) -> dict[str, str]: + """Generate raw headers and write them into a generic data structure which can be handled by any broker protocol.""" + return UserspaceMessageHeaders( + message_id=uuid.uuid4(), + campaign_id=campaign_id, + request_id=request_id, + source=source, + destination=destination, + sdk_version=version_string, + created_at=datetime.datetime.now(tz=datetime.timezone.utc), + operation_id=operation_id, + data_handler=data_handler, + has_error=has_error, + ).model_dump(by_alias=True) + + +def validate_userspace_message_headers(raw_headers: dict[str, str]) -> UserspaceMessageHeaders: + """Validate raw headers and return the object. + + Raises: + pydantic.ValidationError - if the headers were missing any essential information + """ + return UserspaceMessageHeaders(**raw_headers) # type: ignore[arg-type] diff --git a/src/intersect_sdk/_internal/pydantic_schema_generator.py b/src/intersect_sdk/_internal/pydantic_schema_generator.py index 822fef4..8587ab9 100644 --- a/src/intersect_sdk/_internal/pydantic_schema_generator.py +++ b/src/intersect_sdk/_internal/pydantic_schema_generator.py @@ -3,10 +3,7 @@ See: https://docs.pydantic.dev/latest/api/json_schema/#pydantic.json_schema.GenerateJsonSchema """ -from __future__ import annotations - from typing import ( - TYPE_CHECKING, Any, ) @@ -20,10 +17,7 @@ JsonSchemaValue, ) from pydantic.type_adapter import _type_has_config -from pydantic_core import PydanticSerializationError, to_jsonable_python - -if TYPE_CHECKING: - from pydantic_core import CoreSchema, core_schema +from pydantic_core import CoreSchema, PydanticSerializationError, core_schema, to_jsonable_python # build nested dictionary from list of keys. i.e. if keys = ['one', 'two', 'three'] diff --git a/src/intersect_sdk/_internal/schema.py b/src/intersect_sdk/_internal/schema.py index 1f67518..2124686 100644 --- a/src/intersect_sdk/_internal/schema.py +++ b/src/intersect_sdk/_internal/schema.py @@ -4,35 +4,38 @@ import inspect import re +from collections.abc import Callable, Mapping from enum import Enum from typing import ( TYPE_CHECKING, + Annotated, Any, - Callable, - Mapping, NamedTuple, get_origin, ) -from pydantic import PydanticUserError, TypeAdapter +from pydantic import Field, PydanticUserError, TypeAdapter from typing_extensions import TypeAliasType +from ..constants import CAPABILITY_REGEX +from ..service_definitions import IntersectEventDefinition from ..version import version_string from .constants import ( - BASE_EVENT_ATTR, BASE_RESPONSE_ATTR, BASE_STATUS_ATTR, - EVENT_ATTR_KEY, REQUEST_CONTENT, RESPONSE_CONTENT, RESPONSE_DATA, + SHUTDOWN_KEYS, + STRICT_VALIDATION, ) from .event_metadata import EventMetadata, definition_metadata_differences from .function_metadata import FunctionMetadata from .logger import logger from .messages.event import EventMessageHeaders -from .messages.userspace import UserspaceMessageHeader +from .messages.userspace import UserspaceMessageHeaders from .pydantic_schema_generator import GenerateTypedJsonSchema +from .status_metadata import StatusMetadata from .utils import die if TYPE_CHECKING: @@ -41,7 +44,6 @@ from ..capability.base import IntersectBaseCapabilityImplementation from ..config.shared import HierarchyConfig from ..core_definitions import IntersectDataHandler - from ..service_definitions import IntersectEventDefinition ASYNCAPI_VERSION = '2.6.0' @@ -56,19 +58,39 @@ For a complete reference, https://docs.pydantic.dev/latest/concepts/conversion_table """ -CAPABILITY_NAME_PATTERN = r'[\w-]+' -"""Regular expression we use to check valid capability names. Since capability namespacing only occurs in services, we can be more lax than for how we name services/systems/etc. """ + +def _is_annotation_type(annotation: Any, the_type: type) -> bool: + """Checks to see if 'annotation' is one of the following. + + - the_type + - Annotated[the_type] + """ + return annotation is the_type or ( + get_origin(annotation) is Annotated and annotation.__origin__ is the_type + ) + + +def _create_binary_schema( + title: str, the_type: type, content_type: str, mode: JsonSchemaMode +) -> dict[str, Any]: + """Create a JSON schema for data which is entirely binary, try to extract metadata from user's type.""" + # at this point, the typing can safely be deduced as "bytes", so we do not need to use the special format + schema = TypeAdapter(the_type).json_schema(mode=mode) + if 'title' not in schema: + schema['title'] = title + schema['contentMediaType'] = content_type + + return schema class _FunctionAnalysisResult(NamedTuple): """private class generated from static analysis of function.""" - class_name: str method_name: str method: Callable[[Any], Any] """raw method is for inspecting attributes""" min_args: int - """this usually just means number of implicit arguments to a function. Note that for events we currently don't use this. + """this usually just means number of implicit arguments to a function. the maximum number of args can always be derived from this, so don't bother storing it """ @@ -78,7 +100,6 @@ def _function_static_analysis( capability: type[IntersectBaseCapabilityImplementation], name: str, method: Any, - check_method_type: bool = True, ) -> _FunctionAnalysisResult: """This performs generic, simple static analysis, which can be used across annotations.""" if not callable(method): @@ -86,14 +107,11 @@ def _function_static_analysis( f'On class attribute "{name}", INTERSECT annotation should only be used on callable functions, and should be applied first (put it at the bottom)' ) min_args = 0xFF # max args Python allows, excepting varargs - if check_method_type: - static_attr = inspect.getattr_static(capability, name) - if isinstance(static_attr, classmethod): - die( - f'On class attribute "{name}", INTERSECT annotations cannot be used with @classmethod' - ) - min_args = int(not isinstance(static_attr, staticmethod)) - return _FunctionAnalysisResult(capability.__name__, name, method, min_args) + static_attr = inspect.getattr_static(capability, name) + if isinstance(static_attr, classmethod): + die(f'On class attribute "{name}", INTERSECT annotations cannot be used with @classmethod') + min_args = int(not isinstance(static_attr, staticmethod)) + return _FunctionAnalysisResult(name, method, min_args) def _get_functions( @@ -101,7 +119,6 @@ def _get_functions( ) -> tuple[ _FunctionAnalysisResult | None, list[_FunctionAnalysisResult], - list[_FunctionAnalysisResult], ]: """Inspect all functions, and check that annotated functions are not classmethods (which are always a mistake) and are callable. @@ -117,20 +134,10 @@ def _get_functions( """ intersect_status = None intersect_messages = [] - intersect_events = [] for name in dir(capability): method = getattr(capability, name) - if hasattr(method, BASE_EVENT_ATTR): - intersect_events.append( - _function_static_analysis( - capability, - name, - method, - False, - ) - ) - elif hasattr(method, BASE_RESPONSE_ATTR): + if hasattr(method, BASE_RESPONSE_ATTR): intersect_messages.append(_function_static_analysis(capability, name, method)) elif hasattr(method, BASE_STATUS_ATTR): if intersect_status is not None: @@ -143,11 +150,7 @@ def _get_functions( logger.warning( f"Class '{capability.__name__}' has no function annotated with the @intersect_status() decorator. No status information will be provided when sending status messages." ) - if not intersect_messages and not intersect_events and not intersect_status: - die( - f"No intersect annotations detected on class '{capability.__name__}'. Please annotate at least one entrypoint with '@intersect_message()', or one event-emitting function with '@intersect_event()', or create an '@intersect_status' function." - ) - return intersect_status, intersect_messages, intersect_events + return intersect_status, intersect_messages def _merge_schema_definitions( @@ -206,8 +209,19 @@ def _merge_schema_definitions( return schema +def _ensure_title_in_schema(schema: dict[str, Any], title: str) -> None: + """Make sure that any schema without a $ref has a title, insert 'title' in to 'schema' if no title exists. + + Any definition pointed to by a '$ref' (which usually signifies a class) should already have a title + """ + if '$ref' not in schema and 'title' not in schema: + schema['title'] = title + + def _status_fn_schema( - status_info: _FunctionAnalysisResult, schemas: dict[str, Any] + class_name: str, + status_info: _FunctionAnalysisResult, + schemas: dict[str, Any], ) -> tuple[ str, Callable[[Any], Any], @@ -222,7 +236,7 @@ def _status_fn_schema( - The status function's schema - The TypeAdapter to use for serializing outgoing responses """ - class_name, status_fn_name, status_fn, min_params = status_info + status_fn_name, status_fn, min_params = status_info status_signature = inspect.signature(status_fn) method_params = tuple(status_signature.parameters.values()) if len(method_params) != min_params or any( @@ -231,21 +245,23 @@ def _status_fn_schema( die( f"On capability '{class_name}', capability status function '{status_fn_name}' should have no parameters other than 'self' (unless a staticmethod), and should not use keyword or variable length arguments (i.e. '*', *args, **kwargs)." ) - if status_signature.return_annotation is inspect.Signature.empty: + if status_signature.return_annotation in (inspect.Signature.empty, None): die( - f"On capability '{class_name}', capability status function '{status_fn_name}' should have a valid return annotation." + f"On capability '{class_name}', capability status function '{status_fn_name}' should have a valid return annotation and should not be null." ) try: status_adapter = TypeAdapter(status_signature.return_annotation) - return ( + status_schema = _merge_schema_definitions( + status_adapter, + schemas, + status_signature.return_annotation, + 'serialization', + ) + _ensure_title_in_schema(status_schema, 'Status') + return ( # noqa: TRY300 (caught exception means we die) status_fn_name, status_fn, - _merge_schema_definitions( - status_adapter, - schemas, - status_signature.return_annotation, - 'serialization', - ), + status_schema, status_adapter, ) except PydanticUserError as e: @@ -256,7 +272,6 @@ def _status_fn_schema( def _add_events( class_name: str, - function_name: str, schemas: dict[str, Any], event_schemas: dict[str, Any], event_metadatas: dict[str, EventMetadata], @@ -276,37 +291,67 @@ def _add_events( for d in differences_from_cache ) die( - f"On capability '{class_name}', event key '{event_key}' on function '{function_name}' was previously defined differently. \n{diff_str}\n" + f"On capability '{class_name}', event key '{event_key}' was previously defined differently. \n{diff_str}\n" ) - metadata_value.operations.add(function_name) else: if event_definition.data_handler in excluded_data_handlers: die( - f"On capability '{class_name}', function '{function_name}' should not set data_handler as {event_definition.data_handler} unless an instance is configured in IntersectConfig.data_stores ." + f"On capability '{class_name}', event key '{event_key}' should not set data_handler as {event_definition.data_handler} unless an instance is configured in IntersectConfig.data_stores ." ) - try: - event_adapter: TypeAdapter[Any] = TypeAdapter(event_definition.event_type) - event_schemas[event_key] = _merge_schema_definitions( - event_adapter, - schemas, + if event_definition.content_type == 'application/json': + try: + event_adapter: TypeAdapter[Any] = TypeAdapter(event_definition.event_type) + event_schema = _merge_schema_definitions( + event_adapter, + schemas, + event_definition.event_type, + 'serialization', + ) + _ensure_title_in_schema(event_schema, event_key) + event_schemas[event_key] = { + 'schemaFormat': f'application/vnd.aai.asyncapi+json;version={ASYNCAPI_VERSION}', + 'contentType': event_definition.content_type, + 'payload': event_schema, + 'traits': {'$ref': '#/components/messageTraits/commonHeaders'}, + } + if event_definition.event_documentation: + event_schemas[event_key]['description'] = ( + event_definition.event_documentation + ) + event_metadatas[event_key] = EventMetadata( + type=event_definition.event_type, + type_adapter=event_adapter, + content_type=event_definition.content_type, + data_transfer_handler=event_definition.data_handler, + ) + except PydanticUserError as e: + die( + f"On capability '{class_name}', event key '{event_key}' has an invalid value in the events mapping.\n{e}" + ) + else: + if not _is_annotation_type(event_definition.event_type, bytes): + die( + f"On capability '{class_name}', event key '{event_key}' must have EventDefinition event_type be 'bytes' if content_type is not 'application/json'" + ) + + event_adapter = 0 # type: ignore[assignment] + event_schemas[event_key] = _create_binary_schema( + event_key, event_definition.event_type, + event_definition.content_type, 'serialization', ) event_metadatas[event_key] = EventMetadata( type=event_definition.event_type, type_adapter=event_adapter, - operations={function_name}, content_type=event_definition.content_type, data_transfer_handler=event_definition.data_handler, ) - except PydanticUserError as e: - die( - f"On capability '{class_name}', event key '{event_key}' on function '{function_name}' has an invalid value in the events mapping.\n{e}" - ) def _introspection_baseline( capability: type[IntersectBaseCapabilityImplementation], + event_validator: TypeAdapter[dict[str, IntersectEventDefinition]], excluded_data_handlers: set[IntersectDataHandler], ) -> tuple[ dict[Any, Any], # $defs for schemas (common) @@ -324,9 +369,12 @@ def _introspection_baseline( - Capabilities should implement functions which represent entrypoints - Each entrypoint should be annotated with @intersect_message() (this sets a hidden attribute) - - Entrypoint functions should either have no parameters, or they should - describe all their parameters in one BaseModel-derived class. - (NOTE: maybe allow for some other input formats?) + - Entrypoint functions should either have no parameters, or they should have only one parameter which is Pydantic-compatible (i.e. BaseModel) + - Capabilities may also implement a status function + - the status function should have no parameters, and should return a Pydantic-compatible parameter + - the status function also needs to return something which is small enough to fit in memory and isn't binary data; it should be inexpensive to call it + - Capabilities may also implement events, which represent the capabilities announcing information without being prompted externally + - a capability can have many event, but we need to ensure that the """ # global schema variables schemas: dict[Any, Any] = {} @@ -336,12 +384,19 @@ def _introspection_baseline( function_map = {} event_metadatas: dict[str, EventMetadata] = {} - # capability_name should have already been checked before calling this function + class_name = capability.__name__ + + # capability_name should have already been checked for uniqueness before calling this function cap_name = capability.intersect_sdk_capability_name - status_func, response_funcs, event_funcs = _get_functions(capability) + event_functions = event_validator.validate_python(capability.intersect_sdk_events) + status_func, response_funcs = _get_functions(capability) + if not status_func and not response_funcs and not event_functions: + die( + f"No intersect annotations detected on class '{class_name}'. Please annotate at least one entrypoint with '@intersect_message()', or configure at least one event on the 'intersect_sdk_events' class variable, or create an '@intersect_status' function." + ) - # parse functions - for class_name, name, method, min_params in response_funcs: + # parse @intersect_messages + for name, method, min_params in response_funcs: public_name = f'{cap_name}.{name}' # TODO - I'm placing this here for now because we'll eventually want to capture data plane and broker configs in the schema. @@ -354,6 +409,9 @@ def _introspection_baseline( f"On capability '{class_name}', function '{name}' should not set response_data_type as {data_handler} unless an instance is configured in IntersectConfig.data_stores ." ) + request_content = getattr(method, REQUEST_CONTENT) + response_content = getattr(method, RESPONSE_CONTENT) + docstring = inspect.cleandoc(method.__doc__) if method.__doc__ else None signature = inspect.signature(method) method_params = tuple(signature.parameters.values()) @@ -375,14 +433,14 @@ def _introspection_baseline( 'publish': { 'message': { 'schemaFormat': f'application/vnd.aai.asyncapi+json;version={ASYNCAPI_VERSION}', - 'contentType': getattr(method, REQUEST_CONTENT).value, + 'contentType': request_content, 'traits': {'$ref': '#/components/messageTraits/commonHeaders'}, } }, 'subscribe': { 'message': { 'schemaFormat': f'application/vnd.aai.asyncapi+json;version={ASYNCAPI_VERSION}', - 'contentType': getattr(method, RESPONSE_CONTENT).value, + 'contentType': response_content, 'traits': {'$ref': '#/components/messageTraits/commonHeaders'}, } }, @@ -408,17 +466,29 @@ def _introspection_baseline( die( f"On capability '{class_name}', parameter '{parameter.name}' should not use a default value in the function parameter (use 'typing_extensions.Annotated[TYPE, pydantic.Field(default=)]' instead - 'default_factory' is an acceptable, mutually exclusive argument to 'Field')." ) - try: - function_cache_request_adapter = TypeAdapter(annotation) - channels[name]['subscribe']['message']['payload'] = _merge_schema_definitions( - function_cache_request_adapter, - schemas, - annotation, - 'validation', - ) - except PydanticUserError as e: - die( - f"On capability '{class_name}', parameter '{parameter.name}' type annotation '{annotation}' on function '{name}' is invalid\n{e}" + if request_content == 'application/json': + try: + function_cache_request_adapter = TypeAdapter(annotation) + msg_request_schema = _merge_schema_definitions( + function_cache_request_adapter, + schemas, + annotation, + 'validation', + ) + _ensure_title_in_schema(msg_request_schema, parameter.name) + channels[name]['subscribe']['message']['payload'] = msg_request_schema + except PydanticUserError as e: + die( + f"On capability '{class_name}', parameter '{parameter.name}' type annotation '{annotation}' on function '{name}' is invalid\n{e}" + ) + else: + if not _is_annotation_type(annotation, bytes): + die( + f"On capability '{class_name}', parameter '{parameter.name}' type annotation '{annotation.__name__}' on function '{name}' must be 'bytes' if request_content_type is not 'application/json'" + ) + function_cache_request_adapter = 0 # type: ignore[assignment] + channels[name]['subscribe']['message']['payload'] = _create_binary_schema( + name, annotation, request_content, 'validation' ) else: @@ -429,63 +499,70 @@ def _introspection_baseline( die( f"On capability '{class_name}', return type annotation on function '{name}' missing. {SCHEMA_HELP_MSG}" ) - try: - function_cache_response_adapter = TypeAdapter(return_annotation) - channels[name]['publish']['message']['payload'] = _merge_schema_definitions( - function_cache_response_adapter, - schemas, - return_annotation, - 'serialization', - ) - except PydanticUserError as e: - die( - f"On capability '{class_name}', return annotation '{return_annotation}' on function '{name}' is invalid.\n{e}" + if response_content == 'application/json': + try: + function_cache_response_adapter = TypeAdapter(return_annotation) + response_schema = _merge_schema_definitions( + function_cache_response_adapter, + schemas, + return_annotation, + 'serialization', + ) + _ensure_title_in_schema(response_schema, name) + channels[name]['publish']['message']['payload'] = response_schema + except PydanticUserError as e: + die( + f"On capability '{class_name}', return annotation '{return_annotation}' on function '{name}' is invalid.\n{e}" + ) + else: + if not _is_annotation_type(return_annotation, bytes): + die( + f"On capability '{class_name}', return annotation '{return_annotation.__name__}' on function '{name}' must be 'bytes' if response_content_type is not 'application/json'" + ) + function_cache_response_adapter = 0 # type: ignore[assignment] + channels[name]['publish']['message']['payload'] = _create_binary_schema( + name, return_annotation, response_content, 'serialization' ) # final function mapping function_map[public_name] = FunctionMetadata( capability, - method, function_cache_request_adapter, function_cache_response_adapter, + request_content, + response_content, + data_handler, + getattr(method, STRICT_VALIDATION), + getattr(method, SHUTDOWN_KEYS), ) - # this block handles events associated with intersect_messages (implies command pattern) - function_events: dict[str, IntersectEventDefinition] = getattr(method, EVENT_ATTR_KEY) - _add_events( - class_name, - name, - schemas, - event_schemas, - event_metadatas, - function_events, - excluded_data_handlers, - ) - channels[name]['events'] = list(function_events.keys()) - - # parse global schemas - for class_name, name, method, _ in event_funcs: - _add_events( - class_name, - name, - schemas, - event_schemas, - event_metadatas, - getattr(method, EVENT_ATTR_KEY), - excluded_data_handlers, - ) + # parse events + _add_events( + class_name, + schemas, + event_schemas, + event_metadatas, + event_functions, + excluded_data_handlers, + ) status_fn_name, status_fn, status_fn_schema, status_fn_type_adapter = ( - _status_fn_schema(status_func, schemas) if status_func else (None, None, None, None) + _status_fn_schema(class_name, status_func, schemas) + if status_func + else (None, None, None, None) ) # this conditional allows for the status function to also be called like a message if status_fn_type_adapter and status_fn and status_fn_name: public_status_name = f'{cap_name}.{status_fn_name}' function_map[public_status_name] = FunctionMetadata( capability, - status_fn, None, status_fn_type_adapter, + getattr(status_fn, REQUEST_CONTENT), + getattr(status_fn, RESPONSE_CONTENT), + getattr(status_fn, RESPONSE_DATA), + getattr(status_fn, STRICT_VALIDATION), + getattr(status_fn, SHUTDOWN_KEYS), ) return ( @@ -505,10 +582,8 @@ def get_schema_and_functions_from_capability_implementations( ) -> tuple[ dict[str, Any], dict[str, FunctionMetadata], - dict[str, EventMetadata], - type[IntersectBaseCapabilityImplementation] | None, - str | None, - TypeAdapter[Any] | None, + dict[str, dict[str, EventMetadata]], + list[StatusMetadata], ]: """This function generates the core AsyncAPI schema, and the core mappings which are derived from the schema. @@ -519,23 +594,29 @@ def get_schema_and_functions_from_capability_implementations( In-depth introspection is handled later on. """ - status_function_cap: type[IntersectBaseCapabilityImplementation] | None = None - status_function_name: str | None = None - status_function_schema: dict[str, Any] | None = None - status_function_adapter: TypeAdapter[Any] | None = None shared_schemas: dict[Any, Any] = {} # "shared" schemas which get put in $defs capability_schemas: dict[str, Any] = {} # endpoint schemas + status_list: list[StatusMetadata] = [] # list of all active statuses function_map: dict[str, FunctionMetadata] = {} # endpoint functionality - events: dict[ - str, Any - ] = {} # event schemas - TODO event names are currently "global" across capabilities, may want to change this? - event_map: dict[str, EventMetadata] = {} # event functionality + event_map: dict[ + str, dict[str, EventMetadata] + ] = {} # event functionality: capability -> event -> EventMetadata + + # NOTE: we allow for capabilities between 1 and 255 characters in length, however we don't capture this in regex. + # The Rust regex engine cannot use range {} characters, see https://docs.rs/regex/latest/regex/#untrusted-patterns + event_validator = TypeAdapter( + dict[ + Annotated[str, Field(pattern=CAPABILITY_REGEX, max_length=255)], + IntersectEventDefinition, + ] + ) + for capability_type in capabilities: cap_name = capability_type.intersect_sdk_capability_name if ( not cap_name or not isinstance(cap_name, str) - or not re.fullmatch(CAPABILITY_NAME_PATTERN, cap_name) + or not re.fullmatch(CAPABILITY_REGEX, cap_name) ): die( f'Invalid intersect_sdk_capability_name on capability {capability_type.__name__} - must be a non-empty string with only alphanumeric characters and hyphens (you must explicitly set this, and do so on the class and not an instance).' @@ -548,32 +629,33 @@ def get_schema_and_functions_from_capability_implementations( ( subschemas, (cap_status_fn_name, cap_status_schema, cap_status_type_adapter), - cap_functions, + cap_endpoint_schemas, cap_function_map, - cap_events, + cap_event_schemas, cap_event_map, - ) = _introspection_baseline(capability_type, excluded_data_handlers) - - if cap_status_fn_name and cap_status_schema and cap_status_type_adapter: - if status_function_name is not None: - # TODO may want to change this later - die('Only one capability may have an @intersect_status function') - status_function_cap = capability_type - status_function_name = cap_status_fn_name - status_function_schema = cap_status_schema - status_function_adapter = cap_status_type_adapter + ) = _introspection_baseline(capability_type, event_validator, excluded_data_handlers) + + if cap_status_fn_name and cap_status_type_adapter: + status_list.append( + StatusMetadata( + capability_name=cap_name, + function_name=cap_status_fn_name, + serializer=cap_status_type_adapter, + ) + ) shared_schemas.update(subschemas) # NOTE: we will still add the capability to the schema, even if there are no @intersect_message annotations capability_schemas[cap_name] = { - 'channels': cap_functions, + 'endpoints': cap_endpoint_schemas, + 'events': cap_event_schemas, + 'status': cap_status_schema if cap_status_schema else {'type': 'null'}, } # add documentation for the capabilities if capability_type.__doc__: capability_schemas[cap_name]['description'] = inspect.cleandoc(capability_type.__doc__) function_map.update(cap_function_map) - events.update(cap_events) - event_map.update(cap_event_map) + event_map[cap_name] = cap_event_map asyncapi_spec = { 'asyncapi': ASYNCAPI_VERSION, @@ -587,14 +669,12 @@ def get_schema_and_functions_from_capability_implementations( # can be changed per channel 'defaultContentType': 'application/json', 'capabilities': capability_schemas, - 'events': events, - 'status': status_function_schema if status_function_schema else {'type': 'null'}, 'components': { 'schemas': shared_schemas, 'messageTraits': { # this is where we can define our message headers 'commonHeaders': { - 'userspaceHeaders': TypeAdapter(UserspaceMessageHeader).json_schema( + 'userspaceHeaders': TypeAdapter(UserspaceMessageHeaders).json_schema( ref_template='#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/{model}', mode='serialization', ), @@ -611,7 +691,5 @@ def get_schema_and_functions_from_capability_implementations( asyncapi_spec, function_map, event_map, - status_function_cap, - status_function_name, - status_function_adapter, + status_list, ) diff --git a/src/intersect_sdk/_internal/status_metadata.py b/src/intersect_sdk/_internal/status_metadata.py new file mode 100644 index 0000000..f96be09 --- /dev/null +++ b/src/intersect_sdk/_internal/status_metadata.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, NamedTuple + +if TYPE_CHECKING: + from pydantic import TypeAdapter + + +class StatusMetadata(NamedTuple): + """Information we attach to status functions when running them in the Service loop.""" + + capability_name: str + function_name: str + serializer: TypeAdapter[Any] diff --git a/src/intersect_sdk/_internal/version.py b/src/intersect_sdk/_internal/version.py index 4cc68d3..0a8a782 100644 --- a/src/intersect_sdk/_internal/version.py +++ b/src/intersect_sdk/_internal/version.py @@ -1,5 +1,7 @@ """Version sanity checks to make sure that the release version is properly formatted.""" +import re + def strip_version_metadata(version: str) -> str: """Given a string, do the following. @@ -9,8 +11,6 @@ def strip_version_metadata(version: str) -> str: This is necessary because INTERSECT works off of a strict SemVer string and does not understand build metadata. """ - import re - sem_ver = re.search(r'\d+\.\d+\.\d+', version) if sem_ver is None: msg = 'Package version does not contain a semantic version "..", please fix this' diff --git a/src/intersect_sdk/_internal/version_resolver.py b/src/intersect_sdk/_internal/version_resolver.py index 35a850d..29e8592 100644 --- a/src/intersect_sdk/_internal/version_resolver.py +++ b/src/intersect_sdk/_internal/version_resolver.py @@ -1,40 +1,35 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - from ..core_definitions import IntersectDataHandler from ..version import version_info, version_string from .logger import logger -if TYPE_CHECKING: - from .messages.event import EventMessage - from .messages.userspace import UserspaceMessage - def _resolve_user_version( - msg: UserspaceMessage | EventMessage, our_version: str, our_version_info: tuple[int, int, int] + their_version: str, + their_source: str, + their_data_handler: IntersectDataHandler, + our_version: str, + our_version_info: tuple[int, int, int], ) -> bool: """Function which handles version resolution information. Separated into private function for testing purposes """ - their_version = msg['headers']['sdk_version'] their_version_info = [int(x) for x in their_version.split('.')] # logging rules: log "error" if it's definitely our fault, "warning" if it _might_ be our fault if their_version_info[0] != our_version_info[0]: logger.warning( - f'Problem with source {msg["headers"]["source"]}: Major version incompatibility between our SDK version {our_version} and their SDK version {their_version}' + f'Problem with source {their_source}: Major version incompatibility between our SDK version {our_version} and their SDK version {their_version}' ) return False if our_version_info[0] == 0 and our_version_info[1] != their_version_info[1]: logger.warning( - f'Problem with source {msg["headers"]["source"]}: Pre-release minor version incompatibility between our SDK version {our_version} and their SDK version {their_version}' + f'Problem with source {their_source}: Pre-release minor version incompatibility between our SDK version {our_version} and their SDK version {their_version}' ) return False - if msg['headers']['data_handler'] >= len(IntersectDataHandler): + if their_data_handler.value not in [e.value for e in IntersectDataHandler]: logger.error( - f'Problem with source {msg["headers"]["source"]}: This adapter cannot handle data handler with code "{msg["headers"]["data_handler"]}", please upgrade to the latest intersect-sdk version.' + f'Problem with source {their_source}: This adapter cannot handle data handler with code "{their_data_handler}", please upgrade to the latest intersect-sdk version.' ) return False # NOTE: consider implementing a content-type check here as well @@ -42,17 +37,23 @@ def _resolve_user_version( return True -def resolve_user_version(msg: UserspaceMessage | EventMessage) -> bool: +def resolve_user_version( + their_version: str, their_source: str, their_data_handler: IntersectDataHandler +) -> bool: """This function handles all version compatibilities between our SDK version and an incoming message's SDK version. Params - msg - message from the orchestrator or another adapter + their_version - version information from headers + their_source - source information from headers + their_data_handler - data handler information from headers Returns: True if version resolution successful, false otherwise """ return _resolve_user_version( - msg=msg, + their_version, + their_source, + their_data_handler, our_version=version_string, our_version_info=version_info, ) diff --git a/src/intersect_sdk/app_lifecycle.py b/src/intersect_sdk/app_lifecycle.py index 13977b8..3e6769c 100644 --- a/src/intersect_sdk/app_lifecycle.py +++ b/src/intersect_sdk/app_lifecycle.py @@ -17,12 +17,14 @@ import signal import sys from threading import Event -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from ._internal.logger import logger from ._internal.utils import die if TYPE_CHECKING: + from collections.abc import Callable + from .client import IntersectClient from .service import IntersectService diff --git a/src/intersect_sdk/capability/__init__.py b/src/intersect_sdk/capability/__init__.py index 2a96531..d86d77b 100644 --- a/src/intersect_sdk/capability/__init__.py +++ b/src/intersect_sdk/capability/__init__.py @@ -6,9 +6,3 @@ The benefit of using pre-existing INTERSECT capabilities over using your own is that it's easier for campaign authors and automated workflows to discover your instrument. """ - -from .base import IntersectBaseCapabilityImplementation - -__all__ = [ - 'IntersectBaseCapabilityImplementation', -] diff --git a/src/intersect_sdk/capability/base.py b/src/intersect_sdk/capability/base.py index 9731c7a..2dfab78 100644 --- a/src/intersect_sdk/capability/base.py +++ b/src/intersect_sdk/capability/base.py @@ -2,14 +2,10 @@ from __future__ import annotations -import inspect from typing import TYPE_CHECKING, Any, ClassVar from typing_extensions import final -from .._internal.constants import BASE_EVENT_ATTR, BASE_RESPONSE_ATTR, BASE_STATUS_ATTR -from .._internal.logger import logger - if TYPE_CHECKING: from uuid import UUID @@ -19,6 +15,7 @@ from ..service_callback_definitions import ( INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, ) + from ..service_definitions import IntersectEventDefinition from ..shared_callback_definitions import ( IntersectDirectMessageParams, ) @@ -40,7 +37,33 @@ class IntersectBaseCapabilityImplementation: Each capability within a Service MUST have a unique capability name. This value should not be modified once the capability has been added to the Service. - This value should ONLY contain alphanumeric characters, hyphens, and underscores. + This value should ONLY contain alphanumeric characters, hyphens, and underscores. Note case sensitivity; 'HDF' and 'hdf' are different capability names. + """ + + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = {} + """Mapping of event names to IntersectEventDefinitions. + + Override this class with your own configuration value to emit events; by default, the Capability will be configured to emit no events. + If you did not specify any @intersect_message or @intersect_status functions on your capability, you MUST override this. + + All event keys should ONLY contain alphanumeric characters, hyphens, and underscores. Note case sensitivity; 'temperature' and 'Temperature' are different event keys. + Any events you specify MUST have a valid IntersectEventDefinition as a value; the associated Service will refuse to start if you don't. + + To emit an event, you can call `capability.intersect_sdk_emit_event(key, value)`. "key" MUST be configured in this dictionary, and "value" MUST match + the schema you provided for it in the IntersectEventDefinition. + + You are permitted to modify this value as a _class_ _variable_ (i.e. `MyCapabilityImplementation.intersect_sdk_events[dynamic_string_key] = IntersectEventDefinition(...)`) prior to inserting it into the Service. Modifying the value on an instance of this class will do nothing. + Once this capability is passed into the Service constructor, modifying it any further is pointless. + + Example: + + ```python + class MyCapability(IntersectBaseCapabilityImplementation): + intersect_sdk_events: { + 'temperature': IntersectEventDefinition(event_type=float), + 'image': IntersectEventDefinition(event_type=bytes, content_type='image/png'), + } + ``` """ def __init__(self) -> None: @@ -71,7 +94,7 @@ def __init_subclass__(cls) -> None: or cls.intersect_sdk_listen_for_service_event is not IntersectBaseCapabilityImplementation.intersect_sdk_listen_for_service_event ): - msg = f"{cls.__name__}: Attempted to override a reserved INTERSECT-SDK function (don't start your function names with '_intersect_sdk_' or 'intersect_sdk_')" + msg = f"{cls.__name__}: Attempted to override a reserved INTERSECT-SDK function (don't start your function names with '__intersect_sdk_', '_intersect_sdk_', or 'intersect_sdk_')" raise RuntimeError(msg) @final @@ -84,53 +107,15 @@ def _intersect_sdk_register_observer(self, observer: IntersectEventObserver) -> def intersect_sdk_emit_event(self, event_name: str, event_value: Any) -> None: """Emits an event into the INTERSECT system. - If you are emitting an event inside either an @intersect_message decorated function, or ANY FUNCTION called - internally from an @intersect_message decorated function, you MUST register the event on the @intersect_message. - If you're emitting an event from an internal function eventually called from multiple @intersect_message functions, - you must register the event on ALL @intersect_message functions which call this event-emitting function. - - You may also emit an event from any function annotated with @intersect_event, or called after it, but you MUST - register the event on the @intersect_event decorator. The @intersect_event annotation will be IGNORED if you place it - after an @intersect_message annotation; its intended use is for threaded functions you start from the capability. - - Do NOT call this function from: - - any function called from an @intersect_status decorated function - - outside of the capability class (for example: capability_instance.intersect_sdk_emit_event(...) will not work). Create a function in the capability, decorate it with @intersect_event, and call that function. + In order to emit an event with the value of 'event_name', you MUST configure the event on the 'intersect_sdk_events' class variable. + The corresponding event_value you pass in this function MUST match the typing you configure on the IntersectEventDefinition event_value property. params: - event_name: the type of event you are emitting. Note that you must advertise the event in your "entrypoint" function + event_name: the type of event you are emitting. Note that you must advertise the event on the 'intersect_sdk_events' class variable. event_value: the value associated with the event. Note that this value must be accurate to its typing annotation. """ - annotated_operation = None - # we iterate over the stack in REVERSE for two reasons: - # 1) we want to find the FIRST function (the "entrypoint") which is annotated. - # 2) in case the user has a large call stack - # TODO - this is an O(n) operation in a hot loop, try to optimize this later! Responses should have a constant based off library code, events we could potentially restrict. - for frame_info in reversed(inspect.stack()): - try: - capability_function = getattr(self, frame_info.function) - if hasattr(capability_function, BASE_STATUS_ATTR): - logger.error( - f'Cannot emit an event from @intersect_status function {frame_info.function}' - ) - # we won't throw an exception here because users could potentially catch it - # (and don't force failure because this is in a hot loop) - # just decline to emit the event and continue on normally - return - if hasattr(capability_function, BASE_EVENT_ATTR) or hasattr( - capability_function, BASE_RESPONSE_ATTR - ): - annotated_operation = frame_info.function - break - except AttributeError: - pass - if annotated_operation is None: - logger.error( - f"You did not register event '{event_name}' on an @intersect_message or @intersect_event function." - ) - return for observer in self.__intersect_sdk_observers__: - observer._on_observe_event(event_name, event_value, annotated_operation) # noqa: SLF001 (private for application devs, NOT for base implementation) + observer._on_observe_event(event_name, event_value, self.intersect_sdk_capability_name) # noqa: SLF001 (private for application devs, NOT for base implementation) @final def intersect_sdk_call_service( @@ -164,6 +149,7 @@ def intersect_sdk_call_service( def intersect_sdk_listen_for_service_event( self, service: HierarchyConfig, + capability_name: str, event_name: str, response_handler: INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, ) -> None: @@ -173,6 +159,7 @@ def intersect_sdk_listen_for_service_event( Params: - service: The system-of-system hierarchy which points to the specific service + - capability_name: name of capability on the other service which will fire off the event - event_name: The name of the event we want to listen for - response_handler: callback for how to handle the reception of an event The callback submits these parameters: @@ -182,4 +169,4 @@ def intersect_sdk_listen_for_service_event( 4) payload """ for observer in self.__intersect_sdk_observers__: - observer.register_event(service, event_name, response_handler) + observer.register_event(service, capability_name, event_name, response_handler) diff --git a/src/intersect_sdk/capability/universal_capability/__init__.py b/src/intersect_sdk/capability/universal_capability/__init__.py new file mode 100644 index 0000000..a986bf8 --- /dev/null +++ b/src/intersect_sdk/capability/universal_capability/__init__.py @@ -0,0 +1,4 @@ +"""Functionality relating to the universal capability present on all INTERSECT Services. + +While users should generally not need to ever import these classes directly, they SHOULD remain stable. They are potentially more useful for Clients to inspect. +""" diff --git a/src/intersect_sdk/capability/universal_capability/status.py b/src/intersect_sdk/capability/universal_capability/status.py new file mode 100644 index 0000000..7ef7195 --- /dev/null +++ b/src/intersect_sdk/capability/universal_capability/status.py @@ -0,0 +1,25 @@ +"""Definitions supporting the 'core status' functionality of the core capability.""" + +from __future__ import annotations + +import datetime +from typing import Annotated + +from pydantic import BaseModel, Field + + +class IntersectCoreStatus(BaseModel): + """Core status information about the INTERSECT-SDK Service as a whole.""" + + uptime: datetime.timedelta + logical_cpus: Annotated[int, Field(title='Logical CPUs')] + physical_cpus: Annotated[int, Field(title='Physical CPUs')] + cpu_percentages: Annotated[list[float], Field(title='CPU Percentages')] + service_cpu_percentage: Annotated[float, Field(title='Service CPU Usage Percentage')] + """CPU usage of the INTERSECT-SDK Service, does not apply to subprocesses""" + memory_total: int + memory_usage_percentage: float + service_memory_percentage: Annotated[float, Field(title='Service Memory Usage Percentage')] + """Memory usage of the INTERSECT-SDK Service, does not apply to subprocesses""" + disk_total: int + disk_usage_percentage: float diff --git a/src/intersect_sdk/capability/universal_capability/universal_capability.py b/src/intersect_sdk/capability/universal_capability/universal_capability.py new file mode 100644 index 0000000..0cba00a --- /dev/null +++ b/src/intersect_sdk/capability/universal_capability/universal_capability.py @@ -0,0 +1,60 @@ +"""This is a universal capability which should provide common functionality for ALL Services. + +A Client should be able to reliably set the operation of a request to point to this function, regardless of the Service they want to talk to. + +NOTE: While users should generally not need to ever import this class directly, it SHOULD remain stable. Therefore, it does NOT belong in `_internal`, which does not have a stability guarantee. +""" + +import datetime +import os +import time +from typing import final + +import psutil + +from ...service_definitions import intersect_status +from ..base import IntersectBaseCapabilityImplementation +from .status import IntersectCoreStatus + + +@final +class IntersectSdkCoreCapability(IntersectBaseCapabilityImplementation): + """Core capability present in every INTERSECT Service. + + This may be called explicitly by any Client interacting with any SDK service. Set the operation to be "intersect_sdk.". + """ + + intersect_sdk_capability_name = 'intersect_sdk' + """We always reserve the 'intersect_sdk' capability name for this core capability. + + Importantly, this capability must always be inserted first when generating the schema and the function mapping. Since the schema generator will check for duplicate names, any attempt + by a user to call their capability 'intersect_sdk' will always fail. + """ + + def __init__(self) -> None: # noqa: D107 + super().__init__() + self.process = psutil.Process(os.getpid()) + """psutil.Process caches most functions it calls after it calls the function once, so just save the object itself""" + + @intersect_status + def system_capability(self) -> IntersectCoreStatus: + """The status of this Capability reflects core system information which is okay to broadcast across the INTERSECT-SDK system. + + As the INTERSECT status function, this function is called periodically, but we also want to allow calls to it explicitly. + """ + mem_info = psutil.virtual_memory() + disk_info = psutil.disk_usage('/') + # TODO we should provide an option to list GPU information available (we could check out the GPUtil library, but there may be better approaches) + return IntersectCoreStatus( + uptime=datetime.timedelta(seconds=time.time() - self.process.create_time()), + logical_cpus=psutil.cpu_count() or 0, + physical_cpus=psutil.cpu_count(logical=False) or 0, + # do not block the thread when checking CPU percentages + cpu_percentages=psutil.cpu_percent(interval=0.0, percpu=True), + service_cpu_percentage=self.process.cpu_percent(interval=0.0), + memory_total=mem_info.total, + memory_usage_percentage=mem_info.percent, + service_memory_percentage=self.process.memory_percent(), + disk_total=disk_info.total, + disk_usage_percentage=disk_info.percent, + ) diff --git a/src/intersect_sdk/client.py b/src/intersect_sdk/client.py index 7a2389c..66974c0 100644 --- a/src/intersect_sdk/client.py +++ b/src/intersect_sdk/client.py @@ -19,21 +19,20 @@ from pydantic import ValidationError from typing_extensions import Self, final +from intersect_sdk._internal.generic_serializer import GENERIC_MESSAGE_SERIALIZER + from ._internal.control_plane.control_plane_manager import ( - GENERIC_MESSAGE_SERIALIZER, ControlPlaneManager, ) from ._internal.data_plane.data_plane_manager import DataPlaneManager from ._internal.exceptions import IntersectError from ._internal.logger import logger from ._internal.messages.event import ( - EventMessage, - deserialize_and_validate_event_message, + validate_event_message_headers, ) from ._internal.messages.userspace import ( - UserspaceMessage, - create_userspace_message, - deserialize_and_validate_userspace_message, + create_userspace_message_headers, + validate_userspace_message_headers, ) from ._internal.utils import die, send_os_signal from ._internal.version_resolver import resolve_user_version @@ -130,7 +129,7 @@ def __init__( # Do not persist, as we use the temporary client information to build this. self._control_plane_manager.add_subscription_channel( f'{self._hierarchy.hierarchy_string("/")}/response', - {self._handle_userspace_message_raw}, + {self._handle_userspace_message}, persist=False, ) if event_callback: @@ -140,13 +139,15 @@ def __init__( service ) in config.initial_message_event_config.services_to_start_listening_for_events: self._control_plane_manager.add_subscription_channel( - f'{service.replace(".", "/")}/events', - {self._handle_event_message_raw}, + f'{service.hierarchy.replace(".", "/")}/events/{service.capability_name}/{service.event_name}', + {self._handle_event_message}, persist=False, ) self._user_callback = user_callback self._event_callback = event_callback + self._campaign_id = uuid4() + @final def startup(self) -> Self: """This function connects the client to all INTERSECT systems. @@ -223,16 +224,17 @@ def considered_unrecoverable(self) -> bool: """ return self._control_plane_manager.considered_unrecoverable() - def _handle_userspace_message_raw(self, raw: bytes) -> None: + def _handle_userspace_message( + self, payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: """Broker callback, deserialize and validate a userspace message from a broker.""" # safety check in case we get messages back faster than we can send them if self._terminate_after_initial_messages: return try: - message = deserialize_and_validate_userspace_message(raw) - logger.debug(f'Received userspace message:\n{message}') - self._handle_userspace_message(message) + headers = validate_userspace_message_headers(raw_headers) + logger.debug(f'Received userspace message:\n{headers}') except ValidationError as e: logger.warning( f'Invalid message received on userspace message channel, ignoring. Full message:\n{e}' @@ -242,14 +244,13 @@ def _handle_userspace_message_raw(self, raw: bytes) -> None: # but I would argue that it's fine here. If a service isn't sending valid messages, # the client has bigger problems. send_os_signal() + return - def _handle_userspace_message(self, message: UserspaceMessage) -> None: - """Handle a deserialized userspace message.""" # ONE: HANDLE CORE COMPAT ISSUES # is this first branch necessary? May not be in the future - if self._hierarchy.hierarchy_string('.') != message['headers'][ - 'destination' - ] or not resolve_user_version(message): + if self._hierarchy.hierarchy_string('.') != headers.destination or not resolve_user_version( + headers.sdk_version, headers.source, headers.data_handler + ): # NOTE # Again, I would argue that while this may seem drastic, it's fine here. # A client should NEVER be getting messages not addressed to it in a normal workflow. @@ -260,9 +261,11 @@ def _handle_userspace_message(self, message: UserspaceMessage) -> None: # TWO: GET DATA FROM APPROPRIATE DATA STORE AND DESERIALIZE IT try: - request_params = GENERIC_MESSAGE_SERIALIZER.validate_json( - self._data_plane_manager.incoming_message_data_handler(message) + request_params = self._data_plane_manager.incoming_message_data_handler( + payload, headers.data_handler ) + if content_type == 'application/json': + request_params = GENERIC_MESSAGE_SERIALIZER.validate_json(request_params) except ValidationError as e: logger.warning(f'Service sent back invalid response:\n{e}') # NOTE @@ -283,9 +286,9 @@ def _handle_userspace_message(self, message: UserspaceMessage) -> None: # NOTE: the way the service sends a message, errors and non-errors can be handled identically. # Leave it to the user to determine how they want to handle an error. user_function_return = self._user_callback( - message['headers']['source'], - message['operationId'], - message['headers']['has_error'], + headers.source, + headers.operation_id, + headers.has_error, request_params, ) # type: ignore[misc] # mypy note: when we are in this function, we know that the callback has been defined @@ -302,7 +305,9 @@ def _handle_userspace_message(self, message: UserspaceMessage) -> None: self._handle_client_callback(user_function_return) - def _handle_event_message_raw(self, raw: bytes) -> None: + def _handle_event_message( + self, payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: """Broker callback, deserialize and validate an event message from a broker.""" # safety check in case we get messages back faster than we can send them if self._terminate_after_initial_messages: @@ -310,32 +315,28 @@ def _handle_event_message_raw(self, raw: bytes) -> None: return try: - message = deserialize_and_validate_event_message(raw) - logger.debug(f'Received userspace message:\n{message}') - self._handle_event_message(message) + headers = validate_event_message_headers(raw_headers) + logger.debug(f'Received userspace message:\n{headers}') except ValidationError as e: logger.warning( f'Invalid message received on event message channel, ignoring. Full message:\n{e}' ) # NOTE # Unlike Userspace messages, we can safely discard bad event messages without dropping the pubsub loop. + return - def _handle_event_message(self, message: EventMessage) -> None: - """Handle a deserialized event message.""" # ONE: HANDLE CORE COMPAT ISSUES - if not resolve_user_version(message): - # NOTE - # Again, I would argue that while this may seem drastic, it's fine here. - # A client should also know enough about service SDK versions to know if - # it's even possible to try to send messages between them. - send_os_signal() + if not resolve_user_version(headers.sdk_version, headers.source, headers.data_handler): + # we can't handle this event message, so ignore it return # TWO: GET DATA FROM APPROPRIATE DATA STORE AND DESERIALIZE IT try: - request_params = GENERIC_MESSAGE_SERIALIZER.validate_json( - self._data_plane_manager.incoming_message_data_handler(message) + request_params = self._data_plane_manager.incoming_message_data_handler( + payload, headers.data_handler ) + if content_type == 'application/json': + request_params = GENERIC_MESSAGE_SERIALIZER.validate_json(request_params) except ValidationError as e: logger.warning(f'Service sent back invalid response:\n{e}') # NOTE @@ -356,9 +357,9 @@ def _handle_event_message(self, message: EventMessage) -> None: # NOTE: the way the service sends a message, errors and non-errors can be handled identically. # Leave it to the user to determine how they want to handle an error. event_function_return = self._event_callback( - message['headers']['source'], - message['operationId'], - message['headers']['event_name'], + headers.source, + headers.capability_name, + headers.event_name, request_params, ) # type: ignore[misc] # mypy note: when we are in this function, we know that the callback has been defined @@ -390,13 +391,13 @@ def _handle_client_callback(self, user_value: IntersectClientCallback | None) -> if self._event_callback: for add_event in validated_result.services_to_start_listening_for_events: self._control_plane_manager.add_subscription_channel( - f'{add_event.replace(".", "/")}/events', - {self._handle_event_message_raw}, + f'{add_event.hierarchy.replace(".", "/")}/events/{add_event.capability_name}/{add_event.event_name}', + {self._handle_event_message}, persist=False, ) for remove_event in validated_result.services_to_stop_listening_for_events: self._control_plane_manager.remove_subscription_channel( - f'{remove_event.replace(".", "/")}/events' + f'{remove_event.hierarchy.replace(".", "/")}/events/{remove_event.capability_name}/{remove_event.event_name}' ) # sending userspace messages without the callback is okay, we just won't get the response @@ -407,12 +408,19 @@ def _send_userspace_message(self, params: IntersectDirectMessageParams) -> None: """Send a userspace message, be it an initial message from the user or from the user's callback function.""" # ONE: SERIALIZE FUNCTION RESULTS # (function input should already be validated at this point) - msg_payload = GENERIC_MESSAGE_SERIALIZER.dump_json(params.payload, warnings=False) + if params.content_type == 'application/json': + serialized_msg = GENERIC_MESSAGE_SERIALIZER.dump_json(params.payload, warnings=False) + else: + if not isinstance(params.content_type, bytes): + logger.error('Content must be bytes if content-type is not application/json') + send_os_signal() + return + serialized_msg = params.payload # TWO: SEND DATA TO APPROPRIATE DATA STORE try: - out_payload = self._data_plane_manager.outgoing_message_data_handler( - msg_payload, params.content_type, params.data_handler + payload = self._data_plane_manager.outgoing_message_data_handler( + serialized_msg, params.content_type, params.data_handler ) except IntersectError: # NOTE @@ -422,17 +430,19 @@ def _send_userspace_message(self, params: IntersectDirectMessageParams) -> None: return # THREE: SEND MESSAGE - msg = create_userspace_message( + headers = create_userspace_message_headers( source=self._hierarchy.hierarchy_string('.'), destination=params.destination, - content_type=params.content_type, data_handler=params.data_handler, operation_id=params.operation, - payload=out_payload, + campaign_id=self._campaign_id, + request_id=uuid4(), ) - logger.debug(f'Send userspace message:\n{msg}') + logger.debug(f'Send userspace message:\n{headers}') channel = f'{params.destination.replace(".", "/")}/request' # WARNING: If both the Service and the Client drop, the Service will execute the command # but cannot communicate the response to the Client. # in experiment controllers or production, you'll want to set persist to True - self._control_plane_manager.publish_message(channel, msg, persist=False) + self._control_plane_manager.publish_message( + channel, payload, params.content_type, headers, persist=False + ) diff --git a/src/intersect_sdk/client_callback_definitions.py b/src/intersect_sdk/client_callback_definitions.py index 39a4019..78e250e 100644 --- a/src/intersect_sdk/client_callback_definitions.py +++ b/src/intersect_sdk/client_callback_definitions.py @@ -3,13 +3,17 @@ See shared_callback_definitions for additional typings which are also shared by service authors. """ -from typing import Callable, List, Optional +from collections.abc import Callable +from typing import TypeAlias -from pydantic import BaseModel, ConfigDict, Field -from typing_extensions import Annotated, final +from pydantic import BaseModel, ConfigDict +from typing_extensions import final -from .constants import SYSTEM_OF_SYSTEM_REGEX -from .shared_callback_definitions import INTERSECT_JSON_VALUE, IntersectDirectMessageParams +from .shared_callback_definitions import ( + INTERSECT_JSON_VALUE, + IntersectDirectMessageParams, + IntersectEventMessageParams, +) @final @@ -19,21 +23,17 @@ class IntersectClientCallback(BaseModel): If you do not return a value of this type (or None), this will be treated as an Exception and will break the pub-sub loop. """ - messages_to_send: List[IntersectDirectMessageParams] = [] # noqa: FA100 (runtime annotation) + messages_to_send: list[IntersectDirectMessageParams] = [] """ Messages to send as a result of an event or a response from a Service. """ - services_to_start_listening_for_events: List[ # noqa: FA100 (runtime annotation) - Annotated[str, Field(pattern=SYSTEM_OF_SYSTEM_REGEX)] - ] = [] + services_to_start_listening_for_events: list[IntersectEventMessageParams] = [] """ Start listening to events from these services as a result of an event or a response from a Service. For each event in the list - if you are already listening to the event, the action will be a no-op. """ - services_to_stop_listening_for_events: List[ # noqa: FA100 (runtime annotation) - Annotated[str, Field(pattern=SYSTEM_OF_SYSTEM_REGEX)] - ] = [] + services_to_stop_listening_for_events: list[IntersectEventMessageParams] = [] """ Stop listening to events from these services as a result of an event or a response from a Service. @@ -44,9 +44,16 @@ class IntersectClientCallback(BaseModel): model_config = ConfigDict(revalidate_instances='always') +INTERSECT_RESPONSE_VALUE: TypeAlias = INTERSECT_JSON_VALUE | bytes +""" +This is the actual response value you will get back from a Service. The type will already be serialized into Python for you, +but will not be serialized into a precise value. +""" + + INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE = Callable[ - [str, str, bool, INTERSECT_JSON_VALUE], - Optional[IntersectClientCallback], + [str, str, bool, INTERSECT_RESPONSE_VALUE], + IntersectClientCallback | None, ] """ This is a callable function type which should be defined by the user. @@ -59,7 +66,7 @@ class IntersectClientCallback(BaseModel): 2) The name of the operation that triggered the response from your ORIGINAL message - needed for your own control flow loops if sending multiple messages. 3) A boolean - if True, there was an error; if False, there was not. 4) The response, as a Python object - the type should be based on the corresponding Service's schema response. - The Python object will already be deserialized for you. If parameter 3 was "True", then this will be the error message, as a string. + The Python object will already be deserialized for you (unless you are expecting binary data, then it will be a base64). If parameter 3 was "True", then this will be the error message, as a string. If parameter 3 was "False", then this will be either an integer, boolean, float, string, None, a List[T], or a Dict[str, T], where "T" represents any of the 7 aforementioned types. @@ -74,8 +81,8 @@ class IntersectClientCallback(BaseModel): """ INTERSECT_CLIENT_EVENT_CALLBACK_TYPE = Callable[ - [str, str, str, INTERSECT_JSON_VALUE], - Optional[IntersectClientCallback], + [str, str, str, INTERSECT_RESPONSE_VALUE], + IntersectClientCallback | None, ] """ This is a callable function type which should be defined by the user. @@ -85,10 +92,10 @@ class IntersectClientCallback(BaseModel): Params The SDK will send the function four arguments: 1) The message source (the SOS representation of the Service) - this is mostly useful for your own control flow loops you write in the function - 2) The name of the operation from the service that fired the event. + 2) The name of the capability from the service that fired the event. 3) The name of the event. 4) The response, as a Python object - the type should be based on the corresponding Service's event response. - The Python object will already be deserialized for you. This will be either an integer, boolean, float, string, None, + The Python object will already be deserialized for you (unless you are expecting binary data, then it will be base64). This will be either an integer, boolean, float, string, None, a List[T], or a Dict[str, T], where "T" represents any of the 7 aforementioned types. Returns diff --git a/src/intersect_sdk/config/__init__.py b/src/intersect_sdk/config/__init__.py index faf5d00..77c0e77 100644 --- a/src/intersect_sdk/config/__init__.py +++ b/src/intersect_sdk/config/__init__.py @@ -7,14 +7,3 @@ Configuration classes are the most likely thing to "break" following a version update. """ - -from .client import IntersectClientConfig -from .service import IntersectServiceConfig -from .shared import ( - HIERARCHY_REGEX, - ControlPlaneConfig, - ControlProvider, - DataStoreConfig, - DataStoreConfigMap, - HierarchyConfig, -) diff --git a/src/intersect_sdk/config/client.py b/src/intersect_sdk/config/client.py index 2abcdd0..37ce295 100644 --- a/src/intersect_sdk/config/client.py +++ b/src/intersect_sdk/config/client.py @@ -1,9 +1,9 @@ """Client specific configuration types.""" -from typing import List, Literal, Union +from typing import Annotated, Literal from pydantic import BaseModel, ConfigDict, Field -from typing_extensions import Annotated, final +from typing_extensions import final from ..client_callback_definitions import IntersectClientCallback from .shared import ControlPlaneConfig, DataStoreConfigMap @@ -13,7 +13,7 @@ class IntersectClientConfig(BaseModel): """The user-provided configuration needed to integrate with INTERSECT as a client.""" - brokers: Union[Annotated[List[ControlPlaneConfig], Field(min_length=1)], Literal['discovery']] # noqa: FA100 (Pydantic uses runtime annotations) + brokers: Annotated[list[ControlPlaneConfig], Field(min_length=1)] | Literal['discovery'] """ Configurations for any message brokers the application should attach to diff --git a/src/intersect_sdk/config/service.py b/src/intersect_sdk/config/service.py index 1d03c43..d2abb64 100644 --- a/src/intersect_sdk/config/service.py +++ b/src/intersect_sdk/config/service.py @@ -1,9 +1,8 @@ """Service specific configuration types.""" -from typing import List, Literal, Union +from typing import Annotated, Literal from pydantic import BaseModel, ConfigDict, Field, PositiveFloat -from typing_extensions import Annotated from .shared import ControlPlaneConfig, DataStoreConfigMap, HierarchyConfig @@ -16,7 +15,7 @@ class IntersectServiceConfig(BaseModel): Configuration of the System-of-System representation """ - brokers: Union[Annotated[List[ControlPlaneConfig], Field(min_length=1)], Literal['discovery']] # noqa: FA100 (Pydantic uses runtime annotations) + brokers: Annotated[list[ControlPlaneConfig], Field(min_length=1)] | Literal['discovery'] """ Configurations for any message brokers the application should attach to diff --git a/src/intersect_sdk/config/shared.py b/src/intersect_sdk/config/shared.py index a23b348..a6babb2 100644 --- a/src/intersect_sdk/config/shared.py +++ b/src/intersect_sdk/config/shared.py @@ -1,10 +1,9 @@ """Configuration types shared across both Clients and Services.""" from dataclasses import dataclass, field -from typing import List, Literal, Optional, Set +from typing import Annotated, Literal from pydantic import BaseModel, ConfigDict, Field, PositiveInt -from typing_extensions import Annotated from ..core_definitions import IntersectDataHandler @@ -23,7 +22,7 @@ The following commit tracks several issues with MINIO: https://code.ornl.gov/intersect/additive-manufacturing/ros-intersect-adapter/-/commit/fa71b791be0ccf1a5884910b5be3b5239cf9896f """ -ControlProvider = Literal['mqtt3.1.1', 'amqp0.9.1'] +ControlProvider = Literal['mqtt5.0', 'amqp0.9.1'] """The type of broker we connect to.""" @@ -35,7 +34,7 @@ class HierarchyConfig(BaseModel): The name of this application - should be unique within an INTERSECT system """ - subsystem: Optional[str] = Field(default=None, pattern=HIERARCHY_REGEX) # noqa: FA100 (Pydantic uses runtime annotations) + subsystem: str | None = Field(default=None, pattern=HIERARCHY_REGEX) """ An associated subsystem / service-grouping of the system (should be unique within an INTERSECT system) """ @@ -105,7 +104,7 @@ class ControlPlaneConfig: Broker hostname (default: 127.0.0.1) """ - port: Optional[PositiveInt] = None # noqa: FA100 (Pydantic uses runtime annotations) + port: PositiveInt | None = None """ Broker port. List of common ports: @@ -143,7 +142,7 @@ class DataStoreConfig: Data store hostname (default: 127.0.0.1) """ - port: Optional[PositiveInt] = None # noqa: FA100 (Pydantic uses runtime annotations) + port: PositiveInt | None = None """ Data store port """ @@ -153,12 +152,12 @@ class DataStoreConfig: class DataStoreConfigMap: """Configurations for any data stores the application should talk to.""" - minio: List[DataStoreConfig] = field(default_factory=list) # noqa: FA100 (Pydantic uses runtime annotations) + minio: list[DataStoreConfig] = field(default_factory=list) """ minio configurations """ - def get_missing_data_store_types(self) -> Set[IntersectDataHandler]: # noqa: FA100 (not technically a runtime annotation) + def get_missing_data_store_types(self) -> set[IntersectDataHandler]: """Return a set of IntersectDataHandlers which will not be permitted, due to a configuration type missing. If all data configurations exist, returns an empty set diff --git a/src/intersect_sdk/constants.py b/src/intersect_sdk/constants.py index 05a054e..2a1c499 100644 --- a/src/intersect_sdk/constants.py +++ b/src/intersect_sdk/constants.py @@ -1,9 +1,28 @@ """These are miscellaneous constants used in INTERSECT which SDK users may obtain value from knowing about.""" -SYSTEM_OF_SYSTEM_REGEX = r'([-a-z0-9]+\.)*[-a-z0-9]' +SYSTEM_OF_SYSTEM_REGEX = r'^[a-z0-9][-a-z0-9.]*[-a-z0-9]$' """ This is the regex used as a representation of a source/destination. This is only needed externally if you are building a client, services can ignore this. -NOTE: for future compatibility reasons, we are NOT specifying the number of "parts" (separated by a '.') in this regex. +NOTE: for future compatibility reasons, we are NOT specifying the number of "parts" (separated by a '.') in this regex. All that matters is that you don't start or end with a period, or start with a hyphen. +""" + +# see the internal schema file for full validation details +CAPABILITY_REGEX = r'^[a-zA-Z0-9]\w*$' +""" +This is the regex used for representing capabilities and event keys. Capabilities should start with an alphanumeric character, and not be longer than 255 characters. + +This regex applies to namespacing local to a Service, so does not have to be unique across the ecosystem. +""" + +MIME_TYPE_REGEX = r'\w+/[-+.\w]+' +""" +Regex used for validating Content Types. + +References can be found at: + +- https://www.iana.org/assignments/media-types/media-types.xhtml + +- https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types """ diff --git a/src/intersect_sdk/core_definitions.py b/src/intersect_sdk/core_definitions.py index b397e15..b9b0047 100644 --- a/src/intersect_sdk/core_definitions.py +++ b/src/intersect_sdk/core_definitions.py @@ -1,37 +1,47 @@ """Core enumerations and structures used throughout INTERSECT, for both client and service.""" -from enum import Enum, IntEnum +from enum import Enum +from typing import Annotated +from pydantic import Field -class IntersectDataHandler(IntEnum): +from .constants import MIME_TYPE_REGEX + + +class IntersectDataHandler(Enum): """What data transfer type do you want to use for handling the request/response? Default: MESSAGE """ - MESSAGE = 0 - MINIO = 1 + MESSAGE = 'MESSAGE' + MINIO = 'MINIO' -class IntersectMimeType(Enum): - """Roughly corresponds to "Content-Type" values, but enforce standardization of values. +IntersectMimeType = Annotated[str, Field(pattern=MIME_TYPE_REGEX)] +""" +Special typing which represents a "Content-Type" value (i.e. `application/json`). - Default: JSON +The value should be a MIME type; references can be found at: - The value should be a MIME type; references can be found at: +- https://www.iana.org/assignments/media-types/media-types.xhtml - - https://www.iana.org/assignments/media-types/media-types.xhtml +- https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types - - https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types +These values are used to help map an output (or a part of an output) from an arbitrary microservice to an input (or a part of an input) of another arbitrary microservice. - JSON is acceptable for any file which contains non-binary data. +In general, mime types follow one of two rules: +- Complex types (types which cannot be represented as a sequence of bytes) MUST be represented by a Content-Type of 'application/json' (this is default). - BINARY is acceptable for any file which contains binary data and can reasonably be handled as application/octet-string. + If a complex type has binary data in a field, this field MUST be Base64 encoded. - This list is not exhaustive and should be regularly updated. If this list is missing a MIME type you would like to use, please contact the developers or open an issue. - """ + You can mark the type with either 'pydantic.Base64Bytes', or if you need the value to be URL safe, 'pydantic.Base64UrlBytes'. You MUST also specify the "contentType" property, like this: + + ``` + field: Annotated[pydantic.Base64Bytes, pydantic.Field(json_schema_extra={"contentType": "image/png"})] + ``` + + INTERSECT is able to handle serialization/deserialization of 'application/json' types for you, though note that you will need to verify binary data (incoming and outgoing) yourself. INTERSECT will handle the Base64 encoding/decoding, though. - JSON = 'application/json' - STRING = 'text/plain' - BINARY = 'application/octet-string' - HDF5 = 'application/x-hdf5' +- If your Content-Type value is ANYTHING ELSE, you MUST mark it as "bytes" . In this instance, INTERSECT will not base64-encode or base64-decode the value. +""" diff --git a/src/intersect_sdk/exceptions.py b/src/intersect_sdk/exceptions.py new file mode 100644 index 0000000..685ed25 --- /dev/null +++ b/src/intersect_sdk/exceptions.py @@ -0,0 +1,40 @@ +"""Public exceptions API.""" + +from ._internal.exceptions import IntersectError + + +class IntersectCapabilityError(IntersectError): + """This is a marker for a special kind of Capability Exception. WARNING: USE THIS WITH CARE. + + When the SDK catches an Exception from Capability code, it has to decide whether to send information about the Exception in the message, or a generic "Application raised Exception" message. + + The SDK will NOT propagate uncaught exceptions from Capabilities. + In many cases, these exceptions are accidentally thrown, and could leak sensitive information. + The one exception to this are Pydantic's ValidationErrors, which should always be happening due to bad user input (so letting the user know about their invalid user input is important). + + This Exception provides a way for the SDK to know that your Capability explicitly threw this Exception, since nothing else in the SDK will throw it. + If you explicitly throw an IntersectCapabilityError and attach a message to it, the SDK will send this message out publicly. + This can be useful for diagnostic purposes, or you may just want to catch an error and fully inform clients about it. + + For example, the following code will send the generic "Service domain logic threw exception" message through INTERSECT (this is an uncaught ZeroDivisionError): + + ``` + @intersect_message + def call_division(self) -> float: + return 7 / 0 + ``` + + The following code will send: "Service domain logic threw explicit exception: division by zero" + + ``` + @intersect_message + def call_division(self) -> float: + try: + return 7 / 0 + except ZeroDivisionError as e: + raise IntersectCapabilityError(str(e)) + ``` + + Note that this Exception is only useful for `@intersect_message` annotated functions, as Clients are expecting a response. + It is pointless to throw this exception in events, because no message will get sent out if event code errors out. + """ diff --git a/src/intersect_sdk/schema.py b/src/intersect_sdk/schema.py index d17d1c0..3f67463 100644 --- a/src/intersect_sdk/schema.py +++ b/src/intersect_sdk/schema.py @@ -9,7 +9,7 @@ JSON schema advertises the interfaces to other systems. This is necessary for creating scientific campaigns, and the schema is extensible to other clients. -Parts of the schema will be generated from users' own definitions. Functions are represented under "channels", +Parts of the schema will be generated from users' own definitions. Functions are represented under "endpoints", while Pydantic models defined by users and used as request or response types in their functions will have their schemas generated here. There are also several parameters mainly for use by the central INTERSECT microservices, largely encapsulated from users. @@ -35,6 +35,7 @@ from ._internal.schema import get_schema_and_functions_from_capability_implementations from .capability.base import IntersectBaseCapabilityImplementation +from .capability.universal_capability.universal_capability import IntersectSdkCoreCapability if TYPE_CHECKING: from .config.shared import HierarchyConfig @@ -50,7 +51,7 @@ def get_schema_from_capability_implementations( Some key differences: - We utilize three custom fields: "capabilities", "events", and "status". - "capabilities" contains a dictionary: the keys of this dictionary are capability names. The values are dictionaries with the "description" property being a string which describes the capability, - and a "channels" property which more closely follows the AsyncAPI specification of the top-level value "channels". + and a "endpoints" property which more closely follows the AsyncAPI specification of the top-level value "endpoints". - "events" is a key-value dictionary: the keys represent the event name, the values represent the associated schema of the event type. Events are currently shared across all capabilities. - "status" will have a value of the status schema - if no status has been defined, a null schema is used. @@ -62,9 +63,9 @@ def get_schema_from_capability_implementations( msg = 'get_schema_from_capability_implementations - not all provided values are valid capabilities (class must extend IntersectBaseCapabilityImplementation)' raise RuntimeError(msg) - schemas, _, _, _, _, _ = get_schema_and_functions_from_capability_implementations( - capability_types, + schema, _, _, _ = get_schema_and_functions_from_capability_implementations( + [IntersectSdkCoreCapability, *capability_types], hierarchy, - set(), # assume all data handlers are configured if user is just checking their schema + set(), # assume all data handlers are configured if user is just checking their schema - TODO may want to start asserting this in the schema ) - return schemas + return schema diff --git a/src/intersect_sdk/service.py b/src/intersect_sdk/service.py index 37dfef3..d305d51 100644 --- a/src/intersect_sdk/service.py +++ b/src/intersect_sdk/service.py @@ -20,60 +20,56 @@ from collections import defaultdict from threading import Lock from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Callable, Literal, Union -from uuid import UUID, uuid1, uuid3 +from typing import TYPE_CHECKING, Any, Literal +from uuid import UUID, uuid1, uuid3, uuid4 from pydantic import ConfigDict, ValidationError, validate_call from pydantic_core import PydanticSerializationError from typing_extensions import Self, final -from ._internal.constants import ( - RESPONSE_CONTENT, - RESPONSE_DATA, - SHUTDOWN_KEYS, - STRICT_VALIDATION, -) from ._internal.control_plane.control_plane_manager import ( - GENERIC_MESSAGE_SERIALIZER, ControlPlaneManager, ) from ._internal.data_plane.data_plane_manager import DataPlaneManager from ._internal.exceptions import IntersectApplicationError, IntersectError +from ._internal.generic_serializer import GENERIC_MESSAGE_SERIALIZER from ._internal.interfaces import IntersectEventObserver from ._internal.logger import logger from ._internal.messages.event import ( - create_event_message, - deserialize_and_validate_event_message, + create_event_message_headers, + validate_event_message_headers, ) -from ._internal.messages.lifecycle import LifecycleType, create_lifecycle_message +from ._internal.messages.lifecycle import LifecycleType, create_lifecycle_message_headers from ._internal.messages.userspace import ( - UserspaceMessage, - create_userspace_message, - deserialize_and_validate_userspace_message, + UserspaceMessageHeaders, + create_userspace_message_headers, + validate_userspace_message_headers, ) from ._internal.schema import get_schema_and_functions_from_capability_implementations from ._internal.stoppable_thread import StoppableThread -from ._internal.utils import die +from ._internal.utils import die, send_os_signal from ._internal.version_resolver import resolve_user_version from .capability.base import IntersectBaseCapabilityImplementation +from .capability.universal_capability.universal_capability import IntersectSdkCoreCapability from .client_callback_definitions import ( - INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, # noqa: TC001 (runtime-checked-annotation) + INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, ) from .config.service import IntersectServiceConfig from .config.shared import HierarchyConfig -from .core_definitions import IntersectDataHandler, IntersectMimeType +from .core_definitions import IntersectDataHandler +from .exceptions import IntersectCapabilityError from .service_callback_definitions import ( - INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, # noqa: TC001 (runtime-checked annotation) + INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, ) from .shared_callback_definitions import ( - INTERSECT_JSON_VALUE, # noqa: TC001 (runtime-checked annotation) - IntersectDirectMessageParams, # noqa: TC001 (runtime-checked annotation) + INTERSECT_JSON_VALUE, # noqa: F401 (importing this here fixes an import error for the usage of the callback_types in the @validate_call wrappers) + INTERSECT_RESPONSE_VALUE, + IntersectDirectMessageParams, ) from .version import version_string if TYPE_CHECKING: from ._internal.function_metadata import FunctionMetadata - from .config.shared import HierarchyConfig @final @@ -158,7 +154,7 @@ def __init__( self.request_name = req_name self.request = request self.has_error = False - self.response_payload: INTERSECT_JSON_VALUE = None + self.response_payload: INTERSECT_RESPONSE_VALUE = None """Value we get back as response""" self.response_fn = response_handler """User callback function""" @@ -174,7 +170,12 @@ def __init__( capabilities: Your list of capability implementation classes config: The IntersectConfig class """ - for cap in capabilities: + self.capabilities: list[IntersectBaseCapabilityImplementation] = [ + IntersectSdkCoreCapability(), + *capabilities, + ] + + for cap in self.capabilities: if not isinstance(cap, IntersectBaseCapabilityImplementation): die( f'IntersectService parameter must inherit from intersect_sdk.IntersectBaseCapabilityImplementation instead of "{cap.__class__.__name__}" .' @@ -187,8 +188,6 @@ def __init__( # we generally start observing and don't stop, doesn't really matter if we startup or shutdown cap._intersect_sdk_register_observer(self) # noqa: SLF001 (we don't want users calling or overriding it, but this is fine.) - self.capabilities = capabilities - # this is called here in case a user created the object using "IntersectServiceConfig.model_construct()" to skip validation config = IntersectServiceConfig.model_validate(config) @@ -196,19 +195,12 @@ def __init__( schema, function_map, event_map, - status_fn_capability_type, - status_fn_name, - status_type_adapter, + status_list, ) = get_schema_and_functions_from_capability_implementations( [c.__class__ for c in self.capabilities], service_name=config.hierarchy, excluded_data_handlers=config.data_stores.get_missing_data_store_types(), ) - status_fn_capability = None - if status_fn_capability_type: - status_fn_capability = next( - c for c in self.capabilities if c.__class__ is status_fn_capability_type - ) self._schema = schema """ Stringified schema of the user's application. Gets sent in several status message requests. @@ -219,9 +211,6 @@ def __init__( INTERNAL USE ONLY Immutable mapping of operation IDs (advertised in schema, sent in message) to actual function implementations. - - You can get user-defined properties from the method via getattr(_function_map.method, KEY), the keys get set - in the intersect_message decorator function (annotations.py). """ self._event_map = MappingProxyType(event_map) @@ -231,6 +220,13 @@ def __init__( Immutable mapping of event names to actual function implementations. """ + self._status_list = status_list + """ + INTERNAL USE ONLY + + Immutable listing of capabilities + """ + self._function_keys: set[str] = set() """ INTERNAL USE ONLY @@ -245,21 +241,11 @@ def __init__( self._hierarchy = config.hierarchy self._uuid = uuid3(uuid1(), config.hierarchy.hierarchy_string('.')) + # for service-to-service requests where this service is the client + self._campaign_id = uuid4() + self._status_thread: StoppableThread | None = None self._status_ticker_interval = config.status_interval - self._status_retrieval_fn: Callable[[], bytes] = ( - ( - lambda: status_type_adapter.dump_json( - getattr(status_fn_capability, status_fn_name)(), - by_alias=True, - warnings='error', - ) - ) - if status_type_adapter and status_fn_name - else lambda: b'null' - ) - - self._status_memo = self._status_retrieval_fn() self._external_request_thread: StoppableThread | None = None self._external_requests_lock = Lock() @@ -276,7 +262,7 @@ def __init__( { "org.fac.sys1.subsys.service": { - "event_name_1": [ + "CAP_NAME.event_name_1": [ # user_function_1, # user_function_2 ] @@ -303,7 +289,8 @@ def __init__( self._data_plane_manager = DataPlaneManager(self._hierarchy, config.data_stores) # we PUBLISH messages on this channel self._lifecycle_channel_name = f'{config.hierarchy.hierarchy_string("/")}/lifecycle' - # we PUBLISH event messages on this channel + # we PUBLISH event messages on channels DERIVED from this + # `${HIERACRCHY_STRING}/events/${CAPABILITY_NAME}/${EVENT_NAME}` self._events_channel_name = f'{config.hierarchy.hierarchy_string("/")}/events' # we SUBSCRIBE to messages on this channel to receive requests self._service_channel_name = f'{config.hierarchy.hierarchy_string("/")}/request' @@ -315,13 +302,13 @@ def __init__( ) # our userspace queue should be able to survive shutdown self._control_plane_manager.add_subscription_channel( - self._service_channel_name, {self._handle_service_message_raw}, persist=True + self._service_channel_name, {self._handle_service_message}, persist=True ) self._control_plane_manager.add_subscription_channel( - self._client_channel_name, {self._handle_client_message_raw}, persist=True + self._client_channel_name, {self._handle_client_message}, persist=True ) - def _get_capability(self, target: str) -> Any | None: + def _get_capability(self, target: str) -> IntersectBaseCapabilityImplementation | None: for cap in self.capabilities: if cap.intersect_sdk_capability_name == target: return cap @@ -350,9 +337,16 @@ def startup(self) -> Self: logger.error('Cannot start service due to unrecoverable error') return self + # status function should work by the time startup() is called + lifecycle_payload = GENERIC_MESSAGE_SERIALIZER.dump_json( + { + 'schema': self._schema, + 'status': self._status_retrieval_fn(fail_harshly=True), + } + ) self._send_lifecycle_message( - lifecycle_type=LifecycleType.STARTUP, - payload={'schema': self._schema, 'status': self._status_memo}, + lifecycle_type='LCT_STARTUP', + payload=lifecycle_payload, ) # Start the status thread if it doesn't already exist @@ -418,7 +412,10 @@ def shutdown(self, reason: str | None = None) -> Self: self._status_thread = None try: - self._send_lifecycle_message(lifecycle_type=LifecycleType.SHUTDOWN, payload=reason) + self._send_lifecycle_message( + lifecycle_type='LCT_SHUTDOWN', + payload=GENERIC_MESSAGE_SERIALIZER.dump_json(reason) if reason else b'null', + ) except Exception as e: # noqa: BLE001 (this could fail on numerous protocols) logger.error( 'Could not send shutdown message, INTERSECT Core will eventually assume this Service has shutdown.' @@ -458,9 +455,10 @@ def forbid_keys(self, keys: set[str]) -> Self: keys: keys of functions you want to block """ self._function_keys |= keys + payload = GENERIC_MESSAGE_SERIALIZER.dump_json(tuple(keys)) self._send_lifecycle_message( - lifecycle_type=LifecycleType.FUNCTIONS_BLOCKED, - payload=tuple(keys), + lifecycle_type='LCT_FUNCTIONS_BLOCKED', + payload=payload, ) return self @@ -475,9 +473,10 @@ def allow_keys(self, keys: set[str]) -> Self: keys: keys of functions you want to block """ self._function_keys -= keys + payload = GENERIC_MESSAGE_SERIALIZER.dump_json(tuple(keys)) self._send_lifecycle_message( - lifecycle_type=LifecycleType.FUNCTIONS_ALLOWED, - payload=tuple(keys), + lifecycle_type='LCT_FUNCTIONS_ALLOWED', + payload=payload, ) return self @@ -486,10 +485,10 @@ def allow_all_functions(self) -> Self: If you want to only allow certain functions, use "service.allow_keys()" """ - payload = tuple(self._function_keys) + payload = GENERIC_MESSAGE_SERIALIZER.dump_json(tuple(self._function_keys)) self._function_keys.clear() self._send_lifecycle_message( - lifecycle_type=LifecycleType.FUNCTIONS_ALLOWED, + lifecycle_type='LCT_FUNCTIONS_ALLOWED', payload=payload, ) return self @@ -500,12 +499,11 @@ def block_all_functions(self) -> Self: Note that this does NOT disconnect from INTERSECT, and will not block functions which have no markings. """ - self._function_keys = set.union( - *(getattr(m, SHUTDOWN_KEYS) for m in (f.method for f in self._function_map.values())) - ) + self._function_keys = set.union(*(f.shutdown_keys for f in self._function_map.values())) + payload = GENERIC_MESSAGE_SERIALIZER.dump_json(tuple(self._function_keys)) self._send_lifecycle_message( - lifecycle_type=LifecycleType.FUNCTIONS_BLOCKED, - payload=tuple(self._function_keys), + lifecycle_type='LCT_FUNCTIONS_BLOCKED', + payload=payload, ) return self @@ -554,6 +552,7 @@ def add_shutdown_messages( def register_event( self, service: HierarchyConfig, + capability_name: str, event_name: str, response_handler: INTERSECT_CLIENT_EVENT_CALLBACK_TYPE, ) -> None: @@ -561,47 +560,53 @@ def register_event( Params: - service: HierarchyConfig of the service we want to talk to + - capability_name: name of capability which will fire off the event - event_name: name of event to subscribe to - response_handler: callback for how to handle the reception of an event """ hierarchy = service.hierarchy_string('.') - self._svc2svc_events[hierarchy][event_name].add(response_handler) + self._svc2svc_events[hierarchy][f'{capability_name}.{event_name}'].add(response_handler) self._control_plane_manager.add_subscription_channel( - f'{service.hierarchy_string("/")}/events', + f'{service.hierarchy_string("/")}/events/{capability_name}/{event_name}', {self._svc2svc_event_callback}, persist=True, ) - def _svc2svc_event_callback(self, raw: bytes) -> None: + def _svc2svc_event_callback( + self, payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: """Callback received when this service gets an event from another service. Deserializes and validates an EventMessage, will call a userspace function accordingly. """ try: - message = deserialize_and_validate_event_message(raw) + headers = validate_event_message_headers(raw_headers) except ValidationError as e: logger.warning( "Invalid message received from another service's events channel, ignoring. Full message:\n{}", e, ) return - logger.debug('Received event message:\n{}', message) + logger.debug('Received event message:\n{}', headers) try: - payload = GENERIC_MESSAGE_SERIALIZER.validate_json( - self._data_plane_manager.incoming_message_data_handler(message) + request_params = self._data_plane_manager.incoming_message_data_handler( + payload, headers.data_handler ) + if content_type == 'application/json': + request_params = GENERIC_MESSAGE_SERIALIZER.validate_json(request_params) except ValidationError as e: logger.warning( 'Invalid payload message received as an event, ignoring. Full message: {}', e, ) return - source = message['headers']['source'] - event_name = message['headers']['event_name'] - for user_callback in self._svc2svc_events[source][event_name]: + source = headers.source + capability_name = headers.capability_name + event_name = headers.event_name + for user_callback in self._svc2svc_events[source][f'{capability_name}.{event_name}']: try: - user_callback(source, message['operationId'], event_name, payload) + user_callback(source, capability_name, event_name, request_params) except Exception as e: # noqa: BLE001 (need to catch any possible user exception) logger.warning( '!!! INTERSECT: event callback function "%s" produced uncaught exception when handling event "%s" from "%s"', @@ -615,7 +620,7 @@ def _svc2svc_event_callback(self, raw: bytes) -> None: def create_external_request( self, request: IntersectDirectMessageParams, - response_handler: Union[INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE, None] = None, # noqa: UP007 (runtime checked annotation) + response_handler: INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE | None = None, timeout: float = 300.0, ) -> UUID: """Create an external request that we'll send to a different Service. @@ -646,7 +651,7 @@ def create_external_request( self._external_requests_lock.release_lock() return request_uuid - def _get_external_request(self, req_id: UUID) -> IntersectService._ExternalRequest | None: + def _get_external_request(self, req_id: UUID) -> _ExternalRequest | None: req_id_str = str(req_id) if req_id_str in self._external_requests: req: IntersectService._ExternalRequest = self._external_requests[req_id_str] @@ -711,7 +716,9 @@ def _process_external_request(self, extreq: IntersectService._ExternalRequest) - ) extreq.request_state = 'finalized' - def _handle_service_message_raw(self, raw: bytes) -> None: + def _handle_service_message( + self, payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: """Main broker callback function. Deserializes and validates a userspace message from a broker. @@ -719,70 +726,94 @@ def _handle_service_message_raw(self, raw: bytes) -> None: This function is also responsible for publishing all response messages from the broker """ try: - message = deserialize_and_validate_userspace_message(raw) - logger.debug(f'Received userspace message:\n{message}') - response_msg = self._handle_service_message(message) - if response_msg: + headers = validate_userspace_message_headers(raw_headers) + logger.debug(f'Received userspace message:\n{headers}') + response = self._handle_service_message_inner(payload, content_type, headers) + if response: + response_payload, response_content_type, response_headers = response logger.debug( 'Send %s message:\n%s', - 'error' if response_msg['headers']['has_error'] else 'userspace', - response_msg, + 'error' if response_headers['has_error'] else 'userspace', + response_headers, ) - response_channel = f'{message["headers"]["source"].replace(".", "/")}/response' + response_channel = f'{headers.source.replace(".", "/")}/response' # Persistent userspace messages may be useful for orchestration. # Persistence will not hurt anything. self._control_plane_manager.publish_message( - response_channel, response_msg, persist=True + response_channel, + response_payload, + response_content_type, + response_headers, + persist=True, ) except ValidationError as e: logger.warning( f'Invalid message received on userspace message channel, ignoring. Full message:\n{e}' ) - def _handle_service_message(self, message: UserspaceMessage) -> UserspaceMessage | None: + def _handle_service_message_inner( + self, payload: bytes, content_type: str, headers: UserspaceMessageHeaders + ) -> tuple[bytes, str, dict[str, str]] | None: """Main logic for handling a userspace message, minus all broker logic. Params - message: UserspaceMessage from a client + payload: raw data from client + content_type: content type of the message + headers: validated headers Returns - The response message we want to send to the client, or None if we don't want to send anything. + None if we don't want to send anything. + Otherwise, return a tuple of the raw data we'll send back, the content type of the data, and the headers. + """ # ONE: HANDLE CORE COMPAT ISSUES # is this first branch necessary? May not be in the future - if self._hierarchy.hierarchy_string('.') != message['headers']['destination']: + if self._hierarchy.hierarchy_string('.') != headers.destination: return None - if not resolve_user_version(message): - return self._make_error_message( - f'SDK version incompatibility. Service version: {version_string} . Sender version: {message["headers"]["sdk_version"]}', - message, + if not resolve_user_version(headers.sdk_version, headers.source, headers.data_handler): + return ( + f'SDK version incompatibility. Service version: {version_string} . Sender version: {headers.sdk_version}'.encode(), + 'text/plain', + self._make_error_message_headers(headers), ) - # TWO: OPERATION EXISTS AND IS AVAILABLE - operation = message['operationId'] + # TWO: OPERATION EXISTS, CONTENT TYPE MATCHES, AND OPERATION IS AVAILABLE + operation = headers.operation_id operation_meta = self._function_map.get(operation) if operation_meta is None: err_msg = f'Tried to call non-existent operation {operation}' - logger.warning(err_msg) - return self._make_error_message(err_msg, message) - if self._function_keys & getattr(operation_meta.method, SHUTDOWN_KEYS): + logger.debug(err_msg) + return (err_msg.encode(), 'text/plain', self._make_error_message_headers(headers)) + + if operation_meta.request_content_type != content_type: + err_msg = f'For operation {operation}, request content type {content_type} differs from actual content type {operation_meta.request_content_type}' + logger.debug(err_msg) + return (err_msg.encode(), 'text/plain', self._make_error_message_headers(headers)) + + if self._function_keys & operation_meta.shutdown_keys: err_msg = f"Function '{operation}' is currently not available for use." logger.error(err_msg) - return self._make_error_message(err_msg, message) + return (err_msg.encode(), 'text/plain', self._make_error_message_headers(headers)) operation_capability, operation_method = operation.split('.') target_capability = self._get_capability(operation_capability) if target_capability is None: err_msg = f"Could not locate service capability providing '{operation_capability}' for operation {operation}." logger.error(err_msg) - return self._make_error_message(err_msg, message) + return (err_msg.encode(), 'text/plain', self._make_error_message_headers(headers)) # THREE: GET DATA FROM APPROPRIATE DATA STORE try: - request_params = self._data_plane_manager.incoming_message_data_handler(message) + request_params = self._data_plane_manager.incoming_message_data_handler( + payload, headers.data_handler + ) except IntersectError: # could theoretically be either a service or client issue # XXX send a better error message? - return self._make_error_message('Could not get data from data handler', message) + return ( + b'Could not get data from data handler', + 'text/plain', + self._make_error_message_headers(headers), + ) try: # FOUR: CALL USER FUNCTION AND GET MESSAGE @@ -790,57 +821,75 @@ def _handle_service_message(self, message: UserspaceMessage) -> UserspaceMessage target_capability, operation_method, operation_meta, request_params ) # FIVE: SEND DATA TO APPROPRIATE DATA STORE - response_data_handler = getattr(operation_meta.method, RESPONSE_DATA) - response_content_type = getattr(operation_meta.method, RESPONSE_CONTENT) + response_data_handler = operation_meta.response_data_transfer_handler + response_content_type = operation_meta.response_content_type response_payload = self._data_plane_manager.outgoing_message_data_handler( response, response_content_type, response_data_handler ) except ValidationError as e: # client issue with request parameters - return self._make_error_message(f'Bad arguments to application:\n{e}', message) + return ( + f'Bad arguments to application:\n{e}'.encode(), + 'text/plain', + self._make_error_message_headers(headers), + ) + except IntersectCapabilityError as e: + return ( + f'Service domain logic threw explicit exception:\n{e}'.encode(), + 'text/plain', + self._make_error_message_headers(headers), + ) except IntersectApplicationError: - # domain-level exception; do not send specifics about the exception because it may leak internals - return self._make_error_message('Service domain logic threw exception.', message) + # domain-level exception not explicitly caught; do not send specifics about the exception because it may leak internals + return ( + b'Service domain logic threw exception.', + 'text/plain', + self._make_error_message_headers(headers), + ) except IntersectError: # XXX send a better error message? This is a service issue - return self._make_error_message('Could not send data to data handler', message) - finally: - self._check_for_status_update() + return ( + b'Could not send data to data handler', + 'text/plain', + self._make_error_message_headers(headers), + ) # SIX: SEND MESSAGE - return create_userspace_message( - source=message['headers']['destination'], - destination=message['headers']['source'], - content_type=response_content_type, + response_headers = create_userspace_message_headers( + source=headers.destination, + destination=headers.source, data_handler=response_data_handler, - operation_id=message['operationId'], - payload=response_payload, - message_id=message['messageId'], # associate response with request + operation_id=headers.operation_id, + request_id=headers.request_id, + campaign_id=headers.campaign_id, ) + return (response_payload, response_content_type, response_headers) - def _handle_client_message_raw(self, raw: bytes) -> None: + def _handle_client_message( + self, payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: """Broker callback, deserialize and validate a userspace message from a broker. These are 'response' messages from this service calling another service. """ try: - message = deserialize_and_validate_userspace_message(raw) - logger.debug(f'Received userspace message:\n{message}') - self._handle_client_message(message) + headers = validate_userspace_message_headers(raw_headers) + logger.debug(f'Received userspace message:\n{headers}') except ValidationError as e: logger.warning( f'Invalid message received on client message channel, ignoring. Full message:\n{e}' ) + return - def _handle_client_message(self, message: UserspaceMessage) -> None: - """Handle a deserialized service-2-service message.""" - extreq = self._get_external_request(message['messageId']) + extreq = self._get_external_request(headers.message_id) if extreq is not None: error_msg: str | None = None try: - msg_payload = GENERIC_MESSAGE_SERIALIZER.validate_json( - self._data_plane_manager.incoming_message_data_handler(message) + msg_payload = self._data_plane_manager.incoming_message_data_handler( + payload, headers.data_handler ) + if content_type == 'application/json': + msg_payload = GENERIC_MESSAGE_SERIALIZER.validate_json(msg_payload) except ValidationError as e: error_msg = f'Service sent back invalid response:\n{e}' logger.warning(error_msg) @@ -852,33 +901,42 @@ def _handle_client_message(self, message: UserspaceMessage) -> None: # we did not get a valid INTERSECT message back, so just mark it for cleanup extreq.request_state = 'finalized' elif ( - extreq.request.destination != message['headers']['source'] - or extreq.request.operation != message['operationId'] + extreq.request.destination != headers.source + or extreq.request.operation != headers.operation_id ): logger.warning( 'Possible spoof message, discarding. Target destination', extreq.request.destination, 'Actual source', - message['headers']['source'], + headers.source, 'Target operation', extreq.request.operation, 'Actual operation', - message['operationId'], + headers.operation_id, ) extreq.request_state = 'finalized' else: # success extreq.response_payload = msg_payload - extreq.has_error = message['headers']['has_error'] + extreq.has_error = headers.has_error extreq.request_state = 'received' else: - error_msg = f'No external request found for message:\n{message}' + error_msg = f'No external request found for message:\n{headers}' logger.warning(error_msg) def _send_client_message(self, request_id: UUID, params: IntersectDirectMessageParams) -> bool: """Send a userspace message.""" # "params" should already be validated at this stage. - request = GENERIC_MESSAGE_SERIALIZER.dump_json(params.payload, warnings=False) + + if params.content_type == 'application/json': + request = GENERIC_MESSAGE_SERIALIZER.dump_json(params.payload, warnings=False) + else: + if not isinstance(params.content_type, bytes): + logger.error( + 'service-to-service message must be bytes if content-type is not application/json' + ) + return False + request = params.payload # TWO: SEND DATA TO APPROPRIATE DATA STORE try: @@ -889,18 +947,19 @@ def _send_client_message(self, request_id: UUID, params: IntersectDirectMessageP return False # THREE: SEND MESSAGE - msg = create_userspace_message( + headers = create_userspace_message_headers( source=self._hierarchy.hierarchy_string('.'), destination=params.destination, - content_type=params.content_type, data_handler=params.data_handler, operation_id=params.operation, - payload=request_payload, - message_id=request_id, + request_id=request_id, + campaign_id=self._campaign_id, ) - logger.debug(f'Sending client message:\n{msg}') + logger.debug(f'Sending client message:\n{headers}') request_channel = f'{params.destination.replace(".", "/")}/request' - self._control_plane_manager.publish_message(request_channel, msg, persist=True) + self._control_plane_manager.publish_message( + request_channel, request_payload, params.content_type, headers, persist=True + ) return True def _call_user_function( @@ -916,15 +975,10 @@ def _call_user_function( Params fn_cap = capability implementing the user function - fn_name = operation. These get represented in the schema as "channels". + fn_name = operation. These get represented in the schema as "endpoints". fn_meta = all information stored about the user's operation. This includes user-defined params and the request/response (de)serializers. - fn_params = the request argument. - If this value is empty or the bytes literal "null", and users have a request type, we will try to call the user's function with - their default value as the parameter, or "None" if there isn't a default value. - Values nested on a lower level should be handled automatically by Pydantic. - A note on this value: at this point, we still want the parameters to be a JSON string (not a Python object), - as if the object is first converted to Python and THEN validated, users will not have the option - to choose strict validation. + fn_params = the request argument, the actual domain logic. + Returns: If the capability executed with no problems, a byte-string of the response will be returned. @@ -933,58 +987,96 @@ def _call_user_function( Raises: IntersectApplicationException - this catches both invalid message arguments, as well as if the capability itself throws an Exception. It's meant to be for control-flow, it doesn't represent a fatal error. - ValidationError - this is a Pydantic error which occurs if the input fails to validate against the user's model. + ValidationError - this is a Pydantic error which occurs if the endpoint expects JSON input, and the input fails to validate against the user's model. Note that this error is never raised outside this function if the USER's code causes this exception (by doing internal pydantic validation themselves), but only as related to the actual request parameters. NOTE: running this function should normally not cause application failure. Users can terminate their application inside their capability class, but in almost all circumstances, this should be discouraged (outside of the constructor). """ - if fn_meta.request_adapter: - try: - if not fn_params or fn_params == b'null': - # strict=True here does nothing, because ConfigDict property validate_default may not be set and we validate defaults when generating the schema - default_value = fn_meta.request_adapter.get_default_value() - try: - request_obj = default_value.value # type: ignore[union-attr] - except AttributeError: - request_obj = fn_meta.request_adapter.validate_python( - None, + if fn_meta.request_content_type == 'application/json': + # begin Pydantic validation workflow + if fn_meta.request_adapter: + # user has a function parameter + try: + if not fn_params or fn_params == b'null': + # strict=True here does nothing, because ConfigDict property validate_default may not be set and we validate defaults when generating the schema + default_value = fn_meta.request_adapter.get_default_value() + try: + request_obj = default_value.value # type: ignore[union-attr] + except AttributeError: + request_obj = fn_meta.request_adapter.validate_python( + None, + ) + else: + request_obj = fn_meta.request_adapter.validate_json( + fn_params, + strict=fn_meta.strict_validation, ) + except ValidationError as e: + err_msg = f'Bad arguments to application:\n{e}\n' + logger.warning(err_msg) + raise + try: + response = getattr(fn_cap, fn_name)(request_obj) + except IntersectCapabilityError as e: + logger.warning(f'Capability raised explicit exception:\n{e}\n') + raise + except ( + Exception + ) as e: # (need to catch all possible exceptions to gracefully handle the thread) + logger.warning(f'Capability raised exception:\n{e}\n') + raise IntersectApplicationError from e + else: + # user does not have a function parameter + try: + response = getattr(fn_cap, fn_name)() + except IntersectCapabilityError as e: + logger.warning(f'Capability raised explicit exception:\n{e}\n') + raise + except ( + Exception + ) as e: # (need to catch all possible exceptions to gracefully handle the thread) + logger.warning(f'Capability raised exception:\n{e}\n') + raise IntersectApplicationError from e + else: + # handle requests for expected binary data + # note that users should not be specifying a default value here + try: + if fn_meta.request_adapter is not None: + response = getattr(fn_cap, fn_name)(fn_params) else: - request_obj = fn_meta.request_adapter.validate_json( - fn_params, - strict=getattr(fn_meta.method, STRICT_VALIDATION), - ) - except ValidationError as e: - err_msg = f'Bad arguments to application:\n{e}\n' - logger.warning(err_msg) + response = getattr(fn_cap, fn_name)() + except IntersectCapabilityError as e: + logger.warning(f'Capability raised explicit exception:\n{e}\n') raise - try: - response = getattr(fn_cap, fn_name)(request_obj) except ( Exception ) as e: # (need to catch all possible exceptions to gracefully handle the thread) logger.warning(f'Capability raised exception:\n{e}\n') raise IntersectApplicationError from e - else: + + if fn_meta.response_adapter: + # JSON serialization workflow try: - response = getattr(fn_cap, fn_name)() - except ( - Exception - ) as e: # (need to catch all possible exceptions to gracefully handle the thread) - logger.warning(f'Capability raised exception:\n{e}\n') + return fn_meta.response_adapter.dump_json(response, by_alias=True, warnings='error') + except PydanticSerializationError as e: + logger.error( + f'IMPORTANT!!!! Your INTERSECT capability function did not return a value matching your response type. You MUST fix this for your message to be sent out! Full error:\n{e}\n' + ) raise IntersectApplicationError from e - try: - return fn_meta.response_adapter.dump_json(response, by_alias=True, warnings='error') - except PydanticSerializationError as e: + # at this point we need to assume best-effort on the user's part, as we don't handle binary data ourselves + # make sure they have returned bytes in their return value + if not isinstance(response, bytes): logger.error( - f'IMPORTANT!!!! Your INTERSECT capability function did not return a value matching your response type. You MUST fix this for your message to be sent out! Full error:\n{e}\n' + 'IMPORTANT: If your response_content_type is not application/json, you MUST return raw bytes from your function.' ) - raise IntersectApplicationError from e + raise IntersectApplicationError + + return response - def _on_observe_event(self, event_name: str, event_value: Any, operation: str) -> None: + def _on_observe_event(self, event_name: str, event_value: Any, capability_name: str) -> None: """This is the service function which handles events from the capabilities (as opposed to handling messages). This function needs to: @@ -995,21 +1087,33 @@ def _on_observe_event(self, event_name: str, event_value: Any, operation: str) - Note that if validation fails, we simply log the error out and return. We do not broadcast an error message. """ - event_meta = self._event_map.get(event_name) - if event_meta is None or operation not in event_meta.operations: - logger.error( - f"Event name '{event_name}' was not registered on operation '{operation}', so event will not be emitted.\nEvent value: {event_value}" - ) - return - try: - response = event_meta.type_adapter.dump_json( - event_value, by_alias=True, warnings='error' - ) - except PydanticSerializationError as e: + event_meta = self._event_map.get(capability_name, {}).get(event_name) + if event_meta is None: logger.error( - f"Value emitted for event name '{event_name}' from operation '{operation}' does not match schema.\nEvent value: {event_value}\nPydantic error: {e}" + f"Event name '{event_name}' was not registered on capability '{capability_name}', so event will not be emitted.\nEvent value: {event_value}" ) return + + if event_meta.content_type == 'application/json': + try: + response = event_meta.type_adapter.dump_json( + event_value, by_alias=True, warnings='error' + ) + except PydanticSerializationError as e: + logger.error( + f"Value emitted for event name '{event_name}' from capability '{capability_name}' does not match schema.\nEvent value: {event_value}\nPydantic error: {e}" + ) + return + else: + if not isinstance(event_value, bytes): + logger.error( + 'Event type "%s" from capability "%s" emitted non-binary value when content-type is not application/json, you must change this to actually emit the event', + event_name, + capability_name, + ) + return + response = event_value + try: response_payload = self._data_plane_manager.outgoing_message_data_handler( response, event_meta.content_type, event_meta.data_transfer_handler @@ -1018,70 +1122,82 @@ def _on_observe_event(self, event_name: str, event_value: Any, operation: str) - # error should already be logged from the outgoing_message_data_handler function return - msg = create_event_message( + headers = create_event_message_headers( source=self._hierarchy.hierarchy_string('.'), - operation_id=operation, - content_type=event_meta.content_type, + capability_name=capability_name, data_handler=event_meta.data_transfer_handler, event_name=event_name, - payload=response_payload, ) - self._control_plane_manager.publish_message(self._events_channel_name, msg, persist=True) + full_events_channel_name = f'{self._events_channel_name}/{capability_name}/{event_name}' + self._control_plane_manager.publish_message( + full_events_channel_name, + response_payload, + event_meta.content_type, + headers, + persist=True, + ) - def _make_error_message( - self, error_string: str, original_message: UserspaceMessage - ) -> UserspaceMessage: + def _make_error_message_headers( + self, original_headers: UserspaceMessageHeaders + ) -> dict[str, str]: """Generate an error message. Params: - error_string: The error string to send as the payload - original_message: The original UserspaceMessage + original_headers: The original message headers Returns: - the UserspaceMessage we will send as a reply + the headers we will send as a reply """ - return create_userspace_message( - source=original_message['headers']['destination'], - destination=original_message['headers']['source'], - content_type=IntersectMimeType.STRING, + return create_userspace_message_headers( + source=original_headers.destination, + destination=original_headers.source, data_handler=IntersectDataHandler.MESSAGE, - operation_id=original_message['operationId'], - payload=error_string, - message_id=original_message['messageId'], # associate error reply with original + operation_id=original_headers.operation_id, + campaign_id=original_headers.campaign_id, + request_id=original_headers.request_id, has_error=True, ) - def _send_lifecycle_message(self, lifecycle_type: LifecycleType, payload: Any = None) -> None: + def _send_lifecycle_message(self, lifecycle_type: LifecycleType, payload: bytes) -> None: """Send out a lifecycle message.""" - msg = create_lifecycle_message( + headers = create_lifecycle_message_headers( source=self._hierarchy.hierarchy_string('.'), - destination=self._lifecycle_channel_name, lifecycle_type=lifecycle_type, - payload=payload, ) - logger.debug(f'Send lifecycle message:\n{msg}') + logger.debug('Send lifecycle message \nHEADERS: %s\n\n%s', payload, headers) # Lifecycle messages are meant to be short-lived, only the latest message has any usage for systems uninterested in logging, # and queues will be regularly polled about these. Do not persist them. self._control_plane_manager.publish_message( - self._lifecycle_channel_name, msg, persist=False + self._lifecycle_channel_name, payload, 'application/json', headers, persist=False ) - def _check_for_status_update(self) -> bool: - """Call the user's status retrieval function to see if it equals the cached value. If it does not, send out a status update function. + def _status_retrieval_fn(self, fail_harshly: bool) -> dict[str, Any]: + """Call all capability status functions configured. - This will also always update the last cached value. + We generally want to call this once during startup to verify that the user has a potentially valid status function. - Returns: - True if there was a status update, False if there wasn't + However, the status function can potentially fail later on during the runtime, in which case we will just log the error and continue. + During runtime, we'll mark the capability of the status as returning null, which indicates that the status was expected to return a value but did not. """ - next_status = self._status_retrieval_fn() - if next_status != self._status_memo: - self._status_memo = next_status - self._send_lifecycle_message( - lifecycle_type=LifecycleType.STATUS_UPDATE, - payload={'schema': self._schema, 'status': next_status}, - ) - return True - return False + status_map = {} + for status in self._status_list: + try: + capability = self._get_capability(status.capability_name) + result = status.serializer.dump_python( + getattr(capability, status.function_name)(), + by_alias=True, + warnings='error', + ) + status_map[status.capability_name] = result + except Exception as e: # noqa: BLE001 (should catch all exceptions at this point) + msg = f"Status function check for capability '{status.capability_name}' raised an exception, please make sure that the value it's returning matches the type of its return function and that you can reliably call it during service.startup() or default_intersect_lifecycle_loop(service). Exception:\n{e}" + if fail_harshly: + # The status function should only be called when the application developer wants to be connected to the INTERSECT broker. + # If we reach this point, the application should be given the opportunity to do a graceful shutdown, so send a SIGTERM instead of immediately dying + logger.critical(msg) + send_os_signal() + logger.error(msg) + status_map[status.capability_name] = None + return status_map def _status_ticker(self) -> None: """Periodically sends lifecycle polling messages showing the Service's state. Runs in a separate thread.""" @@ -1092,11 +1208,16 @@ def _status_ticker(self) -> None: else: self._status_thread.wait(self._status_ticker_interval) while not self._status_thread.stopped(): - if not self._check_for_status_update(): - self._send_lifecycle_message( - lifecycle_type=LifecycleType.POLLING, - payload={'schema': self._schema, 'status': self._status_memo}, - ) + payload = GENERIC_MESSAGE_SERIALIZER.dump_json( + { + 'schema': self._schema, + 'status': self._status_retrieval_fn(fail_harshly=False), + } + ) + self._send_lifecycle_message( + lifecycle_type='LCT_POLLING', + payload=payload, + ) self._status_thread.wait(self._status_ticker_interval) def _send_external_requests(self) -> None: diff --git a/src/intersect_sdk/service_callback_definitions.py b/src/intersect_sdk/service_callback_definitions.py index bef5c20..c67efe3 100644 --- a/src/intersect_sdk/service_callback_definitions.py +++ b/src/intersect_sdk/service_callback_definitions.py @@ -3,11 +3,13 @@ Please see shared_callback_definitions for definitions which are also used by Clients. """ -from typing import Callable +from collections.abc import Callable -from .shared_callback_definitions import INTERSECT_JSON_VALUE +from .shared_callback_definitions import INTERSECT_RESPONSE_VALUE -INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE = Callable[[str, str, bool, INTERSECT_JSON_VALUE], None] +INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE = Callable[ + [str, str, bool, INTERSECT_RESPONSE_VALUE], None +] """Callback typing for the function which handles another Service's response. Params @@ -17,8 +19,9 @@ 3) A boolean - if True, there was an error; if False, there was not. 4) The response, as a Python object - the type should be based on the corresponding Service's schema response. The Python object will already be deserialized for you. If parameter 3 was "True", then this will be the error message, as a string. - If parameter 3 was "False", then this will be either an integer, boolean, float, string, None, + If parameter 3 was "False", and you are expecting textual data, this will be either an integer, boolean, float, string, None, a List[T], or a Dict[str, T], where "T" represents any of the 7 aforementioned types. + If parameter 3 was "False" but you are expecting binary data, this will be bytes. This callback type should only be used on Capabilities - for client callback functions, use INTERSECT_CLIENT_RESPONSE_CALLBACK_TYPE . """ diff --git a/src/intersect_sdk/service_definitions.py b/src/intersect_sdk/service_definitions.py index 38858cd..ece3391 100644 --- a/src/intersect_sdk/service_definitions.py +++ b/src/intersect_sdk/service_definitions.py @@ -11,19 +11,16 @@ If you are not able to create a schema, the service will refuse to start. """ -from __future__ import annotations - import functools -from typing import Any, Callable, Dict, Mapping, Optional, Sequence, Set +from collections.abc import Callable, Mapping, Sequence +from typing import Any -from pydantic import BaseModel, ConfigDict, Field, field_validator, validate_call -from typing_extensions import Annotated, final +from pydantic import BaseModel, ConfigDict, field_validator, validate_call +from typing_extensions import final from ._internal.constants import ( - BASE_EVENT_ATTR, BASE_RESPONSE_ATTR, BASE_STATUS_ATTR, - EVENT_ATTR_KEY, REQUEST_CONTENT, RESPONSE_CONTENT, RESPONSE_DATA, @@ -43,11 +40,15 @@ class IntersectEventDefinition(BaseModel): The type you provide must be parsable by Pydantic. """ - content_type: IntersectMimeType = IntersectMimeType.JSON + event_documentation: str = '' + """ + This is a strictly informational field which can describe what the event does in schema. + """ + content_type: IntersectMimeType = 'application/json' """ The IntersectMimeType (aka Content-Type) of your event. - default: IntersectMimeType.JSON + default: 'application/json' """ data_handler: IntersectDataHandler = IntersectDataHandler.MESSAGE """ @@ -65,7 +66,7 @@ def _event_type_fail_fast(cls, v: Any) -> Any: # and in Python 3.9+ you can use annotations without them being parsed as strings. # BaseModel objects are technically okay because Pydantic will always treat them as the type. # Otherwise we can just disallow a few common typings and handle the rest when trying to create a TypeAdapter. - if isinstance(v, (int, float, bool, str, Mapping, Sequence)): + if isinstance(v, int | float | bool | str | Mapping | Sequence): msg = 'IntersectEventDefintion: event_type should be a type or a type alias' raise ValueError(msg) # noqa: TRY004 (Pydantic convention is to raise a ValueError) return v @@ -79,11 +80,10 @@ def intersect_message( __func: Callable[..., Any] | None = None, /, *, - events: Optional[Dict[str, IntersectEventDefinition]] = None, # noqa: UP006, UP007 (runtime type annotation) - ignore_keys: Optional[Set[str]] = None, # noqa: UP006, UP007 (runtime type annotation) - request_content_type: IntersectMimeType = IntersectMimeType.JSON, + ignore_keys: set[str] | None = None, + request_content_type: IntersectMimeType = 'application/json', response_data_transfer_handler: IntersectDataHandler = IntersectDataHandler.MESSAGE, - response_content_type: IntersectMimeType = IntersectMimeType.JSON, + response_content_type: IntersectMimeType = 'application/json', strict_request_validation: bool = False, ) -> Callable[..., Any]: """Use this annotation to mark your capability method as an entrypoint to external requests. @@ -101,7 +101,7 @@ def intersect_message( - Iterable/Sequence types (list, deque, set, tuple, frozenset, etc.) - Mapping types (dict, Counter, OrderedDict, etc.). Regarding mapping types: the keys must be one of str/float/int, and float/int keys CANNOT use strict_request_validation=True. - most stdlib types, i.e. Decimal, datetime.datetime, pathlib, etc. - - using typing_extensions "Annotated" type in conjunction with Pydantic's "Field" or various classes from the annotated_types library + - using the typing.Annotated type in conjunction with Pydantic's "Field" or various classes from the annotated_types library - TODO: Generators are a WORK IN PROGRESS but will eventually represent a streaming function You are only allowed to have one additional parameter. Functions without this parameter are assumed to take in no arguments. @@ -119,14 +119,6 @@ def some_external_function(self, request: MyBaseModelRequest) -> MyBaseModelResp In general, if you are able to create a service from this class, you should be okay. Params: - - events: dictionary of event names (strings) to IntersectEventDefninitions. - An IntersectEventDefinition contains metadata about your event, including its type. - Note that the type defined on the IntersectEventDefinition must be parsable by Pydantic. - Note that while multiple functions can emit the same event name, they MUST advertise the SAME type - for this event name. - Inside your function, you may call "self.intersect_sdk_emit_event(event_name, event_value)" to fire off the event. - You may call this in an inner, non-annotated function, but NOTE: EVERY function which calls this function MUST - advertise the same event. - ignore_keys: Hashset of keys. The service class maintains a set of keys to ignore, and will ignore this function if at least one key is present in the service set. By default, all functions will always be allowed. @@ -134,8 +126,8 @@ def some_external_function(self, request: MyBaseModelRequest) -> MyBaseModelResp "service.shutdown()" to disconnect from INTERSECT entirely. In general, you should NOT define this on functions which are just query functions; only set this if you are mutating INSTRUMENT or APPLICATION state. - - request_content_type: how to deserialize incoming requests (default: JSON) - - response_content_type: how to serialize outgoing requests (default: JSON) + - request_content_type: how to deserialize incoming requests (default: application/json) + - response_content_type: how to serialize outgoing requests (default: application/json) - response_data_transfer_handler: are responses going out through the message, or through another mean (i.e. MINIO)? - strict_request_validation: if this is set to True, use pydantic strict validation for requests - otherwise, use lenient validation (default: False) @@ -161,7 +153,6 @@ def __intersect_sdk_wrapper(*args: Any, **kwargs: Any) -> Any: setattr(__intersect_sdk_wrapper, RESPONSE_DATA, response_data_transfer_handler) setattr(__intersect_sdk_wrapper, STRICT_VALIDATION, strict_request_validation) setattr(__intersect_sdk_wrapper, SHUTDOWN_KEYS, set(ignore_keys) if ignore_keys else set()) - setattr(__intersect_sdk_wrapper, EVENT_ATTR_KEY, events or {}) return __intersect_sdk_wrapper @@ -170,14 +161,10 @@ def __intersect_sdk_wrapper(*args: Any, **kwargs: Any) -> Any: return inner_decorator -# TODO - consider forcing intersect_status endpoints to send Messages and JSON responses. @validate_call def intersect_status( __func: Callable[..., Any] | None = None, /, - *, - response_data_transfer_handler: IntersectDataHandler = IntersectDataHandler.MESSAGE, - response_content_type: IntersectMimeType = IntersectMimeType.JSON, ) -> Any: """Use this annotation to mark your capability method as a status retrieval function. @@ -186,12 +173,11 @@ def intersect_status( Your status retrieval function may not have any parameters (other than "self"). Return annotation rules mirror the typing rules for @intersect_message(). - A status message MUST NOT send events out. It should be a simple query of the general service (no specifics). + A status retrieval function should ALWAYS return valid JSON. You should not be returning large globs of data, a few KB serialized should be sufficient. - Params: - - response_content_type: how to serialize outgoing requests (default: JSON) - - response_data_transfer_handler: are responses going out through the message, or through another mean - (i.e. MINIO)? + A status message MUST NOT send events out. It should be a simple query of the general service (no specifics). + A status message MUST send its response back in a value which can be serialized into JSON. + A status message MUST have a fairly small response size (no large data). """ def inner_decorator(func: Callable[..., Any]) -> Callable[..., Any]: @@ -206,10 +192,11 @@ def inner_decorator(func: Callable[..., Any]) -> Callable[..., Any]: def __intersect_sdk_wrapper(*args: Any, **kwargs: Any) -> Any: return func(*args, **kwargs) + # add attrs to allow status function to double as an @intersect_message, valid status functions are limited setattr(__intersect_sdk_wrapper, BASE_STATUS_ATTR, True) - setattr(__intersect_sdk_wrapper, REQUEST_CONTENT, IntersectMimeType.JSON) - setattr(__intersect_sdk_wrapper, RESPONSE_CONTENT, response_content_type) - setattr(__intersect_sdk_wrapper, RESPONSE_DATA, response_data_transfer_handler) + setattr(__intersect_sdk_wrapper, REQUEST_CONTENT, 'application/json') + setattr(__intersect_sdk_wrapper, RESPONSE_CONTENT, 'application/json') + setattr(__intersect_sdk_wrapper, RESPONSE_DATA, IntersectDataHandler.MESSAGE) setattr(__intersect_sdk_wrapper, STRICT_VALIDATION, False) setattr(__intersect_sdk_wrapper, SHUTDOWN_KEYS, set()) @@ -218,45 +205,3 @@ def __intersect_sdk_wrapper(*args: Any, **kwargs: Any) -> Any: if __func: return inner_decorator(__func) return inner_decorator - - -@validate_call -def intersect_event( - *, - events: Annotated[Dict[str, IntersectEventDefinition], Field(min_length=1)], # noqa: UP006 (runtime type annotation) -) -> Callable[..., Any]: - """Use this annotation to mark a function as an event emitter. - - This annotation is meant to be used in conjunction with secondary threads that you start on your CapabilityImplementation. - You should ONLY annotate the function which is the direct thread target. - - Note that you should NOT use this annotation in combination with any other annotation. If you are exposing an endpoint - which ALSO emits messages, use @intersect_message(events={...}) instead. - - Also note that the events you register here should be compatible with all events registered on @intersect_message annotations. - - Params: - - events: dictionary of event names (strings) to IntersectEventDefninitions. - You must declare at least one definition. - An IntersectEventDefinition contains metadata about your event, including its type. - Note that the type defined on the IntersectEventDefinition must be parsable by Pydantic. - Note that while multiple functions can emit the same event name, they MUST advertise the SAME type - for this event name. - Inside your function, you may call "self.intersect_sdk_emit_event(event_name, event_value)" to fire off the event. - You may call this in an inner, non-annotated function, but NOTE: EVERY function which calls this function MUST - advertise the same event. - """ - - def inner_decorator(func: Callable[..., Any]) -> Callable[..., Any]: - # NOTE: we don't actually care how users decorate their @intersect_event functions, because we don't call them. - - @functools.wraps(func) - def __intersect_sdk_wrapper(*args: Any, **kwargs: Any) -> Any: - return func(*args, **kwargs) - - setattr(__intersect_sdk_wrapper, BASE_EVENT_ATTR, True) - setattr(__intersect_sdk_wrapper, EVENT_ATTR_KEY, events) - - return __intersect_sdk_wrapper - - return inner_decorator diff --git a/src/intersect_sdk/shared_callback_definitions.py b/src/intersect_sdk/shared_callback_definitions.py index fd1ff04..4172cc9 100644 --- a/src/intersect_sdk/shared_callback_definitions.py +++ b/src/intersect_sdk/shared_callback_definitions.py @@ -1,28 +1,35 @@ """Callback definitions shared between Services, Capabilities, and Clients.""" -from typing import Any, Dict, List, Union +from typing import Annotated, Any, TypeAlias from pydantic import BaseModel, ConfigDict, Field -from typing_extensions import Annotated, TypeAlias -from .constants import SYSTEM_OF_SYSTEM_REGEX +from .constants import CAPABILITY_REGEX, SYSTEM_OF_SYSTEM_REGEX from .core_definitions import IntersectDataHandler, IntersectMimeType -INTERSECT_JSON_VALUE: TypeAlias = Union[ - List['INTERSECT_JSON_VALUE'], - Dict[str, 'INTERSECT_JSON_VALUE'], - str, - bool, - int, - float, - None, -] +INTERSECT_JSON_VALUE: TypeAlias = ( + list['INTERSECT_JSON_VALUE'] + | dict[str, 'INTERSECT_JSON_VALUE'] + | str + | bool + | int + | float + | None +) """ This is a simple type representation of JSON as a Python object. INTERSECT will automatically deserialize service payloads into one of these types. (Pydantic has a similar type, "JsonValue", which should be used if you desire functionality beyond type hinting. This is strictly a type hint.) """ +INTERSECT_RESPONSE_VALUE: TypeAlias = INTERSECT_JSON_VALUE | bytes +""" +This is the actual response value you will get back from a Service. The type will already be serialized into Python for you, +but will not be serialized into a precise value. + +If you receive 'bytes', you should assume binary data. Other types imply JSON values. +""" + class IntersectDirectMessageParams(BaseModel): """These are the public-facing properties of a message which can be sent to another Service. @@ -49,11 +56,11 @@ class IntersectDirectMessageParams(BaseModel): If you want to just use the service's default value for a request (assuming it has a default value for a request), you may set this as None. """ - content_type: IntersectMimeType = IntersectMimeType.JSON + content_type: IntersectMimeType = 'application/json' """ The IntersectMimeType of your message. You'll want this to match with the ContentType of the function from the schema. - default: IntersectMimeType.JSON + default: application/json """ data_handler: IntersectDataHandler = IntersectDataHandler.MESSAGE @@ -65,3 +72,16 @@ class IntersectDirectMessageParams(BaseModel): # pydantic config model_config = ConfigDict(revalidate_instances='always') + + +class IntersectEventMessageParams(BaseModel): + """Public facing properties of events the Client/Service wants to listen to.""" + + hierarchy: Annotated[str, Field(pattern=SYSTEM_OF_SYSTEM_REGEX)] + """The full hierarchy (org.facility.system.subsystem.service) that we want to listen to.""" + + capability_name: Annotated[str, Field(pattern=CAPABILITY_REGEX)] + """Name of the capability you want to listen to events from""" + + event_name: str + """Name of the event the capability emits that you're listening for""" diff --git a/src/intersect_sdk/version.py b/src/intersect_sdk/version.py index 37dedee..e8cfc97 100644 --- a/src/intersect_sdk/version.py +++ b/src/intersect_sdk/version.py @@ -3,8 +3,6 @@ These values are often used programmatically by the SDK, but can be used by application developers as well. """ -from __future__ import annotations - from ._internal.version import strip_version_metadata # may include build metadata diff --git a/tests/fixtures/example_schema.json b/tests/fixtures/example_schema.json index 2159d18..fc617d6 100644 --- a/tests/fixtures/example_schema.json +++ b/tests/fixtures/example_schema.json @@ -8,8 +8,16 @@ }, "defaultContentType": "application/json", "capabilities": { + "intersect_sdk": { + "endpoints": {}, + "events": {}, + "status": { + "$ref": "#/components/schemas/IntersectCoreStatus" + }, + "description": "Core capability present in every INTERSECT Service.\n\nThis may be called explicitly by any Client interacting with any SDK service. Set the operation to be \"intersect_sdk.\"." + }, "DummyCapability": { - "channels": { + "endpoints": { "annotated_set": { "publish": { "message": { @@ -25,7 +33,8 @@ }, "minItems": 1, "type": "array", - "uniqueItems": true + "uniqueItems": true, + "title": "annotated_set" } }, "description": "return numbers in set which are prime numbers in the range 1-100" @@ -44,12 +53,80 @@ }, "minItems": 1, "type": "array", - "uniqueItems": true + "uniqueItems": true, + "title": "positive_int_set" } }, "description": "return numbers in set which are prime numbers in the range 1-100" + } + }, + "binary_to_binary": { + "publish": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "image/png", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "payload": { + "format": "binary", + "type": "string", + "title": "binary_to_binary", + "contentMediaType": "image/png" + } + } }, - "events": [] + "subscribe": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "image/png", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "payload": { + "format": "binary", + "maxLength": 1048576, + "type": "string", + "title": "binary_to_binary", + "contentMediaType": "image/png" + } + } + } + }, + "calculate_3n_plus_1": { + "publish": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "payload": { + "items": { + "type": "integer" + }, + "type": "array", + "title": "calculate_3n_plus_1" + } + }, + "description": "Calculates the famous \"3n + 1\" problem. Takes in an integer, outputs an array of numbers\nwhich follow the algorithm all the way to \"1\"." + }, + "subscribe": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "payload": { + "maximum": 1000000, + "minimum": 1, + "type": "integer", + "title": "token" + } + }, + "description": "Calculates the famous \"3n + 1\" problem. Takes in an integer, outputs an array of numbers\nwhich follow the algorithm all the way to \"1\"." + } }, "calculate_fibonacci": { "publish": { @@ -63,7 +140,8 @@ "items": { "type": "integer" }, - "type": "array" + "type": "array", + "title": "calculate_fibonacci" } }, "description": "calculates all fibonacci numbers between two numbers\n\ni.e. start = 4, end = 6:\nresponse = [5, 8, 13]" @@ -86,14 +164,14 @@ "type": "integer" } ], - "type": "array" + "type": "array", + "title": "request" } }, "description": "calculates all fibonacci numbers between two numbers\n\ni.e. start = 4, end = 6:\nresponse = [5, 8, 13]" - }, - "events": [] + } }, - "calculate_weird_algorithm": { + "divide_by_zero_exceptions": { "publish": { "message": { "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", @@ -102,13 +180,10 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "items": { - "type": "integer" - }, - "type": "array" + "type": "number", + "title": "divide_by_zero_exceptions" } - }, - "description": "Weird algorithm calculator. Takes in an integer, outputs an array of numbers\nwhich follow the algorithm all the way to \"1\"." + } }, "subscribe": { "message": { @@ -118,14 +193,11 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "maximum": 1000000, - "minimum": 1, - "type": "integer" + "type": "integer", + "title": "param" } - }, - "description": "Weird algorithm calculator. Takes in an integer, outputs an array of numbers\nwhich follow the algorithm all the way to \"1\"." - }, - "events": [] + } + } }, "get_url_parts": { "publish": { @@ -149,10 +221,11 @@ } ] }, - "type": "object" + "type": "object", + "title": "get_url_parts" } }, - "description": "example of automatic URL parsing and schema validation" + "description": "example of automatic URL parsing and schema validation\n\n(the return type should realistically be a TypedDict, not a dict)" }, "subscribe": { "message": { @@ -164,12 +237,12 @@ "payload": { "format": "uri", "minLength": 1, - "type": "string" + "type": "string", + "title": "url" } }, - "description": "example of automatic URL parsing and schema validation" - }, - "events": [] + "description": "example of automatic URL parsing and schema validation\n\n(the return type should realistically be a TypedDict, not a dict)" + } }, "ip4_to_ip6": { "publish": { @@ -181,7 +254,8 @@ }, "payload": { "format": "ipv6", - "type": "string" + "type": "string", + "title": "ip4_to_ip6" } }, "description": "example of IPaddress conversion\nreturn value will always start with '2002::' based on implementation\n\nPydantic also supports IP networks and interfaces, in addition to addresses" @@ -189,18 +263,18 @@ "subscribe": { "message": { "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", - "contentType": "text/plain", + "contentType": "application/json", "traits": { "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { "format": "ipv4", - "type": "string" + "type": "string", + "title": "ip4" } }, "description": "example of IPaddress conversion\nreturn value will always start with '2002::' based on implementation\n\nPydantic also supports IP networks and interfaces, in addition to addresses" - }, - "events": [] + } }, "primitive_event_message": { "publish": { @@ -211,7 +285,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "primitive_event_message" } } }, @@ -225,15 +300,11 @@ "payload": { "default": 1, "minimum": 1, - "type": "integer" + "type": "integer", + "title": "emit_times" } } - }, - "events": [ - "int", - "str", - "float" - ] + } }, "primitive_event_message_random": { "publish": { @@ -244,7 +315,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "primitive_event_message_random" } } }, @@ -256,12 +328,31 @@ "$ref": "#/components/messageTraits/commonHeaders" } } + } + }, + "raise_exception_no_param": { + "publish": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "payload": { + "type": "string", + "title": "raise_exception_no_param" + } + } }, - "events": [ - "int", - "str", - "float" - ] + "subscribe": { + "message": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + } + } + } }, "search_for_lucky_string_in_json": { "publish": { @@ -272,7 +363,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "boolean" + "type": "boolean", + "title": "search_for_lucky_string_in_json" } }, "description": "return true if our lucky string is in JSON, false otherwise" @@ -289,8 +381,7 @@ } }, "description": "return true if our lucky string is in JSON, false otherwise" - }, - "events": [] + } }, "test_datetime": { "publish": { @@ -301,7 +392,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "test_datetime" } }, "description": "NOTE: If strict mode is ON, only JSON strings can be coerced into datetimes.\nIf strict mode is OFF, integers can also be coerced into datetimes." @@ -315,12 +407,12 @@ }, "payload": { "format": "date-time", - "type": "string" + "type": "string", + "title": "request" } }, "description": "NOTE: If strict mode is ON, only JSON strings can be coerced into datetimes.\nIf strict mode is OFF, integers can also be coerced into datetimes." - }, - "events": [] + } }, "test_decimal": { "publish": { @@ -331,7 +423,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "test_decimal" } }, "description": "take in decimal input\nreturn decimal divided by PI (20 precision digits)" @@ -351,12 +444,12 @@ { "type": "string" } - ] + ], + "title": "input_value" } }, "description": "take in decimal input\nreturn decimal divided by PI (20 precision digits)" - }, - "events": [] + } }, "test_dicts": { "publish": { @@ -370,7 +463,8 @@ "additionalProperties": { "type": "integer" }, - "type": "object" + "type": "object", + "title": "test_dicts" } }, "description": "NOTE: JSON always stores Dict/Mapping keys as strings.\nIf the string can't be coerced into the input value, it will throw a RUNTIME error." @@ -386,12 +480,12 @@ "additionalProperties": { "type": "integer" }, - "type": "object" + "type": "object", + "title": "request" } }, "description": "NOTE: JSON always stores Dict/Mapping keys as strings.\nIf the string can't be coerced into the input value, it will throw a RUNTIME error." - }, - "events": [] + } }, "test_enum": { "publish": { @@ -402,7 +496,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "test_enum" } }, "description": "Returns either 'first' or 'later' depending on the enum value." @@ -419,8 +514,7 @@ } }, "description": "Returns either 'first' or 'later' depending on the enum value." - }, - "events": [] + } }, "test_generator": { "publish": { @@ -434,7 +528,8 @@ "items": { "type": "integer" }, - "type": "array" + "type": "array", + "title": "test_generator" } }, "description": "TODO - Generators need more support than this.\n\nThis tests returning a generator function, which may be useful for streaming data.\nIn this example, yield all substring hashes of the request string.\n\nA couple of notes about the Generator type:\n 1) Given the typing is Generator[yield_type, send_type, return_type], only the yield_type matters\n 2) The schema will always look like \"{'items': {'type': }, 'type': 'array'}\"" @@ -447,18 +542,18 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "request" } }, "description": "TODO - Generators need more support than this.\n\nThis tests returning a generator function, which may be useful for streaming data.\nIn this example, yield all substring hashes of the request string.\n\nA couple of notes about the Generator type:\n 1) Given the typing is Generator[yield_type, send_type, return_type], only the yield_type matters\n 2) The schema will always look like \"{'items': {'type': }, 'type': 'array'}\"" - }, - "events": [] + } }, "test_path": { "publish": { "message": { "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", - "contentType": "text/plain", + "contentType": "application/json", "traits": { "$ref": "#/components/messageTraits/commonHeaders" }, @@ -470,7 +565,8 @@ { "type": "null" } - ] + ], + "title": "test_path" } }, "description": "Paths are valid parameters, but you'll often want to further sanitize input to block certain inputs (i.e. \"..\").\n\nThe example regex would work for allowing inputs from a file which always has a file extension and does not allow backwards traversal from the root.\nIt only allows for relative paths and filenames only.\n\nIt's ideal to try to capture this in a regex so that the schema can represent validation 100%; this helps out clients.\nHowever, if you're unable to, it's not required to express everything through schema; you are always free to implement your\nown validation template.\n\nUsing \"Path\" as the request type adds a `\"format\": \"path\"` attribute to the schema and automatically serializes to Pathlib, assuming you want to use the\nPathlib API.\n\nRETURNS - the type of the file based on its URL, or null if it can't guess." @@ -485,12 +581,12 @@ "payload": { "format": "path", "pattern": "([\\w-]+/)*([\\w-]+)\\.[\\w]+", - "type": "string" + "type": "string", + "title": "path" } }, "description": "Paths are valid parameters, but you'll often want to further sanitize input to block certain inputs (i.e. \"..\").\n\nThe example regex would work for allowing inputs from a file which always has a file extension and does not allow backwards traversal from the root.\nIt only allows for relative paths and filenames only.\n\nIt's ideal to try to capture this in a regex so that the schema can represent validation 100%; this helps out clients.\nHowever, if you're unable to, it's not required to express everything through schema; you are always free to implement your\nown validation template.\n\nUsing \"Path\" as the request type adds a `\"format\": \"path\"` attribute to the schema and automatically serializes to Pathlib, assuming you want to use the\nPathlib API.\n\nRETURNS - the type of the file based on its URL, or null if it can't guess." - }, - "events": [] + } }, "test_special_python_types": { "publish": { @@ -516,8 +612,7 @@ "$ref": "#/components/schemas/MyTypedDict" } } - }, - "events": [] + } }, "test_uuid": { "publish": { @@ -528,7 +623,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "string" + "type": "string", + "title": "test_uuid" } }, "description": "Get the 13th digit of a UUID to determine UUID VERSION" @@ -542,12 +638,12 @@ }, "payload": { "format": "uuid", - "type": "string" + "type": "string", + "title": "uid" } }, "description": "Get the 13th digit of a UUID to determine UUID VERSION" - }, - "events": [] + } }, "union_message_with_events": { "publish": { @@ -565,7 +661,8 @@ { "type": "string" } - ] + ], + "title": "union_message_with_events" } } }, @@ -581,13 +678,11 @@ "str", "int" ], - "type": "string" + "type": "string", + "title": "param" } } - }, - "events": [ - "union" - ] + } }, "union_response": { "publish": { @@ -624,7 +719,8 @@ }, "type": "object" } - ] + ], + "title": "union_response" } }, "description": "Spit out a random string, integer, boolean, or object response" @@ -638,8 +734,7 @@ } }, "description": "Spit out a random string, integer, boolean, or object response" - }, - "events": [] + } }, "valid_default_argument": { "publish": { @@ -650,7 +745,8 @@ "$ref": "#/components/messageTraits/commonHeaders" }, "payload": { - "type": "integer" + "type": "integer", + "title": "valid_default_argument" } }, "description": "verifies that you can call a function with a default parameter" @@ -664,12 +760,12 @@ }, "payload": { "default": 4, - "type": "integer" + "type": "integer", + "title": "param" } }, "description": "verifies that you can call a function with a default parameter" - }, - "events": [] + } }, "verify_float_dict": { "publish": { @@ -685,7 +781,8 @@ "type": "string" } }, - "type": "object" + "type": "object", + "title": "verify_float_dict" } }, "description": "verifies that dictionaries can have floats and integers as key types" @@ -703,12 +800,12 @@ "type": "string" } }, - "type": "object" + "type": "object", + "title": "param" } }, "description": "verifies that dictionaries can have floats and integers as key types" - }, - "events": [] + } }, "verify_nested": { "publish": { @@ -736,45 +833,152 @@ } }, "description": "verifies that nested values are parsed correctly" - }, - "events": [] + } } }, - "description": "This is an example of the overarching capability class a user creates that we want to inject into the service.\n\nWhen defining entrypoints to your capability, use the @intersect_message() annotation. Your class will need\nat least one function with this annotation. These functions REQUIRE type annotations to function properly.\nSee the @intersect_message() annotation for more information.\n\nYou can potentially extend from multiple preexisting Capabilities in this class - each Capability may have\nseveral abstract functions which would need to be implemented by the user.\n\nBeyond this, you may define your capability class however you like, including through its constructor." - } - }, - "events": { - "int": { - "type": "integer" - }, - "str": { - "type": "string" - }, - "float": { - "type": "number" - }, - "union": { - "anyOf": [ - { - "type": "integer" + "events": { + "union": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "payload": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "string" + } + ], + "title": "union" + }, + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "description": "Generic example of how to do a union event" + }, + "int": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "payload": { + "description": "Generic integer event", + "type": "integer", + "title": "int" + }, + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + } + }, + "str": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "payload": { + "type": "string", + "title": "str" + }, + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "description": "Generic string event" + }, + "float": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "payload": { + "type": "number", + "title": "float" + }, + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "description": "Generic float event" }, - { - "type": "string" + "list_float": { + "schemaFormat": "application/vnd.aai.asyncapi+json;version=2.6.0", + "contentType": "application/json", + "payload": { + "items": { + "type": "number" + }, + "type": "array", + "title": "list_float" + }, + "traits": { + "$ref": "#/components/messageTraits/commonHeaders" + }, + "description": "generic list of floats event" } - ] - }, - "list_float": { - "items": { - "type": "number" }, - "type": "array" + "status": { + "$ref": "#/components/schemas/DummyStatus" + }, + "description": "This is an example of the overarching capability class a user creates that we want to inject into the service.\n\nWhen defining entrypoints to your capability, use the @intersect_message() annotation. Your class will need\nat least one function with this annotation. These functions REQUIRE type annotations to function properly.\nSee the @intersect_message() annotation for more information.\n\nYou can potentially extend from multiple preexisting Capabilities in this class - each Capability may have\nseveral abstract functions which would need to be implemented by the user.\n\nBeyond this, you may define your capability class however you like, including through its constructor." } }, - "status": { - "$ref": "#/components/schemas/DummyStatus" - }, "components": { "schemas": { + "IntersectCoreStatus": { + "description": "Core status information about the INTERSECT-SDK Service as a whole.", + "properties": { + "uptime": { + "format": "duration", + "title": "Uptime", + "type": "string" + }, + "logical_cpus": { + "title": "Logical CPUs", + "type": "integer" + }, + "physical_cpus": { + "title": "Physical CPUs", + "type": "integer" + }, + "cpu_percentages": { + "items": { + "type": "number" + }, + "title": "CPU Percentages", + "type": "array" + }, + "service_cpu_percentage": { + "title": "Service CPU Usage Percentage", + "type": "number" + }, + "memory_total": { + "title": "Memory Total", + "type": "integer" + }, + "memory_usage_percentage": { + "title": "Memory Usage Percentage", + "type": "number" + }, + "service_memory_percentage": { + "title": "Service Memory Usage Percentage", + "type": "number" + }, + "disk_total": { + "title": "Disk Total", + "type": "integer" + }, + "disk_usage_percentage": { + "title": "Disk Usage Percentage", + "type": "number" + } + }, + "required": [ + "uptime", + "logical_cpus", + "physical_cpus", + "cpu_percentages", + "service_cpu_percentage", + "memory_total", + "memory_usage_percentage", + "service_memory_percentage", + "disk_total", + "disk_usage_percentage" + ], + "title": "IntersectCoreStatus", + "type": "object" + }, "Json": { "anyOf": [ { @@ -944,34 +1148,37 @@ "messageTraits": { "commonHeaders": { "userspaceHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL request/response/command messages must contain this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, + "campaign_id": { + "description": "ID associated with a campaign", + "title": "Campaign Id", + "type": "string" + }, + "request_id": { + "description": "ID associated with a specific request message and response message sequence", + "title": "Request Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "destination": { "description": "destination of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Destination", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -981,50 +1188,50 @@ "title": "Sdk Version", "type": "string" }, + "operation_id": { + "description": "Name of capability and operation we want to call, in the format ${CAPABILITY_NAME}.${FUNCTION_NAME}", + "title": "Operation Id", + "type": "string" + }, "data_handler": { - "$ref": "#/components/messageTraits/commonHeaders/userspaceHeaders/$defs/IntersectDataHandler", - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" }, "has_error": { - "default": false, "description": "If this value is True, the payload will contain the error message (a string)", "title": "Has Error", - "type": "boolean" + "type": "string" } }, "required": [ + "message_id", + "campaign_id", + "request_id", "source", "destination", "created_at", - "sdk_version" + "sdk_version", + "operation_id" ], - "title": "UserspaceMessageHeader", + "title": "UserspaceMessageHeaders", "type": "object" }, "eventHeaders": { - "$defs": { - "IntersectDataHandler": { - "description": "What data transfer type do you want to use for handling the request/response?\n\nDefault: MESSAGE", - "enum": [ - 0, - 1 - ], - "title": "IntersectDataHandler", - "type": "integer" - } - }, - "description": "Matches the current header definition for INTERSECT messages.\n\nALL messages should contain this header.", + "description": "ALL event messages must include this header.\n\nWe do not include the content type of the message in the header, it is handled separately.", "properties": { + "message_id": { + "description": "Unique message ID", + "title": "Message Id", + "type": "string" + }, "source": { "description": "source of the message", - "pattern": "([-a-z0-9]+\\.)*[-a-z0-9]", + "pattern": "^[a-z0-9][-a-z0-9.]*[-a-z0-9]$", "title": "Source", "type": "string" }, "created_at": { "description": "the UTC timestamp of message creation", - "format": "date-time", "title": "Created At", "type": "string" }, @@ -1035,19 +1242,28 @@ "type": "string" }, "data_handler": { - "$ref": "#/components/messageTraits/commonHeaders/eventHeaders/$defs/IntersectDataHandler", - "default": 0, - "description": "Code signifying where data is stored." + "description": "Code signifying where data is stored.", + "type": "string" + }, + "capability_name": { + "description": "The name of the capability which emitted the event originally.", + "pattern": "^[a-zA-Z0-9]\\w*$", + "title": "Capability Name", + "type": "string" }, "event_name": { + "description": "The name of the event that was emitted, namespaced to the capability.", + "pattern": "^[a-zA-Z0-9]\\w*$", "title": "Event Name", "type": "string" } }, "required": [ + "message_id", "source", "created_at", "sdk_version", + "capability_name", "event_name" ], "title": "EventMessageHeaders", diff --git a/tests/fixtures/example_schema.py b/tests/fixtures/example_schema.py index 5dfa01d..59cbb53 100644 --- a/tests/fixtures/example_schema.py +++ b/tests/fixtures/example_schema.py @@ -6,24 +6,18 @@ import decimal import mimetypes import random +from collections.abc import Generator from dataclasses import dataclass from decimal import Decimal from enum import Enum from ipaddress import IPv4Address, IPv6Address from pathlib import Path from typing import ( + Annotated, Any, ClassVar, - Dict, - FrozenSet, - Generator, - List, Literal, NamedTuple, - Optional, - Set, - Tuple, - Union, ) from uuid import UUID @@ -37,15 +31,14 @@ WrapValidator, ) from pydantic_core import PydanticCustomError, Url -from typing_extensions import Annotated, TypeAliasType, TypedDict +from typing_extensions import TypeAliasType, TypedDict from intersect_sdk import ( HierarchyConfig, IntersectBaseCapabilityImplementation, + IntersectCapabilityError, IntersectDataHandler, IntersectEventDefinition, - IntersectMimeType, - intersect_event, intersect_message, intersect_status, ) @@ -80,7 +73,7 @@ def json_custom_error_validator( Json = TypeAliasType( 'Json', Annotated[ - Union[Dict[str, 'Json'], List['Json'], str, int, float, bool, None], + dict[str, 'Json'] | list['Json'] | str | int | float | bool | None, WrapValidator(json_custom_error_validator), ], ) @@ -113,7 +106,7 @@ class Nested2(BaseModel): sub-nested class """ - variables: FrozenSet[int] + variables: frozenset[int] nested_json: Json @@ -175,7 +168,7 @@ class DummyCapabilityImplementation(IntersectBaseCapabilityImplementation): """ # everybody knows that the fastest Fibonacci program is one which pre-caches the numbers :) - _FIBONACCI_LST: ClassVar[List[int]] = [ + _FIBONACCI_LST: ClassVar[list[int]] = [ 0, 1, 2, @@ -229,6 +222,22 @@ class DummyCapabilityImplementation(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'DummyCapability' + intersect_sdk_events: ClassVar[dict[str, IntersectEventDefinition]] = { + 'union': IntersectEventDefinition( + event_type=(int | str), event_documentation='Generic example of how to do a union event' + ), + 'int': IntersectEventDefinition( + event_type=Annotated[int, Field(description='Generic integer event')] + ), + 'str': IntersectEventDefinition(event_type=str, event_documentation='Generic string event'), + 'float': IntersectEventDefinition( + event_type=float, event_documentation='Generic float event' + ), + 'list_float': IntersectEventDefinition( + event_type=list[float], event_documentation='generic list of floats event' + ), + } + def __init__(self) -> None: """ Users have complete freedom over the capability constructor (and are free to use stateful or stateless design paradigms). @@ -266,11 +275,11 @@ def update_status(self, fn_name: str) -> None: self._status_example['last_function_called'] = fn_name @intersect_message( - request_content_type=IntersectMimeType.JSON, - response_content_type=IntersectMimeType.JSON, + request_content_type='application/json', + response_content_type='application/json', response_data_transfer_handler=IntersectDataHandler.MESSAGE, ) - def calculate_fibonacci(self, request: Tuple[int, int]) -> List[int]: + def calculate_fibonacci(self, request: tuple[int, int]) -> list[int]: """ calculates all fibonacci numbers between two numbers @@ -287,17 +296,17 @@ def calculate_fibonacci(self, request: Tuple[int, int]) -> List[int]: return self._FIBONACCI_LST[left:right] @intersect_message( - request_content_type=IntersectMimeType.JSON, - response_content_type=IntersectMimeType.JSON, + request_content_type='application/json', + response_content_type='application/json', response_data_transfer_handler=IntersectDataHandler.MESSAGE, strict_request_validation=True, ) - def calculate_weird_algorithm(self, token: Annotated[int, Ge(1), Le(1_000_000)]) -> List[int]: + def calculate_3n_plus_1(self, token: Annotated[int, Ge(1), Le(1_000_000)]) -> list[int]: """ - Weird algorithm calculator. Takes in an integer, outputs an array of numbers + Calculates the famous "3n + 1" problem. Takes in an integer, outputs an array of numbers which follow the algorithm all the way to "1". """ - self.update_status('calculate_weird_algorithm') + self.update_status('calculate_3n_plus_1') result = [] while token != 1: result.append(token) @@ -309,11 +318,11 @@ def calculate_weird_algorithm(self, token: Annotated[int, Ge(1), Le(1_000_000)]) return result @intersect_message( - request_content_type=IntersectMimeType.JSON, - response_content_type=IntersectMimeType.JSON, + request_content_type='application/json', + response_content_type='application/json', response_data_transfer_handler=IntersectDataHandler.MESSAGE, ) - def union_response(self) -> Union[str, int, bool, Dict[str, Union[str, int, bool]]]: + def union_response(self) -> str | int | bool | dict[str, str | int | bool]: """ Spit out a random string, integer, boolean, or object response """ @@ -333,15 +342,15 @@ def union_response(self) -> Union[str, int, bool, Dict[str, Union[str, int, bool } @intersect_message( - request_content_type=IntersectMimeType.JSON, - response_content_type=IntersectMimeType.JSON, + request_content_type='application/json', + response_content_type='application/json', response_data_transfer_handler=IntersectDataHandler.MESSAGE, strict_request_validation=True, ) def annotated_set( self, - positive_int_set: Annotated[Set[Annotated[int, Field(gt=0)]], Field(min_length=1)], - ) -> Annotated[Set[Annotated[int, Field(gt=0)]], Field(min_length=1)]: + positive_int_set: Annotated[set[Annotated[int, Field(gt=0)]], Field(min_length=1)], + ) -> Annotated[set[Annotated[int, Field(gt=0)]], Field(min_length=1)]: """ return numbers in set which are prime numbers in the range 1-100 """ @@ -375,7 +384,7 @@ def annotated_set( } @intersect_message() - def test_dicts(self, request: Dict[str, int]) -> Dict[str, int]: + def test_dicts(self, request: dict[str, int]) -> dict[str, int]: """ NOTE: JSON always stores Dict/Mapping keys as strings. If the string can't be coerced into the input value, it will throw a RUNTIME error. @@ -420,10 +429,10 @@ def test_uuid(self, uid: UUID) -> str: self.update_status('test_uuid') return uid.hex[12] - @intersect_message(request_content_type=IntersectMimeType.STRING) + @intersect_message(request_content_type='application/json') def test_path( self, path: Annotated[Path, Field(pattern=r'([\w-]+/)*([\w-]+)\.[\w]+')] - ) -> Optional[str]: + ) -> str | None: """ Paths are valid parameters, but you'll often want to further sanitize input to block certain inputs (i.e. ".."). @@ -454,7 +463,7 @@ def test_decimal(self, input_value: Decimal) -> Decimal: return input_value / Decimal('3.14159265358979323846') @intersect_message( - response_content_type=IntersectMimeType.STRING, + response_content_type='application/json', ) def ip4_to_ip6(self, ip4: IPv4Address) -> IPv6Address: """ @@ -467,9 +476,11 @@ def ip4_to_ip6(self, ip4: IPv4Address) -> IPv6Address: return IPv6Address(42545680458834377588178886921629466624 | (int(ip4) << 80)) @intersect_message() - def get_url_parts(self, url: Url) -> Dict[str, Optional[Union[str, int]]]: + def get_url_parts(self, url: Url) -> dict[str, str | int | None]: """ example of automatic URL parsing and schema validation + + (the return type should realistically be a TypedDict, not a dict) """ self.update_status('get_url_parts') return { @@ -506,7 +517,7 @@ def search_for_lucky_string_in_json(self, param: Json) -> bool: return search_through_json_for_value('777', param) @intersect_message - def verify_float_dict(self, param: Dict[float, str]) -> Dict[int, str]: + def verify_float_dict(self, param: dict[float, str]) -> dict[int, str]: """ verifies that dictionaries can have floats and integers as key types """ @@ -541,15 +552,14 @@ def test_special_python_types(self, param: MyTypedDict) -> MyNamedTuple: # - the same event can show up in multiple messages # - we can advertise complex types (i.e. Union) # - a message can advertise multiple events - # - that both @intersect_message and @intersect_event work + # - that both @intersect_message and an event configuration works - @intersect_message(events={'union': IntersectEventDefinition(event_type=Union[int, str])}) - def union_message_with_events(self, param: Literal['str', 'int']) -> Union[int, str]: + @intersect_message + def union_message_with_events(self, param: Literal['str', 'int']) -> int | str: ret = str(random.random()) if param == 'str' else random.randint(1, 1_000_000) self.intersect_sdk_emit_event('union', ret) return ret - @intersect_event(events={'union': IntersectEventDefinition(event_type=Union[int, str])}) def union_event(self) -> None: ran_dumb = random.randrange(2) if ran_dumb == 0: @@ -557,13 +567,7 @@ def union_event(self) -> None: else: self.intersect_sdk_emit_event('union', random.randrange(1_000_001, 2_000_000)) - @intersect_message( - events={ - 'int': IntersectEventDefinition(event_type=int), - 'str': IntersectEventDefinition(event_type=str), - 'float': IntersectEventDefinition(event_type=float), - } - ) + @intersect_message def primitive_event_message(self, emit_times: Annotated[int, Field(1, ge=1)]) -> str: for _ in range(emit_times): self.intersect_sdk_emit_event('str', str(random.random())) @@ -571,13 +575,7 @@ def primitive_event_message(self, emit_times: Annotated[int, Field(1, ge=1)]) -> self.intersect_sdk_emit_event('float', random.random()) return 'your events have been emitted' - @intersect_message( - events={ - 'int': IntersectEventDefinition(event_type=int), - 'str': IntersectEventDefinition(event_type=str), - 'float': IntersectEventDefinition(event_type=float), - } - ) + @intersect_message def primitive_event_message_random(self) -> str: ran_dumb = random.randrange(3) if ran_dumb == 0: @@ -588,6 +586,37 @@ def primitive_event_message_random(self) -> str: self.intersect_sdk_emit_event('float', random.random()) return 'your events have been emitted' - @intersect_event(events={'list_float': IntersectEventDefinition(event_type=List[float])}) def list_float_event(self) -> None: self.intersect_sdk_emit_event('list_int', [random.random() for i in range(3)]) + + @intersect_message(request_content_type='image/png', response_content_type='image/png') + def binary_to_binary(self, in_image: Annotated[bytes, Field(max_length=(1 << 20))]) -> bytes: + return in_image + + @intersect_message + def divide_by_zero_exceptions(self, param: int) -> float: + if param < 0: + # not explicitly raising an exception, not propagating the exception message + return 100 / (param + 1) + # explicitly raising an IntersectCapabilityException, will propagate the exception message + try: + return 100 / (param - 1) + except ZeroDivisionError as e: + raise IntersectCapabilityError(str(e)) from e + + @intersect_message + def raise_exception_no_param(self) -> str: + raise IntersectCapabilityError('I should not exist in production!') # noqa: EM101, TRY003 + + +# quick script for generating a valid schema +if __name__ == '__main__': + import json + + from intersect_sdk import get_schema_from_capability_implementations + + schema = get_schema_from_capability_implementations( + [DummyCapabilityImplementation], FAKE_HIERARCHY_CONFIG + ) + # have user redirect stdout instead of forcing a file location + print(json.dumps(schema, indent=2)) # noqa: T201 diff --git a/tests/integration/test_return_type_mismatch.py b/tests/integration/test_return_type_mismatch.py index 2ef15e5..cf57a2a 100644 --- a/tests/integration/test_return_type_mismatch.py +++ b/tests/integration/test_return_type_mismatch.py @@ -8,6 +8,7 @@ """ import time +from uuid import uuid4 from intersect_sdk import ( ControlPlaneConfig, @@ -15,7 +16,6 @@ DataStoreConfigMap, IntersectBaseCapabilityImplementation, IntersectDataHandler, - IntersectMimeType, IntersectService, IntersectServiceConfig, intersect_message, @@ -24,9 +24,8 @@ ControlPlaneManager, ) from intersect_sdk._internal.messages.userspace import ( - UserspaceMessage, - create_userspace_message, - deserialize_and_validate_userspace_message, + create_userspace_message_headers, + validate_userspace_message_headers, ) from tests.fixtures.example_schema import FAKE_HIERARCHY_CONFIG @@ -64,7 +63,7 @@ def make_intersect_service() -> IntersectService: username='intersect_username', password='intersect_password', port=1883, - protocol='mqtt3.1.1', + protocol='mqtt5.0', ), ], status_interval=30.0, @@ -79,7 +78,7 @@ def make_message_interceptor() -> ControlPlaneManager: username='intersect_username', password='intersect_password', port=1883, - protocol='mqtt3.1.1', + protocol='mqtt5.0', ) ], ) @@ -89,13 +88,15 @@ def make_message_interceptor() -> ControlPlaneManager: # the service is not fulfilling its schema contract in the return value, so we get an error message back -def test_call_user_function_with_invalid_payload(): +def test_call_user_function_with_invalid_payload() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() msg = [None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = (payload, content_type, validate_userspace_message_headers(raw_headers)) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -105,14 +106,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'2', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='ReturnTypeMismatchCapability.wrong_return_annotation', - # calculate_fibonacci takes in a tuple of two integers but we'll just send it one - payload=b'2', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -120,6 +122,5 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['headers']['has_error'] is True - assert b'Service domain logic threw exception.' in msg['payload'] + assert msg[0][2].has_error is True + assert b'Service domain logic threw exception.' in msg[0][0] diff --git a/tests/integration/test_service.py b/tests/integration/test_service.py index 190ed70..e7f0b53 100644 --- a/tests/integration/test_service.py +++ b/tests/integration/test_service.py @@ -7,29 +7,26 @@ instead, initialize an array with one value in it, then change the value inside the callback """ +import json import time -from typing import List +from uuid import uuid4 from intersect_sdk import ( ControlPlaneConfig, DataStoreConfig, DataStoreConfigMap, IntersectDataHandler, - IntersectMimeType, IntersectService, IntersectServiceConfig, ) from intersect_sdk._internal.control_plane.control_plane_manager import ControlPlaneManager from intersect_sdk._internal.data_plane.minio_utils import MinioPayload, get_minio_object from intersect_sdk._internal.messages.lifecycle import ( - LifecycleMessage, - LifecycleType, - deserialize_and_validate_lifecycle_message, + validate_lifecycle_message_headers, ) from intersect_sdk._internal.messages.userspace import ( - UserspaceMessage, - create_userspace_message, - deserialize_and_validate_userspace_message, + create_userspace_message_headers, + validate_userspace_message_headers, ) from tests.fixtures.example_schema import FAKE_HIERARCHY_CONFIG, DummyCapabilityImplementation @@ -55,7 +52,7 @@ def make_intersect_service() -> IntersectService: username='intersect_username', password='intersect_password', port=1883, - protocol='mqtt3.1.1', + protocol='mqtt5.0', ), ], status_interval=30.0, @@ -70,7 +67,7 @@ def make_message_interceptor() -> ControlPlaneManager: username='intersect_username', password='intersect_password', port=1883, - protocol='mqtt3.1.1', + protocol='mqtt5.0', ) ], ) @@ -79,7 +76,7 @@ def make_message_interceptor() -> ControlPlaneManager: # TESTS ################ -def test_control_plane_connections(): +def test_control_plane_connections() -> None: intersect_service = make_intersect_service() # make sure to wait a bit between each startup/shutdown call assert intersect_service.is_connected() is False @@ -105,13 +102,20 @@ def test_control_plane_connections(): # normal test that the user function can be called -def test_call_user_function(): +def test_call_user_function() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + campaign_id = uuid4() + request_id = uuid4() + + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -121,13 +125,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'[4,6]', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.calculate_fibonacci', - payload=b'[4,6]', + campaign_id=campaign_id, + request_id=request_id, ), True, ) @@ -135,18 +141,24 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['payload'] == b'[5,8,13]' + assert msg[0] == b'[5,8,13]' + # make sure header IDs were not modified + assert msg[2]['request_id'] == request_id + assert msg[2]['campaign_id'] == campaign_id # call a @staticmethod user function, which should work as normal -def test_call_static_user_function(): +def test_call_static_user_function() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -156,13 +168,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'"res"', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.test_generator', - payload=b'"res"', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -170,17 +184,20 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['payload'] == b'[114,215,330,101,216,115]' + assert msg[0] == b'[114,215,330,101,216,115]' -def test_call_user_function_with_default_and_empty_payload(): +def test_call_user_function_with_default_and_empty_payload() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -190,13 +207,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'null', # the SDK will call the function's default value if "null" is passed as an argument + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.valid_default_argument', - payload=b'null', # if sending null as the payload, the SDK will call the function's default value + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -204,18 +223,21 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['payload'] == b'8' + assert msg[0] == b'8' # call a user function with invalid parameters (so Pydantic will catch the error and pass it to the message interceptor) -def test_call_user_function_with_invalid_payload(): +def test_call_user_function_with_invalid_payload() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -225,14 +247,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'[2]', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.calculate_fibonacci', - # calculate_fibonacci takes in a tuple of two integers but we'll just send it one - payload=b'[2]', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -240,21 +263,24 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['headers']['has_error'] is True - assert b'Bad arguments to application' in msg['payload'] - assert b'validation error for tuple[int, int]' in msg['payload'] + assert msg[2].has_error is True + assert b'Bad arguments to application' in msg[0] + assert b'validation error for tuple[int, int]' in msg[0] # try to call an operation which doesn't exist - we'll get an error message back -def test_call_nonexistent_user_function(): +def test_call_nonexistent_user_function() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] # in this case, the message payload will be a Pydantic error (as our payload was invalid, but the operation was valid) - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -264,13 +290,85 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'null', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.THIS_FUNCTION_DOES_NOT_EXIST', - payload=b'null', + campaign_id=uuid4(), + request_id=uuid4(), + ), + True, + ) + time.sleep(3.0) + intersect_service.shutdown() + message_interceptor.disconnect() + + assert msg[2].has_error is True + assert b'Tried to call non-existent operation' in msg[0] + + +# make sure that exceptions propagate appropriately, based on whether or not IntersectCapabilityException is explicitly thrown +def test_exception_propagation() -> None: + intersect_service = make_intersect_service() + message_interceptor = make_message_interceptor() + msg = [] + + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg.append((payload, content_type, validate_userspace_message_headers(raw_headers))) + + message_interceptor.add_subscription_channel( + 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False + ) + message_interceptor.connect() + intersect_service.startup() + time.sleep(1.0) + # divide by zero message which does NOT propagate + message_interceptor.publish_message( + intersect_service._service_channel_name, + b'-1', + 'application/json', + create_userspace_message_headers( + source='msg.msg.msg.msg.msg', + destination='test.test.test.test.test', + data_handler=IntersectDataHandler.MESSAGE, + operation_id='DummyCapability.divide_by_zero_exceptions', + campaign_id=uuid4(), + request_id=uuid4(), + ), + True, + ) + # divide by zero message which DOES propagate + message_interceptor.publish_message( + intersect_service._service_channel_name, + b'1', + 'application/json', + create_userspace_message_headers( + source='msg.msg.msg.msg.msg', + destination='test.test.test.test.test', + data_handler=IntersectDataHandler.MESSAGE, + operation_id='DummyCapability.divide_by_zero_exceptions', + campaign_id=uuid4(), + request_id=uuid4(), + ), + True, + ) + # sanity check for message propagation without param + message_interceptor.publish_message( + intersect_service._service_channel_name, + b'null', + 'application/json', + create_userspace_message_headers( + source='msg.msg.msg.msg.msg', + destination='test.test.test.test.test', + data_handler=IntersectDataHandler.MESSAGE, + operation_id='DummyCapability.raise_exception_no_param', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -278,19 +376,29 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - assert msg['headers']['has_error'] is True - assert b'Tried to call non-existent operation' in msg['payload'] + assert msg[0][2].has_error is True + assert msg[0][0] == b'Service domain logic threw exception.' + assert msg[1][2].has_error is True + assert msg[1][0] == b'Service domain logic threw explicit exception:\ndivision by zero' + assert msg[2][2].has_error is True + assert ( + msg[2][0] + == b'Service domain logic threw explicit exception:\nI should not exist in production!' + ) # this function is just here to ensure the MINIO workflow is correct -def test_call_minio_user_function(): +def test_call_minio_user_function() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - msg = [None] + msg = [None, None, None] - def userspace_msg_callback(payload: bytes) -> None: - msg[0] = deserialize_and_validate_userspace_message(payload) + def userspace_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + msg[0] = payload + msg[1] = content_type + msg[2] = validate_userspace_message_headers(raw_headers) message_interceptor.add_subscription_channel( 'msg/msg/msg/msg/msg/response', {userspace_msg_callback}, False @@ -300,13 +408,15 @@ def userspace_msg_callback(payload: bytes) -> None: time.sleep(1.0) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'"1970-01-01T00:00:00Z"', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.test_datetime', - payload=b'"1970-01-01T00:00:00Z"', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) @@ -314,9 +424,8 @@ def userspace_msg_callback(payload: bytes) -> None: intersect_service.shutdown() message_interceptor.disconnect() - msg: UserspaceMessage = msg[0] - minio_payload: MinioPayload = msg['payload'] - assert msg['headers']['data_handler'] == IntersectDataHandler.MINIO + minio_payload: MinioPayload = json.loads(msg[0]) + assert msg[2].data_handler == IntersectDataHandler.MINIO actual_data_str = get_minio_object( intersect_service._data_plane_manager._minio_providers[0], minio_payload ).decode() @@ -328,13 +437,17 @@ def userspace_msg_callback(payload: bytes) -> None: # NOTE: this test deliberately takes over a minute to run, due to how POLLING works. # # NOTE: we are NOT listening for FUNCTIONS_ALLOWED or FUNCTIONS_BLOCKED messages here because that API is subject to change -def test_lifecycle_messages(): +def test_lifecycle_messages() -> None: intersect_service = make_intersect_service() message_interceptor = make_message_interceptor() - messages: List[LifecycleMessage] = [] + messages = [] - def lifecycle_msg_callback(payload: bytes) -> None: - messages.append(deserialize_and_validate_lifecycle_message(payload)) + def lifecycle_msg_callback( + payload: bytes, content_type: str, raw_headers: dict[str, str] + ) -> None: + messages.append( + (json.loads(payload), content_type, validate_lifecycle_message_headers(raw_headers)) + ) message_interceptor.add_subscription_channel( 'test/test/test/test/test/lifecycle', {lifecycle_msg_callback}, False @@ -345,42 +458,53 @@ def lifecycle_msg_callback(payload: bytes) -> None: # sleep a moment to make sure message_interceptor catches the startup message time.sleep(1.0) intersect_service.startup() - # sleep a bit over 60 seconds to make sure we get the polling message - time.sleep(62.0) - - # send a message to trigger a status update (just the way the example service's domain works, not intrinsic) + # startup message should include a "default state" for the status, make sure we get it before we publish our message + time.sleep(3.0) + # send a message to make sure the next status update will be different (just the way the example service's domain works, not intrinsic) message_interceptor.publish_message( intersect_service._service_channel_name, - create_userspace_message( + b'{"1.2":"one point two"}', + 'application/json', + create_userspace_message_headers( source='msg.msg.msg.msg.msg', destination='test.test.test.test.test', - content_type=IntersectMimeType.JSON, data_handler=IntersectDataHandler.MESSAGE, operation_id='DummyCapability.verify_float_dict', # note that the dict key MUST be a string, even though the input wants a float key - payload=b'{"1.2":"one point two"}', + campaign_id=uuid4(), + request_id=uuid4(), ), True, ) - time.sleep(3.0) + # sleep a bit over 60 seconds to make sure we get the polling message with the new status + time.sleep(62.0) + intersect_service.shutdown('I want to shutdown') # sleep to get the shutdown message time.sleep(1.0) message_interceptor.disconnect() - assert len(messages) == 4 + assert len(messages) == 3 - assert messages[0]['headers']['lifecycle_type'] == LifecycleType.STARTUP - assert 'schema' in messages[0]['payload'] + assert messages[0][2].lifecycle_type == 'LCT_STARTUP' + assert 'schema' in messages[0][0] - assert messages[1]['headers']['lifecycle_type'] == LifecycleType.POLLING - assert 'schema' in messages[1]['payload'] + assert messages[1][2].lifecycle_type == 'LCT_POLLING' + assert 'schema' in messages[1][0] - assert messages[2]['headers']['lifecycle_type'] == LifecycleType.STATUS_UPDATE - assert 'schema' in messages[2]['payload'] + assert messages[2][2].lifecycle_type == 'LCT_SHUTDOWN' + assert 'I want to shutdown' in messages[2][0] - assert messages[3]['headers']['lifecycle_type'] == LifecycleType.SHUTDOWN - assert 'I want to shutdown' in messages[3]['payload'] + # make sure both the universal capability and the test capability show up in the first two messages + for i in range(2): + assert list(messages[i][0]['status'].keys()) == ['intersect_sdk', 'DummyCapability'] - assert messages[0]['payload']['status'] == messages[1]['payload']['status'] - assert messages[0]['payload']['status'] != messages[2]['payload']['status'] + # check the status values of the DummyCapability (the INTERSECT-SDK capability's status values are too variable) + assert messages[0][0]['status']['DummyCapability'] == { + 'functions_called': 0, + 'last_function_called': '', + } + assert messages[1][0]['status']['DummyCapability'] == { + 'functions_called': 1, + 'last_function_called': 'verify_float_dict', + } diff --git a/tests/unit/test_annotations.py b/tests/unit/test_annotations.py index 3874ce4..6648b1a 100644 --- a/tests/unit/test_annotations.py +++ b/tests/unit/test_annotations.py @@ -4,7 +4,6 @@ from intersect_sdk import ( IntersectBaseCapabilityImplementation, IntersectEventDefinition, - intersect_event, intersect_message, intersect_status, ) @@ -12,52 +11,31 @@ # this should immediately fail when trying to define the class itself # don't even need to create an object for it -def test_invalid_annotation_params(): +def test_invalid_annotation_params() -> None: with pytest.raises(ValidationError) as ex: class BadAnnotationArgs(IntersectBaseCapabilityImplementation): @intersect_message( request_content_type=0, response_content_type=0, - response_data_transfer_handler='MESSAGE', + response_data_transfer_handler='N/A', strict_request_validation='red', ) def bad_annotations(self, param: bool) -> bool: ... errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] assert len(errors) == 4 - assert {'type': 'enum', 'loc': ('request_content_type',)} in errors + assert {'type': 'string_type', 'loc': ('request_content_type',)} in errors assert {'type': 'enum', 'loc': ('response_data_transfer_handler',)} in errors - assert {'type': 'enum', 'loc': ('response_content_type',)} in errors + assert {'type': 'string_type', 'loc': ('response_content_type',)} in errors assert {'type': 'bool_parsing', 'loc': ('strict_request_validation',)} in errors -def test_incorrect_intersect_event_annotations(): - with pytest.raises(ValidationError) as ex: - - class BadEventArgs(IntersectBaseCapabilityImplementation): - @intersect_event() - def some_func(self) -> bool: ... - - errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] - assert len(errors) == 1 - assert {'type': 'missing_keyword_only_argument', 'loc': ('events',)} in errors - - with pytest.raises(ValidationError) as ex: - - class BadEventArgs2(IntersectBaseCapabilityImplementation): - @intersect_event(events={}) - def some_func(self) -> bool: ... - - errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] - assert len(errors) == 1 - assert {'type': 'too_short', 'loc': ('events',)} in errors - +def test_incorrect_intersect_event_configs() -> None: with pytest.raises(ValidationError) as ex: class BadEventArgs3(IntersectBaseCapabilityImplementation): - @intersect_event(events={'one': IntersectEventDefinition()}) - def some_func(self) -> bool: ... + intersect_sdk_events = {'one': IntersectEventDefinition()} errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] assert len(errors) == 1 @@ -67,64 +45,25 @@ def some_func(self) -> bool: ... with pytest.raises(ValidationError) as ex: class BadEventArgs4(IntersectBaseCapabilityImplementation): - @intersect_event( - events={ - 'one': IntersectEventDefinition( - event_type=5, content_type=0, data_handler='test' - ) - } - ) - def some_func(self) -> bool: ... + intersect_sdk_events = { + 'one': IntersectEventDefinition( + event_type=5, + content_type=0, + data_handler='test', + event_documentation=0xDEADBEEF, + ) + } errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] - assert len(errors) == 3 + assert len(errors) == 4 assert {'type': 'value_error', 'loc': ('event_type',)} in errors - assert {'type': 'enum', 'loc': ('content_type',)} in errors + assert {'type': 'string_type', 'loc': ('content_type',)} in errors assert {'type': 'enum', 'loc': ('data_handler',)} in errors - - # make sure that improper use of model_construct() will not pass inner validation (users are free to use it as long as they construct a valid model) - with pytest.raises(ValidationError) as ex: - - class BadEventArgs5(IntersectBaseCapabilityImplementation): - @intersect_event( - events={ - 'one': IntersectEventDefinition.model_construct( - data_handler='no', content_type='no' - ) - } - ) - def some_func(self) -> bool: ... - - errors = [{'type': e['type'], 'loc': e['loc']} for e in ex.value.errors()] - assert len(errors) == 3 - assert { - 'type': 'enum', - 'loc': ( - 'events', - 'one', - 'content_type', - ), - } in errors - assert { - 'type': 'enum', - 'loc': ( - 'events', - 'one', - 'data_handler', - ), - } in errors - assert { - 'type': 'missing', - 'loc': ( - 'events', - 'one', - 'event_type', - ), - } in errors + assert {'type': 'string_type', 'loc': ('event_documentation',)} in errors # only tests @classmethod applied first, schema_invalids tests @classmethod applied last -def test_classmethod_rejected(caplog: pytest.LogCaptureFixture): +def test_classmethod_rejected(caplog: pytest.LogCaptureFixture) -> None: with pytest.raises(TypeError) as ex: class ClassMethod1(IntersectBaseCapabilityImplementation): @@ -144,7 +83,7 @@ def bad_annotations(cls, param: bool) -> bool: ... assert 'The `@classmethod` decorator cannot be used with `@intersect_status()`' in str(ex2) -def test_staticmethod_invalids(): +def test_staticmethod_invalids() -> None: with pytest.raises(TypeError) as ex: class Test1(IntersectBaseCapabilityImplementation): diff --git a/tests/unit/test_base_capability_implementation.py b/tests/unit/test_base_capability_implementation.py index 2ff01ab..93f93c9 100644 --- a/tests/unit/test_base_capability_implementation.py +++ b/tests/unit/test_base_capability_implementation.py @@ -10,9 +10,7 @@ IntersectBaseCapabilityImplementation, IntersectDirectMessageParams, IntersectEventDefinition, - intersect_event, intersect_message, - intersect_status, ) from intersect_sdk._internal.interfaces import IntersectEventObserver @@ -27,8 +25,9 @@ def __init__(self) -> None: tuple[IntersectDirectMessageParams, INTERSECT_SERVICE_RESPONSE_CALLBACK_TYPE | None], ] = {} - def _on_observe_event(self, event_name: str, event_value: Any, operation: str) -> None: - self.tracked_events.append((event_name, event_value, operation)) + # this will work even if capability_name is an empty string; this is checked on the Service + def _on_observe_event(self, event_name: str, event_value: Any, capability_name: str) -> None: + self.tracked_events.append((event_name, event_value, capability_name)) def create_external_request( self, @@ -44,7 +43,7 @@ def create_external_request( # TESTS #################### -def test_no_override(): +def test_no_override() -> None: with pytest.raises(RuntimeError) as ex: class BadClass1(IntersectBaseCapabilityImplementation): @@ -87,59 +86,24 @@ def intersect_sdk_listen_for_service_event( assert 'BadClass4: Attempted to override a reserved INTERSECT-SDK function' in str(ex) -# Note that the ONLY thing the capability itself checks for are annotated functions. -# The event definitions and overall schema validation are a service-specific feature -def test_functions_dont_emit_events(): - """Functions without annotations and status functions should NOT emit events""" - +def test_functions_emit_events() -> None: class Inner(IntersectBaseCapabilityImplementation): - def mock_message(self, param: int) -> int: - ret = param << 1 - self.intersect_sdk_emit_event('mock_message', ret) - return ret - - def mock_event(self) -> None: - self.intersect_sdk_emit_event('mock_event', 'hello') - - @intersect_status() - def mock_status(self) -> str: - self._inner_function() - self.intersect_sdk_emit_event('status', 'test') - return 'test' - - # this function WOULD emit an event if it were called directly, but it gets called through a status function - @intersect_event(events={'inner': IntersectEventDefinition(event_type=str)}) - def _inner_function(self) -> None: - self.intersect_sdk_emit_event('inner', 'function') - - # setup - observer = MockObserver() - capability = Inner() - capability._intersect_sdk_register_observer(observer) + intersect_sdk_events = { + 'mock_event': IntersectEventDefinition(event_type=str), + 'inner': IntersectEventDefinition(event_type=int), + } - # message mocking - capability.mock_message(7) - capability.mock_event() - capability.mock_status() - - assert len(observer.tracked_events) == 0 - - -def test_functions_emit_events(): - class Inner(IntersectBaseCapabilityImplementation): @intersect_message() def mock_message(self, param: int) -> int: ret = param << 1 self.intersect_sdk_emit_event('mock_message', ret) return ret - @intersect_event(events={'mock_event': IntersectEventDefinition(event_type=str)}) def mock_event(self) -> None: self.intersect_sdk_emit_event('mock_event', 'hello') # can emit any event as long as an earlier function has the event configuration # NOTE: The event_type here is invalid, BUT this is something the IntersectService handles. - @intersect_event(events={'inner': IntersectEventDefinition(event_type=int)}) def outer_function(self) -> None: self._inner_function() @@ -160,7 +124,7 @@ def _inner_function(self) -> None: assert len(observer.tracked_events) == 3 -def test_functions_handle_requests(): +def test_functions_handle_requests() -> None: class Inner(IntersectBaseCapabilityImplementation): def __init__(self) -> None: super().__init__() diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 0990473..4c8118b 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -167,7 +167,7 @@ def test_valid_service_config(): password='secret', host='http://hardknock.life', port='1883', - protocol='mqtt3.1.1', + protocol='mqtt5.0', ), ControlPlaneConfig( username='fine', diff --git a/tests/unit/test_event_message_headers.py b/tests/unit/test_event_message_headers.py new file mode 100644 index 0000000..1100a57 --- /dev/null +++ b/tests/unit/test_event_message_headers.py @@ -0,0 +1,119 @@ +""" +event message header validation testing +""" + +import datetime +import uuid + +import pytest +from pydantic import ValidationError + +from intersect_sdk import IntersectDataHandler, version_string +from intersect_sdk._internal.messages.event import ( + create_event_message_headers, + validate_event_message_headers, +) + + +def test_valid_event_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'source': 'source', + 'sdk_version': '0.5.0', + 'created_at': '2024-01-19T20:21:14.045591Z', + 'capability_name': 'capability', + 'event_name': 'event', + } + headers = validate_event_message_headers(raw_headers) + # check defaults + assert headers.data_handler == IntersectDataHandler.MESSAGE + # check type serializations + assert isinstance(headers.message_id, uuid.UUID) + assert isinstance(headers.created_at, datetime.datetime) + assert headers.created_at.year == 2024 + + +def test_unusual_event_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'source': 'source.one', + 'sdk_version': '0.5.0', + 'created_at': '2024', + 'data_handler': 'MINIO', + 'capability_name': 'capability', + 'event_name': 'event', + } + headers = validate_event_message_headers(raw_headers) + assert headers.data_handler == IntersectDataHandler.MINIO + # even on strict mode, Pydantic can validate an integer as a string type, i.e. '"2024"' - it parses this as number of seconds since the Unix epoch + assert headers.created_at.year == 1970 + + +def test_missing_does_not_deserialize() -> None: + raw_headers: dict[str, str] = {} + with pytest.raises(ValidationError) as err: + validate_event_message_headers(raw_headers) + errors = err.value.errors() + assert len(errors) == 6 + assert all(e['type'] == 'missing' for e in errors) + locations = [e['loc'] for e in errors] + assert ('message_id',) in locations + assert ('source',) in locations + assert ('sdk_version',) in locations + assert ('created_at',) in locations + assert ('capability_name',) in locations + assert ('event_name',) in locations + + +def test_invalid_does_not_deserialize() -> None: + raw_headers = { + 'message_id': 'not_a_uuid', + 'source': '/', + 'sdk_version': '1.0.0+20130313144700', + 'created_at': '2024-01-19T20:21:14.045591', + 'data_handler': 'COBOL', + 'capability_name': 'b@d_ch@r$', + 'event_name': 'b@d_ch@r$', + } + with pytest.raises(ValidationError) as err: + validate_event_message_headers(raw_headers) + errors = [{'type': e['type'], 'loc': e['loc']} for e in err.value.errors()] + assert len(errors) == 7 + # value we have is a string, but not a UUID + assert {'type': 'uuid_parsing', 'loc': ('message_id',)} in errors + # '/' is not a valid character in a source string or destination string + assert {'type': 'string_pattern_mismatch', 'loc': ('source',)} in errors + # The datetime in the sample data is ALMOST valid, but lacks zone information! + assert {'type': 'timezone_aware', 'loc': ('created_at',)} in errors + # the sample versions here have build metadata or alpha release data in their strings, this is not valid for INTERSECT + assert {'type': 'string_pattern_mismatch', 'loc': ('sdk_version',)} in errors + # can't transpose these values into the enumerations + assert {'type': 'enum', 'loc': ('data_handler',)} in errors + # invalid capability / event name format + assert {'type': 'string_pattern_mismatch', 'loc': ('capability_name',)} in errors + assert {'type': 'string_pattern_mismatch', 'loc': ('event_name',)} in errors + + +def test_create_event_message() -> None: + msg = create_event_message_headers( + source='source', + data_handler=IntersectDataHandler.MESSAGE, + capability_name='capability', + event_name='event', + ) + + # make sure all values are serialized as strings, this is necessary for some protocols i.e. MQTT5 Properties + for value in msg.values(): + assert isinstance(value, str) + + # rule of UUID-4 generation + assert str(msg['message_id'])[14] == '4' + assert len(msg['message_id']) == 36 + # enforce UTC + assert msg['created_at'][-6:] == '+00:00' + # this should be lowercase for maximum language capability + assert msg['data_handler'] == 'MESSAGE' + assert msg['sdk_version'] == version_string + assert msg['source'] == 'source' + assert msg['capability_name'] == 'capability' + assert msg['event_name'] == 'event' diff --git a/tests/unit/test_imports.py b/tests/unit/test_imports.py new file mode 100644 index 0000000..d9d6e53 --- /dev/null +++ b/tests/unit/test_imports.py @@ -0,0 +1,7 @@ +import intersect_sdk + + +def test_imports() -> None: + """Quick PEP-562 test, make sure every import is valid.""" + for attr in dir(intersect_sdk): + assert getattr(intersect_sdk, attr) is not None diff --git a/tests/unit/test_invalid_schema_runtime.py b/tests/unit/test_invalid_schema_runtime.py index 4d80db0..bdf6550 100644 --- a/tests/unit/test_invalid_schema_runtime.py +++ b/tests/unit/test_invalid_schema_runtime.py @@ -26,7 +26,7 @@ class CapabilityWithMinio(IntersectBaseCapabilityImplementation): def arbitrary_function(self, param: int) -> int: ... -def test_minio_not_allowed_without_config(caplog: pytest.LogCaptureFixture): +def test_minio_not_allowed_without_config(caplog: pytest.LogCaptureFixture) -> None: cap = CapabilityWithMinio() # note that despite the broker configuration, you do not actually need a broker running for this test conf = IntersectServiceConfig( @@ -36,10 +36,13 @@ def test_minio_not_allowed_without_config(caplog: pytest.LogCaptureFixture): username='intersect_username', password='intersect_password', port=1883, - protocol='mqtt3.1.1', + protocol='mqtt5.0', ), ], ) with pytest.raises(SystemExit): IntersectService([cap], conf) - assert "function 'arbitrary_function' should not set response_data_type as 1" in caplog.text + assert ( + "function 'arbitrary_function' should not set response_data_type as IntersectDataHandler.MINIO" + in caplog.text + ) diff --git a/tests/unit/test_lifecycle_message.py b/tests/unit/test_lifecycle_message.py deleted file mode 100644 index fa7f38f..0000000 --- a/tests/unit/test_lifecycle_message.py +++ /dev/null @@ -1,101 +0,0 @@ -""" -LifecycleMessage validation testing -""" - -import datetime -import uuid - -import pytest -from pydantic import ValidationError - -from intersect_sdk import version_string -from intersect_sdk._internal.messages.lifecycle import ( - LifecycleType, - create_lifecycle_message, - deserialize_and_validate_lifecycle_message, -) - - -def test_valid_lifecycle_message_deserializes(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","contentType":"application/json","payload":"payload","headers":{"source":"source","destination":"destination","sdk_version":"0.5.0","created_at":"2024-01-19T20:21:14.045591Z","lifecycle_type":0}}' - deserialized = deserialize_and_validate_lifecycle_message(serialized) - assert deserialized['headers']['lifecycle_type'] == LifecycleType.STARTUP - assert deserialized['contentType'] == 'application/json' - - -def test_unusual_lifecycle_message_deserializes(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","contentType":"application/json","payload":"payload","headers":{"source":"source.one","destination":"destination.two","sdk_version":"0.5.0","created_at":"2024","lifecycle_type":0}}' - deserialized = deserialize_and_validate_lifecycle_message(serialized) - assert deserialized['headers']['lifecycle_type'] == LifecycleType.STARTUP - assert deserialized['contentType'] == 'application/json' - # even on strict mode, Pydantic can validate an integer as a string type, i.e. '"2024"' - it parses this as number of seconds since the Unix epoch - assert deserialized['headers']['created_at'].year == 1970 - - -def test_missing_does_not_deserialize(): - serialized = b'{}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_lifecycle_message(serialized) - errors = err.value.errors() - assert len(errors) == 3 - assert all(e['type'] == 'missing' for e in errors) - locations = [e['loc'] for e in errors] - assert ('messageId',) in locations - assert ('payload',) in locations - assert ('headers',) in locations - - -def test_missing_headers_properties_does_not_deserialize(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","contentType":"application/json","payload":"payload","headers":{}}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_lifecycle_message(serialized) - errors = err.value.errors() - assert len(errors) == 5 - assert all(e['type'] == 'missing' for e in errors) - locations = [e['loc'] for e in errors] - assert ('headers', 'source') in locations - assert ('headers', 'destination') in locations - assert ('headers', 'created_at') in locations - assert ('headers', 'sdk_version') in locations - assert ('headers', 'lifecycle_type') in locations - - -def test_invalid_does_not_deserialize(): - serialized = b'{"messageId":"notauuid","contentType":"doesnotexist","payload":"payload","headers":{"source":"/","destination":"/","sdk_version":"1.0.0+20130313144700","created_at":"2024-01-19T20:21:14.045591","lifecycle_type":-1}}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_lifecycle_message(serialized) - errors = [{'type': e['type'], 'loc': e['loc']} for e in err.value.errors()] - assert len(errors) == 7 - # value we have is a string, but not a UUID - assert {'type': 'uuid_parsing', 'loc': ('messageId',)} in errors - # '/' is not a valid character in a source string or destination string - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'source')} in errors - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'destination')} in errors - # The datetime in the sample data is ALMOST valid, but lacks zone information! - assert {'type': 'timezone_aware', 'loc': ('headers', 'created_at')} in errors - # the sample versions here have build metadata or alpha release data in their strings, this is not valid for INTERSECT - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'sdk_version')} in errors - # can't transpose these values into the enumerations - assert {'type': 'enum', 'loc': ('headers', 'lifecycle_type')} in errors - assert {'type': 'literal_error', 'loc': ('contentType',)} in errors - - -def test_create_lifecycle_message(): - msg = create_lifecycle_message( - source='source', - destination='destination', - lifecycle_type=LifecycleType.SHUTDOWN, - payload=[1, 2, 3], - ) - assert isinstance(msg['messageId'], uuid.UUID) - # rule of UUID-4 generation - assert str(msg['messageId'])[14] == '4' - assert msg['contentType'] == 'application/json' - assert msg['payload'] == [1, 2, 3] - assert isinstance(msg['headers']['created_at'], datetime.datetime) - # enforce UTC - assert msg['headers']['created_at'].tzinfo == datetime.timezone.utc - assert msg['headers']['lifecycle_type'] == LifecycleType.SHUTDOWN - assert msg['headers']['sdk_version'] == version_string - assert msg['headers']['source'] == 'source' - assert msg['headers']['destination'] == 'destination' diff --git a/tests/unit/test_lifecycle_message_headers.py b/tests/unit/test_lifecycle_message_headers.py new file mode 100644 index 0000000..4841375 --- /dev/null +++ b/tests/unit/test_lifecycle_message_headers.py @@ -0,0 +1,101 @@ +""" +lifecycle message header validation testing +""" + +import datetime +import uuid + +import pytest +from pydantic import ValidationError + +from intersect_sdk import version_string +from intersect_sdk._internal.messages.lifecycle import ( + create_lifecycle_message_headers, + validate_lifecycle_message_headers, +) + + +def test_valid_lifecycle_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'source': 'source', + 'sdk_version': '0.5.0', + 'created_at': '2024-01-19T20:21:14.045591Z', + 'lifecycle_type': 'LCT_STARTUP', + } + headers = validate_lifecycle_message_headers(raw_headers) + assert isinstance(headers.message_id, uuid.UUID) + assert isinstance(headers.created_at, datetime.datetime) + assert headers.created_at.year == 2024 + + +def test_unusual_lifecycle_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'source': 'source.one', + 'sdk_version': '0.5.0', + 'created_at': '2024', + 'lifecycle_type': 'LCT_STARTUP', + } + headers = validate_lifecycle_message_headers(raw_headers) + # even on strict mode, Pydantic can validate an integer as a string type, i.e. '"2024"' - it parses this as number of seconds since the Unix epoch + assert headers.created_at.year == 1970 + + +def test_missing_does_not_deserialize() -> None: + raw_headers: dict[str, str] = {} + with pytest.raises(ValidationError) as err: + validate_lifecycle_message_headers(raw_headers) + errors = err.value.errors() + assert len(errors) == 5 + assert all(e['type'] == 'missing' for e in errors) + locations = [e['loc'] for e in errors] + assert ('message_id',) in locations + assert ('source',) in locations + assert ('lifecycle_type',) in locations + assert ('sdk_version',) in locations + assert ('created_at',) in locations + + +def test_invalid_does_not_deserialize() -> None: + raw_headers = { + 'message_id': 'not_a_uuid', + 'source': '/', + 'sdk_version': '1.0.0+20130313144700', + 'created_at': '2024-01-19T20:21:14.045591', + 'lifecycle_type': 'NOT_A_LIFECYCLE_TYPE', + } + with pytest.raises(ValidationError) as err: + validate_lifecycle_message_headers(raw_headers) + errors = [{'type': e['type'], 'loc': e['loc']} for e in err.value.errors()] + assert len(errors) == 5 + # value we have is a string, but not a UUID + assert {'type': 'uuid_parsing', 'loc': ('message_id',)} in errors + # '/' is not a valid character in a source string + assert {'type': 'string_pattern_mismatch', 'loc': ('source',)} in errors + # The datetime in the sample data is ALMOST valid, but lacks zone information! + assert {'type': 'timezone_aware', 'loc': ('created_at',)} in errors + # the sample versions here have build metadata or alpha release data in their strings, this is not valid for INTERSECT + assert {'type': 'string_pattern_mismatch', 'loc': ('sdk_version',)} in errors + # can't transpose these values into the enumerations + assert {'type': 'literal_error', 'loc': ('lifecycle_type',)} in errors + + +def test_create_lifecycle_message() -> None: + msg = create_lifecycle_message_headers( + source='source', + lifecycle_type='LCT_SHUTDOWN', + ) + + # make sure all values are serialized as strings, this is necessary for some protocols i.e. MQTT5 Properties + for value in msg.values(): + assert isinstance(value, str) + + # rule of UUID-4 generation + assert msg['message_id'][14] == '4' + assert len(msg['message_id']) == 36 + # enforce UTC + assert msg['created_at'][-6:] == '+00:00' + assert msg['lifecycle_type'] == 'LCT_SHUTDOWN' + assert msg['sdk_version'] == version_string + assert msg['source'] == 'source' diff --git a/tests/unit/test_schema_invalids.py b/tests/unit/test_schema_invalids.py index 947fb42..233159e 100644 --- a/tests/unit/test_schema_invalids.py +++ b/tests/unit/test_schema_invalids.py @@ -16,22 +16,20 @@ import datetime import sys from collections import namedtuple +from collections.abc import Generator from dataclasses import dataclass -from typing import Any, Dict, FrozenSet, Generator, List, NamedTuple, Set, Tuple, TypeVar +from typing import Annotated, Any, NamedTuple, TypeVar import pytest from annotated_types import Gt from pydantic import BaseModel, Field -from typing_extensions import Annotated, TypeAliasType, TypedDict +from typing_extensions import TypeAliasType, TypedDict from intersect_sdk import ( HierarchyConfig, IntersectBaseCapabilityImplementation, - IntersectDataHandler, IntersectEventDefinition, - IntersectMimeType, get_schema_from_capability_implementations, - intersect_event, intersect_message, intersect_status, ) @@ -46,15 +44,15 @@ ) -def get_schema_helper(test_type: List[type]): +def get_schema_helper(test_type: list[type]) -> dict[str, Any]: return get_schema_from_capability_implementations(test_type, TEST_HIERARCHY_CONFIG) # MESSAGE TESTS ########################### -def test_disallow_missing_annotation(caplog: pytest.LogCaptureFixture): - # this class has no @intersect_message or @intersect_event annotation +def test_disallow_missing_annotation(caplog: pytest.LogCaptureFixture) -> None: + # this class has no @intersect_message or @intersect_status annotation, and no intersect_sdk_evnts configuration class MissingIntersectMessage(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @@ -65,7 +63,7 @@ def do_something(self, one: int) -> int: ... assert 'has no function annotated' in caplog.text -def test_disallow_too_many_parameters(caplog: pytest.LogCaptureFixture): +def test_disallow_too_many_parameters(caplog: pytest.LogCaptureFixture) -> None: # more than one parameter is forbidden class TooManyParametersOnIntersectMessage(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @@ -164,7 +162,7 @@ class MockObjectSubtype(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self) -> List[object]: ... + def mock_message(self) -> list[object]: ... with pytest.raises(SystemExit): get_schema_helper([MockObjectSubtype]) @@ -192,7 +190,7 @@ class MockAnyList(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: List[Any]) -> None: ... + def mock_message(self, param: list[Any]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAnyList]) @@ -207,7 +205,7 @@ class MockComplexDynamicList(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: List[namedtuple('Point', ['x', 'y'])]) -> None: # noqa: PYI024 (this is the point of testing this...) + def mock_message(self, param: list[namedtuple('Point', ['x', 'y'])]) -> None: # noqa: PYI024 (this is the point of testing this...) ... with pytest.raises(SystemExit): @@ -222,7 +220,7 @@ class MockAnySet(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: Set[Any]) -> None: ... + def mock_message(self, param: set[Any]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAnySet]) @@ -236,7 +234,7 @@ class MockAnyFrozenSet(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: FrozenSet[Any]) -> None: ... + def mock_message(self, param: frozenset[Any]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAnyFrozenSet]) @@ -266,7 +264,7 @@ class MockNonStrDictKey(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: Dict[List[int], str]) -> None: ... + def mock_message(self, param: dict[list[int], str]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockNonStrDictKey]) @@ -283,7 +281,7 @@ class MockAnyDictValue(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: Dict[str, Any]) -> None: ... + def mock_message(self, param: dict[str, Any]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAnyDictValue]) @@ -297,7 +295,7 @@ class MockAnyTuple(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: Tuple[int, str, Any, bool]) -> None: ... + def mock_message(self, param: tuple[int, str, Any, bool]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAnyTuple]) @@ -377,7 +375,7 @@ class MockAmbiguousTuple(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' @intersect_message() - def mock_message(self, param: Tuple[()]) -> None: ... + def mock_message(self, param: tuple[()]) -> None: ... with pytest.raises(SystemExit): get_schema_helper([MockAmbiguousTuple]) @@ -461,7 +459,7 @@ def test_disallow_ambiguous_typealiastype(caplog: pytest.LogCaptureFixture): class AmbiguousTypeAliasType(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' T = TypeVar('T') - PositiveList = TypeAliasType('PositiveList', List[Annotated[T, Gt(0)]], type_params=(T,)) + PositiveList = TypeAliasType('PositiveList', list[Annotated[T, Gt(0)]], type_params=(T,)) @intersect_message() def mock_message(self, param: PositiveList) -> None: ... @@ -542,7 +540,10 @@ def missing_return_annotation(self): ... with pytest.raises(SystemExit): get_schema_helper([MissingReturnAnnotationOnStatus]) - assert "'missing_return_annotation' should have a valid return annotation." in caplog.text + assert ( + "capability status function 'missing_return_annotation' should have a valid return annotation and should not be null." + in caplog.text + ) def test_disallow_invalid_return_annotation_status(caplog: pytest.LogCaptureFixture): @@ -569,106 +570,29 @@ def missing_return_annotation(self) -> Any: ... # EVENTS TESTS ##################################### -def test_disallow_same_event_different_types(caplog: pytest.LogCaptureFixture): - # this fails because the event with the same key 'mykey' is mapped to an integer in function 1, and a string in function 2 - class EventTypedDifferentlyAcrossFunctions(IntersectBaseCapabilityImplementation): - intersect_sdk_capability_name = 'unused' - - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=int)}) - def function_1(self) -> None: ... - - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=str)}) - def function_2(self) -> None: ... - - with pytest.raises(SystemExit): - get_schema_helper([EventTypedDifferentlyAcrossFunctions]) - assert ( - "On capability 'EventTypedDifferentlyAcrossFunctions', event key 'mykey' on function 'function_2' was previously defined differently. \nevent_type mismatch: current=, previous=" - in caplog.text - ) - - -def test_disallow_same_event_different_content_types(caplog: pytest.LogCaptureFixture): - # NOTE: @intersect_message functions are always evaluated before @intersect_event functions, regardless of their order in the class. - class CapImp(IntersectBaseCapabilityImplementation): - intersect_sdk_capability_name = 'unused' - - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=int)}) - def function_2(self) -> None: ... - - @intersect_message( - events={ - 'mykey': IntersectEventDefinition( - event_type=int, content_type=IntersectMimeType.STRING - ) - } - ) - def function_1(self, param: int) -> None: ... - - with pytest.raises(SystemExit): - get_schema_helper([CapImp]) - assert ( - "On capability 'CapImp', event key 'mykey' on function 'function_2' was previously defined differently. \ncontent_type mismatch: current=IntersectMimeType.JSON, previous=IntersectMimeType.STRING" - in caplog.text - ) - - -def test_disallow_same_event_different_data_handlers(caplog: pytest.LogCaptureFixture): - # NOTE: @intersect_message functions are always evaluated before @intersect_event functions, regardless of their order in the class. - class CapImp(IntersectBaseCapabilityImplementation): - intersect_sdk_capability_name = 'unused' - - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=int)}) - def function_2(self) -> None: ... - - @intersect_message( - events={ - 'mykey': IntersectEventDefinition( - event_type=int, data_handler=IntersectDataHandler.MINIO - ) - } - ) - def function_1(self, param: int) -> None: ... - - with pytest.raises(SystemExit): - get_schema_helper([CapImp]) - assert ( - "On capability 'CapImp', event key 'mykey' on function 'function_2' was previously defined differently. \ndata_handler mismatch: current=IntersectDataHandler.MESSAGE, previous=IntersectDataHandler.MINIO" - in caplog.text - ) - - -def test_disallow_event_type_without_schema(caplog: pytest.LogCaptureFixture): +def test_disallow_event_type_without_schema(caplog: pytest.LogCaptureFixture) -> None: class CapImp(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' + # this is not parsable by Pydantic, so will not be accepted class Inner: one: int - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=Inner)}) - def myfunc(self) -> None: ... + intersect_sdk_events = {'mykey': IntersectEventDefinition(event_type=Inner)} with pytest.raises(SystemExit): get_schema_helper([CapImp]) - assert ( - "event key 'mykey' on function 'myfunc' has an invalid value in the events mapping" - in caplog.text - ) + assert "event key 'mykey' has an invalid value in the events mapping" in caplog.text -def test_disallow_dynamic_event_type(caplog: pytest.LogCaptureFixture): +def test_disallow_dynamic_event_type(caplog: pytest.LogCaptureFixture) -> None: class CapImp(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'unused' - - @intersect_event(events={'mykey': IntersectEventDefinition(event_type=List[Any])}) - def myfunc(self) -> None: ... + intersect_sdk_events = {'mykey': IntersectEventDefinition(event_type=list[Any])} with pytest.raises(SystemExit): get_schema_helper([CapImp]) - assert ( - "event key 'mykey' on function 'myfunc' has an invalid value in the events mapping" - in caplog.text - ) + assert "event key 'mykey' has an invalid value in the events mapping" in caplog.text # KEYWORD ARGUMENT TESTS ############################## @@ -952,28 +876,54 @@ def valid_function_2(self, param: str) -> str: ... assert '"dup" is a duplicate and has already appeared in another capability.' in caplog.text -# XXX this should probably pass at some point in the future, move it to a valid test file if so -def test_multiple_status_functions_across_capabilities(caplog: pytest.LogCaptureFixture): - # fails because there can only be one @intersect_status function between ALL capabilities +def test_input_param_must_be_bytes_if_content_type_is_binary(caplog: pytest.LogCaptureFixture): + # fails because request_content_type is not application/json, but request param is not bytes - class CapabilityName1(IntersectBaseCapabilityImplementation): - intersect_sdk_capability_name = '1' + class BinaryCapability(IntersectBaseCapabilityImplementation): + intersect_sdk_capability_name = 'binary' - @intersect_message() - def valid_function_1(self, param: str) -> str: ... + @intersect_message(request_content_type='image/png') + def param_not_bytes(self, param: int) -> str: ... - @intersect_status - def status_function_1(self) -> str: ... + with pytest.raises(SystemExit): + get_schema_helper([BinaryCapability]) + assert ( + "parameter 'param' type annotation 'int' on function 'param_not_bytes' must be 'bytes' if request_content_type is not 'application/json'" + in caplog.text + ) - class CapabilityName2(IntersectBaseCapabilityImplementation): - intersect_sdk_capability_name = '2' - @intersect_message() - def valid_function_2(self, param: str) -> str: ... +def test_return_type_must_be_bytes_if_content_type_is_binary(caplog: pytest.LogCaptureFixture): + # fails because response_content_type is not application/json, but return type is not bytes - @intersect_status - def status_function_2(self) -> str: ... + class BinaryCapability(IntersectBaseCapabilityImplementation): + intersect_sdk_capability_name = 'binary' + + @intersect_message(response_content_type='image/png') + def return_not_bytes(self, param: int) -> int: ... + + with pytest.raises(SystemExit): + get_schema_helper([BinaryCapability]) + assert ( + "return annotation 'int' on function 'return_not_bytes' must be 'bytes' if response_content_type is not 'application/json'" + in caplog.text + ) + + +def test_event_type_must_be_bytes_if_content_type_is_binary( + caplog: pytest.LogCaptureFixture, +) -> None: + # fails because a binary (non-JSON) event content type must use "bytes" as its event type + + class BinaryCapability(IntersectBaseCapabilityImplementation): + intersect_sdk_capability_name = 'binary' + intersect_sdk_events = { + 'name': IntersectEventDefinition(event_type=int, content_type='image/png') + } with pytest.raises(SystemExit): - get_schema_helper([CapabilityName1, CapabilityName2]) - assert 'Only one capability may have an @intersect_status function' in caplog.text + get_schema_helper([BinaryCapability]) + assert ( + "event key 'name' must have EventDefinition event_type be 'bytes' if content_type is not 'application/json'" + in caplog.text + ) diff --git a/tests/unit/test_schema_valid.py b/tests/unit/test_schema_valid.py index 25e7c82..4e99113 100644 --- a/tests/unit/test_schema_valid.py +++ b/tests/unit/test_schema_valid.py @@ -5,19 +5,11 @@ IntersectBaseCapabilityImplementation, IntersectDataHandler, IntersectEventDefinition, - IntersectMimeType, - intersect_event, + get_schema_from_capability_implementations, intersect_message, intersect_status, ) -from intersect_sdk._internal.constants import ( - REQUEST_CONTENT, - RESPONSE_CONTENT, - RESPONSE_DATA, - STRICT_VALIDATION, -) from intersect_sdk._internal.schema import get_schema_and_functions_from_capability_implementations -from intersect_sdk.schema import get_schema_from_capability_implementations from tests.fixtures.example_schema import ( FAKE_HIERARCHY_CONFIG, DummyCapabilityImplementation, @@ -33,7 +25,7 @@ def get_fixture_path(fixture: str): # MINIMAL ANNOTATION TESTS ###################### -def test_minimal_intersect_annotations(): +def test_minimal_intersect_annotations() -> None: class CapWithMessage(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'CapWithMessage' @@ -43,8 +35,8 @@ def message_function(self, theinput: int) -> int: class CapWithEvent(IntersectBaseCapabilityImplementation): intersect_sdk_capability_name = 'CapWithEvent' + intersect_sdk_events = {'event': IntersectEventDefinition(event_type=str)} - @intersect_event(events={'event': IntersectEventDefinition(event_type=str)}) def event_function(self): self.intersect_sdk_emit_event('event', 'emitted_value') @@ -63,7 +55,8 @@ def status_function(self) -> str: ], FAKE_HIERARCHY_CONFIG, ) - assert len(schemas['capabilities']) == 3 + # this will also include the universal capability + assert len(schemas['capabilities']) == 4 # FIXTURE TESTS ################## @@ -79,56 +72,86 @@ def test_schema_comparison(): assert expected_schema == actual_schema -def test_verify_status_fn(): - (schema, function_map, _, status_fn_capability, status_fn_name, status_type_adapter) = ( +def test_verify_status_fn() -> None: + # NOTE: this construction will not include the default intersect_sdk capability, however this is an internal function which is normally only called after that capability is injected + (schema, function_map, _, status_list) = ( get_schema_and_functions_from_capability_implementations( [DummyCapabilityImplementation], FAKE_HIERARCHY_CONFIG, set() ) ) - assert status_fn_capability is DummyCapabilityImplementation - assert status_fn_name == 'get_status' - assert status_fn_name not in schema['capabilities'] - scoped_name = f'{status_fn_capability.intersect_sdk_capability_name}.{status_fn_name}' + dummy_capability = status_list[0] + + assert ( + dummy_capability.capability_name + is DummyCapabilityImplementation.intersect_sdk_capability_name + ) + assert dummy_capability.function_name == 'get_status' + assert ( + dummy_capability.capability_name + not in schema['capabilities']['DummyCapability']['endpoints'] + ) + + scoped_name = f'{dummy_capability.capability_name}.{dummy_capability.function_name}' assert scoped_name in function_map - assert status_type_adapter == function_map[scoped_name].response_adapter + assert dummy_capability.serializer == function_map[scoped_name].response_adapter assert function_map[scoped_name].request_adapter is None - assert status_type_adapter.json_schema() == schema['components']['schemas']['DummyStatus'] + assert ( + dummy_capability.serializer.json_schema() == schema['components']['schemas']['DummyStatus'] + ) def test_verify_attributes(): - _, function_map, _, _, _, _ = get_schema_and_functions_from_capability_implementations( + ( + _, + function_map, + _, + _, + ) = get_schema_and_functions_from_capability_implementations( [DummyCapabilityImplementation], FAKE_HIERARCHY_CONFIG, set() ) # test defaults assert ( - getattr(function_map['DummyCapability.verify_float_dict'].method, RESPONSE_DATA) + function_map['DummyCapability.verify_float_dict'].response_data_transfer_handler == IntersectDataHandler.MESSAGE ) - assert ( - getattr(function_map['DummyCapability.verify_nested'].method, REQUEST_CONTENT) - == IntersectMimeType.JSON - ) - assert ( - getattr(function_map['DummyCapability.verify_nested'].method, RESPONSE_CONTENT) - == IntersectMimeType.JSON - ) - assert getattr(function_map['DummyCapability.verify_nested'].method, STRICT_VALIDATION) is False + assert function_map['DummyCapability.verify_nested'].request_content_type == 'application/json' + assert function_map['DummyCapability.verify_nested'].response_content_type == 'application/json' + assert function_map['DummyCapability.verify_nested'].strict_validation is False # test non-defaults assert ( - getattr(function_map['DummyCapability.verify_nested'].method, RESPONSE_DATA) + function_map['DummyCapability.verify_nested'].response_data_transfer_handler == IntersectDataHandler.MINIO ) - assert ( - getattr(function_map['DummyCapability.ip4_to_ip6'].method, RESPONSE_CONTENT) - == IntersectMimeType.STRING - ) - assert ( - getattr(function_map['DummyCapability.test_path'].method, REQUEST_CONTENT) - == IntersectMimeType.STRING - ) - assert ( - getattr(function_map['DummyCapability.calculate_weird_algorithm'].method, STRICT_VALIDATION) - is True + assert function_map['DummyCapability.ip4_to_ip6'].response_content_type == 'application/json' + assert function_map['DummyCapability.test_path'].request_content_type == 'application/json' + assert function_map['DummyCapability.calculate_3n_plus_1'].strict_validation is True + + +def test_multiple_status_functions_across_capabilities() -> None: + class CapabilityName1(IntersectBaseCapabilityImplementation): + intersect_sdk_capability_name = '1' + + @intersect_message() + def valid_function_1(self, param: str) -> str: ... + + @intersect_status + def status_function_1(self) -> str: ... + + class CapabilityName2(IntersectBaseCapabilityImplementation): + intersect_sdk_capability_name = '2' + + @intersect_message() + def valid_function_2(self, param: str) -> str: ... + + @intersect_status + def status_function_2(self) -> str: ... + + schema = get_schema_from_capability_implementations( + [CapabilityName1, CapabilityName2], FAKE_HIERARCHY_CONFIG ) + + assert len(schema['capabilities']) == 3 + for capability in schema['capabilities'].values(): + assert 'status' in capability diff --git a/tests/unit/test_userspace_message.py b/tests/unit/test_userspace_message.py deleted file mode 100644 index ffa0a28..0000000 --- a/tests/unit/test_userspace_message.py +++ /dev/null @@ -1,106 +0,0 @@ -""" -UserspaceMessage validation testing -""" - -import datetime -import uuid - -import pytest -from pydantic import ValidationError - -from intersect_sdk import IntersectDataHandler, IntersectMimeType, version_string -from intersect_sdk._internal.messages.userspace import ( - create_userspace_message, - deserialize_and_validate_userspace_message, -) - - -def test_valid_userspace_message_deserializes(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","operationId":"operation","contentType":"application/json","payload":"payload","headers":{"source":"source","destination":"destination","sdk_version":"0.5.0","created_at":"2024-01-19T20:21:14.045591Z","data_handler":0}}' - deserialized = deserialize_and_validate_userspace_message(serialized) - assert deserialized['headers']['data_handler'] == IntersectDataHandler.MESSAGE - assert deserialized['contentType'] == IntersectMimeType.JSON - assert deserialized['headers']['has_error'] is False - - -def test_unusual_userspace_message_deserializes(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","operationId":"operation","contentType":"application/json","payload":"payload","headers":{"source":"source.one","destination":"destination.two","sdk_version":"0.5.0","created_at":"2024","data_handler":0}}' - deserialized = deserialize_and_validate_userspace_message(serialized) - assert deserialized['headers']['data_handler'] == IntersectDataHandler.MESSAGE - assert deserialized['contentType'] == IntersectMimeType.JSON - assert deserialized['headers']['has_error'] is False - # even on strict mode, Pydantic can validate an integer as a string type, i.e. '"2024"' - it parses this as number of seconds since the Unix epoch - assert deserialized['headers']['created_at'].year == 1970 - - -def test_missing_does_not_deserialize(): - serialized = b'{}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_userspace_message(serialized) - errors = err.value.errors() - assert len(errors) == 4 - assert all(e['type'] == 'missing' for e in errors) - locations = [e['loc'] for e in errors] - assert ('messageId',) in locations - assert ('payload',) in locations - assert ('headers',) in locations - assert ('operationId',) in locations - - -def test_missing_headers_properties_does_not_deserialize(): - serialized = b'{"messageId":"cc88a2c9-7e47-409f-82c5-ef49914ae140","operationId":"operation","contentType":"application/json","payload":"payload","headers":{}}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_userspace_message(serialized) - errors = err.value.errors() - assert len(errors) == 4 - assert all(e['type'] == 'missing' for e in errors) - locations = [e['loc'] for e in errors] - assert ('headers', 'source') in locations - assert ('headers', 'destination') in locations - assert ('headers', 'created_at') in locations - assert ('headers', 'sdk_version') in locations - - -def test_invalid_does_not_deserialize(): - serialized = b'{"messageId":"notauuid","operationId":1,"contentType":"doesnotexist","payload":"payload","headers":{"source":"/","destination":"/","sdk_version":"1.0.0+20130313144700","created_at":"2024-01-19T20:21:14.045591","data_handler":-1}}' - with pytest.raises(ValidationError) as err: - deserialize_and_validate_userspace_message(serialized) - errors = [{'type': e['type'], 'loc': e['loc']} for e in err.value.errors()] - assert len(errors) == 8 - # value we have is a string, but not a UUID - assert {'type': 'uuid_parsing', 'loc': ('messageId',)} in errors - assert {'type': 'string_type', 'loc': ('operationId',)} in errors - # '/' is not a valid character in a source string or destination string - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'source')} in errors - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'destination')} in errors - # The datetime in the sample data is ALMOST valid, but lacks zone information! - assert {'type': 'timezone_aware', 'loc': ('headers', 'created_at')} in errors - # the sample versions here have build metadata or alpha release data in their strings, this is not valid for INTERSECT - assert {'type': 'string_pattern_mismatch', 'loc': ('headers', 'sdk_version')} in errors - # can't transpose these values into the enumerations - assert {'type': 'enum', 'loc': ('headers', 'data_handler')} in errors - assert {'type': 'enum', 'loc': ('contentType',)} in errors - - -def test_create_userspace_message(): - msg = create_userspace_message( - source='source', - destination='destination', - operation_id='operation', - content_type=IntersectMimeType.JSON, - data_handler=IntersectDataHandler.MESSAGE, - payload=[1, 2, 3], - ) - assert isinstance(msg['messageId'], uuid.UUID) - # rule of UUID-4 generation - assert str(msg['messageId'])[14] == '4' - assert msg['operationId'] == 'operation' - assert msg['contentType'] == IntersectMimeType.JSON - assert msg['payload'] == [1, 2, 3] - assert isinstance(msg['headers']['created_at'], datetime.datetime) - # enforce UTC - assert msg['headers']['created_at'].tzinfo == datetime.timezone.utc - assert msg['headers']['data_handler'] == IntersectDataHandler.MESSAGE - assert msg['headers']['sdk_version'] == version_string - assert msg['headers']['source'] == 'source' - assert msg['headers']['destination'] == 'destination' diff --git a/tests/unit/test_userspace_message_headers.py b/tests/unit/test_userspace_message_headers.py new file mode 100644 index 0000000..6bfa47c --- /dev/null +++ b/tests/unit/test_userspace_message_headers.py @@ -0,0 +1,139 @@ +""" +userspace message header validation testing +""" + +import datetime +import uuid + +import pytest +from pydantic import ValidationError + +from intersect_sdk import IntersectDataHandler, version_string +from intersect_sdk._internal.messages.userspace import ( + create_userspace_message_headers, + validate_userspace_message_headers, +) + + +def test_valid_userspace_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'campaign_id': 'dd88a2c9-7e47-409f-82c5-ef49914ae141', + 'request_id': 'ee88a2c9-7e47-409f-82c5-ef49914ae142', + 'operation_id': 'operation', + 'source': 'source', + 'destination': 'destination', + 'sdk_version': '0.5.0', + 'created_at': '2024-01-19T20:21:14.045591Z', + } + headers = validate_userspace_message_headers(raw_headers) + # check defaults + assert headers.data_handler == IntersectDataHandler.MESSAGE + assert headers.has_error is False + # check type serializations + assert isinstance(headers.message_id, uuid.UUID) + assert isinstance(headers.created_at, datetime.datetime) + assert headers.created_at.year == 2024 + + +def test_unusual_userspace_message_deserializes() -> None: + raw_headers = { + 'message_id': 'cc88a2c9-7e47-409f-82c5-ef49914ae140', + 'campaign_id': 'dd88a2c9-7e47-409f-82c5-ef49914ae141', + 'request_id': 'ee88a2c9-7e47-409f-82c5-ef49914ae142', + 'operation_id': 'operation', + 'source': 'source.one', + 'destination': 'destination.two', + 'sdk_version': '0.5.0', + 'created_at': '2024', + 'data_handler': 'MINIO', + 'has_error': 'true', + } + headers = validate_userspace_message_headers(raw_headers) + assert headers.data_handler == IntersectDataHandler.MINIO + assert headers.has_error is True + # even on strict mode, Pydantic can validate an integer as a string type, i.e. '"2024"' - it parses this as number of seconds since the Unix epoch + assert headers.created_at.year == 1970 + + +def test_missing_does_not_deserialize() -> None: + raw_headers: dict[str, str] = {} + with pytest.raises(ValidationError) as err: + validate_userspace_message_headers(raw_headers) + errors = err.value.errors() + assert len(errors) == 8 + assert all(e['type'] == 'missing' for e in errors) + locations = [e['loc'] for e in errors] + assert ('message_id',) in locations + assert ('campaign_id',) in locations + assert ('request_id',) in locations + assert ('operation_id',) in locations + assert ('source',) in locations + assert ('destination',) in locations + assert ('sdk_version',) in locations + assert ('created_at',) in locations + + +def test_invalid_does_not_deserialize() -> None: + raw_headers = { + 'message_id': 'not_a_uuid', + 'campaign_id': 'also_not_a_uuid', + 'request_id': 'definitely_not_a_uuid', + 'operation_id': 1, + 'source': '/', + 'destination': '/', + 'sdk_version': '1.0.0+20130313144700', + 'created_at': '2024-01-19T20:21:14.045591', + 'data_handler': 'COBOL', + 'has_error': 'I_AM_NOT_A_BOOLEAN', + } + with pytest.raises(ValidationError) as err: + validate_userspace_message_headers(raw_headers) + errors = [{'type': e['type'], 'loc': e['loc']} for e in err.value.errors()] + assert len(errors) == 10 + # value we have is a string, but not a UUID + assert {'type': 'uuid_parsing', 'loc': ('message_id',)} in errors + assert {'type': 'uuid_parsing', 'loc': ('request_id',)} in errors + assert {'type': 'uuid_parsing', 'loc': ('campaign_id',)} in errors + assert {'type': 'string_type', 'loc': ('operation_id',)} in errors + # '/' is not a valid character in a source string or destination string + assert {'type': 'string_pattern_mismatch', 'loc': ('source',)} in errors + assert {'type': 'string_pattern_mismatch', 'loc': ('destination',)} in errors + # The datetime in the sample data is ALMOST valid, but lacks zone information! + assert {'type': 'timezone_aware', 'loc': ('created_at',)} in errors + # the sample versions here have build metadata or alpha release data in their strings, this is not valid for INTERSECT + assert {'type': 'string_pattern_mismatch', 'loc': ('sdk_version',)} in errors + # can't transpose these values into the enumerations + assert {'type': 'enum', 'loc': ('data_handler',)} in errors + + +def test_create_userspace_message() -> None: + msg = create_userspace_message_headers( + source='source', + destination='destination', + operation_id='operation', + data_handler=IntersectDataHandler.MESSAGE, + request_id=uuid.uuid4(), + campaign_id=uuid.uuid4(), + ) + + # make sure all values are serialized as strings, this is necessary for some protocols i.e. MQTT5 Properties + for value in msg.values(): + assert isinstance(value, str) + + # rule of UUID-4 generation + assert str(msg['message_id'])[14] == '4' + assert len(msg['message_id']) == 36 + assert str(msg['request_id'])[14] == '4' + assert len(msg['request_id']) == 36 + assert str(msg['campaign_id'])[14] == '4' + assert len(msg['campaign_id']) == 36 + # enforce UTC + assert msg['created_at'][-6:] == '+00:00' + # this should be lowercase for maximum language capability + assert msg['has_error'] == 'false' + assert msg['operation_id'] == 'operation' + assert msg['data_handler'] == 'MESSAGE' + assert msg['sdk_version'] == version_string + assert msg['source'] == 'source' + assert msg['destination'] == 'destination' diff --git a/tests/unit/test_version_resolver.py b/tests/unit/test_version_resolver.py index 1a3943d..767289c 100644 --- a/tests/unit/test_version_resolver.py +++ b/tests/unit/test_version_resolver.py @@ -10,47 +10,19 @@ (but that's why we have the version check in the first place!) """ -import datetime -from uuid import uuid4 - import pytest from intersect_sdk import ( IntersectDataHandler, - IntersectMimeType, version_info, version_string, ) -from intersect_sdk._internal.messages.userspace import UserspaceMessage, UserspaceMessageHeader from intersect_sdk._internal.version_resolver import _resolve_user_version, resolve_user_version -# HELPERS ################# - - -def message_generator(service_sdk_version: str) -> UserspaceMessage: - """ - generates a boilerplate UserspaceMessage - we only care about the sdk_version property for these tests - """ - return UserspaceMessage( - messageId=uuid4(), - operationId='no_message_being_sent', - contentType=IntersectMimeType.JSON, - payload='irrelevant', - headers=UserspaceMessageHeader( - source='source.test.test.test', - destination='destination.test.test.test', - sdk_version=service_sdk_version, - created_at=datetime.datetime.now(tz=datetime.timezone.utc), - data_handler=IntersectDataHandler.MESSAGE, - ), - ) - - # TESTS ################## -def test_version_info(): +def test_version_info() -> None: assert len(version_info) == 3 assert all(isinstance(x, int) for x in version_info) @@ -59,29 +31,29 @@ def test_version_info(): # against THIS SDK's version -def test_equal_version_ok(): - assert resolve_user_version(message_generator(version_string)) is True +def test_equal_version_ok() -> None: + assert resolve_user_version(version_string, 'source', IntersectDataHandler.MESSAGE) is True -def test_bugfix_up_ok(): +def test_bugfix_up_ok() -> None: mock_version = f'{version_info[0]}.{version_info[1]}.{version_info[2] + 1}' - assert resolve_user_version(message_generator(mock_version)) is True + assert resolve_user_version(mock_version, 'source', IntersectDataHandler.MESSAGE) is True -def test_bugfix_down_ok(): +def test_bugfix_down_ok() -> None: mock_version = f'{version_info[0]}.{version_info[1]}.{version_info[2] - 1}' - assert resolve_user_version(message_generator(mock_version)) is True + assert resolve_user_version(mock_version, 'source', IntersectDataHandler.MINIO) is True -def test_major_difference_up_not_ok(caplog: pytest.LogCaptureFixture): +def test_major_difference_up_not_ok(caplog: pytest.LogCaptureFixture) -> None: mock_version = f'{version_info[0] + 1}.{version_info[1]}.{version_info[2]}' - assert resolve_user_version(message_generator(mock_version)) is False + assert resolve_user_version(mock_version, 'source', IntersectDataHandler.MESSAGE) is False assert 'Major version incompatibility' in caplog.text -def test_major_difference_down_not_ok(caplog: pytest.LogCaptureFixture): +def test_major_difference_down_not_ok(caplog: pytest.LogCaptureFixture) -> None: mock_version = f'{version_info[0] - 1}.{version_info[1]}.{version_info[2]}' - assert resolve_user_version(message_generator(mock_version)) is False + assert resolve_user_version(mock_version, 'source', IntersectDataHandler.MESSAGE) is False assert 'Major version incompatibility' in caplog.text @@ -89,46 +61,62 @@ def test_major_difference_down_not_ok(caplog: pytest.LogCaptureFixture): # We want to test against arbitrary SDK versions, which may not necessarily be this one. -def test_minor_version_up_ok_if_release(): - their_message = message_generator('1.1.0') +def test_minor_version_up_ok_if_release() -> None: + their_version = '1.1.0' our_version = '1.0.0' assert ( _resolve_user_version( - their_message, our_version, tuple([int(x) for x in our_version.split('.')]) + their_version, + 'their_source', + IntersectDataHandler.MESSAGE, + our_version, + tuple([int(x) for x in our_version.split('.')]), ) is True ) -def test_minor_version_down_ok_if_release(): - their_message = message_generator('1.1.0') +def test_minor_version_down_ok_if_release() -> None: + their_version = '1.1.0' our_version = '1.2.0' assert ( _resolve_user_version( - their_message, our_version, tuple([int(x) for x in our_version.split('.')]) + their_version, + 'their_source', + IntersectDataHandler.MESSAGE, + our_version, + tuple([int(x) for x in our_version.split('.')]), ) is True ) -def test_minor_version_up_not_ok_if_prerelease(caplog: pytest.LogCaptureFixture): - their_message = message_generator('0.2.0') +def test_minor_version_up_not_ok_if_prerelease(caplog: pytest.LogCaptureFixture) -> None: + their_version = '0.2.0' our_version = '0.1.0' assert ( _resolve_user_version( - their_message, our_version, tuple([int(x) for x in our_version.split('.')]) + their_version, + 'their_source', + IntersectDataHandler.MESSAGE, + our_version, + tuple([int(x) for x in our_version.split('.')]), ) is False ) assert 'Pre-release minor version incompatibility' in caplog.text -def test_minor_version_down_not_ok_if_prerelease(caplog: pytest.LogCaptureFixture): - their_message = message_generator('0.2.0') +def test_minor_version_down_not_ok_if_prerelease(caplog: pytest.LogCaptureFixture) -> None: + their_version = '0.2.0' our_version = '0.3.0' assert ( _resolve_user_version( - their_message, our_version, tuple([int(x) for x in our_version.split('.')]) + their_version, + 'their_source', + IntersectDataHandler.MESSAGE, + our_version, + tuple([int(x) for x in our_version.split('.')]), ) is False )