diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99b8fcff..7e334a01 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,13 +36,13 @@ jobs: run: ./scripts/lint build: - if: github.repository == 'stainless-sdks/hyperspell-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork timeout-minutes: 10 name: build permissions: contents: read id-token: write - runs-on: depot-ubuntu-24.04 + runs-on: ${{ github.repository == 'stainless-sdks/hyperspell-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 @@ -61,12 +61,14 @@ jobs: run: rye build - name: Get GitHub OIDC Token + if: github.repository == 'stainless-sdks/hyperspell-python' id: github-oidc uses: actions/github-script@v6 with: script: core.setOutput('github_token', await core.getIDToken()); - name: Upload tarball + if: github.repository == 'stainless-sdks/hyperspell-python' env: URL: https://pkg.stainless.com/s AUTH: ${{ steps.github-oidc.outputs.github_token }} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3f63a672..a36746b8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.22.1" + ".": "0.25.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index cb4ba6b1..7a16060c 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 14 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/hyperspell%2Fhyperspell-b98d78d20b4c4d2c702e39d073c7b46218c6a4faf8b2f7293034aa37cd55140e.yml -openapi_spec_hash: c280139f8355b085bd2c417c67fffada -config_hash: 595375b8ab62a4d175e28264da481aa3 +configured_endpoints: 18 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/hyperspell%2Fhyperspell-55497442c9c2cf4077d1e8661fdf82ebe905dbb140ec65421ffa78da6d06a015.yml +openapi_spec_hash: aa9abda3ed926041c12f664e4225c5ed +config_hash: bd953556a1d60f48307815fdaf9ba88b diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a27cfee..ec94995a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,47 @@ # Changelog +## 0.25.0 (2025-09-29) + +Full Changelog: [v0.22.1...v0.25.0](https://github.com/hyperspell/python-sdk/compare/v0.22.1...v0.25.0) + +### Features + +* **api:** api update ([f7a4696](https://github.com/hyperspell/python-sdk/commit/f7a4696dec30ab24f9d9679aa2108db7eee54eea)) +* **api:** api update ([8cc8137](https://github.com/hyperspell/python-sdk/commit/8cc8137a2d6d7f31ff0b700d5e0785fedeae3548)) +* **api:** api update ([d2f69a5](https://github.com/hyperspell/python-sdk/commit/d2f69a5441b2257818f1dcf7332eac31b4717123)) +* **api:** api update ([508ad7e](https://github.com/hyperspell/python-sdk/commit/508ad7ed7ad9dc387996a63727da6db1f929d22c)) +* **api:** api update ([7c260a4](https://github.com/hyperspell/python-sdk/commit/7c260a44ab0c326018ec50fdda6d87d8e9322ded)) +* **api:** api update ([796beb0](https://github.com/hyperspell/python-sdk/commit/796beb01090fbd33bf769e2917c14e8c9d2b5087)) +* **api:** api update ([1158074](https://github.com/hyperspell/python-sdk/commit/11580746b95550270c9aa0b5a358c26a1dd6888c)) +* **api:** api update ([77b1d4c](https://github.com/hyperspell/python-sdk/commit/77b1d4c16f9f01827d2ef9a886b85c2b2e41829b)) +* **api:** update via SDK Studio ([adcbd19](https://github.com/hyperspell/python-sdk/commit/adcbd194629860d4af7c85720938dd26dd47e026)) +* **api:** update via SDK Studio ([4929e75](https://github.com/hyperspell/python-sdk/commit/4929e750336a8d43d9d8d54dc7d1d44f58d12071)) +* **api:** update via SDK Studio ([743c857](https://github.com/hyperspell/python-sdk/commit/743c85738f9beeb8f493d3bf88c7db53184eab1c)) +* **api:** update via SDK Studio ([b6dd141](https://github.com/hyperspell/python-sdk/commit/b6dd141ce233e2835fa1b3dd9ffd0a582cc39db1)) +* **api:** update via SDK Studio ([4161ff3](https://github.com/hyperspell/python-sdk/commit/4161ff335fa76ce6ed7ca86f9aec446ebe71b227)) +* **api:** update via SDK Studio ([e7a6bd1](https://github.com/hyperspell/python-sdk/commit/e7a6bd18157bfaa267da257f3248da9173bf83f6)) +* improve future compat with pydantic v3 ([ebc041e](https://github.com/hyperspell/python-sdk/commit/ebc041e9c7389c11d5f2049232fa84b1b4188668)) +* **types:** replace List[str] with SequenceNotStr in params ([3682734](https://github.com/hyperspell/python-sdk/commit/3682734864bd6767b30a2a57c85a49df45fcdcfe)) + + +### Bug Fixes + +* avoid newer type syntax ([7658b38](https://github.com/hyperspell/python-sdk/commit/7658b3838b534fe2a3d65c18da254eef6436e736)) +* **compat:** compat with `pydantic<2.8.0` when using additional fields ([d792335](https://github.com/hyperspell/python-sdk/commit/d79233578d0271e5de5b16043341565a8224bcf6)) + + +### Chores + +* do not install brew dependencies in ./scripts/bootstrap by default ([d9de5e4](https://github.com/hyperspell/python-sdk/commit/d9de5e46c7d49ed75cf7bb9ddc5451ed00576260)) +* **internal:** add Sequence related utils ([9cbe921](https://github.com/hyperspell/python-sdk/commit/9cbe921b91aacb926a8b5020ed935b9c5ea3773d)) +* **internal:** change ci workflow machines ([c9d8d8d](https://github.com/hyperspell/python-sdk/commit/c9d8d8d6b2d49b53307ed4a99042b01cdc44e089)) +* **internal:** move mypy configurations to `pyproject.toml` file ([116ded2](https://github.com/hyperspell/python-sdk/commit/116ded251332fdfa7366f5b0beb98f1c76aac4d2)) +* **internal:** update pydantic dependency ([a83362d](https://github.com/hyperspell/python-sdk/commit/a83362dc7319cfb8837d99efda10025857b21347)) +* **internal:** update pyright exclude list ([ccfaa5f](https://github.com/hyperspell/python-sdk/commit/ccfaa5f283c0f2cd5966fd1996ae65afa49bef0b)) +* **tests:** simplify `get_platform` test ([7364d6f](https://github.com/hyperspell/python-sdk/commit/7364d6f85d7849ac24377552f31a9d7b4d622565)) +* **types:** change optional parameter type from NotGiven to Omit ([59fe8c6](https://github.com/hyperspell/python-sdk/commit/59fe8c67a542f707d1867b8786baafbc018c7de2)) +* update github action ([c2227bb](https://github.com/hyperspell/python-sdk/commit/c2227bbbca9210c1156bd219d6eea4c9d8a56376)) + ## 0.22.1 (2025-08-13) Full Changelog: [v0.22.0...v0.22.1](https://github.com/hyperspell/python-sdk/compare/v0.22.0...v0.22.1) diff --git a/README.md b/README.md index 41ec77f4..c94d2be3 100644 --- a/README.md +++ b/README.md @@ -191,11 +191,11 @@ from hyperspell import Hyperspell client = Hyperspell() -response = client.memories.search( +query_result = client.memories.search( query="query", options={}, ) -print(response.options) +print(query_result.options) ``` ## File uploads diff --git a/api.md b/api.md index 6a65d499..568a25d8 100644 --- a/api.md +++ b/api.md @@ -1,3 +1,9 @@ +# Shared Types + +```python +from hyperspell.types import QueryResult +``` + # Integrations Types: @@ -34,18 +40,18 @@ Methods: - client.integrations.web_crawler.index(\*\*params) -> WebCrawlerIndexResponse +## Slack + +Methods: + +- client.integrations.slack.list(\*\*params) -> object + # Memories Types: ```python -from hyperspell.types import ( - Memory, - MemoryStatus, - MemoryDeleteResponse, - MemorySearchResponse, - MemoryStatusResponse, -) +from hyperspell.types import Memory, MemoryStatus, MemoryDeleteResponse, MemoryStatusResponse ``` Methods: @@ -54,10 +60,24 @@ Methods: - client.memories.delete(resource_id, \*, source) -> MemoryDeleteResponse - client.memories.add(\*\*params) -> MemoryStatus - client.memories.get(resource_id, \*, source) -> Memory -- client.memories.search(\*\*params) -> MemorySearchResponse +- client.memories.search(\*\*params) -> QueryResult - client.memories.status() -> MemoryStatusResponse - client.memories.upload(\*\*params) -> MemoryStatus +# Evaluate + +Types: + +```python +from hyperspell.types import EvaluateScoreHighlightResponse, EvaluateScoreQueryResponse +``` + +Methods: + +- client.evaluate.get_query(query_id) -> QueryResult +- client.evaluate.score_highlight(highlight_id, \*\*params) -> EvaluateScoreHighlightResponse +- client.evaluate.score_query(query_id, \*\*params) -> EvaluateScoreQueryResponse + # Vaults Types: diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 39a0a2b5..00000000 --- a/mypy.ini +++ /dev/null @@ -1,50 +0,0 @@ -[mypy] -pretty = True -show_error_codes = True - -# Exclude _files.py because mypy isn't smart enough to apply -# the correct type narrowing and as this is an internal module -# it's fine to just use Pyright. -# -# We also exclude our `tests` as mypy doesn't always infer -# types correctly and Pyright will still catch any type errors. -exclude = ^(src/hyperspell/_files\.py|_dev/.*\.py|tests/.*)$ - -strict_equality = True -implicit_reexport = True -check_untyped_defs = True -no_implicit_optional = True - -warn_return_any = True -warn_unreachable = True -warn_unused_configs = True - -# Turn these options off as it could cause conflicts -# with the Pyright options. -warn_unused_ignores = False -warn_redundant_casts = False - -disallow_any_generics = True -disallow_untyped_defs = True -disallow_untyped_calls = True -disallow_subclassing_any = True -disallow_incomplete_defs = True -disallow_untyped_decorators = True -cache_fine_grained = True - -# By default, mypy reports an error if you assign a value to the result -# of a function call that doesn't return anything. We do this in our test -# cases: -# ``` -# result = ... -# assert result is None -# ``` -# Changing this codegen to make mypy happy would increase complexity -# and would not be worth it. -disable_error_code = func-returns-value,overload-cannot-match - -# https://github.com/python/mypy/issues/12162 -[mypy.overrides] -module = "black.files.*" -ignore_errors = true -ignore_missing_imports = true diff --git a/pyproject.toml b/pyproject.toml index 38939e65..0cdb5090 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "hyperspell" -version = "0.22.1" +version = "0.25.0" description = "The official Python library for the hyperspell API" dynamic = ["readme"] license = "MIT" @@ -56,7 +56,6 @@ dev-dependencies = [ "dirty-equals>=0.6.0", "importlib-metadata>=6.7.0", "rich>=13.7.1", - "nest_asyncio==1.6.0", "pytest-xdist>=3.6.1", ] @@ -148,6 +147,7 @@ exclude = [ "_dev", ".venv", ".nox", + ".git", ] reportImplicitOverride = true @@ -156,6 +156,58 @@ reportOverlappingOverload = false reportImportCycles = false reportPrivateUsage = false +[tool.mypy] +pretty = true +show_error_codes = true + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ['src/hyperspell/_files.py', '_dev/.*.py', 'tests/.*'] + +strict_equality = true +implicit_reexport = true +check_untyped_defs = true +no_implicit_optional = true + +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = false +warn_redundant_casts = false + +disallow_any_generics = true +disallow_untyped_defs = true +disallow_untyped_calls = true +disallow_subclassing_any = true +disallow_incomplete_defs = true +disallow_untyped_decorators = true +cache_fine_grained = true + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = "func-returns-value,overload-cannot-match" + +# https://github.com/python/mypy/issues/12162 +[[tool.mypy.overrides]] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true + + [tool.ruff] line-length = 120 output-format = "grouped" diff --git a/requirements-dev.lock b/requirements-dev.lock index 271a703f..c3a47ff0 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -75,7 +75,6 @@ multidict==6.4.4 mypy==1.14.1 mypy-extensions==1.0.0 # via mypy -nest-asyncio==1.6.0 nodeenv==1.8.0 # via pyright nox==2023.4.22 @@ -89,9 +88,9 @@ pluggy==1.5.0 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via hyperspell -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic pygments==2.18.0 # via rich @@ -127,6 +126,9 @@ typing-extensions==4.12.2 # via pydantic # via pydantic-core # via pyright + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic virtualenv==20.24.5 # via nox yarl==1.20.0 diff --git a/requirements.lock b/requirements.lock index c51d6551..e90ee670 100644 --- a/requirements.lock +++ b/requirements.lock @@ -55,9 +55,9 @@ multidict==6.4.4 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via hyperspell -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic sniffio==1.3.0 # via anyio @@ -68,5 +68,8 @@ typing-extensions==4.12.2 # via multidict # via pydantic # via pydantic-core + # via typing-inspection +typing-inspection==0.4.1 + # via pydantic yarl==1.20.0 # via aiohttp diff --git a/scripts/bootstrap b/scripts/bootstrap index e84fe62c..b430fee3 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -4,10 +4,18 @@ set -e cd "$(dirname "$0")/.." -if ! command -v rye >/dev/null 2>&1 && [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then +if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then brew bundle check >/dev/null 2>&1 || { - echo "==> Installing Homebrew dependencies…" - brew bundle + echo -n "==> Install Homebrew dependencies? (y/N): " + read -r response + case "$response" in + [yY][eE][sS]|[yY]) + brew bundle + ;; + *) + ;; + esac + echo } fi diff --git a/src/hyperspell/__init__.py b/src/hyperspell/__init__.py index 3f17ad2b..b4b72aa2 100644 --- a/src/hyperspell/__init__.py +++ b/src/hyperspell/__init__.py @@ -3,7 +3,7 @@ import typing as _t from . import types -from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes +from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes, omit, not_given from ._utils import file_from_path from ._client import ( Client, @@ -48,7 +48,9 @@ "ProxiesTypes", "NotGiven", "NOT_GIVEN", + "not_given", "Omit", + "omit", "HyperspellError", "APIError", "APIStatusError", diff --git a/src/hyperspell/_base_client.py b/src/hyperspell/_base_client.py index 9ff21427..fbf8d394 100644 --- a/src/hyperspell/_base_client.py +++ b/src/hyperspell/_base_client.py @@ -42,7 +42,6 @@ from ._qs import Querystring from ._files import to_httpx_files, async_to_httpx_files from ._types import ( - NOT_GIVEN, Body, Omit, Query, @@ -57,9 +56,10 @@ RequestOptions, HttpxRequestFiles, ModelBuilderProtocol, + not_given, ) from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping -from ._compat import PYDANTIC_V2, model_copy, model_dump +from ._compat import PYDANTIC_V1, model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( APIResponse, @@ -145,9 +145,9 @@ def __init__( def __init__( self, *, - url: URL | NotGiven = NOT_GIVEN, - json: Body | NotGiven = NOT_GIVEN, - params: Query | NotGiven = NOT_GIVEN, + url: URL | NotGiven = not_given, + json: Body | NotGiven = not_given, + params: Query | NotGiven = not_given, ) -> None: self.url = url self.json = json @@ -232,7 +232,7 @@ def _set_private_attributes( model: Type[_T], options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model @@ -320,7 +320,7 @@ def _set_private_attributes( client: AsyncAPIClient, options: FinalRequestOptions, ) -> None: - if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + if (not PYDANTIC_V1) and getattr(self, "__pydantic_private__", None) is None: self.__pydantic_private__ = {} self._model = model @@ -595,7 +595,7 @@ def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalReques # we internally support defining a temporary header to override the # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` # see _response.py for implementation details - override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN) + override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, not_given) if is_given(override_cast_to): options.headers = headers return cast(Type[ResponseT], override_cast_to) @@ -825,7 +825,7 @@ def __init__( version: str, base_url: str | URL, max_retries: int = DEFAULT_MAX_RETRIES, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, http_client: httpx.Client | None = None, custom_headers: Mapping[str, str] | None = None, custom_query: Mapping[str, object] | None = None, @@ -1356,7 +1356,7 @@ def __init__( base_url: str | URL, _strict_response_validation: bool, max_retries: int = DEFAULT_MAX_RETRIES, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, http_client: httpx.AsyncClient | None = None, custom_headers: Mapping[str, str] | None = None, custom_query: Mapping[str, object] | None = None, @@ -1818,8 +1818,8 @@ def make_request_options( extra_query: Query | None = None, extra_body: Body | None = None, idempotency_key: str | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - post_parser: PostParser | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + post_parser: PostParser | NotGiven = not_given, ) -> RequestOptions: """Create a dict of type RequestOptions without keys of NotGiven values.""" options: RequestOptions = {} diff --git a/src/hyperspell/_client.py b/src/hyperspell/_client.py index 4c5bd535..3abe0ae8 100644 --- a/src/hyperspell/_client.py +++ b/src/hyperspell/_client.py @@ -3,7 +3,7 @@ from __future__ import annotations import os -from typing import Any, Union, Mapping +from typing import Any, Mapping from typing_extensions import Self, override import httpx @@ -11,17 +11,17 @@ from . import _exceptions from ._qs import Querystring from ._types import ( - NOT_GIVEN, Omit, Timeout, NotGiven, Transport, ProxiesTypes, RequestOptions, + not_given, ) from ._utils import is_given, get_async_library from ._version import __version__ -from .resources import auth, vaults, memories +from .resources import auth, vaults, evaluate, memories from ._streaming import Stream as Stream, AsyncStream as AsyncStream from ._exceptions import APIStatusError, HyperspellError from ._base_client import ( @@ -46,6 +46,7 @@ class Hyperspell(SyncAPIClient): integrations: integrations.IntegrationsResource memories: memories.MemoriesResource + evaluate: evaluate.EvaluateResource vaults: vaults.VaultsResource auth: auth.AuthResource with_raw_response: HyperspellWithRawResponse @@ -61,7 +62,7 @@ def __init__( api_key: str | None = None, user_id: str | None = None, base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, max_retries: int = DEFAULT_MAX_RETRIES, default_headers: Mapping[str, str] | None = None, default_query: Mapping[str, object] | None = None, @@ -111,6 +112,7 @@ def __init__( self.integrations = integrations.IntegrationsResource(self) self.memories = memories.MemoriesResource(self) + self.evaluate = evaluate.EvaluateResource(self) self.vaults = vaults.VaultsResource(self) self.auth = auth.AuthResource(self) self.with_raw_response = HyperspellWithRawResponse(self) @@ -153,9 +155,9 @@ def copy( api_key: str | None = None, user_id: str | None = None, base_url: str | httpx.URL | None = None, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, http_client: httpx.Client | None = None, - max_retries: int | NotGiven = NOT_GIVEN, + max_retries: int | NotGiven = not_given, default_headers: Mapping[str, str] | None = None, set_default_headers: Mapping[str, str] | None = None, default_query: Mapping[str, object] | None = None, @@ -237,6 +239,7 @@ def _make_status_error( class AsyncHyperspell(AsyncAPIClient): integrations: integrations.AsyncIntegrationsResource memories: memories.AsyncMemoriesResource + evaluate: evaluate.AsyncEvaluateResource vaults: vaults.AsyncVaultsResource auth: auth.AsyncAuthResource with_raw_response: AsyncHyperspellWithRawResponse @@ -252,7 +255,7 @@ def __init__( api_key: str | None = None, user_id: str | None = None, base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, max_retries: int = DEFAULT_MAX_RETRIES, default_headers: Mapping[str, str] | None = None, default_query: Mapping[str, object] | None = None, @@ -302,6 +305,7 @@ def __init__( self.integrations = integrations.AsyncIntegrationsResource(self) self.memories = memories.AsyncMemoriesResource(self) + self.evaluate = evaluate.AsyncEvaluateResource(self) self.vaults = vaults.AsyncVaultsResource(self) self.auth = auth.AsyncAuthResource(self) self.with_raw_response = AsyncHyperspellWithRawResponse(self) @@ -344,9 +348,9 @@ def copy( api_key: str | None = None, user_id: str | None = None, base_url: str | httpx.URL | None = None, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | Timeout | None | NotGiven = not_given, http_client: httpx.AsyncClient | None = None, - max_retries: int | NotGiven = NOT_GIVEN, + max_retries: int | NotGiven = not_given, default_headers: Mapping[str, str] | None = None, set_default_headers: Mapping[str, str] | None = None, default_query: Mapping[str, object] | None = None, @@ -429,6 +433,7 @@ class HyperspellWithRawResponse: def __init__(self, client: Hyperspell) -> None: self.integrations = integrations.IntegrationsResourceWithRawResponse(client.integrations) self.memories = memories.MemoriesResourceWithRawResponse(client.memories) + self.evaluate = evaluate.EvaluateResourceWithRawResponse(client.evaluate) self.vaults = vaults.VaultsResourceWithRawResponse(client.vaults) self.auth = auth.AuthResourceWithRawResponse(client.auth) @@ -437,6 +442,7 @@ class AsyncHyperspellWithRawResponse: def __init__(self, client: AsyncHyperspell) -> None: self.integrations = integrations.AsyncIntegrationsResourceWithRawResponse(client.integrations) self.memories = memories.AsyncMemoriesResourceWithRawResponse(client.memories) + self.evaluate = evaluate.AsyncEvaluateResourceWithRawResponse(client.evaluate) self.vaults = vaults.AsyncVaultsResourceWithRawResponse(client.vaults) self.auth = auth.AsyncAuthResourceWithRawResponse(client.auth) @@ -445,6 +451,7 @@ class HyperspellWithStreamedResponse: def __init__(self, client: Hyperspell) -> None: self.integrations = integrations.IntegrationsResourceWithStreamingResponse(client.integrations) self.memories = memories.MemoriesResourceWithStreamingResponse(client.memories) + self.evaluate = evaluate.EvaluateResourceWithStreamingResponse(client.evaluate) self.vaults = vaults.VaultsResourceWithStreamingResponse(client.vaults) self.auth = auth.AuthResourceWithStreamingResponse(client.auth) @@ -453,6 +460,7 @@ class AsyncHyperspellWithStreamedResponse: def __init__(self, client: AsyncHyperspell) -> None: self.integrations = integrations.AsyncIntegrationsResourceWithStreamingResponse(client.integrations) self.memories = memories.AsyncMemoriesResourceWithStreamingResponse(client.memories) + self.evaluate = evaluate.AsyncEvaluateResourceWithStreamingResponse(client.evaluate) self.vaults = vaults.AsyncVaultsResourceWithStreamingResponse(client.vaults) self.auth = auth.AsyncAuthResourceWithStreamingResponse(client.auth) diff --git a/src/hyperspell/_compat.py b/src/hyperspell/_compat.py index 92d9ee61..bdef67f0 100644 --- a/src/hyperspell/_compat.py +++ b/src/hyperspell/_compat.py @@ -12,14 +12,13 @@ _T = TypeVar("_T") _ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) -# --------------- Pydantic v2 compatibility --------------- +# --------------- Pydantic v2, v3 compatibility --------------- # Pyright incorrectly reports some of our functions as overriding a method when they don't # pyright: reportIncompatibleMethodOverride=false -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") +PYDANTIC_V1 = pydantic.VERSION.startswith("1.") -# v1 re-exports if TYPE_CHECKING: def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 @@ -44,90 +43,92 @@ def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 ... else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( + # v1 re-exports + if PYDANTIC_V1: + from pydantic.typing import ( get_args as get_args, is_union as is_union, get_origin as get_origin, is_typeddict as is_typeddict, is_literal_type as is_literal_type, ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime else: - from pydantic.typing import ( + from ._utils import ( get_args as get_args, is_union as is_union, get_origin as get_origin, + parse_date as parse_date, is_typeddict as is_typeddict, + parse_datetime as parse_datetime, is_literal_type as is_literal_type, ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime # refactored config if TYPE_CHECKING: from pydantic import ConfigDict as ConfigDict else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: + if PYDANTIC_V1: # TODO: provide an error message here? ConfigDict = None + else: + from pydantic import ConfigDict as ConfigDict # renamed methods / properties def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: + if PYDANTIC_V1: return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + else: + return model.model_validate(value) def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore + if PYDANTIC_V1: + return field.required # type: ignore + return field.is_required() def field_get_default(field: FieldInfo) -> Any: value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None + if PYDANTIC_V1: return value + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None return value def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore + if PYDANTIC_V1: + return field.outer_type_ # type: ignore + return field.annotation def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore + if PYDANTIC_V1: + return model.__config__ # type: ignore + return model.model_config def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore + if PYDANTIC_V1: + return model.__fields__ # type: ignore + return model.model_fields def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy(deep=deep) - return model.copy(deep=deep) # type: ignore + if PYDANTIC_V1: + return model.copy(deep=deep) # type: ignore + return model.model_copy(deep=deep) def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore + if PYDANTIC_V1: + return model.json(indent=indent) # type: ignore + return model.model_dump_json(indent=indent) def model_dump( @@ -139,14 +140,14 @@ def model_dump( warnings: bool = True, mode: Literal["json", "python"] = "python", ) -> dict[str, Any]: - if PYDANTIC_V2 or hasattr(model, "model_dump"): + if (not PYDANTIC_V1) or hasattr(model, "model_dump"): return model.model_dump( mode=mode, exclude=exclude, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, # warnings are not supported in Pydantic v1 - warnings=warnings if PYDANTIC_V2 else True, + warnings=True if PYDANTIC_V1 else warnings, ) return cast( "dict[str, Any]", @@ -159,9 +160,9 @@ def model_dump( def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + return model.model_validate(data) # generic models @@ -170,17 +171,16 @@ def model_parse(model: type[_ModelT], data: Any) -> _ModelT: class GenericModel(pydantic.BaseModel): ... else: - if PYDANTIC_V2: + if PYDANTIC_V1: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + else: # there no longer needs to be a distinction in v2 but # we still have to create our own subclass to avoid # inconsistent MRO ordering errors class GenericModel(pydantic.BaseModel): ... - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - # cached properties if TYPE_CHECKING: diff --git a/src/hyperspell/_models.py b/src/hyperspell/_models.py index b8387ce9..6a3cd1d2 100644 --- a/src/hyperspell/_models.py +++ b/src/hyperspell/_models.py @@ -50,7 +50,7 @@ strip_annotated_type, ) from ._compat import ( - PYDANTIC_V2, + PYDANTIC_V1, ConfigDict, GenericModel as BaseGenericModel, get_args, @@ -81,11 +81,7 @@ class _ConfigProtocol(Protocol): class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: + if PYDANTIC_V1: @property @override @@ -95,6 +91,10 @@ def model_fields_set(self) -> set[str]: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] extra: Any = pydantic.Extra.allow # type: ignore + else: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) def to_dict( self, @@ -215,25 +215,25 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] if key not in model_fields: parsed = construct_type(value=value, type_=extra_field_type) if extra_field_type is not None else value - if PYDANTIC_V2: - _extra[key] = parsed - else: + if PYDANTIC_V1: _fields_set.add(key) fields_values[key] = parsed + else: + _extra[key] = parsed object.__setattr__(m, "__dict__", fields_values) - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) - object.__setattr__(m, "__pydantic_extra__", _extra) - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) - else: + if PYDANTIC_V1: # init_private_attributes() does not exist in v2 m._init_private_attributes() # type: ignore # copied from Pydantic v1's `construct()` method object.__setattr__(m, "__fields_set__", _fields_set) + else: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) return m @@ -243,7 +243,7 @@ def construct( # pyright: ignore[reportIncompatibleMethodOverride] # although not in practice model_construct = construct - if not PYDANTIC_V2: + if PYDANTIC_V1: # we define aliases for some of the new pydantic v2 methods so # that we can just document these methods without having to specify # a specific pydantic version as some users may not know which @@ -256,7 +256,7 @@ def model_dump( mode: Literal["json", "python"] | str = "python", include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, @@ -264,6 +264,7 @@ def model_dump( warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, serialize_as_any: bool = False, + fallback: Callable[[Any], Any] | None = None, ) -> dict[str, Any]: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump @@ -295,16 +296,18 @@ def model_dump( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") dumped = super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, ) - return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped + return cast("dict[str, Any]", json_safe(dumped)) if mode == "json" else dumped @override def model_dump_json( @@ -313,13 +316,14 @@ def model_dump_json( indent: int | None = None, include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, + fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, ) -> str: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json @@ -348,11 +352,13 @@ def model_dump_json( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") return super().json( # type: ignore[reportDeprecated] indent=indent, include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, @@ -363,10 +369,10 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: if value is None: return field_get_default(field) - if PYDANTIC_V2: - type_ = field.annotation - else: + if PYDANTIC_V1: type_ = cast(type, field.outer_type_) # type: ignore + else: + type_ = field.annotation # type: ignore if type_ is None: raise RuntimeError(f"Unexpected field type is None for {key}") @@ -375,7 +381,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object: def _get_extra_fields_type(cls: type[pydantic.BaseModel]) -> type | None: - if not PYDANTIC_V2: + if PYDANTIC_V1: # TODO return None @@ -628,30 +634,30 @@ def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, for variant in get_args(union): variant = strip_annotated_type(variant) if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: + if PYDANTIC_V1: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] + discriminator_alias = field_info.alias - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): if isinstance(entry, str): mapping[entry] = variant else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: continue # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias + discriminator_alias = field.get("serialization_alias") - if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): - for entry in get_args(annotation): + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: if isinstance(entry, str): mapping[entry] = variant @@ -714,7 +720,7 @@ class GenericModel(BaseGenericModel, BaseModel): pass -if PYDANTIC_V2: +if not PYDANTIC_V1: from pydantic import TypeAdapter as _TypeAdapter _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) @@ -782,12 +788,12 @@ class FinalRequestOptions(pydantic.BaseModel): json_data: Union[Body, None] = None extra_json: Union[AnyMapping, None] = None - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: + if PYDANTIC_V1: class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] arbitrary_types_allowed: bool = True + else: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) def get_max_retries(self, max_retries: int) -> int: if isinstance(self.max_retries, NotGiven): @@ -820,9 +826,9 @@ def construct( # type: ignore key: strip_not_given(value) for key, value in values.items() } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + if PYDANTIC_V1: + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + return super().model_construct(_fields_set, **kwargs) if not TYPE_CHECKING: # type checkers incorrectly complain about this assignment diff --git a/src/hyperspell/_qs.py b/src/hyperspell/_qs.py index 274320ca..ada6fd3f 100644 --- a/src/hyperspell/_qs.py +++ b/src/hyperspell/_qs.py @@ -4,7 +4,7 @@ from urllib.parse import parse_qs, urlencode from typing_extensions import Literal, get_args -from ._types import NOT_GIVEN, NotGiven, NotGivenOr +from ._types import NotGiven, not_given from ._utils import flatten _T = TypeVar("_T") @@ -41,8 +41,8 @@ def stringify( self, params: Params, *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, ) -> str: return urlencode( self.stringify_items( @@ -56,8 +56,8 @@ def stringify_items( self, params: Params, *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, ) -> list[tuple[str, str]]: opts = Options( qs=self, @@ -143,8 +143,8 @@ def __init__( self, qs: Querystring = _qs, *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + array_format: ArrayFormat | NotGiven = not_given, + nested_format: NestedFormat | NotGiven = not_given, ) -> None: self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/src/hyperspell/_types.py b/src/hyperspell/_types.py index 13fcedc0..5f369eea 100644 --- a/src/hyperspell/_types.py +++ b/src/hyperspell/_types.py @@ -13,10 +13,21 @@ Mapping, TypeVar, Callable, + Iterator, Optional, Sequence, ) -from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable +from typing_extensions import ( + Set, + Literal, + Protocol, + TypeAlias, + TypedDict, + SupportsIndex, + overload, + override, + runtime_checkable, +) import httpx import pydantic @@ -106,18 +117,21 @@ class RequestOptions(TypedDict, total=False): # Sentinel class used until PEP 0661 is accepted class NotGiven: """ - A sentinel singleton class used to distinguish omitted keyword arguments - from those passed in with the value None (which may have different behavior). + For parameters with a meaningful None value, we need to distinguish between + the user explicitly passing None, and the user not passing the parameter at + all. + + User code shouldn't need to use not_given directly. For example: ```py - def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... + def create(timeout: Timeout | None | NotGiven = not_given): ... - get(timeout=1) # 1s timeout - get(timeout=None) # No timeout - get() # Default timeout behavior, which may not be statically known at the method definition. + create(timeout=1) # 1s timeout + create(timeout=None) # No timeout + create() # Default timeout behavior ``` """ @@ -129,13 +143,14 @@ def __repr__(self) -> str: return "NOT_GIVEN" -NotGivenOr = Union[_T, NotGiven] +not_given = NotGiven() +# for backwards compatibility: NOT_GIVEN = NotGiven() class Omit: - """In certain situations you need to be able to represent a case where a default value has - to be explicitly removed and `None` is not an appropriate substitute, for example: + """ + To explicitly omit something from being sent in a request, use `omit`. ```py # as the default `Content-Type` header is `application/json` that will be sent @@ -145,8 +160,8 @@ class Omit: # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' client.post(..., headers={"Content-Type": "multipart/form-data"}) - # instead you can remove the default `application/json` header by passing Omit - client.post(..., headers={"Content-Type": Omit()}) + # instead you can remove the default `application/json` header by passing omit + client.post(..., headers={"Content-Type": omit}) ``` """ @@ -154,6 +169,9 @@ def __bool__(self) -> Literal[False]: return False +omit = Omit() + + @runtime_checkable class ModelBuilderProtocol(Protocol): @classmethod @@ -217,3 +235,26 @@ class _GenericAlias(Protocol): class HttpxSendArgs(TypedDict, total=False): auth: httpx.Auth follow_redirects: bool + + +_T_co = TypeVar("_T_co", covariant=True) + + +if TYPE_CHECKING: + # This works because str.__contains__ does not accept object (either in typeshed or at runtime) + # https://github.com/hauntsaninja/useful_types/blob/5e9710f3875107d068e7679fd7fec9cfab0eff3b/useful_types/__init__.py#L285 + class SequenceNotStr(Protocol[_T_co]): + @overload + def __getitem__(self, index: SupportsIndex, /) -> _T_co: ... + @overload + def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ... + def __contains__(self, value: object, /) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T_co]: ... + def index(self, value: Any, start: int = 0, stop: int = ..., /) -> int: ... + def count(self, value: Any, /) -> int: ... + def __reversed__(self) -> Iterator[_T_co]: ... +else: + # just point this to a normal `Sequence` at runtime to avoid having to special case + # deserializing our custom sequence type + SequenceNotStr = Sequence diff --git a/src/hyperspell/_utils/__init__.py b/src/hyperspell/_utils/__init__.py index d4fda26f..dc64e29a 100644 --- a/src/hyperspell/_utils/__init__.py +++ b/src/hyperspell/_utils/__init__.py @@ -10,7 +10,6 @@ lru_cache as lru_cache, is_mapping as is_mapping, is_tuple_t as is_tuple_t, - parse_date as parse_date, is_iterable as is_iterable, is_sequence as is_sequence, coerce_float as coerce_float, @@ -23,7 +22,6 @@ coerce_boolean as coerce_boolean, coerce_integer as coerce_integer, file_from_path as file_from_path, - parse_datetime as parse_datetime, strip_not_given as strip_not_given, deepcopy_minimal as deepcopy_minimal, get_async_library as get_async_library, @@ -32,12 +30,20 @@ maybe_coerce_boolean as maybe_coerce_boolean, maybe_coerce_integer as maybe_coerce_integer, ) +from ._compat import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, +) from ._typing import ( is_list_type as is_list_type, is_union_type as is_union_type, extract_type_arg as extract_type_arg, is_iterable_type as is_iterable_type, is_required_type as is_required_type, + is_sequence_type as is_sequence_type, is_annotated_type as is_annotated_type, is_type_alias_type as is_type_alias_type, strip_annotated_type as strip_annotated_type, @@ -55,3 +61,4 @@ function_has_argument as function_has_argument, assert_signatures_in_sync as assert_signatures_in_sync, ) +from ._datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime diff --git a/src/hyperspell/_utils/_compat.py b/src/hyperspell/_utils/_compat.py new file mode 100644 index 00000000..dd703233 --- /dev/null +++ b/src/hyperspell/_utils/_compat.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +import sys +import typing_extensions +from typing import Any, Type, Union, Literal, Optional +from datetime import date, datetime +from typing_extensions import get_args as _get_args, get_origin as _get_origin + +from .._types import StrBytesIntFloat +from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime + +_LITERAL_TYPES = {Literal, typing_extensions.Literal} + + +def get_args(tp: type[Any]) -> tuple[Any, ...]: + return _get_args(tp) + + +def get_origin(tp: type[Any]) -> type[Any] | None: + return _get_origin(tp) + + +def is_union(tp: Optional[Type[Any]]) -> bool: + if sys.version_info < (3, 10): + return tp is Union # type: ignore[comparison-overlap] + else: + import types + + return tp is Union or tp is types.UnionType + + +def is_typeddict(tp: Type[Any]) -> bool: + return typing_extensions.is_typeddict(tp) + + +def is_literal_type(tp: Type[Any]) -> bool: + return get_origin(tp) in _LITERAL_TYPES + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + return _parse_date(value) + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + return _parse_datetime(value) diff --git a/src/hyperspell/_utils/_datetime_parse.py b/src/hyperspell/_utils/_datetime_parse.py new file mode 100644 index 00000000..7cb9d9e6 --- /dev/null +++ b/src/hyperspell/_utils/_datetime_parse.py @@ -0,0 +1,136 @@ +""" +This file contains code from https://github.com/pydantic/pydantic/blob/main/pydantic/v1/datetime_parse.py +without the Pydantic v1 specific errors. +""" + +from __future__ import annotations + +import re +from typing import Dict, Union, Optional +from datetime import date, datetime, timezone, timedelta + +from .._types import StrBytesIntFloat + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) + + +def _get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") from None + + +def _from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + return timezone(timedelta(minutes=offset)) + else: + return None + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = _get_numeric(value, "datetime") + if number is not None: + return _from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + + match = datetime_re.match(value) + if match is None: + raise ValueError("invalid datetime format") + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo")) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + return datetime(**kw_) # type: ignore + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = _get_numeric(value, "date") + if number is not None: + return _from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + assert not isinstance(value, (float, int)) + match = date_re.match(value) + if match is None: + raise ValueError("invalid date format") + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise ValueError("invalid date format") from None diff --git a/src/hyperspell/_utils/_transform.py b/src/hyperspell/_utils/_transform.py index b0cc20a7..52075492 100644 --- a/src/hyperspell/_utils/_transform.py +++ b/src/hyperspell/_utils/_transform.py @@ -16,18 +16,20 @@ lru_cache, is_mapping, is_iterable, + is_sequence, ) from .._files import is_base64_file_input +from ._compat import get_origin, is_typeddict from ._typing import ( is_list_type, is_union_type, extract_type_arg, is_iterable_type, is_required_type, + is_sequence_type, is_annotated_type, strip_annotated_type, ) -from .._compat import get_origin, model_dump, is_typeddict _T = TypeVar("_T") @@ -167,6 +169,8 @@ def _transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation @@ -184,6 +188,8 @@ def _transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. @@ -262,7 +268,7 @@ def _transform_typeddict( annotations = get_type_hints(expected_type, include_extras=True) for key, value in data.items(): if not is_given(value): - # we don't need to include `NotGiven` values here as they'll + # we don't need to include omitted values here as they'll # be stripped out before the request is sent anyway continue @@ -329,6 +335,8 @@ async def _async_transform_recursive( Defaults to the same value as the `annotation` argument. """ + from .._compat import model_dump + if inner_type is None: inner_type = annotation @@ -346,6 +354,8 @@ async def _async_transform_recursive( (is_list_type(stripped_type) and is_list(data)) # Iterable[T] or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + # Sequence[T] + or (is_sequence_type(stripped_type) and is_sequence(data) and not isinstance(data, str)) ): # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually # intended as an iterable, so we don't transform it. @@ -424,7 +434,7 @@ async def _async_transform_typeddict( annotations = get_type_hints(expected_type, include_extras=True) for key, value in data.items(): if not is_given(value): - # we don't need to include `NotGiven` values here as they'll + # we don't need to include omitted values here as they'll # be stripped out before the request is sent anyway continue diff --git a/src/hyperspell/_utils/_typing.py b/src/hyperspell/_utils/_typing.py index 1bac9542..193109f3 100644 --- a/src/hyperspell/_utils/_typing.py +++ b/src/hyperspell/_utils/_typing.py @@ -15,7 +15,7 @@ from ._utils import lru_cache from .._types import InheritsGeneric -from .._compat import is_union as _is_union +from ._compat import is_union as _is_union def is_annotated_type(typ: type) -> bool: @@ -26,6 +26,11 @@ def is_list_type(typ: type) -> bool: return (get_origin(typ) or typ) == list +def is_sequence_type(typ: type) -> bool: + origin = get_origin(typ) or typ + return origin == typing_extensions.Sequence or origin == typing.Sequence or origin == _c_abc.Sequence + + def is_iterable_type(typ: type) -> bool: """If the given type is `typing.Iterable[T]`""" origin = get_origin(typ) or typ diff --git a/src/hyperspell/_utils/_utils.py b/src/hyperspell/_utils/_utils.py index ea3cf3f2..50d59269 100644 --- a/src/hyperspell/_utils/_utils.py +++ b/src/hyperspell/_utils/_utils.py @@ -21,8 +21,7 @@ import sniffio -from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike -from .._compat import parse_date as parse_date, parse_datetime as parse_datetime +from .._types import Omit, NotGiven, FileTypes, HeadersLike _T = TypeVar("_T") _TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) @@ -64,7 +63,7 @@ def _extract_items( try: key = path[index] except IndexError: - if isinstance(obj, NotGiven): + if not is_given(obj): # no value was provided - we can safely ignore return [] @@ -127,8 +126,8 @@ def _extract_items( return [] -def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: - return not isinstance(obj, NotGiven) +def is_given(obj: _T | NotGiven | Omit) -> TypeGuard[_T]: + return not isinstance(obj, NotGiven) and not isinstance(obj, Omit) # Type safe methods for narrowing types with TypeVars. diff --git a/src/hyperspell/_version.py b/src/hyperspell/_version.py index bcb6e879..1b27d30f 100644 --- a/src/hyperspell/_version.py +++ b/src/hyperspell/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "hyperspell" -__version__ = "0.22.1" # x-release-please-version +__version__ = "0.25.0" # x-release-please-version diff --git a/src/hyperspell/resources/__init__.py b/src/hyperspell/resources/__init__.py index 71ddb874..d9c32c3f 100644 --- a/src/hyperspell/resources/__init__.py +++ b/src/hyperspell/resources/__init__.py @@ -16,6 +16,14 @@ VaultsResourceWithStreamingResponse, AsyncVaultsResourceWithStreamingResponse, ) +from .evaluate import ( + EvaluateResource, + AsyncEvaluateResource, + EvaluateResourceWithRawResponse, + AsyncEvaluateResourceWithRawResponse, + EvaluateResourceWithStreamingResponse, + AsyncEvaluateResourceWithStreamingResponse, +) from .memories import ( MemoriesResource, AsyncMemoriesResource, @@ -46,6 +54,12 @@ "AsyncMemoriesResourceWithRawResponse", "MemoriesResourceWithStreamingResponse", "AsyncMemoriesResourceWithStreamingResponse", + "EvaluateResource", + "AsyncEvaluateResource", + "EvaluateResourceWithRawResponse", + "AsyncEvaluateResourceWithRawResponse", + "EvaluateResourceWithStreamingResponse", + "AsyncEvaluateResourceWithStreamingResponse", "VaultsResource", "AsyncVaultsResource", "VaultsResourceWithRawResponse", diff --git a/src/hyperspell/resources/auth.py b/src/hyperspell/resources/auth.py index a81f560e..7fd0152e 100644 --- a/src/hyperspell/resources/auth.py +++ b/src/hyperspell/resources/auth.py @@ -7,7 +7,7 @@ import httpx from ..types import auth_user_token_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given from .._utils import maybe_transform, async_maybe_transform from .._compat import cached_property from .._resource import SyncAPIResource, AsyncAPIResource @@ -53,7 +53,7 @@ def delete_user( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AuthDeleteUserResponse: """Endpoint to delete user.""" return self._delete( @@ -72,7 +72,7 @@ def me( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AuthMeResponse: """Endpoint to get basic user data.""" return self._get( @@ -87,13 +87,14 @@ def user_token( self, *, user_id: str, - expires_in: Optional[str] | NotGiven = NOT_GIVEN, + expires_in: Optional[str] | Omit = omit, + origin: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Token: """Use this endpoint to create a user token for a specific user. @@ -103,6 +104,9 @@ def user_token( Args: expires_in: Token lifetime, e.g., '30m', '2h', '1d'. Defaults to 24 hours if not provided. + origin: Origin of the request, used for CSRF protection. If set, the token will only be + valid for requests originating from this origin. + extra_headers: Send extra headers extra_query: Add additional query parameters to the request @@ -117,6 +121,7 @@ def user_token( { "user_id": user_id, "expires_in": expires_in, + "origin": origin, }, auth_user_token_params.AuthUserTokenParams, ), @@ -155,7 +160,7 @@ async def delete_user( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AuthDeleteUserResponse: """Endpoint to delete user.""" return await self._delete( @@ -174,7 +179,7 @@ async def me( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AuthMeResponse: """Endpoint to get basic user data.""" return await self._get( @@ -189,13 +194,14 @@ async def user_token( self, *, user_id: str, - expires_in: Optional[str] | NotGiven = NOT_GIVEN, + expires_in: Optional[str] | Omit = omit, + origin: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Token: """Use this endpoint to create a user token for a specific user. @@ -205,6 +211,9 @@ async def user_token( Args: expires_in: Token lifetime, e.g., '30m', '2h', '1d'. Defaults to 24 hours if not provided. + origin: Origin of the request, used for CSRF protection. If set, the token will only be + valid for requests originating from this origin. + extra_headers: Send extra headers extra_query: Add additional query parameters to the request @@ -219,6 +228,7 @@ async def user_token( { "user_id": user_id, "expires_in": expires_in, + "origin": origin, }, auth_user_token_params.AuthUserTokenParams, ), diff --git a/src/hyperspell/resources/evaluate.py b/src/hyperspell/resources/evaluate.py new file mode 100644 index 00000000..58dc4bf3 --- /dev/null +++ b/src/hyperspell/resources/evaluate.py @@ -0,0 +1,359 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional + +import httpx + +from ..types import evaluate_score_query_params, evaluate_score_highlight_params +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.shared.query_result import QueryResult +from ..types.evaluate_score_query_response import EvaluateScoreQueryResponse +from ..types.evaluate_score_highlight_response import EvaluateScoreHighlightResponse + +__all__ = ["EvaluateResource", "AsyncEvaluateResource"] + + +class EvaluateResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EvaluateResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/hyperspell/python-sdk#accessing-raw-response-data-eg-headers + """ + return EvaluateResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EvaluateResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/hyperspell/python-sdk#with_streaming_response + """ + return EvaluateResourceWithStreamingResponse(self) + + def get_query( + self, + query_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QueryResult: + """ + Retrieve the result of a previous query. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not query_id: + raise ValueError(f"Expected a non-empty value for `query_id` but received {query_id!r}") + return self._get( + f"/evaluate/query/{query_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=QueryResult, + ) + + def score_highlight( + self, + highlight_id: str, + *, + comment: Optional[str] | Omit = omit, + score: float | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EvaluateScoreHighlightResponse: + """ + Score an individual highlight. + + Args: + comment: Comment on the chunk + + score: Rating of the chunk from -1 (bad) to +1 (good). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not highlight_id: + raise ValueError(f"Expected a non-empty value for `highlight_id` but received {highlight_id!r}") + return self._post( + f"/evaluate/highlight/{highlight_id}", + body=maybe_transform( + { + "comment": comment, + "score": score, + }, + evaluate_score_highlight_params.EvaluateScoreHighlightParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluateScoreHighlightResponse, + ) + + def score_query( + self, + query_id: str, + *, + score: float | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EvaluateScoreQueryResponse: + """ + Score the result of a query. + + Args: + score: Rating of the query result from -1 (bad) to +1 (good). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not query_id: + raise ValueError(f"Expected a non-empty value for `query_id` but received {query_id!r}") + return self._post( + f"/evaluate/query/{query_id}", + body=maybe_transform({"score": score}, evaluate_score_query_params.EvaluateScoreQueryParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluateScoreQueryResponse, + ) + + +class AsyncEvaluateResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEvaluateResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/hyperspell/python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncEvaluateResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEvaluateResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/hyperspell/python-sdk#with_streaming_response + """ + return AsyncEvaluateResourceWithStreamingResponse(self) + + async def get_query( + self, + query_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QueryResult: + """ + Retrieve the result of a previous query. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not query_id: + raise ValueError(f"Expected a non-empty value for `query_id` but received {query_id!r}") + return await self._get( + f"/evaluate/query/{query_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=QueryResult, + ) + + async def score_highlight( + self, + highlight_id: str, + *, + comment: Optional[str] | Omit = omit, + score: float | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EvaluateScoreHighlightResponse: + """ + Score an individual highlight. + + Args: + comment: Comment on the chunk + + score: Rating of the chunk from -1 (bad) to +1 (good). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not highlight_id: + raise ValueError(f"Expected a non-empty value for `highlight_id` but received {highlight_id!r}") + return await self._post( + f"/evaluate/highlight/{highlight_id}", + body=await async_maybe_transform( + { + "comment": comment, + "score": score, + }, + evaluate_score_highlight_params.EvaluateScoreHighlightParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluateScoreHighlightResponse, + ) + + async def score_query( + self, + query_id: str, + *, + score: float | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> EvaluateScoreQueryResponse: + """ + Score the result of a query. + + Args: + score: Rating of the query result from -1 (bad) to +1 (good). + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not query_id: + raise ValueError(f"Expected a non-empty value for `query_id` but received {query_id!r}") + return await self._post( + f"/evaluate/query/{query_id}", + body=await async_maybe_transform({"score": score}, evaluate_score_query_params.EvaluateScoreQueryParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=EvaluateScoreQueryResponse, + ) + + +class EvaluateResourceWithRawResponse: + def __init__(self, evaluate: EvaluateResource) -> None: + self._evaluate = evaluate + + self.get_query = to_raw_response_wrapper( + evaluate.get_query, + ) + self.score_highlight = to_raw_response_wrapper( + evaluate.score_highlight, + ) + self.score_query = to_raw_response_wrapper( + evaluate.score_query, + ) + + +class AsyncEvaluateResourceWithRawResponse: + def __init__(self, evaluate: AsyncEvaluateResource) -> None: + self._evaluate = evaluate + + self.get_query = async_to_raw_response_wrapper( + evaluate.get_query, + ) + self.score_highlight = async_to_raw_response_wrapper( + evaluate.score_highlight, + ) + self.score_query = async_to_raw_response_wrapper( + evaluate.score_query, + ) + + +class EvaluateResourceWithStreamingResponse: + def __init__(self, evaluate: EvaluateResource) -> None: + self._evaluate = evaluate + + self.get_query = to_streamed_response_wrapper( + evaluate.get_query, + ) + self.score_highlight = to_streamed_response_wrapper( + evaluate.score_highlight, + ) + self.score_query = to_streamed_response_wrapper( + evaluate.score_query, + ) + + +class AsyncEvaluateResourceWithStreamingResponse: + def __init__(self, evaluate: AsyncEvaluateResource) -> None: + self._evaluate = evaluate + + self.get_query = async_to_streamed_response_wrapper( + evaluate.get_query, + ) + self.score_highlight = async_to_streamed_response_wrapper( + evaluate.score_highlight, + ) + self.score_query = async_to_streamed_response_wrapper( + evaluate.score_query, + ) diff --git a/src/hyperspell/resources/integrations/__init__.py b/src/hyperspell/resources/integrations/__init__.py index e1c67171..f72f59aa 100644 --- a/src/hyperspell/resources/integrations/__init__.py +++ b/src/hyperspell/resources/integrations/__init__.py @@ -1,5 +1,13 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from .slack import ( + SlackResource, + AsyncSlackResource, + SlackResourceWithRawResponse, + AsyncSlackResourceWithRawResponse, + SlackResourceWithStreamingResponse, + AsyncSlackResourceWithStreamingResponse, +) from .web_crawler import ( WebCrawlerResource, AsyncWebCrawlerResource, @@ -38,6 +46,12 @@ "AsyncWebCrawlerResourceWithRawResponse", "WebCrawlerResourceWithStreamingResponse", "AsyncWebCrawlerResourceWithStreamingResponse", + "SlackResource", + "AsyncSlackResource", + "SlackResourceWithRawResponse", + "AsyncSlackResourceWithRawResponse", + "SlackResourceWithStreamingResponse", + "AsyncSlackResourceWithStreamingResponse", "IntegrationsResource", "AsyncIntegrationsResource", "IntegrationsResourceWithRawResponse", diff --git a/src/hyperspell/resources/integrations/google_calendar.py b/src/hyperspell/resources/integrations/google_calendar.py index 99d5caaf..fd8fef10 100644 --- a/src/hyperspell/resources/integrations/google_calendar.py +++ b/src/hyperspell/resources/integrations/google_calendar.py @@ -4,7 +4,7 @@ import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import Body, Query, Headers, NotGiven, not_given from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource from ..._response import ( @@ -47,7 +47,7 @@ def list( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Calendar: """List available calendars for a user. @@ -91,7 +91,7 @@ async def list( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Calendar: """List available calendars for a user. diff --git a/src/hyperspell/resources/integrations/integrations.py b/src/hyperspell/resources/integrations/integrations.py index d4e299e0..937c8d3c 100644 --- a/src/hyperspell/resources/integrations/integrations.py +++ b/src/hyperspell/resources/integrations/integrations.py @@ -4,7 +4,15 @@ import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .slack import ( + SlackResource, + AsyncSlackResource, + SlackResourceWithRawResponse, + AsyncSlackResourceWithRawResponse, + SlackResourceWithStreamingResponse, + AsyncSlackResourceWithStreamingResponse, +) +from ..._types import Body, Query, Headers, NotGiven, not_given from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource from ..._response import ( @@ -44,6 +52,10 @@ def google_calendar(self) -> GoogleCalendarResource: def web_crawler(self) -> WebCrawlerResource: return WebCrawlerResource(self._client) + @cached_property + def slack(self) -> SlackResource: + return SlackResource(self._client) + @cached_property def with_raw_response(self) -> IntegrationsResourceWithRawResponse: """ @@ -72,7 +84,7 @@ def revoke( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> IntegrationRevokeResponse: """ Revokes Hyperspell's access the given provider and deletes all stored @@ -107,6 +119,10 @@ def google_calendar(self) -> AsyncGoogleCalendarResource: def web_crawler(self) -> AsyncWebCrawlerResource: return AsyncWebCrawlerResource(self._client) + @cached_property + def slack(self) -> AsyncSlackResource: + return AsyncSlackResource(self._client) + @cached_property def with_raw_response(self) -> AsyncIntegrationsResourceWithRawResponse: """ @@ -135,7 +151,7 @@ async def revoke( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> IntegrationRevokeResponse: """ Revokes Hyperspell's access the given provider and deletes all stored @@ -177,6 +193,10 @@ def google_calendar(self) -> GoogleCalendarResourceWithRawResponse: def web_crawler(self) -> WebCrawlerResourceWithRawResponse: return WebCrawlerResourceWithRawResponse(self._integrations.web_crawler) + @cached_property + def slack(self) -> SlackResourceWithRawResponse: + return SlackResourceWithRawResponse(self._integrations.slack) + class AsyncIntegrationsResourceWithRawResponse: def __init__(self, integrations: AsyncIntegrationsResource) -> None: @@ -194,6 +214,10 @@ def google_calendar(self) -> AsyncGoogleCalendarResourceWithRawResponse: def web_crawler(self) -> AsyncWebCrawlerResourceWithRawResponse: return AsyncWebCrawlerResourceWithRawResponse(self._integrations.web_crawler) + @cached_property + def slack(self) -> AsyncSlackResourceWithRawResponse: + return AsyncSlackResourceWithRawResponse(self._integrations.slack) + class IntegrationsResourceWithStreamingResponse: def __init__(self, integrations: IntegrationsResource) -> None: @@ -211,6 +235,10 @@ def google_calendar(self) -> GoogleCalendarResourceWithStreamingResponse: def web_crawler(self) -> WebCrawlerResourceWithStreamingResponse: return WebCrawlerResourceWithStreamingResponse(self._integrations.web_crawler) + @cached_property + def slack(self) -> SlackResourceWithStreamingResponse: + return SlackResourceWithStreamingResponse(self._integrations.slack) + class AsyncIntegrationsResourceWithStreamingResponse: def __init__(self, integrations: AsyncIntegrationsResource) -> None: @@ -227,3 +255,7 @@ def google_calendar(self) -> AsyncGoogleCalendarResourceWithStreamingResponse: @cached_property def web_crawler(self) -> AsyncWebCrawlerResourceWithStreamingResponse: return AsyncWebCrawlerResourceWithStreamingResponse(self._integrations.web_crawler) + + @cached_property + def slack(self) -> AsyncSlackResourceWithStreamingResponse: + return AsyncSlackResourceWithStreamingResponse(self._integrations.slack) diff --git a/src/hyperspell/resources/integrations/slack.py b/src/hyperspell/resources/integrations/slack.py new file mode 100644 index 00000000..10144dc6 --- /dev/null +++ b/src/hyperspell/resources/integrations/slack.py @@ -0,0 +1,228 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional + +import httpx + +from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.integrations import slack_list_params + +__all__ = ["SlackResource", "AsyncSlackResource"] + + +class SlackResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> SlackResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/hyperspell/python-sdk#accessing-raw-response-data-eg-headers + """ + return SlackResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> SlackResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/hyperspell/python-sdk#with_streaming_response + """ + return SlackResourceWithStreamingResponse(self) + + def list( + self, + *, + channels: SequenceNotStr[str] | Omit = omit, + exclude_archived: Optional[bool] | Omit = omit, + include_dms: bool | Omit = omit, + include_group_dms: bool | Omit = omit, + include_private: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> object: + """ + List Slack conversations accessible to the user via the live Nango connection. + + Returns minimal channel metadata suitable for selection UIs. If required scopes + are missing, Slack's error is propagated with details. + + Supports filtering by channels, including/excluding private channels, DMs, group + DMs, and archived channels based on the provided search options. + + Args: + channels: List of Slack channels to include (by id, name, or #name). + + exclude_archived: If set, pass 'exclude_archived' to Slack. If None, omit the param. + + include_dms: Include direct messages (im) when listing conversations. + + include_group_dms: Include group DMs (mpim) when listing conversations. + + include_private: Include private channels when constructing Slack 'types'. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/integrations/slack/list", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "channels": channels, + "exclude_archived": exclude_archived, + "include_dms": include_dms, + "include_group_dms": include_group_dms, + "include_private": include_private, + }, + slack_list_params.SlackListParams, + ), + ), + cast_to=object, + ) + + +class AsyncSlackResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncSlackResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/hyperspell/python-sdk#accessing-raw-response-data-eg-headers + """ + return AsyncSlackResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncSlackResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/hyperspell/python-sdk#with_streaming_response + """ + return AsyncSlackResourceWithStreamingResponse(self) + + async def list( + self, + *, + channels: SequenceNotStr[str] | Omit = omit, + exclude_archived: Optional[bool] | Omit = omit, + include_dms: bool | Omit = omit, + include_group_dms: bool | Omit = omit, + include_private: bool | Omit = omit, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> object: + """ + List Slack conversations accessible to the user via the live Nango connection. + + Returns minimal channel metadata suitable for selection UIs. If required scopes + are missing, Slack's error is propagated with details. + + Supports filtering by channels, including/excluding private channels, DMs, group + DMs, and archived channels based on the provided search options. + + Args: + channels: List of Slack channels to include (by id, name, or #name). + + exclude_archived: If set, pass 'exclude_archived' to Slack. If None, omit the param. + + include_dms: Include direct messages (im) when listing conversations. + + include_group_dms: Include group DMs (mpim) when listing conversations. + + include_private: Include private channels when constructing Slack 'types'. + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/integrations/slack/list", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "channels": channels, + "exclude_archived": exclude_archived, + "include_dms": include_dms, + "include_group_dms": include_group_dms, + "include_private": include_private, + }, + slack_list_params.SlackListParams, + ), + ), + cast_to=object, + ) + + +class SlackResourceWithRawResponse: + def __init__(self, slack: SlackResource) -> None: + self._slack = slack + + self.list = to_raw_response_wrapper( + slack.list, + ) + + +class AsyncSlackResourceWithRawResponse: + def __init__(self, slack: AsyncSlackResource) -> None: + self._slack = slack + + self.list = async_to_raw_response_wrapper( + slack.list, + ) + + +class SlackResourceWithStreamingResponse: + def __init__(self, slack: SlackResource) -> None: + self._slack = slack + + self.list = to_streamed_response_wrapper( + slack.list, + ) + + +class AsyncSlackResourceWithStreamingResponse: + def __init__(self, slack: AsyncSlackResource) -> None: + self._slack = slack + + self.list = async_to_streamed_response_wrapper( + slack.list, + ) diff --git a/src/hyperspell/resources/integrations/web_crawler.py b/src/hyperspell/resources/integrations/web_crawler.py index e2530b92..05e8efdc 100644 --- a/src/hyperspell/resources/integrations/web_crawler.py +++ b/src/hyperspell/resources/integrations/web_crawler.py @@ -4,7 +4,7 @@ import httpx -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given from ..._utils import maybe_transform, async_maybe_transform from ..._compat import cached_property from ..._resource import SyncAPIResource, AsyncAPIResource @@ -45,14 +45,14 @@ def index( self, *, url: str, - limit: int | NotGiven = NOT_GIVEN, - max_depth: int | NotGiven = NOT_GIVEN, + limit: int | Omit = omit, + max_depth: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> WebCrawlerIndexResponse: """ Recursively crawl a website to make it available for indexed search. @@ -116,14 +116,14 @@ async def index( self, *, url: str, - limit: int | NotGiven = NOT_GIVEN, - max_depth: int | NotGiven = NOT_GIVEN, + limit: int | Omit = omit, + max_depth: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> WebCrawlerIndexResponse: """ Recursively crawl a website to make it available for indexed search. diff --git a/src/hyperspell/resources/memories.py b/src/hyperspell/resources/memories.py index cceb15e3..044081ad 100644 --- a/src/hyperspell/resources/memories.py +++ b/src/hyperspell/resources/memories.py @@ -9,7 +9,7 @@ import httpx from ..types import memory_add_params, memory_list_params, memory_search_params, memory_upload_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes +from .._types import Body, Omit, Query, Headers, NotGiven, FileTypes, omit, not_given from .._utils import extract_files, maybe_transform, deepcopy_minimal, async_maybe_transform from .._compat import cached_property from .._resource import SyncAPIResource, AsyncAPIResource @@ -23,8 +23,8 @@ from .._base_client import AsyncPaginator, make_request_options from ..types.memory import Memory from ..types.memory_status import MemoryStatus +from ..types.shared.query_result import QueryResult from ..types.memory_delete_response import MemoryDeleteResponse -from ..types.memory_search_response import MemorySearchResponse from ..types.memory_status_response import MemoryStatusResponse __all__ = ["MemoriesResource", "AsyncMemoriesResource"] @@ -53,9 +53,9 @@ def with_streaming_response(self) -> MemoriesResourceWithStreamingResponse: def list( self, *, - collection: Optional[str] | NotGiven = NOT_GIVEN, - cursor: Optional[str] | NotGiven = NOT_GIVEN, - size: int | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, + cursor: Optional[str] | Omit = omit, + size: int | Omit = omit, source: Optional[ Literal[ "collections", @@ -106,13 +106,13 @@ def list( "zoom", ] ] - | NotGiven = NOT_GIVEN, + | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> SyncCursorPage[Memory]: """This endpoint allows you to paginate through all documents in the index. @@ -210,7 +210,7 @@ def delete( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryDeleteResponse: """ Delete a memory and its associated chunks from the index. @@ -254,16 +254,16 @@ def add( self, *, text: str, - collection: Optional[str] | NotGiven = NOT_GIVEN, - date: Union[str, datetime] | NotGiven = NOT_GIVEN, - resource_id: str | NotGiven = NOT_GIVEN, - title: Optional[str] | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, + date: Union[str, datetime] | Omit = omit, + resource_id: str | Omit = omit, + title: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatus: """Adds an arbitrary document to the index. @@ -369,7 +369,7 @@ def get( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Memory: """ Retrieves a document by provider and resource_id. @@ -399,9 +399,9 @@ def search( self, *, query: str, - answer: bool | NotGiven = NOT_GIVEN, - max_results: int | NotGiven = NOT_GIVEN, - options: memory_search_params.Options | NotGiven = NOT_GIVEN, + answer: bool | Omit = omit, + max_results: int | Omit = omit, + options: memory_search_params.Options | Omit = omit, sources: List[ Literal[ "collections", @@ -452,14 +452,14 @@ def search( "zoom", ] ] - | NotGiven = NOT_GIVEN, + | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> MemorySearchResponse: + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QueryResult: """ Retrieves documents matching the query. @@ -497,7 +497,7 @@ def search( options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=MemorySearchResponse, + cast_to=QueryResult, ) def status( @@ -508,7 +508,7 @@ def status( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatusResponse: """ This endpoint shows the indexing progress of documents, both by provider and @@ -526,13 +526,13 @@ def upload( self, *, file: FileTypes, - collection: Optional[str] | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatus: """This endpoint will upload a file to the index and return a resource_id. @@ -599,9 +599,9 @@ def with_streaming_response(self) -> AsyncMemoriesResourceWithStreamingResponse: def list( self, *, - collection: Optional[str] | NotGiven = NOT_GIVEN, - cursor: Optional[str] | NotGiven = NOT_GIVEN, - size: int | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, + cursor: Optional[str] | Omit = omit, + size: int | Omit = omit, source: Optional[ Literal[ "collections", @@ -652,13 +652,13 @@ def list( "zoom", ] ] - | NotGiven = NOT_GIVEN, + | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AsyncPaginator[Memory, AsyncCursorPage[Memory]]: """This endpoint allows you to paginate through all documents in the index. @@ -756,7 +756,7 @@ async def delete( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryDeleteResponse: """ Delete a memory and its associated chunks from the index. @@ -800,16 +800,16 @@ async def add( self, *, text: str, - collection: Optional[str] | NotGiven = NOT_GIVEN, - date: Union[str, datetime] | NotGiven = NOT_GIVEN, - resource_id: str | NotGiven = NOT_GIVEN, - title: Optional[str] | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, + date: Union[str, datetime] | Omit = omit, + resource_id: str | Omit = omit, + title: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatus: """Adds an arbitrary document to the index. @@ -915,7 +915,7 @@ async def get( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> Memory: """ Retrieves a document by provider and resource_id. @@ -945,9 +945,9 @@ async def search( self, *, query: str, - answer: bool | NotGiven = NOT_GIVEN, - max_results: int | NotGiven = NOT_GIVEN, - options: memory_search_params.Options | NotGiven = NOT_GIVEN, + answer: bool | Omit = omit, + max_results: int | Omit = omit, + options: memory_search_params.Options | Omit = omit, sources: List[ Literal[ "collections", @@ -998,14 +998,14 @@ async def search( "zoom", ] ] - | NotGiven = NOT_GIVEN, + | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> MemorySearchResponse: + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> QueryResult: """ Retrieves documents matching the query. @@ -1043,7 +1043,7 @@ async def search( options=make_request_options( extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout ), - cast_to=MemorySearchResponse, + cast_to=QueryResult, ) async def status( @@ -1054,7 +1054,7 @@ async def status( extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatusResponse: """ This endpoint shows the indexing progress of documents, both by provider and @@ -1072,13 +1072,13 @@ async def upload( self, *, file: FileTypes, - collection: Optional[str] | NotGiven = NOT_GIVEN, + collection: Optional[str] | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> MemoryStatus: """This endpoint will upload a file to the index and return a resource_id. diff --git a/src/hyperspell/resources/vaults.py b/src/hyperspell/resources/vaults.py index babb8a04..4f0d9296 100644 --- a/src/hyperspell/resources/vaults.py +++ b/src/hyperspell/resources/vaults.py @@ -7,7 +7,7 @@ import httpx from ..types import vault_list_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given from .._utils import maybe_transform from .._compat import cached_property from .._resource import SyncAPIResource, AsyncAPIResource @@ -47,14 +47,14 @@ def with_streaming_response(self) -> VaultsResourceWithStreamingResponse: def list( self, *, - cursor: Optional[str] | NotGiven = NOT_GIVEN, - size: int | NotGiven = NOT_GIVEN, + cursor: Optional[str] | Omit = omit, + size: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> SyncCursorPage[VaultListResponse]: """ This endpoint lists all collections, and how many documents are in each @@ -113,14 +113,14 @@ def with_streaming_response(self) -> AsyncVaultsResourceWithStreamingResponse: def list( self, *, - cursor: Optional[str] | NotGiven = NOT_GIVEN, - size: int | NotGiven = NOT_GIVEN, + cursor: Optional[str] | Omit = omit, + size: int | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, extra_query: Query | None = None, extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + timeout: float | httpx.Timeout | None | NotGiven = not_given, ) -> AsyncPaginator[VaultListResponse, AsyncCursorPage[VaultListResponse]]: """ This endpoint lists all collections, and how many documents are in each diff --git a/src/hyperspell/types/__init__.py b/src/hyperspell/types/__init__.py index 5c43b83b..0ed74dd4 100644 --- a/src/hyperspell/types/__init__.py +++ b/src/hyperspell/types/__init__.py @@ -4,6 +4,7 @@ from .token import Token as Token from .memory import Memory as Memory +from .shared import QueryResult as QueryResult from .memory_status import MemoryStatus as MemoryStatus from .auth_me_response import AuthMeResponse as AuthMeResponse from .memory_add_params import MemoryAddParams as MemoryAddParams @@ -14,7 +15,10 @@ from .memory_upload_params import MemoryUploadParams as MemoryUploadParams from .auth_user_token_params import AuthUserTokenParams as AuthUserTokenParams from .memory_delete_response import MemoryDeleteResponse as MemoryDeleteResponse -from .memory_search_response import MemorySearchResponse as MemorySearchResponse from .memory_status_response import MemoryStatusResponse as MemoryStatusResponse from .auth_delete_user_response import AuthDeleteUserResponse as AuthDeleteUserResponse +from .evaluate_score_query_params import EvaluateScoreQueryParams as EvaluateScoreQueryParams from .integration_revoke_response import IntegrationRevokeResponse as IntegrationRevokeResponse +from .evaluate_score_query_response import EvaluateScoreQueryResponse as EvaluateScoreQueryResponse +from .evaluate_score_highlight_params import EvaluateScoreHighlightParams as EvaluateScoreHighlightParams +from .evaluate_score_highlight_response import EvaluateScoreHighlightResponse as EvaluateScoreHighlightResponse diff --git a/src/hyperspell/types/auth_me_response.py b/src/hyperspell/types/auth_me_response.py index 64acb118..d4ca2c54 100644 --- a/src/hyperspell/types/auth_me_response.py +++ b/src/hyperspell/types/auth_me_response.py @@ -6,14 +6,28 @@ from .._models import BaseModel -__all__ = ["AuthMeResponse"] +__all__ = ["AuthMeResponse", "App"] + + +class App(BaseModel): + id: str + """The Hyperspell app's id this user belongs to""" + + icon_url: Optional[str] = None + """The app's icon""" + + name: str + """The app's name""" + + redirect_url: Optional[str] = None + """The app's redirect URL""" class AuthMeResponse(BaseModel): id: str """The user's id""" - app: str + app: App """The Hyperspell app's id this user belongs to""" available_integrations: List[ diff --git a/src/hyperspell/types/auth_user_token_params.py b/src/hyperspell/types/auth_user_token_params.py index fa7cc949..c95c2321 100644 --- a/src/hyperspell/types/auth_user_token_params.py +++ b/src/hyperspell/types/auth_user_token_params.py @@ -13,3 +13,9 @@ class AuthUserTokenParams(TypedDict, total=False): expires_in: Optional[str] """Token lifetime, e.g., '30m', '2h', '1d'. Defaults to 24 hours if not provided.""" + + origin: Optional[str] + """Origin of the request, used for CSRF protection. + + If set, the token will only be valid for requests originating from this origin. + """ diff --git a/src/hyperspell/types/evaluate_score_highlight_params.py b/src/hyperspell/types/evaluate_score_highlight_params.py new file mode 100644 index 00000000..c5d36931 --- /dev/null +++ b/src/hyperspell/types/evaluate_score_highlight_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["EvaluateScoreHighlightParams"] + + +class EvaluateScoreHighlightParams(TypedDict, total=False): + comment: Optional[str] + """Comment on the chunk""" + + score: float + """Rating of the chunk from -1 (bad) to +1 (good).""" diff --git a/src/hyperspell/types/evaluate_score_highlight_response.py b/src/hyperspell/types/evaluate_score_highlight_response.py new file mode 100644 index 00000000..4bd66e67 --- /dev/null +++ b/src/hyperspell/types/evaluate_score_highlight_response.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .._models import BaseModel + +__all__ = ["EvaluateScoreHighlightResponse"] + + +class EvaluateScoreHighlightResponse(BaseModel): + message: str + """A message describing the result.""" + + success: bool + """Whether the feedback was successfully saved.""" diff --git a/src/hyperspell/types/evaluate_score_query_params.py b/src/hyperspell/types/evaluate_score_query_params.py new file mode 100644 index 00000000..38ccb5e6 --- /dev/null +++ b/src/hyperspell/types/evaluate_score_query_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import TypedDict + +__all__ = ["EvaluateScoreQueryParams"] + + +class EvaluateScoreQueryParams(TypedDict, total=False): + score: float + """Rating of the query result from -1 (bad) to +1 (good).""" diff --git a/src/hyperspell/types/evaluate_score_query_response.py b/src/hyperspell/types/evaluate_score_query_response.py new file mode 100644 index 00000000..d1d59e6d --- /dev/null +++ b/src/hyperspell/types/evaluate_score_query_response.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .._models import BaseModel + +__all__ = ["EvaluateScoreQueryResponse"] + + +class EvaluateScoreQueryResponse(BaseModel): + message: str + """A message describing the result.""" + + success: bool + """Whether the feedback was successfully saved.""" diff --git a/src/hyperspell/types/integrations/__init__.py b/src/hyperspell/types/integrations/__init__.py index cd5fc240..d0911f57 100644 --- a/src/hyperspell/types/integrations/__init__.py +++ b/src/hyperspell/types/integrations/__init__.py @@ -3,5 +3,6 @@ from __future__ import annotations from .calendar import Calendar as Calendar +from .slack_list_params import SlackListParams as SlackListParams from .web_crawler_index_params import WebCrawlerIndexParams as WebCrawlerIndexParams from .web_crawler_index_response import WebCrawlerIndexResponse as WebCrawlerIndexResponse diff --git a/src/hyperspell/types/integrations/slack_list_params.py b/src/hyperspell/types/integrations/slack_list_params.py new file mode 100644 index 00000000..17cf66d4 --- /dev/null +++ b/src/hyperspell/types/integrations/slack_list_params.py @@ -0,0 +1,27 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +from ..._types import SequenceNotStr + +__all__ = ["SlackListParams"] + + +class SlackListParams(TypedDict, total=False): + channels: SequenceNotStr[str] + """List of Slack channels to include (by id, name, or #name).""" + + exclude_archived: Optional[bool] + """If set, pass 'exclude_archived' to Slack. If None, omit the param.""" + + include_dms: bool + """Include direct messages (im) when listing conversations.""" + + include_group_dms: bool + """Include group DMs (mpim) when listing conversations.""" + + include_private: bool + """Include private channels when constructing Slack 'types'.""" diff --git a/src/hyperspell/types/memory.py b/src/hyperspell/types/memory.py index c5e83e05..df0de529 100644 --- a/src/hyperspell/types/memory.py +++ b/src/hyperspell/types/memory.py @@ -32,12 +32,17 @@ class Metadata(BaseModel): url: Optional[str] = None - __pydantic_extra__: Dict[str, object] = FieldInfo(init=False) # pyright: ignore[reportIncompatibleVariableOverride] if TYPE_CHECKING: + # Some versions of Pydantic <2.8.0 have a bug and don’t allow assigning a + # value to this field, so for compatibility we avoid doing it at runtime. + __pydantic_extra__: Dict[str, object] = FieldInfo(init=False) # pyright: ignore[reportIncompatibleVariableOverride] + # Stub to indicate that arbitrary properties are accepted. # To access properties that are not valid identifiers you can use `getattr`, e.g. # `getattr(obj, '$type')` def __getattr__(self, attr: str) -> object: ... + else: + __pydantic_extra__: Dict[str, object] class Memory(BaseModel): diff --git a/src/hyperspell/types/memory_search_params.py b/src/hyperspell/types/memory_search_params.py index 8c3940cb..07e692a2 100644 --- a/src/hyperspell/types/memory_search_params.py +++ b/src/hyperspell/types/memory_search_params.py @@ -6,6 +6,7 @@ from datetime import datetime from typing_extensions import Literal, Required, Annotated, TypedDict +from .._types import SequenceNotStr from .._utils import PropertyInfo __all__ = [ @@ -167,7 +168,7 @@ class OptionsGoogleMail(TypedDict, total=False): before: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] """Only query documents created before this date.""" - label_ids: List[str] + label_ids: SequenceNotStr[str] """List of label IDs to filter messages (e.g., ['INBOX', 'SENT', 'DRAFT']). Multiple labels are combined with OR logic - messages matching ANY specified @@ -191,7 +192,7 @@ class OptionsNotion(TypedDict, total=False): before: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] """Only query documents created before this date.""" - notion_page_ids: List[str] + notion_page_ids: SequenceNotStr[str] """List of Notion page IDs to search. If not provided, all pages in the workspace will be searched. @@ -241,10 +242,22 @@ class OptionsSlack(TypedDict, total=False): before: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] """Only query documents created before this date.""" - channels: List[str] - """List of Slack channels to search. + channels: SequenceNotStr[str] + """List of Slack channels to include (by id, name, or #name).""" - If not provided, all channels in the workspace will be searched. + exclude_archived: Optional[bool] + """If set, pass 'exclude_archived' to Slack. If None, omit the param.""" + + include_dms: bool + """Include direct messages (im) when listing conversations.""" + + include_group_dms: bool + """Include group DMs (mpim) when listing conversations.""" + + include_private: bool + """Include private channels when constructing Slack 'types'. + + Defaults to False to preserve existing cassette query params. """ weight: float diff --git a/src/hyperspell/types/shared/__init__.py b/src/hyperspell/types/shared/__init__.py new file mode 100644 index 00000000..029f085f --- /dev/null +++ b/src/hyperspell/types/shared/__init__.py @@ -0,0 +1,3 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .query_result import QueryResult as QueryResult diff --git a/src/hyperspell/types/memory_search_response.py b/src/hyperspell/types/shared/query_result.py similarity index 54% rename from src/hyperspell/types/memory_search_response.py rename to src/hyperspell/types/shared/query_result.py index bf6410e8..67fdb09c 100644 --- a/src/hyperspell/types/memory_search_response.py +++ b/src/hyperspell/types/shared/query_result.py @@ -2,13 +2,13 @@ from typing import Dict, List, Optional -from .memory import Memory -from .._models import BaseModel +from ..memory import Memory +from ..._models import BaseModel -__all__ = ["MemorySearchResponse"] +__all__ = ["QueryResult"] -class MemorySearchResponse(BaseModel): +class QueryResult(BaseModel): documents: List[Memory] answer: Optional[str] = None @@ -20,3 +20,13 @@ class MemorySearchResponse(BaseModel): These are meant to help the developer debug the query, and are not meant to be shown to the user. """ + + query_id: Optional[str] = None + """The ID of the query. + + This can be used to retrieve the query later, or add feedback to it. If the + query failed, this will be None. + """ + + score: Optional[float] = None + """The average score of the query feedback, if any.""" diff --git a/tests/api_resources/integrations/test_slack.py b/tests/api_resources/integrations/test_slack.py new file mode 100644 index 00000000..d4312400 --- /dev/null +++ b/tests/api_resources/integrations/test_slack.py @@ -0,0 +1,95 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from hyperspell import Hyperspell, AsyncHyperspell +from tests.utils import assert_matches_type + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestSlack: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_list(self, client: Hyperspell) -> None: + slack = client.integrations.slack.list() + assert_matches_type(object, slack, path=["response"]) + + @parametrize + def test_method_list_with_all_params(self, client: Hyperspell) -> None: + slack = client.integrations.slack.list( + channels=["string"], + exclude_archived=True, + include_dms=True, + include_group_dms=True, + include_private=True, + ) + assert_matches_type(object, slack, path=["response"]) + + @parametrize + def test_raw_response_list(self, client: Hyperspell) -> None: + response = client.integrations.slack.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + slack = response.parse() + assert_matches_type(object, slack, path=["response"]) + + @parametrize + def test_streaming_response_list(self, client: Hyperspell) -> None: + with client.integrations.slack.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + slack = response.parse() + assert_matches_type(object, slack, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncSlack: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_list(self, async_client: AsyncHyperspell) -> None: + slack = await async_client.integrations.slack.list() + assert_matches_type(object, slack, path=["response"]) + + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncHyperspell) -> None: + slack = await async_client.integrations.slack.list( + channels=["string"], + exclude_archived=True, + include_dms=True, + include_group_dms=True, + include_private=True, + ) + assert_matches_type(object, slack, path=["response"]) + + @parametrize + async def test_raw_response_list(self, async_client: AsyncHyperspell) -> None: + response = await async_client.integrations.slack.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + slack = await response.parse() + assert_matches_type(object, slack, path=["response"]) + + @parametrize + async def test_streaming_response_list(self, async_client: AsyncHyperspell) -> None: + async with async_client.integrations.slack.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + slack = await response.parse() + assert_matches_type(object, slack, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_auth.py b/tests/api_resources/test_auth.py index 207e4c13..5f217d11 100644 --- a/tests/api_resources/test_auth.py +++ b/tests/api_resources/test_auth.py @@ -79,6 +79,7 @@ def test_method_user_token_with_all_params(self, client: Hyperspell) -> None: auth = client.auth.user_token( user_id="user_id", expires_in="30m", + origin="origin", ) assert_matches_type(Token, auth, path=["response"]) @@ -174,6 +175,7 @@ async def test_method_user_token_with_all_params(self, async_client: AsyncHypers auth = await async_client.auth.user_token( user_id="user_id", expires_in="30m", + origin="origin", ) assert_matches_type(Token, auth, path=["response"]) diff --git a/tests/api_resources/test_evaluate.py b/tests/api_resources/test_evaluate.py new file mode 100644 index 00000000..c8ab9f33 --- /dev/null +++ b/tests/api_resources/test_evaluate.py @@ -0,0 +1,290 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from hyperspell import Hyperspell, AsyncHyperspell +from tests.utils import assert_matches_type +from hyperspell.types import ( + EvaluateScoreQueryResponse, + EvaluateScoreHighlightResponse, +) +from hyperspell.types.shared import QueryResult + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEvaluate: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @parametrize + def test_method_get_query(self, client: Hyperspell) -> None: + evaluate = client.evaluate.get_query( + "query_id", + ) + assert_matches_type(QueryResult, evaluate, path=["response"]) + + @parametrize + def test_raw_response_get_query(self, client: Hyperspell) -> None: + response = client.evaluate.with_raw_response.get_query( + "query_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = response.parse() + assert_matches_type(QueryResult, evaluate, path=["response"]) + + @parametrize + def test_streaming_response_get_query(self, client: Hyperspell) -> None: + with client.evaluate.with_streaming_response.get_query( + "query_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = response.parse() + assert_matches_type(QueryResult, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_get_query(self, client: Hyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `query_id` but received ''"): + client.evaluate.with_raw_response.get_query( + "", + ) + + @parametrize + def test_method_score_highlight(self, client: Hyperspell) -> None: + evaluate = client.evaluate.score_highlight( + highlight_id="highlight_id", + ) + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + def test_method_score_highlight_with_all_params(self, client: Hyperspell) -> None: + evaluate = client.evaluate.score_highlight( + highlight_id="highlight_id", + comment="comment", + score=-1, + ) + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + def test_raw_response_score_highlight(self, client: Hyperspell) -> None: + response = client.evaluate.with_raw_response.score_highlight( + highlight_id="highlight_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = response.parse() + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + def test_streaming_response_score_highlight(self, client: Hyperspell) -> None: + with client.evaluate.with_streaming_response.score_highlight( + highlight_id="highlight_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = response.parse() + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_score_highlight(self, client: Hyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `highlight_id` but received ''"): + client.evaluate.with_raw_response.score_highlight( + highlight_id="", + ) + + @parametrize + def test_method_score_query(self, client: Hyperspell) -> None: + evaluate = client.evaluate.score_query( + query_id="query_id", + ) + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + def test_method_score_query_with_all_params(self, client: Hyperspell) -> None: + evaluate = client.evaluate.score_query( + query_id="query_id", + score=-1, + ) + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + def test_raw_response_score_query(self, client: Hyperspell) -> None: + response = client.evaluate.with_raw_response.score_query( + query_id="query_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = response.parse() + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + def test_streaming_response_score_query(self, client: Hyperspell) -> None: + with client.evaluate.with_streaming_response.score_query( + query_id="query_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = response.parse() + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + def test_path_params_score_query(self, client: Hyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `query_id` but received ''"): + client.evaluate.with_raw_response.score_query( + query_id="", + ) + + +class TestAsyncEvaluate: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @parametrize + async def test_method_get_query(self, async_client: AsyncHyperspell) -> None: + evaluate = await async_client.evaluate.get_query( + "query_id", + ) + assert_matches_type(QueryResult, evaluate, path=["response"]) + + @parametrize + async def test_raw_response_get_query(self, async_client: AsyncHyperspell) -> None: + response = await async_client.evaluate.with_raw_response.get_query( + "query_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = await response.parse() + assert_matches_type(QueryResult, evaluate, path=["response"]) + + @parametrize + async def test_streaming_response_get_query(self, async_client: AsyncHyperspell) -> None: + async with async_client.evaluate.with_streaming_response.get_query( + "query_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = await response.parse() + assert_matches_type(QueryResult, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_get_query(self, async_client: AsyncHyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `query_id` but received ''"): + await async_client.evaluate.with_raw_response.get_query( + "", + ) + + @parametrize + async def test_method_score_highlight(self, async_client: AsyncHyperspell) -> None: + evaluate = await async_client.evaluate.score_highlight( + highlight_id="highlight_id", + ) + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + async def test_method_score_highlight_with_all_params(self, async_client: AsyncHyperspell) -> None: + evaluate = await async_client.evaluate.score_highlight( + highlight_id="highlight_id", + comment="comment", + score=-1, + ) + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + async def test_raw_response_score_highlight(self, async_client: AsyncHyperspell) -> None: + response = await async_client.evaluate.with_raw_response.score_highlight( + highlight_id="highlight_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = await response.parse() + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + @parametrize + async def test_streaming_response_score_highlight(self, async_client: AsyncHyperspell) -> None: + async with async_client.evaluate.with_streaming_response.score_highlight( + highlight_id="highlight_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = await response.parse() + assert_matches_type(EvaluateScoreHighlightResponse, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_score_highlight(self, async_client: AsyncHyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `highlight_id` but received ''"): + await async_client.evaluate.with_raw_response.score_highlight( + highlight_id="", + ) + + @parametrize + async def test_method_score_query(self, async_client: AsyncHyperspell) -> None: + evaluate = await async_client.evaluate.score_query( + query_id="query_id", + ) + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + async def test_method_score_query_with_all_params(self, async_client: AsyncHyperspell) -> None: + evaluate = await async_client.evaluate.score_query( + query_id="query_id", + score=-1, + ) + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + async def test_raw_response_score_query(self, async_client: AsyncHyperspell) -> None: + response = await async_client.evaluate.with_raw_response.score_query( + query_id="query_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + evaluate = await response.parse() + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + @parametrize + async def test_streaming_response_score_query(self, async_client: AsyncHyperspell) -> None: + async with async_client.evaluate.with_streaming_response.score_query( + query_id="query_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + evaluate = await response.parse() + assert_matches_type(EvaluateScoreQueryResponse, evaluate, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @parametrize + async def test_path_params_score_query(self, async_client: AsyncHyperspell) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `query_id` but received ''"): + await async_client.evaluate.with_raw_response.score_query( + query_id="", + ) diff --git a/tests/api_resources/test_memories.py b/tests/api_resources/test_memories.py index cd3041d8..182601a5 100644 --- a/tests/api_resources/test_memories.py +++ b/tests/api_resources/test_memories.py @@ -13,11 +13,11 @@ Memory, MemoryStatus, MemoryDeleteResponse, - MemorySearchResponse, MemoryStatusResponse, ) from hyperspell._utils import parse_datetime from hyperspell.pagination import SyncCursorPage, AsyncCursorPage +from hyperspell.types.shared import QueryResult base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -191,7 +191,7 @@ def test_method_search(self, client: Hyperspell) -> None: memory = client.memories.search( query="query", ) - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize def test_method_search_with_all_params(self, client: Hyperspell) -> None: @@ -249,6 +249,10 @@ def test_method_search_with_all_params(self, client: Hyperspell) -> None: "after": parse_datetime("2019-12-27T18:11:19.117Z"), "before": parse_datetime("2019-12-27T18:11:19.117Z"), "channels": ["string"], + "exclude_archived": True, + "include_dms": True, + "include_group_dms": True, + "include_private": True, "weight": 0, }, "web_crawler": { @@ -261,7 +265,7 @@ def test_method_search_with_all_params(self, client: Hyperspell) -> None: }, sources=["collections"], ) - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize def test_raw_response_search(self, client: Hyperspell) -> None: @@ -272,7 +276,7 @@ def test_raw_response_search(self, client: Hyperspell) -> None: assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" memory = response.parse() - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize def test_streaming_response_search(self, client: Hyperspell) -> None: @@ -283,7 +287,7 @@ def test_streaming_response_search(self, client: Hyperspell) -> None: assert response.http_request.headers.get("X-Stainless-Lang") == "python" memory = response.parse() - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) assert cast(Any, response.is_closed) is True @@ -523,7 +527,7 @@ async def test_method_search(self, async_client: AsyncHyperspell) -> None: memory = await async_client.memories.search( query="query", ) - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize async def test_method_search_with_all_params(self, async_client: AsyncHyperspell) -> None: @@ -581,6 +585,10 @@ async def test_method_search_with_all_params(self, async_client: AsyncHyperspell "after": parse_datetime("2019-12-27T18:11:19.117Z"), "before": parse_datetime("2019-12-27T18:11:19.117Z"), "channels": ["string"], + "exclude_archived": True, + "include_dms": True, + "include_group_dms": True, + "include_private": True, "weight": 0, }, "web_crawler": { @@ -593,7 +601,7 @@ async def test_method_search_with_all_params(self, async_client: AsyncHyperspell }, sources=["collections"], ) - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize async def test_raw_response_search(self, async_client: AsyncHyperspell) -> None: @@ -604,7 +612,7 @@ async def test_raw_response_search(self, async_client: AsyncHyperspell) -> None: assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" memory = await response.parse() - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) @parametrize async def test_streaming_response_search(self, async_client: AsyncHyperspell) -> None: @@ -615,7 +623,7 @@ async def test_streaming_response_search(self, async_client: AsyncHyperspell) -> assert response.http_request.headers.get("X-Stainless-Lang") == "python" memory = await response.parse() - assert_matches_type(MemorySearchResponse, memory, path=["response"]) + assert_matches_type(QueryResult, memory, path=["response"]) assert cast(Any, response.is_closed) is True diff --git a/tests/test_client.py b/tests/test_client.py index 8ddca0cf..eff94130 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -6,13 +6,10 @@ import os import sys import json -import time import asyncio import inspect -import subprocess import tracemalloc from typing import Any, Union, cast -from textwrap import dedent from unittest import mock from typing_extensions import Literal @@ -23,14 +20,17 @@ from hyperspell import Hyperspell, AsyncHyperspell, APIResponseValidationError from hyperspell._types import Omit +from hyperspell._utils import asyncify from hyperspell._models import BaseModel, FinalRequestOptions from hyperspell._exceptions import APIStatusError, APITimeoutError, HyperspellError, APIResponseValidationError from hyperspell._base_client import ( DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, + OtherPlatform, DefaultHttpxClient, DefaultAsyncHttpxClient, + get_platform, make_request_options, ) @@ -1762,50 +1762,9 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert response.http_request.headers.get("x-stainless-retry-count") == "42" - def test_get_platform(self) -> None: - # A previous implementation of asyncify could leave threads unterminated when - # used with nest_asyncio. - # - # Since nest_asyncio.apply() is global and cannot be un-applied, this - # test is run in a separate process to avoid affecting other tests. - test_code = dedent(""" - import asyncio - import nest_asyncio - import threading - - from hyperspell._utils import asyncify - from hyperspell._base_client import get_platform - - async def test_main() -> None: - result = await asyncify(get_platform)() - print(result) - for thread in threading.enumerate(): - print(thread.name) - - nest_asyncio.apply() - asyncio.run(test_main()) - """) - with subprocess.Popen( - [sys.executable, "-c", test_code], - text=True, - ) as process: - timeout = 10 # seconds - - start_time = time.monotonic() - while True: - return_code = process.poll() - if return_code is not None: - if return_code != 0: - raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code") - - # success - break - - if time.monotonic() - start_time > timeout: - process.kill() - raise AssertionError("calling get_platform using asyncify resulted in a hung process") - - time.sleep(0.1) + async def test_get_platform(self) -> None: + platform = await asyncify(get_platform)() + assert isinstance(platform, (str, OtherPlatform)) async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: # Test that the proxy environment variables are set correctly diff --git a/tests/test_models.py b/tests/test_models.py index 0a875386..088404f1 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -8,7 +8,7 @@ from pydantic import Field from hyperspell._utils import PropertyInfo -from hyperspell._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from hyperspell._compat import PYDANTIC_V1, parse_obj, model_dump, model_json from hyperspell._models import BaseModel, construct_type @@ -294,12 +294,12 @@ class Model(BaseModel): assert cast(bool, m.foo) is True m = Model.construct(foo={"name": 3}) - if PYDANTIC_V2: - assert isinstance(m.foo, Submodel1) - assert m.foo.name == 3 # type: ignore - else: + if PYDANTIC_V1: assert isinstance(m.foo, Submodel2) assert m.foo.name == "3" + else: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore def test_list_of_unions() -> None: @@ -426,10 +426,10 @@ class Model(BaseModel): expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) - if PYDANTIC_V2: - expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' - else: + if PYDANTIC_V1: expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + else: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' model = Model.construct(created_at="2019-12-27T18:11:19.117Z") assert model.created_at == expected @@ -531,7 +531,7 @@ class Model2(BaseModel): assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} assert m4.to_dict(mode="json") == {"created_at": time_str} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_dict(warnings=False) @@ -556,7 +556,7 @@ class Model(BaseModel): assert m3.model_dump() == {"foo": None} assert m3.model_dump(exclude_none=True) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump(round_trip=True) @@ -580,10 +580,10 @@ class Model(BaseModel): assert json.loads(m.to_json()) == {"FOO": "hello"} assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} - if PYDANTIC_V2: - assert m.to_json(indent=None) == '{"FOO":"hello"}' - else: + if PYDANTIC_V1: assert m.to_json(indent=None) == '{"FOO": "hello"}' + else: + assert m.to_json(indent=None) == '{"FOO":"hello"}' m2 = Model() assert json.loads(m2.to_json()) == {} @@ -595,7 +595,7 @@ class Model(BaseModel): assert json.loads(m3.to_json()) == {"FOO": None} assert json.loads(m3.to_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): m.to_json(warnings=False) @@ -622,7 +622,7 @@ class Model(BaseModel): assert json.loads(m3.model_dump_json()) == {"foo": None} assert json.loads(m3.model_dump_json(exclude_none=True)) == {} - if not PYDANTIC_V2: + if PYDANTIC_V1: with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): m.model_dump_json(round_trip=True) @@ -679,12 +679,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_unknown_variant() -> None: @@ -768,12 +768,12 @@ class B(BaseModel): ) assert isinstance(m, A) assert m.foo_type == "a" - if PYDANTIC_V2: - assert m.data == 100 # type: ignore[comparison-overlap] - else: + if PYDANTIC_V1: # pydantic v1 automatically converts inputs to strings # if the expected type is a str assert m.data == "100" + else: + assert m.data == 100 # type: ignore[comparison-overlap] def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: @@ -833,7 +833,7 @@ class B(BaseModel): assert UnionType.__discriminator__ is discriminator -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_type_alias_type() -> None: Alias = TypeAliasType("Alias", str) # pyright: ignore @@ -849,7 +849,7 @@ class Model(BaseModel): assert m.union == "bar" -@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +@pytest.mark.skipif(PYDANTIC_V1, reason="TypeAliasType is not supported in Pydantic v1") def test_field_named_cls() -> None: class Model(BaseModel): cls: str @@ -936,7 +936,7 @@ class Type2(BaseModel): assert isinstance(model.value, InnerType2) -@pytest.mark.skipif(not PYDANTIC_V2, reason="this is only supported in pydantic v2 for now") +@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2 for now") def test_extra_properties() -> None: class Item(BaseModel): prop: int diff --git a/tests/test_transform.py b/tests/test_transform.py index aa220e3f..96aa54ea 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -8,14 +8,14 @@ import pytest -from hyperspell._types import NOT_GIVEN, Base64FileInput +from hyperspell._types import Base64FileInput, omit, not_given from hyperspell._utils import ( PropertyInfo, transform as _transform, parse_datetime, async_transform as _async_transform, ) -from hyperspell._compat import PYDANTIC_V2 +from hyperspell._compat import PYDANTIC_V1 from hyperspell._models import BaseModel _T = TypeVar("_T") @@ -189,7 +189,7 @@ class DateModel(BaseModel): @pytest.mark.asyncio async def test_iso8601_format(use_async: bool) -> None: dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") - tz = "Z" if PYDANTIC_V2 else "+00:00" + tz = "+00:00" if PYDANTIC_V1 else "Z" assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] @@ -297,11 +297,11 @@ async def test_pydantic_unknown_field(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_types(use_async: bool) -> None: model = MyModel.construct(foo=True) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": True} @@ -309,11 +309,11 @@ async def test_pydantic_mismatched_types(use_async: bool) -> None: @pytest.mark.asyncio async def test_pydantic_mismatched_object_type(use_async: bool) -> None: model = MyModel.construct(foo=MyModel.construct(hello="world")) - if PYDANTIC_V2: + if PYDANTIC_V1: + params = await transform(model, Any, use_async) + else: with pytest.warns(UserWarning): params = await transform(model, Any, use_async) - else: - params = await transform(model, Any, use_async) assert cast(Any, params) == {"foo": {"hello": "world"}} @@ -450,4 +450,11 @@ async def test_transform_skipping(use_async: bool) -> None: @pytest.mark.asyncio async def test_strips_notgiven(use_async: bool) -> None: assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} - assert await transform({"foo_bar": NOT_GIVEN}, Foo1, use_async) == {} + assert await transform({"foo_bar": not_given}, Foo1, use_async) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_strips_omit(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": omit}, Foo1, use_async) == {} diff --git a/tests/test_utils/test_datetime_parse.py b/tests/test_utils/test_datetime_parse.py new file mode 100644 index 00000000..cddc2ac8 --- /dev/null +++ b/tests/test_utils/test_datetime_parse.py @@ -0,0 +1,110 @@ +""" +Copied from https://github.com/pydantic/pydantic/blob/v1.10.22/tests/test_datetime_parse.py +with modifications so it works without pydantic v1 imports. +""" + +from typing import Type, Union +from datetime import date, datetime, timezone, timedelta + +import pytest + +from hyperspell._utils import parse_date, parse_datetime + + +def create_tz(minutes: int) -> timezone: + return timezone(timedelta(minutes=minutes)) + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + ("1494012444.883309", date(2017, 5, 5)), + (b"1494012444.883309", date(2017, 5, 5)), + (1_494_012_444.883_309, date(2017, 5, 5)), + ("1494012444", date(2017, 5, 5)), + (1_494_012_444, date(2017, 5, 5)), + (0, date(1970, 1, 1)), + ("2012-04-23", date(2012, 4, 23)), + (b"2012-04-23", date(2012, 4, 23)), + ("2012-4-9", date(2012, 4, 9)), + (date(2012, 4, 9), date(2012, 4, 9)), + (datetime(2012, 4, 9, 12, 15), date(2012, 4, 9)), + # Invalid inputs + ("x20120423", ValueError), + ("2012-04-56", ValueError), + (19_999_999_999, date(2603, 10, 11)), # just before watershed + (20_000_000_001, date(1970, 8, 20)), # just after watershed + (1_549_316_052, date(2019, 2, 4)), # nowish in s + (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms + (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs + (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ("infinity", date(9999, 12, 31)), + ("inf", date(9999, 12, 31)), + (float("inf"), date(9999, 12, 31)), + ("infinity ", date(9999, 12, 31)), + (int("1" + "0" * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ("-infinity", date(1, 1, 1)), + ("-inf", date(1, 1, 1)), + ("nan", ValueError), + ], +) +def test_date_parsing(value: Union[str, bytes, int, float], result: Union[date, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_date(value) + else: + assert parse_date(value) == result + + +@pytest.mark.parametrize( + "value,result", + [ + # Valid inputs + # values in seconds + ("1494012444.883309", datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + (1_494_012_444.883_309, datetime(2017, 5, 5, 19, 27, 24, 883_309, tzinfo=timezone.utc)), + ("1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (b"1494012444", datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + (1_494_012_444, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + # values in ms + ("1494012444000.883309", datetime(2017, 5, 5, 19, 27, 24, 883, tzinfo=timezone.utc)), + ("-1494012444000.883309", datetime(1922, 8, 29, 4, 32, 35, 999117, tzinfo=timezone.utc)), + (1_494_012_444_000, datetime(2017, 5, 5, 19, 27, 24, tzinfo=timezone.utc)), + ("2012-04-23T09:15:00", datetime(2012, 4, 23, 9, 15)), + ("2012-4-9 4:8:16", datetime(2012, 4, 9, 4, 8, 16)), + ("2012-04-23T09:15:00Z", datetime(2012, 4, 23, 9, 15, 0, 0, timezone.utc)), + ("2012-4-9 4:8:16-0320", datetime(2012, 4, 9, 4, 8, 16, 0, create_tz(-200))), + ("2012-04-23T10:20:30.400+02:30", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(150))), + ("2012-04-23T10:20:30.400+02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(120))), + ("2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (b"2012-04-23T10:20:30.400-02", datetime(2012, 4, 23, 10, 20, 30, 400_000, create_tz(-120))), + (datetime(2017, 5, 5), datetime(2017, 5, 5)), + (0, datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone.utc)), + # Invalid inputs + ("x20120423091500", ValueError), + ("2012-04-56T09:15:90", ValueError), + ("2012-04-23T11:05:00-25:00", ValueError), + (19_999_999_999, datetime(2603, 10, 11, 11, 33, 19, tzinfo=timezone.utc)), # just before watershed + (20_000_000_001, datetime(1970, 8, 20, 11, 33, 20, 1000, tzinfo=timezone.utc)), # just after watershed + (1_549_316_052, datetime(2019, 2, 4, 21, 34, 12, 0, tzinfo=timezone.utc)), # nowish in s + (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms + (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs + (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ("infinity", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf", datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("inf ", datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float("inf"), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ("-infinity", datetime(1, 1, 1, 0, 0)), + ("-inf", datetime(1, 1, 1, 0, 0)), + ("nan", ValueError), + ], +) +def test_datetime_parsing(value: Union[str, bytes, int, float], result: Union[datetime, Type[Exception]]) -> None: + if type(result) == type and issubclass(result, Exception): # pyright: ignore[reportUnnecessaryIsInstance] + with pytest.raises(result): + parse_datetime(value) + else: + assert parse_datetime(value) == result diff --git a/tests/utils.py b/tests/utils.py index 135bcfe0..186a1588 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -4,7 +4,7 @@ import inspect import traceback import contextlib -from typing import Any, TypeVar, Iterator, cast +from typing import Any, TypeVar, Iterator, Sequence, cast from datetime import date, datetime from typing_extensions import Literal, get_args, get_origin, assert_type @@ -15,10 +15,11 @@ is_list_type, is_union_type, extract_type_arg, + is_sequence_type, is_annotated_type, is_type_alias_type, ) -from hyperspell._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from hyperspell._compat import PYDANTIC_V1, field_outer_type, get_model_fields from hyperspell._models import BaseModel BaseModelT = TypeVar("BaseModelT", bound=BaseModel) @@ -27,12 +28,12 @@ def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: for name, field in get_model_fields(model).items(): field_value = getattr(value, name) - if PYDANTIC_V2: - allow_none = False - else: + if PYDANTIC_V1: # in v1 nullability was structured differently # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields allow_none = getattr(field, "allow_none", False) + else: + allow_none = False assert_matches_type( field_outer_type(field), @@ -71,6 +72,13 @@ def assert_matches_type( if is_list_type(type_): return _assert_list_type(type_, value) + if is_sequence_type(type_): + assert isinstance(value, Sequence) + inner_type = get_args(type_)[0] + for entry in value: # type: ignore + assert_type(inner_type, entry) # type: ignore + return + if origin == str: assert isinstance(value, str) elif origin == int: