Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.22.0"
".": "0.22.1"
}
6 changes: 3 additions & 3 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 14
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/hyperspell%2Fhyperspell-ec1420af27ac837f49a977cb95726af45a5ee5b5cd367e54b8a611de47ee3c84.yml
openapi_spec_hash: 0fc5dd84801ee8f46a9b5d0941bdefda
config_hash: 985dd1bd217ba3c5c5b614da08d43e5f
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/hyperspell%2Fhyperspell-b98d78d20b4c4d2c702e39d073c7b46218c6a4faf8b2f7293034aa37cd55140e.yml
openapi_spec_hash: c280139f8355b085bd2c417c67fffada
config_hash: 595375b8ab62a4d175e28264da481aa3
15 changes: 15 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,20 @@
# Changelog

## 0.22.1 (2025-08-13)

Full Changelog: [v0.22.0...v0.22.1](https://github.com/hyperspell/python-sdk/compare/v0.22.0...v0.22.1)

### Features

* **api:** api update ([d2c231b](https://github.com/hyperspell/python-sdk/commit/d2c231b07652d05a03e545078b17da80d425a072))
* **api:** api update ([ac37961](https://github.com/hyperspell/python-sdk/commit/ac3796155df478f984b59d18b81e0ab8a0b20a36))


### Chores

* **internal:** update comment in script ([3f74f9d](https://github.com/hyperspell/python-sdk/commit/3f74f9ded72572e20d6839d14c34713d75c83d4c))
* update @stainless-api/prism-cli to v5.15.0 ([37e8a3f](https://github.com/hyperspell/python-sdk/commit/37e8a3f52aff75d78adc6ba7a0a51107cc9e0fe1))

## 0.22.0 (2025-08-06)

Full Changelog: [v0.21.0...v0.22.0](https://github.com/hyperspell/python-sdk/compare/v0.21.0...v0.22.0)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "hyperspell"
version = "0.22.0"
version = "0.22.1"
description = "The official Python library for the hyperspell API"
dynamic = ["readme"]
license = "MIT"
Expand Down
4 changes: 2 additions & 2 deletions scripts/mock
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"

# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &

# Wait for server to come online
echo -n "Waiting for server"
Expand All @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then

echo
else
npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
fi
2 changes: 1 addition & 1 deletion scripts/test
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ elif ! prism_is_running ; then
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
echo

exit 1
Expand Down
2 changes: 1 addition & 1 deletion src/hyperspell/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "hyperspell"
__version__ = "0.22.0" # x-release-please-version
__version__ = "0.22.1" # x-release-please-version
34 changes: 32 additions & 2 deletions src/hyperspell/resources/integrations/web_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ def index(
self,
*,
url: str,
limit: int | NotGiven = NOT_GIVEN,
max_depth: int | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
Expand All @@ -56,6 +58,12 @@ def index(
Recursively crawl a website to make it available for indexed search.

Args:
url: The base URL of the website to crawl

limit: Maximum number of pages to crawl in total

max_depth: Maximum depth of links to follow during crawling

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -71,7 +79,14 @@ def index(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform({"url": url}, web_crawler_index_params.WebCrawlerIndexParams),
query=maybe_transform(
{
"url": url,
"limit": limit,
"max_depth": max_depth,
},
web_crawler_index_params.WebCrawlerIndexParams,
),
),
cast_to=WebCrawlerIndexResponse,
)
Expand Down Expand Up @@ -101,6 +116,8 @@ async def index(
self,
*,
url: str,
limit: int | NotGiven = NOT_GIVEN,
max_depth: int | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
Expand All @@ -112,6 +129,12 @@ async def index(
Recursively crawl a website to make it available for indexed search.

Args:
url: The base URL of the website to crawl

limit: Maximum number of pages to crawl in total

max_depth: Maximum depth of links to follow during crawling

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -127,7 +150,14 @@ async def index(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=await async_maybe_transform({"url": url}, web_crawler_index_params.WebCrawlerIndexParams),
query=await async_maybe_transform(
{
"url": url,
"limit": limit,
"max_depth": max_depth,
},
web_crawler_index_params.WebCrawlerIndexParams,
),
),
cast_to=WebCrawlerIndexResponse,
)
Expand Down
16 changes: 8 additions & 8 deletions src/hyperspell/resources/memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,12 +534,12 @@ def upload(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> MemoryStatus:
"""This endpoint will upload a file to the index and return a document ID.
"""This endpoint will upload a file to the index and return a resource_id.

The file
will be processed in the background and the document will be available for
querying once the processing is complete. You can use the `document_id` to query
the document later, and check the status of the document.
will be processed in the background and the memory will be available for
querying once the processing is complete. You can use the `resource_id` to query
the memory later, and check the status of the memory.

Args:
file: The file to ingest.
Expand Down Expand Up @@ -1080,12 +1080,12 @@ async def upload(
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> MemoryStatus:
"""This endpoint will upload a file to the index and return a document ID.
"""This endpoint will upload a file to the index and return a resource_id.

The file
will be processed in the background and the document will be available for
querying once the processing is complete. You can use the `document_id` to query
the document later, and check the status of the document.
will be processed in the background and the memory will be available for
querying once the processing is complete. You can use the `resource_id` to query
the memory later, and check the status of the memory.

Args:
file: The file to ingest.
Expand Down
7 changes: 7 additions & 0 deletions src/hyperspell/types/integrations/web_crawler_index_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,10 @@

class WebCrawlerIndexParams(TypedDict, total=False):
url: Required[str]
"""The base URL of the website to crawl"""

limit: int
"""Maximum number of pages to crawl in total"""

max_depth: int
"""Maximum depth of links to follow during crawling"""
18 changes: 18 additions & 0 deletions tests/api_resources/integrations/test_web_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,15 @@ def test_method_index(self, client: Hyperspell) -> None:
)
assert_matches_type(WebCrawlerIndexResponse, web_crawler, path=["response"])

@parametrize
def test_method_index_with_all_params(self, client: Hyperspell) -> None:
web_crawler = client.integrations.web_crawler.index(
url="url",
limit=1,
max_depth=0,
)
assert_matches_type(WebCrawlerIndexResponse, web_crawler, path=["response"])

@parametrize
def test_raw_response_index(self, client: Hyperspell) -> None:
response = client.integrations.web_crawler.with_raw_response.index(
Expand Down Expand Up @@ -61,6 +70,15 @@ async def test_method_index(self, async_client: AsyncHyperspell) -> None:
)
assert_matches_type(WebCrawlerIndexResponse, web_crawler, path=["response"])

@parametrize
async def test_method_index_with_all_params(self, async_client: AsyncHyperspell) -> None:
web_crawler = await async_client.integrations.web_crawler.index(
url="url",
limit=1,
max_depth=0,
)
assert_matches_type(WebCrawlerIndexResponse, web_crawler, path=["response"])

@parametrize
async def test_raw_response_index(self, async_client: AsyncHyperspell) -> None:
response = await async_client.integrations.web_crawler.with_raw_response.index(
Expand Down