Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions client_gen_config.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,4 +73,17 @@ directive:
where: '$.paths."/v2/apps".post'
transform: >
$["parameters"] = [];

# Strip path-level and operation-level "servers" overrides from all paths.
# Without this, Autorest generates a duplicate `endpoint` parameter in
# GeneratedClient.__init__ (one per unique server URL), which is a Python
# syntax error. Runtime multi-base-URL routing is handled by
# _BaseURLProxy in custom_extensions.py instead.
- from: openapi-document
where: '$.paths.*'
transform: >
delete $["servers"];
for (const m of ["get","post","put","patch","delete","head","options"]) {
if ($[m] && $[m].servers) { delete $[m].servers; }
}
```
55 changes: 53 additions & 2 deletions src/pydo/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from azure.core.credentials import AccessToken

from pydo.custom_policies import CustomHttpLoggingPolicy
from pydo.custom_extensions import _BaseURLProxy, INFERENCE_BASE_URL
from pydo import GeneratedClient, _version

if TYPE_CHECKING:
Expand All @@ -32,13 +33,28 @@ def get_token(self, *args, **kwargs) -> AccessToken:
class Client(GeneratedClient): # type: ignore
"""The official DigitalOcean Python client

:param token: A valid API token.
:param token: A valid API token / model access key.
:type token: str
:keyword endpoint: Service URL. Default value is "https://api.digitalocean.com".
:paramtype endpoint: str
:keyword inference_endpoint: Serverless inference URL.
Default value is "https://inference.do-ai.run".
:paramtype inference_endpoint: str
:keyword agent_endpoint: Agent inference URL. Pass the per-agent
subdomain (e.g. ``"https://<id>.agents.do-ai.run"``).
Required only when using agent inference endpoints.
:paramtype agent_endpoint: str
"""

def __init__(self, token: str, *, timeout: int = 120, **kwargs):
def __init__(
self,
token: str,
*,
timeout: int = 120,
inference_endpoint: str = INFERENCE_BASE_URL,
agent_endpoint: str = "",
**kwargs,
):
logger = kwargs.get("logger")
if logger is not None and kwargs.get("http_logging_policy") == "":
kwargs["http_logging_policy"] = CustomHttpLoggingPolicy(logger=logger)
Expand All @@ -48,6 +64,41 @@ def __init__(self, token: str, *, timeout: int = 120, **kwargs):
TokenCredentials(token), timeout=timeout, sdk_moniker=sdk_moniker, **kwargs
)

self._setup_inference_routing(inference_endpoint, agent_endpoint)

def _setup_inference_routing(
self,
inference_endpoint: str,
agent_endpoint: str,
) -> None:
"""Route Inference / AgentInference operation groups to their endpoints.

* ``*Inference*`` (but not ``*AgentInference*``) → *inference_endpoint*
* ``*AgentInference*`` → *agent_endpoint*

Both use the same token passed to ``Client.__init__``.
"""
inference_proxy = _BaseURLProxy(
self._client,
inference_endpoint,
)

agent_proxy = None
if agent_endpoint:
agent_proxy = _BaseURLProxy(
self._client,
agent_endpoint,
)

for attr in self.__dict__.values():
if not hasattr(attr, "_client"):
continue
class_name = type(attr).__name__
if class_name.startswith("AgentInference") and agent_proxy:
attr._client = agent_proxy
elif class_name.startswith("Inference"):
attr._client = inference_proxy


__all__ = ["Client"]

Expand Down
57 changes: 54 additions & 3 deletions src/pydo/aio/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@

from pydo import _version
from pydo.custom_policies import CustomHttpLoggingPolicy
from pydo.custom_extensions import _BaseURLProxy, INFERENCE_BASE_URL
from pydo.aio import GeneratedClient

if TYPE_CHECKING:
Expand All @@ -34,15 +35,30 @@ async def get_token(self, *args, **kwargs) -> AccessToken:


class Client(GeneratedClient): # type: ignore
"""The official DigitalOcean Python client
"""The official DigitalOcean Python client (async)

:param token: A valid API token.
:param token: A valid API token / model access key.
:type token: str
:keyword endpoint: Service URL. Default value is "https://api.digitalocean.com".
:paramtype endpoint: str
:keyword inference_endpoint: Serverless inference URL.
Default value is "https://inference.do-ai.run".
:paramtype inference_endpoint: str
:keyword agent_endpoint: Agent inference URL. Pass the per-agent
subdomain (e.g. ``"https://<id>.agents.do-ai.run"``).
Required only when using agent inference endpoints.
:paramtype agent_endpoint: str
"""

def __init__(self, token: str, *, timeout: int = 120, **kwargs):
def __init__(
self,
token: str,
*,
timeout: int = 120,
inference_endpoint: str = INFERENCE_BASE_URL,
agent_endpoint: str = "",
**kwargs,
):
logger = kwargs.get("logger")
if logger is not None and kwargs.get("http_logging_policy") == "":
kwargs["http_logging_policy"] = CustomHttpLoggingPolicy(logger=logger)
Expand All @@ -52,6 +68,41 @@ def __init__(self, token: str, *, timeout: int = 120, **kwargs):
TokenCredentials(token), timeout=timeout, sdk_moniker=sdk_moniker, **kwargs
)

self._setup_inference_routing(inference_endpoint, agent_endpoint)

def _setup_inference_routing(
self,
inference_endpoint: str,
agent_endpoint: str,
) -> None:
"""Route Inference / AgentInference operation groups to their endpoints.

* ``*Inference*`` (but not ``*AgentInference*``) → *inference_endpoint*
* ``*AgentInference*`` → *agent_endpoint*

Both use the same token passed to ``Client.__init__``.
"""
inference_proxy = _BaseURLProxy(
self._client,
inference_endpoint,
)

agent_proxy = None
if agent_endpoint:
agent_proxy = _BaseURLProxy(
self._client,
agent_endpoint,
)

for attr in self.__dict__.values():
if not hasattr(attr, "_client"):
continue
class_name = type(attr).__name__
if class_name.startswith("AgentInference") and agent_proxy:
attr._client = agent_proxy
elif class_name.startswith("Inference"):
attr._client = inference_proxy


# Add all objects you want publicly available to users at this package level
__all__ = ["Client"] # type: List[str]
Expand Down
86 changes: 83 additions & 3 deletions src/pydo/aio/operations/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,96 @@
"""Customize generated code here.

Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize

Async mirror of ``pydo/operations/_patch.py``. See that module for the
full design rationale. If no inference / agent-inference operations have
been generated, this module exports nothing.
"""
from typing import TYPE_CHECKING

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import List

__all__ = (
[]
) # type: List[str] # Add all objects you want publicly available to users at this package level
# ---------------------------------------------------------------------------
# Serverless Inference operations (async)
# ---------------------------------------------------------------------------

try:
from ._operations import InferenceOperations as _GeneratedInferenceOperations

import pydo.operations._operations as _ops

_HAS_INFERENCE = True
except ImportError:
_HAS_INFERENCE = False

if _HAS_INFERENCE:
from pydo.custom_extensions import (
AsyncStreamingMixin,
install_streaming_wrappers,
)

class InferenceOperations(AsyncStreamingMixin, _GeneratedInferenceOperations):
"""Async InferenceOperations with fully automatic streaming support.

Mirror of the sync version in ``pydo/operations/_patch.py``.
"""

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
install_streaming_wrappers(
self, _GeneratedInferenceOperations, _ops, is_async=True
)


# ---------------------------------------------------------------------------
# Agent Inference operations (async)
# ---------------------------------------------------------------------------

try:
from ._operations import (
AgentInferenceOperations as _GeneratedAgentInferenceOperations,
)

if not _HAS_INFERENCE:
import pydo.operations._operations as _ops

_HAS_AGENT_INFERENCE = True
except ImportError:
_HAS_AGENT_INFERENCE = False

if _HAS_AGENT_INFERENCE:
if not _HAS_INFERENCE:
from pydo.custom_extensions import (
AsyncStreamingMixin,
install_streaming_wrappers,
)

class AgentInferenceOperations(
AsyncStreamingMixin, _GeneratedAgentInferenceOperations
):
"""Async AgentInferenceOperations with fully automatic streaming support.

Mirror of the sync version in ``pydo/operations/_patch.py``.
"""

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
install_streaming_wrappers(
self, _GeneratedAgentInferenceOperations, _ops, is_async=True
)


# ---------------------------------------------------------------------------
# Exports
# ---------------------------------------------------------------------------

__all__ = [] # type: ignore[assignment]
if _HAS_INFERENCE:
__all__.append("InferenceOperations")
if _HAS_AGENT_INFERENCE:
__all__.append("AgentInferenceOperations")


def patch_sdk():
Expand Down
Loading
Loading