-
Notifications
You must be signed in to change notification settings - Fork 623
Add a dedicated OpenAI-compatible LLM adapter #1895
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Open
jimmyzhuu
wants to merge
5
commits into
Zipstack:main
Choose a base branch
from
jimmyzhuu:codex/openai-compatible-llm-adapter
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
Show all changes
5 commits
Select commit
Hold shift + click to select a range
8b97716
Add OpenAI-compatible LLM adapter
jimmyzhuu 5090773
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] f1d6dff
Address review feedback for custom OpenAI adapter
jimmyzhuu b4d0af1
Fix import formatting after rebase
jimmyzhuu d3e1cad
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
40 changes: 40 additions & 0 deletions
40
unstract/sdk1/src/unstract/sdk1/adapters/llm1/openai_compatible.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,40 @@ | ||
| from typing import Any | ||
|
|
||
| from unstract.sdk1.adapters.base1 import BaseAdapter, OpenAICompatibleLLMParameters | ||
| from unstract.sdk1.adapters.enums import AdapterTypes | ||
|
|
||
|
|
||
| class OpenAICompatibleLLMAdapter(OpenAICompatibleLLMParameters, BaseAdapter): | ||
| @staticmethod | ||
| def get_id() -> str: | ||
| return "openaicompatible|b6d10f33-2c41-49fc-a8c2-58d2b247fc09" | ||
|
|
||
| @staticmethod | ||
| def get_metadata() -> dict[str, Any]: | ||
| return { | ||
| "name": "OpenAI Compatible", | ||
| "version": "1.0.0", | ||
| "adapter": OpenAICompatibleLLMAdapter, | ||
| "description": "OpenAI-compatible LLM adapter", | ||
| "is_active": True, | ||
| } | ||
|
|
||
| @staticmethod | ||
| def get_name() -> str: | ||
| return "OpenAI Compatible" | ||
|
|
||
| @staticmethod | ||
| def get_description() -> str: | ||
| return "OpenAI-compatible LLM adapter" | ||
|
|
||
| @staticmethod | ||
| def get_provider() -> str: | ||
| return "custom_openai" | ||
|
|
||
| @staticmethod | ||
| def get_icon() -> str: | ||
| return "/icons/adapter-icons/OpenAI.png" | ||
|
|
||
| @staticmethod | ||
| def get_adapter_type() -> AdapterTypes: | ||
| return AdapterTypes.LLM |
62 changes: 62 additions & 0 deletions
62
unstract/sdk1/src/unstract/sdk1/adapters/llm1/static/custom_openai.json
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,62 @@ | ||
| { | ||
| "title": "OpenAI Compatible LLM", | ||
| "type": "object", | ||
| "required": [ | ||
| "adapter_name", | ||
| "api_base" | ||
| ], | ||
| "properties": { | ||
| "adapter_name": { | ||
| "type": "string", | ||
| "title": "Name", | ||
| "default": "", | ||
| "description": "Provide a unique name for this adapter instance. Example: compatible-gateway-1" | ||
| }, | ||
| "api_key": { | ||
| "type": [ | ||
| "string", | ||
| "null" | ||
| ], | ||
| "title": "API Key", | ||
| "format": "password", | ||
| "description": "API key for your OpenAI-compatible endpoint. Leave empty if the endpoint does not require one." | ||
| }, | ||
|
coderabbitai[bot] marked this conversation as resolved.
|
||
| "model": { | ||
| "type": "string", | ||
| "title": "Model", | ||
| "default": "gpt-4o-mini", | ||
| "description": "The model name expected by your OpenAI-compatible endpoint. Examples: gpt-4o-mini, ERNIE-4.0-8K (Baidu Qianfan), qwen-max, openai/gpt-4o" | ||
| }, | ||
| "api_base": { | ||
| "type": "string", | ||
| "format": "url", | ||
| "title": "API Base", | ||
| "default": "https://your-endpoint.example.com/v1", | ||
| "description": "Base URL for the OpenAI-compatible endpoint. Examples: https://your-endpoint.example.com/v1, https://qianfan.baidubce.com/v2" | ||
| }, | ||
| "max_tokens": { | ||
| "type": "number", | ||
| "minimum": 0, | ||
| "multipleOf": 1, | ||
| "title": "Maximum Output Tokens", | ||
| "default": 4096, | ||
| "description": "Maximum number of output tokens to limit LLM replies. Leave it empty to use the provider default." | ||
| }, | ||
| "max_retries": { | ||
| "type": "number", | ||
| "minimum": 0, | ||
| "multipleOf": 1, | ||
| "title": "Max Retries", | ||
| "default": 5, | ||
| "description": "The maximum number of times to retry a request if it fails." | ||
| }, | ||
| "timeout": { | ||
| "type": "number", | ||
| "minimum": 0, | ||
| "multipleOf": 1, | ||
| "title": "Timeout", | ||
| "default": 900, | ||
| "description": "Timeout in seconds." | ||
| } | ||
| } | ||
| } | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,117 @@ | ||
| import json | ||
| from functools import lru_cache | ||
| from importlib import import_module | ||
| from unittest.mock import MagicMock, patch | ||
|
|
||
| from unstract.sdk1.adapters.base1 import OpenAICompatibleLLMParameters | ||
| from unstract.sdk1.adapters.constants import Common | ||
| from unstract.sdk1.adapters.llm1 import adapters | ||
| from unstract.sdk1.adapters.llm1.openai_compatible import OpenAICompatibleLLMAdapter | ||
|
|
||
|
|
||
| @lru_cache(maxsize=1) | ||
| def _load_llm_module() -> object: | ||
| import sys | ||
| from types import ModuleType | ||
|
|
||
| with patch.dict( | ||
| sys.modules, | ||
| { | ||
| # Stub python-magic so importing LLM does not depend on libmagic | ||
| # being available in the test environment. | ||
| "magic": ModuleType("magic") | ||
| }, | ||
| ): | ||
| return import_module("unstract.sdk1.llm") | ||
|
|
||
|
|
||
| def _load_llm_class() -> type: | ||
| return _load_llm_module().LLM | ||
|
|
||
|
|
||
| def test_openai_compatible_adapter_is_registered() -> None: | ||
| adapter_id = OpenAICompatibleLLMAdapter.get_id() | ||
|
|
||
| assert adapter_id in adapters | ||
| assert adapters[adapter_id][Common.MODULE] is OpenAICompatibleLLMAdapter | ||
|
|
||
|
|
||
| def test_openai_compatible_validate_prefixes_model() -> None: | ||
| validated = OpenAICompatibleLLMParameters.validate( | ||
| { | ||
| "api_base": "https://gateway.example.com/v1", | ||
| "api_key": "test-key", | ||
| "model": "ERNIE-4.0-8K", | ||
| } | ||
| ) | ||
|
|
||
| assert validated["model"] == "custom_openai/ERNIE-4.0-8K" | ||
|
|
||
|
|
||
| def test_openai_compatible_validate_preserves_prefixed_model() -> None: | ||
| validated = OpenAICompatibleLLMParameters.validate( | ||
| { | ||
| "api_base": "https://gateway.example.com/v1", | ||
| "model": "custom_openai/openai/gpt-4o", | ||
| } | ||
| ) | ||
|
|
||
| assert validated["model"] == "custom_openai/openai/gpt-4o" | ||
| assert validated["api_key"] is None | ||
|
|
||
|
|
||
| def test_openai_compatible_schema_is_loadable() -> None: | ||
| schema = json.loads(OpenAICompatibleLLMAdapter.get_json_schema()) | ||
|
|
||
| assert schema["title"] == "OpenAI Compatible LLM" | ||
| assert schema["properties"]["api_key"]["type"] == ["string", "null"] | ||
| assert "ERNIE-4.0-8K" in schema["properties"]["model"]["description"] | ||
|
|
||
|
|
||
| def test_record_usage_uses_reported_prompt_tokens_without_estimating() -> None: | ||
| llm_module = _load_llm_module() | ||
| llm_cls = llm_module.LLM | ||
|
|
||
| llm = llm_cls.__new__(llm_cls) | ||
| llm._platform_api_key = "platform-key" | ||
| llm.platform_kwargs = {"run_id": "run-1"} | ||
| llm.adapter = MagicMock() | ||
| llm.adapter.get_provider.return_value = "custom_openai" | ||
|
|
||
|
greptile-apps[bot] marked this conversation as resolved.
|
||
| with ( | ||
| patch.object(llm_module, "token_counter") as mock_token_counter, | ||
| patch.object(llm_module, "Audit") as mock_audit, | ||
| ): | ||
| llm._record_usage( | ||
| model="custom_openai/ERNIE-4.0-8K", | ||
| messages=[{"role": "user", "content": "hello"}], | ||
| usage={"prompt_tokens": 3, "completion_tokens": 4, "total_tokens": 7}, | ||
| llm_api="complete", | ||
| ) | ||
|
|
||
| mock_token_counter.assert_not_called() | ||
| mock_audit.return_value.push_usage_data.assert_called_once() | ||
|
|
||
|
|
||
| def test_record_usage_tolerates_unmapped_models_without_prompt_tokens() -> None: | ||
| llm_module = _load_llm_module() | ||
| llm_cls = llm_module.LLM | ||
|
|
||
| llm = llm_cls.__new__(llm_cls) | ||
| llm._platform_api_key = "platform-key" | ||
| llm.platform_kwargs = {"run_id": "run-1"} | ||
| llm.adapter = MagicMock() | ||
| llm.adapter.get_provider.return_value = "custom_openai" | ||
|
|
||
| with ( | ||
| patch.object(llm_module, "token_counter", side_effect=Exception("unmapped")), | ||
| patch.object(llm_module, "Audit") as mock_audit, | ||
| ): | ||
| llm._record_usage( | ||
| model="custom_openai/ERNIE-4.0-8K", | ||
| messages=[{"role": "user", "content": "hello"}], | ||
| usage={"completion_tokens": 4, "total_tokens": 7}, | ||
| llm_api="complete", | ||
| ) | ||
|
|
||
| mock_audit.return_value.push_usage_data.assert_called_once() | ||
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.