forked from sankalp1999/code_qa
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathmodel_executor.py
More file actions
44 lines (38 loc) · 1.27 KB
/
model_executor.py
File metadata and controls
44 lines (38 loc) · 1.27 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import os
from openai import OpenAI
import openai
from ollama import Client
# OpenAI client setup
openai_client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
sambanova_client = OpenAI(
api_key=os.environ.get("SAMBANOVA_API_KEY"),
base_url="https://api.sambanova.ai/v1",
)
# Local Ollama client setup
ollama_client = Client(
host=os.environ.get("OLLAMA_SERVER", "http://localhost:11434"),
headers={
}
)
def call_openai_model(client, model, messages, max_tokens=400):
chat_completion = client.chat.completions.create(
model=model,
messages=messages,
max_tokens=max_tokens
)
return chat_completion.choices[0].message.content
def call_ollama_model(model, messages, max_tokens=400):
response = ollama_client.chat(
model=model,
messages=messages
)
return response.message.content
def call_ai_model(client_type, model, messages, max_tokens=400):
if client_type == "openai":
return call_openai_model(openai_client, model, messages, max_tokens)
elif client_type == "sambanova":
return call_openai_model(sambanova_client, model, messages, max_tokens)
elif client_type == "ollama":
return call_ollama_model(model, messages)
else:
raise ValueError("Invalid client type")