Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion apps/sim/app/api/copilot/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ const ChatMessageSchema = z.object({
userMessageId: z.string().optional(), // ID from frontend for the user message
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
Expand Down
69 changes: 69 additions & 0 deletions apps/sim/app/api/providers/litellm/models/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { env } from '@/lib/core/config/env'
import { filterBlacklistedModels, isProviderBlacklisted } from '@/providers/utils'

const logger = createLogger('LiteLLMModelsAPI')

/**
* Get available LiteLLM models
*/
export async function GET(_request: NextRequest) {
if (isProviderBlacklisted('litellm')) {
logger.info('LiteLLM provider is blacklisted, returning empty models')
return NextResponse.json({ models: [] })
}

const baseUrl = (env.LITELLM_BASE_URL || '').replace(/\/$/, '')

if (!baseUrl) {
logger.info('LITELLM_BASE_URL not configured')
return NextResponse.json({ models: [] })
}

try {
logger.info('Fetching LiteLLM models', {
baseUrl,
})

const headers: Record<string, string> = {
'Content-Type': 'application/json',
}

if (env.LITELLM_API_KEY) {
headers.Authorization = `Bearer ${env.LITELLM_API_KEY}`
}

const response = await fetch(`${baseUrl}/v1/models`, {
headers,
next: { revalidate: 60 },
})

if (!response.ok) {
logger.warn('LiteLLM service is not available', {
status: response.status,
statusText: response.statusText,
})
return NextResponse.json({ models: [] })
}

const data = (await response.json()) as { data: Array<{ id: string }> }
const allModels = data.data.map((model) => `litellm/${model.id}`)
const models = filterBlacklistedModels(allModels)

logger.info('Successfully fetched LiteLLM models', {
count: models.length,
filtered: allModels.length - models.length,
models,
})

return NextResponse.json({ models })
} catch (error) {
logger.error('Failed to fetch LiteLLM models', {
error: error instanceof Error ? error.message : 'Unknown error',
baseUrl,
})

return NextResponse.json({ models: [] })
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { useProviderModels } from '@/hooks/queries/providers'
import {
updateLiteLLMProviderModels,
updateOllamaProviderModels,
updateOpenRouterProviderModels,
updateVLLMProviderModels,
Expand All @@ -30,6 +31,8 @@ function useSyncProvider(provider: ProviderName) {
updateOllamaProviderModels(data.models)
} else if (provider === 'vllm') {
updateVLLMProviderModels(data.models)
} else if (provider === 'litellm') {
updateLiteLLMProviderModels(data.models)
} else if (provider === 'openrouter') {
void updateOpenRouterProviderModels(data.models)
if (data.modelInfo) {
Expand All @@ -54,6 +57,7 @@ export function ProviderModelsLoader() {
useSyncProvider('base')
useSyncProvider('ollama')
useSyncProvider('vllm')
useSyncProvider('litellm')
useSyncProvider('openrouter')
return null
}
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ export const MODEL_OPTIONS = [
{ value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' },
{ value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' },
{ value: 'gemini-3-pro', label: 'Gemini 3 Pro' },
{ value: 'litellm', label: 'LiteLLM' },
] as const

/**
Expand Down
11 changes: 11 additions & 0 deletions apps/sim/components/icons.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3679,6 +3679,17 @@ export function VllmIcon(props: SVGProps<SVGSVGElement>) {
)
}

export function LiteLLMIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
<title>LiteLLM</title>
<text x='12' y='17' fontSize='16' textAnchor='middle' dominantBaseline='middle'>
🚅
</text>
</svg>
)
}

export function PosthogIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
Expand Down
1 change: 1 addition & 0 deletions apps/sim/hooks/queries/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const providerEndpoints: Record<ProviderName, string> = {
base: '/api/providers/base/models',
ollama: '/api/providers/ollama/models',
vllm: '/api/providers/vllm/models',
litellm: '/api/providers/litellm/models',
openrouter: '/api/providers/openrouter/models',
}

Expand Down
2 changes: 1 addition & 1 deletion apps/sim/lib/copilot/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ export interface SendMessageRequest {
userMessageId?: string // ID from frontend for the user message
chatId?: string
workflowId?: string
mode?: CopilotMode | CopilotTransportMode
mode?: CopilotMode | CopilotTransportMode
model?: CopilotModelId
prefetch?: boolean
createNewChat?: boolean
Expand Down
1 change: 1 addition & 0 deletions apps/sim/lib/copilot/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ const VALID_PROVIDER_IDS: readonly ProviderId[] = [
'mistral',
'groq',
'ollama',
'litellm',
] as const

/**
Expand Down
1 change: 1 addition & 0 deletions apps/sim/lib/copilot/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export const COPILOT_MODEL_IDS = [
'claude-4.5-opus',
'claude-4.1-opus',
'gemini-3-pro',
'litellm',
] as const

export type CopilotModelId = (typeof COPILOT_MODEL_IDS)[number]
Expand Down
2 changes: 2 additions & 0 deletions apps/sim/lib/core/config/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ export const env = createEnv({
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
VLLM_BASE_URL: z.string().url().optional(), // vLLM self-hosted base URL (OpenAI-compatible)
VLLM_API_KEY: z.string().optional(), // Optional bearer token for vLLM
LITELLM_BASE_URL: z.string().url().optional(), // LiteLLM proxy base URL (OpenAI-compatible)
LITELLM_API_KEY: z.string().optional(), // Optional bearer token for LiteLLM
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
EXA_API_KEY: z.string().min(1).optional(), // Exa AI API key for enhanced online search
Expand Down
Loading