Skip to content

Commit 018b7d0

Browse files
author
Aditya Puranik
committed
feat(providers): add LiteLLM provider for Agent blocks
1 parent 3768c63 commit 018b7d0

File tree

13 files changed

+811
-2
lines changed

13 files changed

+811
-2
lines changed
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
import { createLogger } from '@sim/logger'
2+
import { type NextRequest, NextResponse } from 'next/server'
3+
import { env } from '@/lib/core/config/env'
4+
import { filterBlacklistedModels, isProviderBlacklisted } from '@/providers/utils'
5+
6+
const logger = createLogger('LiteLLMModelsAPI')
7+
8+
/**
9+
* Get available LiteLLM models
10+
*/
11+
export async function GET(_request: NextRequest) {
12+
if (isProviderBlacklisted('litellm')) {
13+
logger.info('LiteLLM provider is blacklisted, returning empty models')
14+
return NextResponse.json({ models: [] })
15+
}
16+
17+
const baseUrl = (env.LITELLM_BASE_URL || '').replace(/\/$/, '')
18+
19+
if (!baseUrl) {
20+
logger.info('LITELLM_BASE_URL not configured')
21+
return NextResponse.json({ models: [] })
22+
}
23+
24+
try {
25+
logger.info('Fetching LiteLLM models', {
26+
baseUrl,
27+
})
28+
29+
const headers: Record<string, string> = {
30+
'Content-Type': 'application/json',
31+
}
32+
33+
if (env.LITELLM_API_KEY) {
34+
headers.Authorization = `Bearer ${env.LITELLM_API_KEY}`
35+
}
36+
37+
const response = await fetch(`${baseUrl}/v1/models`, {
38+
headers,
39+
next: { revalidate: 60 },
40+
})
41+
42+
if (!response.ok) {
43+
logger.warn('LiteLLM service is not available', {
44+
status: response.status,
45+
statusText: response.statusText,
46+
})
47+
return NextResponse.json({ models: [] })
48+
}
49+
50+
const data = (await response.json()) as { data: Array<{ id: string }> }
51+
const allModels = data.data.map((model) => `litellm/${model.id}`)
52+
const models = filterBlacklistedModels(allModels)
53+
54+
logger.info('Successfully fetched LiteLLM models', {
55+
count: models.length,
56+
filtered: allModels.length - models.length,
57+
models,
58+
})
59+
60+
return NextResponse.json({ models })
61+
} catch (error) {
62+
logger.error('Failed to fetch LiteLLM models', {
63+
error: error instanceof Error ? error.message : 'Unknown error',
64+
baseUrl,
65+
})
66+
67+
return NextResponse.json({ models: [] })
68+
}
69+
}

apps/sim/app/workspace/[workspaceId]/providers/provider-models-loader.tsx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import { useEffect } from 'react'
44
import { createLogger } from '@sim/logger'
55
import { useProviderModels } from '@/hooks/queries/providers'
66
import {
7+
updateLiteLLMProviderModels,
78
updateOllamaProviderModels,
89
updateOpenRouterProviderModels,
910
updateVLLMProviderModels,
@@ -30,6 +31,8 @@ function useSyncProvider(provider: ProviderName) {
3031
updateOllamaProviderModels(data.models)
3132
} else if (provider === 'vllm') {
3233
updateVLLMProviderModels(data.models)
34+
} else if (provider === 'litellm') {
35+
updateLiteLLMProviderModels(data.models)
3336
} else if (provider === 'openrouter') {
3437
void updateOpenRouterProviderModels(data.models)
3538
if (data.modelInfo) {
@@ -54,6 +57,7 @@ export function ProviderModelsLoader() {
5457
useSyncProvider('base')
5558
useSyncProvider('ollama')
5659
useSyncProvider('vllm')
60+
useSyncProvider('litellm')
5761
useSyncProvider('openrouter')
5862
return null
5963
}

apps/sim/components/icons.tsx

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3679,6 +3679,22 @@ export function VllmIcon(props: SVGProps<SVGSVGElement>) {
36793679
)
36803680
}
36813681

3682+
export function LiteLLMIcon(props: SVGProps<SVGSVGElement>) {
3683+
return (
3684+
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
3685+
<title>LiteLLM</title>
3686+
<path
3687+
d='M12 2L2 7l10 5 10-5-10-5zM2 17l10 5 10-5M2 12l10 5 10-5'
3688+
stroke='#10B981'
3689+
strokeWidth='2'
3690+
strokeLinecap='round'
3691+
strokeLinejoin='round'
3692+
fill='none'
3693+
/>
3694+
</svg>
3695+
)
3696+
}
3697+
36823698
export function PosthogIcon(props: SVGProps<SVGSVGElement>) {
36833699
return (
36843700
<svg

apps/sim/hooks/queries/providers.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ const providerEndpoints: Record<ProviderName, string> = {
88
base: '/api/providers/base/models',
99
ollama: '/api/providers/ollama/models',
1010
vllm: '/api/providers/vllm/models',
11+
litellm: '/api/providers/litellm/models',
1112
openrouter: '/api/providers/openrouter/models',
1213
}
1314

apps/sim/lib/core/config/env.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,8 @@ export const env = createEnv({
8585
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
8686
VLLM_BASE_URL: z.string().url().optional(), // vLLM self-hosted base URL (OpenAI-compatible)
8787
VLLM_API_KEY: z.string().optional(), // Optional bearer token for vLLM
88+
LITELLM_BASE_URL: z.string().url().optional(), // LiteLLM proxy base URL (OpenAI-compatible)
89+
LITELLM_API_KEY: z.string().optional(), // Optional bearer token for LiteLLM
8890
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
8991
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
9092
EXA_API_KEY: z.string().min(1).optional(), // Exa AI API key for enhanced online search

0 commit comments

Comments
 (0)