Skip to content

Commit fb06907

Browse files
michael-borckclaude
andcommitted
Massive UX improvements: Fix settings persistence & add model discovery
🎯 **Settings Persistence Fixed:** - Create backend settings management system - Settings now persist across app restarts - No more restart requirement - apply settings immediately - Bridge localStorage (persistence) with runtime settings 🔄 **Model Discovery Feature:** - Add /api/models endpoint for all providers - Refresh button (🔄) fetches available models - Dropdown selection replaces manual typing - Support for Ollama, OpenAI, Anthropic, Google, Groq, Together 🔑 **Enhanced Provider Support:** - Add optional API key support for Ollama (bearer token) - Better provider configuration management - Runtime settings override environment variables ⚡ **Instant Settings Application:** - Remove restart popup/requirement - Settings take effect immediately - Better success messaging and feedback - Loading states and error handling 🎨 **Improved Settings UI:** - Model dropdown when available, text input as fallback - Better form validation and user feedback - Loading states for model discovery - Clearer instructions and help text - Enhanced provider-specific guidance **Major UX Issues Resolved:** ✅ Settings persist across sessions ✅ No restart required for changes ✅ Model discovery prevents typing errors ✅ Better API key management ✅ Immediate feedback and application 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent 1dbc591 commit fb06907

8 files changed

Lines changed: 477 additions & 40 deletions

File tree

app/api/getChat/route.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
11
import {
22
createProvider,
3-
getDefaultProviderConfig,
43
LLMStreamPayload,
54
} from "@/utils/providers";
5+
import { getSettings } from "@/utils/settings";
66

77
export async function POST(request: Request) {
88
let { messages } = await request.json();
99

1010
try {
11-
const providerConfig = getDefaultProviderConfig();
12-
const provider = createProvider(providerConfig);
11+
const settings = getSettings();
12+
const provider = createProvider(); // Will use runtime settings
1313

1414
const payload: LLMStreamPayload = {
15-
model: providerConfig.defaultModel,
15+
model: settings.llmModel,
1616
messages,
1717
stream: true,
1818
};

app/api/models/route.ts

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
import { NextResponse } from "next/server";
2+
3+
export async function POST(request: Request) {
4+
try {
5+
const { provider, baseUrl, apiKey } = await request.json();
6+
7+
let models: string[] = [];
8+
9+
switch (provider.toLowerCase()) {
10+
case "ollama":
11+
models = await getOllamaModels(baseUrl, apiKey);
12+
break;
13+
case "openai":
14+
models = await getOpenAIModels(baseUrl, apiKey);
15+
break;
16+
case "anthropic":
17+
case "claude":
18+
models = getAnthropicModels();
19+
break;
20+
case "google":
21+
case "gemini":
22+
models = await getGoogleModels(baseUrl, apiKey);
23+
break;
24+
case "groq":
25+
models = await getGroqModels(baseUrl, apiKey);
26+
break;
27+
case "together":
28+
models = await getTogetherModels(baseUrl, apiKey);
29+
break;
30+
default:
31+
return NextResponse.json({ error: "Unsupported provider" }, { status: 400 });
32+
}
33+
34+
return NextResponse.json({ models });
35+
} catch (error) {
36+
console.error("Error fetching models:", error);
37+
return NextResponse.json({
38+
error: "Failed to fetch models",
39+
details: error instanceof Error ? error.message : "Unknown error"
40+
}, { status: 500 });
41+
}
42+
}
43+
44+
async function getOllamaModels(baseUrl: string, apiKey?: string): Promise<string[]> {
45+
const url = `${baseUrl}/api/tags`;
46+
const headers: Record<string, string> = {};
47+
48+
if (apiKey) {
49+
headers.Authorization = `Bearer ${apiKey}`;
50+
}
51+
52+
const response = await fetch(url, { headers });
53+
if (!response.ok) {
54+
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
55+
}
56+
57+
const data = await response.json();
58+
return data.models?.map((model: any) => model.name) || [];
59+
}
60+
61+
async function getOpenAIModels(baseUrl: string, apiKey: string): Promise<string[]> {
62+
const response = await fetch(`${baseUrl}/v1/models`, {
63+
headers: {
64+
Authorization: `Bearer ${apiKey}`,
65+
},
66+
});
67+
68+
if (!response.ok) {
69+
throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);
70+
}
71+
72+
const data = await response.json();
73+
return data.data?.map((model: any) => model.id) || [];
74+
}
75+
76+
function getAnthropicModels(): string[] {
77+
return [
78+
"claude-3-5-sonnet-20241022",
79+
"claude-3-5-haiku-20241022",
80+
"claude-3-opus-20240229",
81+
"claude-3-sonnet-20240229",
82+
"claude-3-haiku-20240307"
83+
];
84+
}
85+
86+
async function getGoogleModels(baseUrl: string, apiKey: string): Promise<string[]> {
87+
const response = await fetch(`${baseUrl}/v1beta/models?key=${apiKey}`);
88+
89+
if (!response.ok) {
90+
throw new Error(`Google API error: ${response.status} ${response.statusText}`);
91+
}
92+
93+
const data = await response.json();
94+
return data.models?.map((model: any) => model.name.replace('models/', '')) || [];
95+
}
96+
97+
async function getGroqModels(baseUrl: string, apiKey: string): Promise<string[]> {
98+
const response = await fetch(`${baseUrl}/v1/models`, {
99+
headers: {
100+
Authorization: `Bearer ${apiKey}`,
101+
},
102+
});
103+
104+
if (!response.ok) {
105+
throw new Error(`Groq API error: ${response.status} ${response.statusText}`);
106+
}
107+
108+
const data = await response.json();
109+
return data.data?.map((model: any) => model.id) || [];
110+
}
111+
112+
async function getTogetherModels(baseUrl: string, apiKey: string): Promise<string[]> {
113+
const response = await fetch(`${baseUrl}/v1/models`, {
114+
headers: {
115+
Authorization: `Bearer ${apiKey}`,
116+
},
117+
});
118+
119+
if (!response.ok) {
120+
throw new Error(`Together API error: ${response.status} ${response.statusText}`);
121+
}
122+
123+
const data = await response.json();
124+
return data.data?.map((model: any) => model.id) || [];
125+
}

app/api/settings/route.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import { NextResponse } from "next/server";
2+
import { getSettings, updateSettings, AppSettings } from "@/utils/settings";
3+
4+
export async function GET() {
5+
try {
6+
const settings = getSettings();
7+
// Don't send API keys to frontend for security
8+
const safeSettings = {
9+
...settings,
10+
llmApiKey: settings.llmApiKey ? "***" : "",
11+
searchApiKey: settings.searchApiKey ? "***" : "",
12+
};
13+
return NextResponse.json(safeSettings);
14+
} catch (error) {
15+
console.error("Error getting settings:", error);
16+
return NextResponse.json({ error: "Failed to get settings" }, { status: 500 });
17+
}
18+
}
19+
20+
export async function POST(request: Request) {
21+
try {
22+
const newSettings: AppSettings = await request.json();
23+
24+
// Update runtime settings
25+
updateSettings(newSettings);
26+
27+
return NextResponse.json({ success: true, message: "Settings updated successfully" });
28+
} catch (error) {
29+
console.error("Error updating settings:", error);
30+
return NextResponse.json({ error: "Failed to update settings" }, { status: 500 });
31+
}
32+
}

0 commit comments

Comments
 (0)