-
-
Notifications
You must be signed in to change notification settings - Fork 6
Replace glassmorphic with Tailwind utilities #488
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
ac3e9c5
789e2fb
ea1503a
971c66e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,27 +13,25 @@ import { Spinner } from '@/components/ui/spinner' | |
| import { Section } from '@/components/section' | ||
| import { FollowupPanel } from '@/components/followup-panel' | ||
| import { inquire, researcher, taskManager, querySuggestor, resolutionSearch, type DrawnFeature } from '@/lib/agents' | ||
| // Removed import of useGeospatialToolMcp as it no longer exists and was incorrectly used here. | ||
| // The geospatialTool (if used by agents like researcher) now manages its own MCP client. | ||
| import { writer } from '@/lib/agents/writer' | ||
| import { saveChat, getSystemPrompt } from '@/lib/actions/chat' // Added getSystemPrompt | ||
| import { saveChat, getSystemPrompt } from '@/lib/actions/chat' | ||
| import { Chat, AIMessage } from '@/lib/types' | ||
| import { UserMessage } from '@/components/user-message' | ||
| import { BotMessage } from '@/components/message' | ||
| import { SearchSection } from '@/components/search-section' | ||
| import SearchRelated from '@/components/search-related' | ||
| import { GeoJsonLayer } from '@/components/map/geojson-layer' | ||
| import { ResolutionImage } from '@/components/resolution-image' | ||
| import { CopilotDisplay } from '@/components/copilot-display' | ||
| import RetrieveSection from '@/components/retrieve-section' | ||
| import { VideoSearchSection } from '@/components/video-search-section' | ||
| import { MapQueryHandler } from '@/components/map/map-query-handler' // Add this import | ||
| import { MapQueryHandler } from '@/components/map/map-query-handler' | ||
|
|
||
| // Define the type for related queries | ||
| type RelatedQueries = { | ||
| items: { query: string }[] | ||
| } | ||
|
|
||
| // Removed mcp parameter from submit, as geospatialTool now handles its client. | ||
| async function submit(formData?: FormData, skip?: boolean) { | ||
| 'use server' | ||
|
|
||
|
|
@@ -43,16 +41,17 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| const isCollapsed = createStreamableValue(false) | ||
|
|
||
| const action = formData?.get('action') as string; | ||
| const drawnFeaturesString = formData?.get('drawnFeatures') as string; | ||
| let drawnFeatures: DrawnFeature[] = []; | ||
| try { | ||
| drawnFeatures = drawnFeaturesString ? JSON.parse(drawnFeaturesString) : []; | ||
| } catch (e) { | ||
| console.error('Failed to parse drawnFeatures:', e); | ||
| } | ||
|
|
||
| if (action === 'resolution_search') { | ||
| const file = formData?.get('file') as File; | ||
| const timezone = (formData?.get('timezone') as string) || 'UTC'; | ||
| const drawnFeaturesString = formData?.get('drawnFeatures') as string; | ||
| let drawnFeatures: DrawnFeature[] = []; | ||
| try { | ||
| drawnFeatures = drawnFeaturesString ? JSON.parse(drawnFeaturesString) : []; | ||
| } catch (e) { | ||
| console.error('Failed to parse drawnFeatures:', e); | ||
| } | ||
|
|
||
| if (!file) { | ||
| throw new Error('No file provided for resolution search.'); | ||
|
|
@@ -61,7 +60,6 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| const buffer = await file.arrayBuffer(); | ||
| const dataUrl = `data:${file.type};base64,${Buffer.from(buffer).toString('base64')}`; | ||
|
|
||
| // Get the current messages, excluding tool-related ones. | ||
| const messages: CoreMessage[] = [...(aiState.get().messages as any[])].filter( | ||
| message => | ||
| message.role !== 'tool' && | ||
|
|
@@ -71,16 +69,12 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| message.type !== 'resolution_search_result' | ||
| ); | ||
|
|
||
| // The user's prompt for this action is static. | ||
| const userInput = 'Analyze this map view.'; | ||
|
|
||
| // Construct the multimodal content for the user message. | ||
| const content: CoreMessage['content'] = [ | ||
| { type: 'text', text: userInput }, | ||
| { type: 'image', image: dataUrl, mimeType: file.type } | ||
| ]; | ||
|
|
||
| // Add the new user message to the AI state. | ||
| aiState.update({ | ||
| ...aiState.get(), | ||
| messages: [ | ||
|
|
@@ -90,12 +84,11 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| }); | ||
| messages.push({ role: 'user', content }); | ||
|
|
||
| // Create a streamable value for the summary. | ||
| const summaryStream = createStreamableValue<string>(''); | ||
| const summaryStream = createStreamableValue<string>('Analyzing map view...'); | ||
| const groupeId = nanoid(); | ||
|
|
||
| async function processResolutionSearch() { | ||
| try { | ||
| // Call the simplified agent, which now returns a stream. | ||
| const streamResult = await resolutionSearch(messages, timezone, drawnFeatures); | ||
|
|
||
| let fullSummary = ''; | ||
|
|
@@ -107,22 +100,41 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| } | ||
|
|
||
| const analysisResult = await streamResult.object; | ||
|
|
||
| // Mark the summary stream as done with the result. | ||
| summaryStream.done(analysisResult.summary || 'Analysis complete.'); | ||
|
|
||
| if (analysisResult.geoJson) { | ||
| uiStream.append( | ||
| <GeoJsonLayer | ||
| id={groupeId} | ||
| data={analysisResult.geoJson as FeatureCollection} | ||
| /> | ||
| ); | ||
| } | ||
|
|
||
| messages.push({ role: 'assistant', content: analysisResult.summary || 'Analysis complete.' }); | ||
|
|
||
| const sanitizedMessages: CoreMessage[] = messages.map(m => { | ||
| if (Array.isArray(m.content)) { | ||
| return { | ||
| ...m, | ||
| content: m.content.filter(part => part.type !== 'image') | ||
| content: m.content.filter((part: any) => part.type !== 'image') | ||
| } as CoreMessage | ||
| } | ||
| return m | ||
| }) | ||
|
|
||
| const currentMessages = aiState.get().messages; | ||
| const sanitizedHistory = currentMessages.map(m => { | ||
| if (m.role === "user" && Array.isArray(m.content)) { | ||
| return { | ||
| ...m, | ||
| content: m.content.map((part: any) => | ||
| part.type === "image" ? { ...part, image: "IMAGE_PROCESSED" } : part | ||
| ) | ||
| } | ||
| } | ||
| return m | ||
| }); | ||
|
Comment on lines
+126
to
+137
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: The code creates 🐛 Proposed fix aiState.done({
...aiState.get(),
messages: [
- ...aiState.get().messages,
+ ...sanitizedHistory,
{
id: groupeId,
role: 'assistant',Also applies to: 147-150 🤖 Prompt for AI Agents |
||
| const relatedQueries = await querySuggestor(uiStream, sanitizedMessages); | ||
| uiStream.append( | ||
| <Section title="Follow-up"> | ||
|
|
@@ -132,8 +144,6 @@ async function submit(formData?: FormData, skip?: boolean) { | |
|
|
||
| await new Promise(resolve => setTimeout(resolve, 500)); | ||
|
|
||
| const groupeId = nanoid(); | ||
|
|
||
| aiState.done({ | ||
| ...aiState.get(), | ||
| messages: [ | ||
|
|
@@ -147,7 +157,10 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| { | ||
| id: groupeId, | ||
| role: 'assistant', | ||
| content: JSON.stringify(analysisResult), | ||
| content: JSON.stringify({ | ||
| ...analysisResult, | ||
| image: dataUrl | ||
| }), | ||
| type: 'resolution_search_result' | ||
| }, | ||
| { | ||
|
|
@@ -173,12 +186,11 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| } | ||
| } | ||
|
|
||
| // Start the background process without awaiting it. | ||
| processResolutionSearch(); | ||
|
|
||
| // Immediately update the UI stream with the BotMessage component. | ||
| uiStream.update( | ||
| <Section title="response"> | ||
| <ResolutionImage src={dataUrl} /> | ||
| <BotMessage content={summaryStream.value} /> | ||
| </Section> | ||
| ); | ||
|
|
@@ -198,7 +210,17 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| message.type !== 'related' && | ||
| message.type !== 'end' && | ||
| message.type !== 'resolution_search_result' | ||
| ) | ||
| ).map(m => { | ||
| if (Array.isArray(m.content)) { | ||
| return { | ||
| ...m, | ||
| content: m.content.filter((part: any) => | ||
| part.type !== "image" || (typeof part.image === "string" && part.image.startsWith("data:")) | ||
| ) | ||
| } as any | ||
| } | ||
| return m | ||
| }) | ||
|
|
||
| const groupeId = nanoid() | ||
| const useSpecificAPI = process.env.USE_SPECIFIC_API_FOR_WRITER === 'true' | ||
|
|
@@ -241,9 +263,8 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| </Section> | ||
| ); | ||
|
|
||
| uiStream.append(answerSection); | ||
| uiStream.update(answerSection); | ||
|
|
||
| const groupeId = nanoid(); | ||
| const relatedQueries = { items: [] }; | ||
|
|
||
| aiState.done({ | ||
|
|
@@ -327,7 +348,6 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| } | ||
|
|
||
| const hasImage = messageParts.some(part => part.type === 'image') | ||
| // Properly type the content based on whether it contains images | ||
| const content: CoreMessage['content'] = hasImage | ||
| ? messageParts as CoreMessage['content'] | ||
| : messageParts.map(part => part.text).join('\n') | ||
|
|
@@ -361,7 +381,6 @@ async function submit(formData?: FormData, skip?: boolean) { | |
|
|
||
| const userId = 'anonymous' | ||
| const currentSystemPrompt = (await getSystemPrompt(userId)) || '' | ||
|
|
||
| const mapProvider = formData?.get('mapProvider') as 'mapbox' | 'google' | ||
|
|
||
| async function processEvents() { | ||
|
|
@@ -410,7 +429,8 @@ async function submit(formData?: FormData, skip?: boolean) { | |
| streamText, | ||
| messages, | ||
| mapProvider, | ||
| useSpecificAPI | ||
| useSpecificAPI, | ||
| drawnFeatures | ||
| ) | ||
| answer = fullResponse | ||
| toolOutputs = toolResponses | ||
|
|
@@ -643,12 +663,10 @@ export const getUIStateFromAIState = (aiState: AIState): UIState => { | |
| case 'input_related': | ||
| let messageContent: string | any[] | ||
| try { | ||
| // For backward compatibility with old messages that stored a JSON string | ||
| const json = JSON.parse(content as string) | ||
| messageContent = | ||
| type === 'input' ? json.input : json.related_query | ||
| } catch (e) { | ||
| // New messages will store the content array or string directly | ||
| messageContent = content | ||
| } | ||
| return { | ||
|
|
@@ -669,8 +687,8 @@ export const getUIStateFromAIState = (aiState: AIState): UIState => { | |
| } | ||
| break | ||
| case 'assistant': | ||
| const answer = createStreamableValue() | ||
| answer.done(content) | ||
| const answer = createStreamableValue(content as string) | ||
| answer.done(content as string) | ||
| switch (type) { | ||
| case 'response': | ||
| return { | ||
|
|
@@ -682,7 +700,9 @@ export const getUIStateFromAIState = (aiState: AIState): UIState => { | |
| ) | ||
| } | ||
| case 'related': | ||
| const relatedQueries = createStreamableValue<RelatedQueries>() | ||
| const relatedQueries = createStreamableValue<RelatedQueries>({ | ||
| items: [] | ||
| }) | ||
| relatedQueries.done(JSON.parse(content as string)) | ||
| return { | ||
| id, | ||
|
|
@@ -704,11 +724,13 @@ export const getUIStateFromAIState = (aiState: AIState): UIState => { | |
| case 'resolution_search_result': { | ||
| const analysisResult = JSON.parse(content as string); | ||
| const geoJson = analysisResult.geoJson as FeatureCollection; | ||
| const image = analysisResult.image as string; | ||
|
|
||
| return { | ||
| id, | ||
| component: ( | ||
| <> | ||
| {image && <ResolutionImage src={image} />} | ||
| {geoJson && ( | ||
| <GeoJsonLayer id={id} data={geoJson} /> | ||
| )} | ||
|
|
@@ -721,21 +743,37 @@ export const getUIStateFromAIState = (aiState: AIState): UIState => { | |
| case 'tool': | ||
| try { | ||
| const toolOutput = JSON.parse(content as string) | ||
| const isCollapsed = createStreamableValue() | ||
| const isCollapsed = createStreamableValue(true) | ||
| isCollapsed.done(true) | ||
|
|
||
| if ( | ||
| toolOutput.type === 'MAP_QUERY_TRIGGER' && | ||
| name === 'geospatialQueryTool' | ||
| ) { | ||
| const mapUrl = toolOutput.mcp_response?.mapUrl; | ||
| const placeName = toolOutput.mcp_response?.location?.place_name; | ||
|
|
||
| return { | ||
| id, | ||
| component: <MapQueryHandler toolOutput={toolOutput} />, | ||
| component: ( | ||
| <> | ||
| {mapUrl && ( | ||
| <ResolutionImage | ||
| src={mapUrl} | ||
| className="mb-0" | ||
| alt={placeName ? `Map of ${placeName}` : 'Map Preview'} | ||
| /> | ||
| )} | ||
| <MapQueryHandler toolOutput={toolOutput} /> | ||
| </> | ||
| ), | ||
| isCollapsed: false | ||
| } | ||
| } | ||
|
|
||
| const searchResults = createStreamableValue() | ||
| const searchResults = createStreamableValue( | ||
| JSON.stringify(toolOutput) | ||
| ) | ||
| searchResults.done(JSON.stringify(toolOutput)) | ||
| switch (name) { | ||
| case 'search': | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🧹 Nitpick | 🔵 Trivial
Consider consolidating image sanitization logic.
There are two similar sanitization patterns in this file:
"IMAGE_PROCESSED"placeholderThese serve different purposes but the logic is similar. Consider extracting helper functions to clarify intent and reduce duplication.
Suggested helpers
🤖 Prompt for AI Agents