diff --git a/go.mod b/go.mod index 4dc59a93..97d4f37c 100644 --- a/go.mod +++ b/go.mod @@ -14,6 +14,7 @@ require ( github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 github.com/joho/godotenv v1.5.1 github.com/openai/openai-go/v2 v2.7.1 + github.com/openai/openai-go/v3 v3.12.0 github.com/samber/lo v1.51.0 github.com/stretchr/testify v1.10.0 go.mongodb.org/mongo-driver/v2 v2.3.0 diff --git a/go.sum b/go.sum index 41824e06..1943dc8d 100644 --- a/go.sum +++ b/go.sum @@ -90,6 +90,8 @@ github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= github.com/openai/openai-go/v2 v2.7.1 h1:/tfvTJhfv7hTSL8mWwc5VL4WLLSDL5yn9VqVykdu9r8= github.com/openai/openai-go/v2 v2.7.1/go.mod h1:jrJs23apqJKKbT+pqtFgNKpRju/KP9zpUTZhz3GElQE= +github.com/openai/openai-go/v3 v3.12.0 h1:NkrImaglFQeDycc/n/fEmpFV8kKr8snl9/8X2x4eHOg= +github.com/openai/openai-go/v3 v3.12.0/go.mod h1:cdufnVK14cWcT9qA1rRtrXx4FTRsgbDPW7Ia7SS5cZo= github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= diff --git a/internal/api/chat/create_conversation_message.go b/internal/api/chat/create_conversation_message.go index 9f78a2ac..cfe7730d 100644 --- a/internal/api/chat/create_conversation_message.go +++ b/internal/api/chat/create_conversation_message.go @@ -3,7 +3,6 @@ package chat import ( "context" - "paperdebugger/internal/api/mapper" "paperdebugger/internal/libs/contextutil" "paperdebugger/internal/libs/shared" "paperdebugger/internal/models" @@ -115,7 +114,7 @@ func (s *ChatServer) createConversation( userInstructions string, userMessage string, userSelectedText string, - languageModel models.LanguageModel, + modelSlug string, conversationType chatv1.ConversationType, ) (*models.Conversation, error) { systemPrompt, err := s.chatService.GetSystemPrompt(ctx, latexFullSource, projectInstructions, userInstructions, conversationType) @@ -135,7 +134,7 @@ func (s *ChatServer) createConversation( } return s.chatService.InsertConversationToDB( - ctx, userId, projectId, languageModel, messages, oaiHistory.OfInputItemList, + ctx, userId, projectId, modelSlug, messages, oaiHistory.OfInputItemList, ) } @@ -180,7 +179,7 @@ func (s *ChatServer) appendConversationMessage( // 如果 conversationId 是 "", 就创建新对话,否则就追加消息到对话 // conversationType 可以在一次 conversation 中多次切换 -func (s *ChatServer) prepare(ctx context.Context, projectId string, conversationId string, userMessage string, userSelectedText string, languageModel models.LanguageModel, conversationType chatv1.ConversationType) (context.Context, *models.Conversation, *models.Settings, error) { +func (s *ChatServer) prepare(ctx context.Context, projectId string, conversationId string, userMessage string, userSelectedText string, modelSlug string, conversationType chatv1.ConversationType) (context.Context, *models.Conversation, *models.Settings, error) { actor, err := contextutil.GetActor(ctx) if err != nil { return ctx, nil, nil, err @@ -223,7 +222,7 @@ func (s *ChatServer) prepare(ctx context.Context, projectId string, conversation userInstructions, userMessage, userSelectedText, - languageModel, + modelSlug, conversationType, ) } else { @@ -251,68 +250,3 @@ func (s *ChatServer) prepare(ctx context.Context, projectId string, conversation return ctx, conversation, settings, nil } - -// Deprecated: Use CreateConversationMessageStream instead. -func (s *ChatServer) CreateConversationMessage( - ctx context.Context, - req *chatv1.CreateConversationMessageRequest, -) (*chatv1.CreateConversationMessageResponse, error) { - languageModel := models.LanguageModel(req.GetLanguageModel()) - ctx, conversation, settings, err := s.prepare( - ctx, - req.GetProjectId(), - req.GetConversationId(), - req.GetUserMessage(), - req.GetUserSelectedText(), - languageModel, - req.GetConversationType(), - ) - if err != nil { - return nil, err - } - - llmProvider := &models.LLMProviderConfig{ - Endpoint: s.cfg.OpenAIBaseURL, - APIKey: settings.OpenAIAPIKey, - } - openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletion(ctx, languageModel, conversation.OpenaiChatHistory, llmProvider) - if err != nil { - return nil, err - } - - bsonMessages := make([]bson.M, len(inappChatHistory)) - for i := range inappChatHistory { - bsonMsg, err := convertToBSON(&inappChatHistory[i]) - if err != nil { - return nil, err - } - bsonMessages[i] = bsonMsg - } - conversation.InappChatHistory = append(conversation.InappChatHistory, bsonMessages...) - conversation.OpenaiChatHistory = openaiChatHistory - - if err := s.chatService.UpdateConversation(conversation); err != nil { - return nil, err - } - - go func() { - protoMessages := make([]*chatv1.Message, len(conversation.InappChatHistory)) - for i, bsonMsg := range conversation.InappChatHistory { - protoMessages[i] = mapper.BSONToChatMessage(bsonMsg) - } - title, err := s.aiClient.GetConversationTitle(ctx, protoMessages, llmProvider) - if err != nil { - s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex()) - return - } - conversation.Title = title - if err := s.chatService.UpdateConversation(conversation); err != nil { - s.logger.Error("Failed to update conversation with new title", "error", err, "conversationID", conversation.ID.Hex()) - return - } - }() - - return &chatv1.CreateConversationMessageResponse{ - Conversation: mapper.MapModelConversationToProto(conversation), - }, nil -} diff --git a/internal/api/chat/create_conversation_message_stream.go b/internal/api/chat/create_conversation_message_stream.go index 0e659a28..e996d3a5 100644 --- a/internal/api/chat/create_conversation_message_stream.go +++ b/internal/api/chat/create_conversation_message_stream.go @@ -25,14 +25,18 @@ func (s *ChatServer) CreateConversationMessageStream( ) error { ctx := stream.Context() - languageModel := models.LanguageModel(req.GetLanguageModel()) + modelSlug := req.GetModelSlug() + if modelSlug == "" { + modelSlug = models.LanguageModel(req.GetLanguageModel()).Name() + } + ctx, conversation, settings, err := s.prepare( ctx, req.GetProjectId(), req.GetConversationId(), req.GetUserMessage(), req.GetUserSelectedText(), - languageModel, + modelSlug, req.GetConversationType(), ) if err != nil { @@ -41,11 +45,10 @@ func (s *ChatServer) CreateConversationMessageStream( // 用法跟 ChatCompletion 一样,只是传递了 stream 参数 llmProvider := &models.LLMProviderConfig{ - Endpoint: s.cfg.OpenAIBaseURL, - APIKey: settings.OpenAIAPIKey, + APIKey: settings.OpenAIAPIKey, } - openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletionStream(ctx, stream, conversation.ID.Hex(), languageModel, conversation.OpenaiChatHistory, llmProvider) + openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletionStream(ctx, stream, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistory, llmProvider) if err != nil { return s.sendStreamError(stream, err) } diff --git a/internal/api/mapper/conversation.go b/internal/api/mapper/conversation.go index 129dabd2..88ca05a0 100644 --- a/internal/api/mapper/conversation.go +++ b/internal/api/mapper/conversation.go @@ -32,10 +32,21 @@ func MapModelConversationToProto(conversation *models.Conversation) *chatv1.Conv return msg.GetPayload().GetMessageType() != &chatv1.MessagePayload_System{} }) + modelSlug := conversation.ModelSlug + if modelSlug == "" { + modelSlug = models.SlugFromLanguageModel(models.LanguageModel(conversation.LanguageModel)) + } + + languageModel := chatv1.LanguageModel(conversation.LanguageModel) + if languageModel == chatv1.LanguageModel_LANGUAGE_MODEL_UNSPECIFIED { + languageModel = chatv1.LanguageModel(models.LanguageModelFromSlug(modelSlug)) + } + return &chatv1.Conversation{ Id: conversation.ID.Hex(), Title: conversation.Title, - LanguageModel: chatv1.LanguageModel(conversation.LanguageModel), - Messages: filteredMessages, + LanguageModel: languageModel, // backward compatibility + // ModelSlug: modelSlug, + Messages: filteredMessages, } } diff --git a/internal/models/conversation.go b/internal/models/conversation.go index 23b0e2b3..70d48300 100644 --- a/internal/models/conversation.go +++ b/internal/models/conversation.go @@ -11,6 +11,7 @@ type Conversation struct { ProjectID string `bson:"project_id"` Title string `bson:"title"` LanguageModel LanguageModel `bson:"language_model"` + ModelSlug string `bson:"model_slug"` InappChatHistory []bson.M `bson:"inapp_chat_history"` // Store as raw BSON to avoid protobuf decoding issues OpenaiChatHistory responses.ResponseInputParam `bson:"openai_chat_history"` // 实际上发给 GPT 的聊天历史 diff --git a/internal/models/language_model.go b/internal/models/language_model.go index 7f1e8df0..73c94d25 100644 --- a/internal/models/language_model.go +++ b/internal/models/language_model.go @@ -56,3 +56,69 @@ func (x LanguageModel) Name() string { return openai.ChatModelGPT5 } } + +func LanguageModelFromSlug(slug string) LanguageModel { + switch slug { + case "gpt-4o": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT4O) + case "gpt-4.1": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41) + case "gpt-4.1-mini": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41_MINI) + case "gpt-5": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5) + case "gpt-5-mini": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI) + case "gpt-5-nano": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO) + case "gpt-5-chat-latest": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST) + case "o1": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1) + case "o1-mini": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI) + case "o3": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3) + case "o3-mini": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI) + case "o4-mini": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI) + case "codex-mini-latest": + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST) + default: + return LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_UNSPECIFIED) + } +} + +func SlugFromLanguageModel(languageModel LanguageModel) string { + switch languageModel { + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT4O): + return "gpt-4o" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41): + return "gpt-4.1" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41_MINI): + return "gpt-4.1-mini" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5): + return "gpt-5" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI): + return "gpt-5-mini" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO): + return "gpt-5-nano" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST): + return "gpt-5-chat-latest" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1): + return "o1" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI): + return "o1-mini" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3): + return "o3" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI): + return "o3-mini" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI): + return "o4-mini" + case LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST): + return "codex-mini-latest" + default: + return "unknown" + } +} diff --git a/internal/services/chat.go b/internal/services/chat.go index 131be4d9..825eecfc 100644 --- a/internal/services/chat.go +++ b/internal/services/chat.go @@ -92,7 +92,7 @@ func (s *ChatService) GetPrompt(ctx context.Context, content string, selectedTex return strings.TrimSpace(userPromptBuffer.String()), nil } -func (s *ChatService) InsertConversationToDB(ctx context.Context, userID bson.ObjectID, projectID string, languageModel models.LanguageModel, inappChatHistory []*chatv1.Message, openaiChatHistory responses.ResponseInputParam) (*models.Conversation, error) { +func (s *ChatService) InsertConversationToDB(ctx context.Context, userID bson.ObjectID, projectID string, modelSlug string, inappChatHistory []*chatv1.Message, openaiChatHistory responses.ResponseInputParam) (*models.Conversation, error) { // Convert protobuf messages to BSON bsonMessages := make([]bson.M, len(inappChatHistory)) for i := range inappChatHistory { @@ -116,7 +116,7 @@ func (s *ChatService) InsertConversationToDB(ctx context.Context, userID bson.Ob UserID: userID, ProjectID: projectID, Title: DefaultConversationTitle, - LanguageModel: languageModel, + ModelSlug: modelSlug, InappChatHistory: bsonMessages, OpenaiChatHistory: openaiChatHistory, } diff --git a/internal/services/toolkit/client/completion.go b/internal/services/toolkit/client/completion.go index 6bc73b88..f4c13259 100644 --- a/internal/services/toolkit/client/completion.go +++ b/internal/services/toolkit/client/completion.go @@ -14,15 +14,15 @@ import ( // Parameters: // // ctx: The context for controlling cancellation and deadlines. -// languageModel: The language model to use for completion (e.g., GPT-3.5, GPT-4). +// modelSlug: The language model to use for completion (e.g., GPT-3.5, GPT-4). // messages: The full chat history (as input) to send to the language model. // // Returns: // 1. The full chat history sent to the language model (including any tool call results). // 2. The incremental chat history visible to the user (including tool call results and assistant responses). // 3. An error, if any occurred during the process. -func (a *AIClient) ChatCompletion(ctx context.Context, languageModel models.LanguageModel, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { - openaiChatHistory, inappChatHistory, err := a.ChatCompletionStream(ctx, nil, "", languageModel, messages, llmProvider) +func (a *AIClient) ChatCompletion(ctx context.Context, modelSlug string, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { + openaiChatHistory, inappChatHistory, err := a.ChatCompletionStream(ctx, nil, "", modelSlug, messages, llmProvider) if err != nil { return nil, nil, err } @@ -50,11 +50,11 @@ func (a *AIClient) ChatCompletion(ctx context.Context, languageModel models.Lang // - If tool calls are required, it handles them and appends the results to the chat history, then continues the loop. // - If no tool calls are needed, it appends the assistant's response and exits the loop. // - Finally, it returns the updated chat histories and any error encountered. -func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chatv1.ChatService_CreateConversationMessageStreamServer, conversationId string, languageModel models.LanguageModel, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { +func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chatv1.ChatService_CreateConversationMessageStreamServer, conversationId string, modelSlug string, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { openaiChatHistory := responses.ResponseNewParamsInputUnion{OfInputItemList: messages} inappChatHistory := []chatv1.Message{} - streamHandler := handler.NewStreamHandler(callbackStream, conversationId, languageModel) + streamHandler := handler.NewStreamHandler(callbackStream, conversationId, modelSlug) streamHandler.SendInitialization() defer func() { @@ -62,7 +62,7 @@ func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chat }() oaiClient := a.GetOpenAIClient(llmProvider) - params := getDefaultParams(languageModel, openaiChatHistory, a.toolCallHandler.Registry) + params := getDefaultParams(modelSlug, openaiChatHistory, a.toolCallHandler.Registry) for { params.Input = openaiChatHistory diff --git a/internal/services/toolkit/client/get_conversation_title.go b/internal/services/toolkit/client/get_conversation_title.go index f956bf0d..283e689f 100644 --- a/internal/services/toolkit/client/get_conversation_title.go +++ b/internal/services/toolkit/client/get_conversation_title.go @@ -29,7 +29,7 @@ func (a *AIClient) GetConversationTitle(ctx context.Context, inappChatHistory [] message := strings.Join(messages, "\n") message = fmt.Sprintf("%s\nBased on above conversation, generate a short, clear, and descriptive title that summarizes the main topic or purpose of the discussion. The title should be concise, specific, and use natural language. Avoid vague or generic titles. Use abbreviation and short words if possible. Use 3-5 words if possible. Give me the title only, no other text including any other words.", message) - _, resp, err := a.ChatCompletion(ctx, models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41_MINI), responses.ResponseInputParam{ + _, resp, err := a.ChatCompletion(ctx, "gpt-4.1-mini", responses.ResponseInputParam{ { OfInputMessage: &responses.ResponseInputItemMessageParam{ Role: "system", diff --git a/internal/services/toolkit/client/utils.go b/internal/services/toolkit/client/utils.go index d2b4d4c1..39b24d87 100644 --- a/internal/services/toolkit/client/utils.go +++ b/internal/services/toolkit/client/utils.go @@ -6,12 +6,12 @@ This file contains utility functions for the client package. (Mainly miscellaneo It is used to append assistant responses to both OpenAI and in-app chat histories, and to create response items for chat interactions. */ import ( - "paperdebugger/internal/models" "paperdebugger/internal/services/toolkit/registry" chatv1 "paperdebugger/pkg/gen/api/chat/v1" "github.com/openai/openai-go/v2" "github.com/openai/openai-go/v2/responses" + "github.com/samber/lo" ) // appendAssistantTextResponse appends the assistant's response to both OpenAI and in-app chat histories. @@ -43,26 +43,30 @@ func appendAssistantTextResponse(openaiChatHistory *responses.ResponseNewParamsI // getDefaultParams constructs the default parameters for a chat completion request. // The tool registry is managed centrally by the registry package. // The chat history is constructed manually, so Store must be set to false. -func getDefaultParams(languageModel models.LanguageModel, chatHistory responses.ResponseNewParamsInputUnion, toolRegistry *registry.ToolRegistry) responses.ResponseNewParams { - if languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST) { +func getDefaultParams(modelSlug string, chatHistory responses.ResponseNewParamsInputUnion, toolRegistry *registry.ToolRegistry) responses.ResponseNewParams { + var reasoningModels = []string{ + "gpt-5", + "gpt-5-mini", + "gpt-5-nano", + "gpt-5-chat-latest", + "o4-mini", + "o3-mini", + "o3", + "o1-mini", + "o1", + "codex-mini-latest", + } + if lo.Contains(reasoningModels, modelSlug) { return responses.ResponseNewParams{ - Model: languageModel.Name(), + Model: modelSlug, Tools: toolRegistry.GetTools(), Input: chatHistory, Store: openai.Bool(false), } } + return responses.ResponseNewParams{ - Model: languageModel.Name(), + Model: modelSlug, Temperature: openai.Float(0.7), MaxOutputTokens: openai.Int(4000), // DEBUG POINT: change this to test the frontend handler Tools: toolRegistry.GetTools(), // 工具注册由 registry 统一管理 diff --git a/internal/services/toolkit/handler/stream.go b/internal/services/toolkit/handler/stream.go index 78eb9e27..96ca6668 100644 --- a/internal/services/toolkit/handler/stream.go +++ b/internal/services/toolkit/handler/stream.go @@ -10,18 +10,18 @@ import ( type StreamHandler struct { callbackStream chatv1.ChatService_CreateConversationMessageStreamServer conversationId string - languageModel models.LanguageModel + modelSlug string } func NewStreamHandler( callbackStream chatv1.ChatService_CreateConversationMessageStreamServer, conversationId string, - languageModel models.LanguageModel, + modelSlug string, ) *StreamHandler { return &StreamHandler{ callbackStream: callbackStream, conversationId: conversationId, - languageModel: languageModel, + modelSlug: modelSlug, } } @@ -29,11 +29,13 @@ func (h *StreamHandler) SendInitialization() { if h.callbackStream == nil { return } + h.callbackStream.Send(&chatv1.CreateConversationMessageStreamResponse{ ResponsePayload: &chatv1.CreateConversationMessageStreamResponse_StreamInitialization{ StreamInitialization: &chatv1.StreamInitialization{ ConversationId: h.conversationId, - LanguageModel: chatv1.LanguageModel(h.languageModel), + // ModelSlug: h.modelSlug, + LanguageModel: chatv1.LanguageModel(models.LanguageModelFromSlug(h.modelSlug)), // compatible with old code }, }, }) diff --git a/pkg/gen/api/auth/v1/auth.pb.go b/pkg/gen/api/auth/v1/auth.pb.go index 87514ddd..569ea4e8 100644 --- a/pkg/gen/api/auth/v1/auth.pb.go +++ b/pkg/gen/api/auth/v1/auth.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: auth/v1/auth.proto diff --git a/pkg/gen/api/chat/v1/chat.pb.go b/pkg/gen/api/chat/v1/chat.pb.go index 7f048947..662833e0 100644 --- a/pkg/gen/api/chat/v1/chat.pb.go +++ b/pkg/gen/api/chat/v1/chat.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: chat/v1/chat.proto @@ -109,7 +109,7 @@ type ConversationType int32 const ( ConversationType_CONVERSATION_TYPE_UNSPECIFIED ConversationType = 0 - ConversationType_CONVERSATION_TYPE_DEBUG ConversationType = 1 // does not contain any customized messages, the inapp_history and openai_history are synced. + ConversationType_CONVERSATION_TYPE_DEBUG ConversationType = 1 // does not contain any customized messages, the ) // Enum value maps for ConversationType. @@ -657,7 +657,8 @@ type Conversation struct { state protoimpl.MessageState `protogen:"open.v1"` Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Title string `protobuf:"bytes,3,opt,name=title,proto3" json:"title,omitempty"` - LanguageModel LanguageModel `protobuf:"varint,2,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` + LanguageModel LanguageModel `protobuf:"varint,2,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` // deprecated: use model_slug instead + // string model_slug = 5; // new: model slug string // If list conversations, then messages length is 0. Messages []*Message `protobuf:"bytes,4,rep,name=messages,proto3" json:"messages,omitempty"` unknownFields protoimpl.UnknownFields @@ -905,7 +906,8 @@ type CreateConversationMessageRequest struct { // If conversation_id is not provided, // a new conversation will be created and the id will be returned. ConversationId *string `protobuf:"bytes,2,opt,name=conversation_id,json=conversationId,proto3,oneof" json:"conversation_id,omitempty"` - LanguageModel LanguageModel `protobuf:"varint,3,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` + LanguageModel LanguageModel `protobuf:"varint,3,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` // deprecated: use model_slug instead + ModelSlug string `protobuf:"bytes,7,opt,name=model_slug,json=modelSlug,proto3" json:"model_slug,omitempty"` // new: model slug string UserMessage string `protobuf:"bytes,4,opt,name=user_message,json=userMessage,proto3" json:"user_message,omitempty"` UserSelectedText *string `protobuf:"bytes,5,opt,name=user_selected_text,json=userSelectedText,proto3,oneof" json:"user_selected_text,omitempty"` ConversationType *ConversationType `protobuf:"varint,6,opt,name=conversation_type,json=conversationType,proto3,enum=chat.v1.ConversationType,oneof" json:"conversation_type,omitempty"` @@ -964,6 +966,13 @@ func (x *CreateConversationMessageRequest) GetLanguageModel() LanguageModel { return LanguageModel_LANGUAGE_MODEL_UNSPECIFIED } +func (x *CreateConversationMessageRequest) GetModelSlug() string { + if x != nil { + return x.ModelSlug + } + return "" +} + func (x *CreateConversationMessageRequest) GetUserMessage() string { if x != nil { return x.UserMessage @@ -1341,7 +1350,7 @@ func (x *ListSupportedModelsResponse) GetModels() []*SupportedModel { type StreamInitialization struct { state protoimpl.MessageState `protogen:"open.v1"` ConversationId string `protobuf:"bytes,1,opt,name=conversation_id,json=conversationId,proto3" json:"conversation_id,omitempty"` - LanguageModel LanguageModel `protobuf:"varint,5,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` + LanguageModel LanguageModel `protobuf:"varint,5,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` // deprecated: use model_slug instead unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1703,7 +1712,8 @@ type CreateConversationMessageStreamRequest struct { state protoimpl.MessageState `protogen:"open.v1"` ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` ConversationId *string `protobuf:"bytes,2,opt,name=conversation_id,json=conversationId,proto3,oneof" json:"conversation_id,omitempty"` - LanguageModel LanguageModel `protobuf:"varint,3,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` + LanguageModel LanguageModel `protobuf:"varint,3,opt,name=language_model,json=languageModel,proto3,enum=chat.v1.LanguageModel" json:"language_model,omitempty"` // deprecated: use model_slug instead + ModelSlug string `protobuf:"bytes,7,opt,name=model_slug,json=modelSlug,proto3" json:"model_slug,omitempty"` // new: model slug string UserMessage string `protobuf:"bytes,4,opt,name=user_message,json=userMessage,proto3" json:"user_message,omitempty"` UserSelectedText *string `protobuf:"bytes,5,opt,name=user_selected_text,json=userSelectedText,proto3,oneof" json:"user_selected_text,omitempty"` ConversationType *ConversationType `protobuf:"varint,6,opt,name=conversation_type,json=conversationType,proto3,enum=chat.v1.ConversationType,oneof" json:"conversation_type,omitempty"` @@ -1762,6 +1772,13 @@ func (x *CreateConversationMessageStreamRequest) GetLanguageModel() LanguageMode return LanguageModel_LANGUAGE_MODEL_UNSPECIFIED } +func (x *CreateConversationMessageStreamRequest) GetModelSlug() string { + if x != nil { + return x.ModelSlug + } + return "" +} + func (x *CreateConversationMessageStreamRequest) GetUserMessage() string { if x != nil { return x.UserMessage @@ -2002,12 +2019,14 @@ const file_chat_v1_chat_proto_rawDesc = "" + "\x16GetConversationRequest\x12'\n" + "\x0fconversation_id\x18\x01 \x01(\tR\x0econversationId\"T\n" + "\x17GetConversationResponse\x129\n" + - "\fconversation\x18\x01 \x01(\v2\x15.chat.v1.ConversationR\fconversation\"\x92\x03\n" + + "\fconversation\x18\x01 \x01(\v2\x15.chat.v1.ConversationR\fconversation\"\xb1\x03\n" + " CreateConversationMessageRequest\x12\x1d\n" + "\n" + "project_id\x18\x01 \x01(\tR\tprojectId\x12,\n" + "\x0fconversation_id\x18\x02 \x01(\tH\x00R\x0econversationId\x88\x01\x01\x12=\n" + - "\x0elanguage_model\x18\x03 \x01(\x0e2\x16.chat.v1.LanguageModelR\rlanguageModel\x12!\n" + + "\x0elanguage_model\x18\x03 \x01(\x0e2\x16.chat.v1.LanguageModelR\rlanguageModel\x12\x1d\n" + + "\n" + + "model_slug\x18\a \x01(\tR\tmodelSlug\x12!\n" + "\fuser_message\x18\x04 \x01(\tR\vuserMessage\x121\n" + "\x12user_selected_text\x18\x05 \x01(\tH\x01R\x10userSelectedText\x88\x01\x01\x12K\n" + "\x11conversation_type\x18\x06 \x01(\x0e2\x19.chat.v1.ConversationTypeH\x02R\x10conversationType\x88\x01\x01B\x12\n" + @@ -2052,12 +2071,14 @@ const file_chat_v1_chat_proto_rawDesc = "" + "\x12StreamFinalization\x12'\n" + "\x0fconversation_id\x18\x01 \x01(\tR\x0econversationId\"2\n" + "\vStreamError\x12#\n" + - "\rerror_message\x18\x01 \x01(\tR\ferrorMessage\"\x98\x03\n" + + "\rerror_message\x18\x01 \x01(\tR\ferrorMessage\"\xb7\x03\n" + "&CreateConversationMessageStreamRequest\x12\x1d\n" + "\n" + "project_id\x18\x01 \x01(\tR\tprojectId\x12,\n" + "\x0fconversation_id\x18\x02 \x01(\tH\x00R\x0econversationId\x88\x01\x01\x12=\n" + - "\x0elanguage_model\x18\x03 \x01(\x0e2\x16.chat.v1.LanguageModelR\rlanguageModel\x12!\n" + + "\x0elanguage_model\x18\x03 \x01(\x0e2\x16.chat.v1.LanguageModelR\rlanguageModel\x12\x1d\n" + + "\n" + + "model_slug\x18\a \x01(\tR\tmodelSlug\x12!\n" + "\fuser_message\x18\x04 \x01(\tR\vuserMessage\x121\n" + "\x12user_selected_text\x18\x05 \x01(\tH\x01R\x10userSelectedText\x88\x01\x01\x12K\n" + "\x11conversation_type\x18\x06 \x01(\x0e2\x19.chat.v1.ConversationTypeH\x02R\x10conversationType\x88\x01\x01B\x12\n" + diff --git a/pkg/gen/api/comment/v1/comment.pb.go b/pkg/gen/api/comment/v1/comment.pb.go index 8daf2720..b19607bd 100644 --- a/pkg/gen/api/comment/v1/comment.pb.go +++ b/pkg/gen/api/comment/v1/comment.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: comment/v1/comment.proto diff --git a/pkg/gen/api/project/v1/project.pb.go b/pkg/gen/api/project/v1/project.pb.go index f67566ca..99113e09 100644 --- a/pkg/gen/api/project/v1/project.pb.go +++ b/pkg/gen/api/project/v1/project.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: project/v1/project.proto diff --git a/pkg/gen/api/shared/v1/shared.pb.go b/pkg/gen/api/shared/v1/shared.pb.go index 58d084f2..5c3eb7c8 100644 --- a/pkg/gen/api/shared/v1/shared.pb.go +++ b/pkg/gen/api/shared/v1/shared.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: shared/v1/shared.proto diff --git a/pkg/gen/api/user/v1/user.pb.go b/pkg/gen/api/user/v1/user.pb.go index 85603cf2..c54615c4 100644 --- a/pkg/gen/api/user/v1/user.pb.go +++ b/pkg/gen/api/user/v1/user.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.10 +// protoc-gen-go v1.36.11 // protoc (unknown) // source: user/v1/user.proto diff --git a/proto/chat/v1/chat.proto b/proto/chat/v1/chat.proto index ab8b7e12..b8d6f5b9 100644 --- a/proto/chat/v1/chat.proto +++ b/proto/chat/v1/chat.proto @@ -105,7 +105,8 @@ message Message { message Conversation { string id = 1; string title = 3; - LanguageModel language_model = 2; + LanguageModel language_model = 2; // deprecated: use model_slug instead + // string model_slug = 5; // new: model slug string // If list conversations, then messages length is 0. repeated Message messages = 4; } @@ -132,8 +133,8 @@ message CreateConversationMessageRequest { // If conversation_id is not provided, // a new conversation will be created and the id will be returned. optional string conversation_id = 2; - LanguageModel language_model = 3; - + LanguageModel language_model = 3; // deprecated: use model_slug instead + string model_slug = 7; // new: model slug string string user_message = 4; optional string user_selected_text = 5; optional ConversationType conversation_type = 6; @@ -178,7 +179,8 @@ message ListSupportedModelsResponse { // Information sent once at the beginning of a new conversation stream message StreamInitialization { string conversation_id = 1; - LanguageModel language_model = 5; + LanguageModel language_model = 5; // deprecated: use model_slug instead + // string model_slug = 6; // new: model slug string } // Designed as StreamPartBegin and StreamPartEnd to @@ -227,7 +229,8 @@ message StreamError { enum ConversationType { CONVERSATION_TYPE_UNSPECIFIED = 0; - CONVERSATION_TYPE_DEBUG = 1; // does not contain any customized messages, the inapp_history and openai_history are synced. + CONVERSATION_TYPE_DEBUG = 1; // does not contain any customized messages, the + // inapp_history and openai_history are synced. // CONVERSATION_TYPE_NO_SYSTEM_MESSAGE_INJECTION = 2; // CONVERSATION_TYPE_NO_USER_MESSAGE_INJECTION = 3; } @@ -238,7 +241,8 @@ enum ConversationType { message CreateConversationMessageStreamRequest { string project_id = 1; optional string conversation_id = 2; - LanguageModel language_model = 3; + LanguageModel language_model = 3; // deprecated: use model_slug instead + string model_slug = 7; // new: model slug string string user_message = 4; optional string user_selected_text = 5; optional ConversationType conversation_type = 6; diff --git a/webapp/_webapp/package.json b/webapp/_webapp/package.json index 8b08c29e..42fb6315 100644 --- a/webapp/_webapp/package.json +++ b/webapp/_webapp/package.json @@ -74,4 +74,4 @@ "typescript-eslint": "^8.33.0", "vite": "^6.3.5" } -} \ No newline at end of file +} diff --git a/webapp/_webapp/src/components/message-entry-container/tools/jsonrpc.tsx b/webapp/_webapp/src/components/message-entry-container/tools/jsonrpc.tsx index 6ab12bb2..86474e2e 100644 --- a/webapp/_webapp/src/components/message-entry-container/tools/jsonrpc.tsx +++ b/webapp/_webapp/src/components/message-entry-container/tools/jsonrpc.tsx @@ -8,7 +8,6 @@ type JsonRpcProps = { }; export const JsonRpc = ({ functionName, preparing, animated }: JsonRpcProps) => { - if (preparing) { return (