diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f561ce52..90e507f0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -22,6 +22,7 @@ jobs: run: | export PD_API_ENDPOINT=https://app.paperdebugger.com export BETA_BUILD=false + export GRAFANA_API_KEY=${{ secrets.GRAFANA_API_KEY }} cd webapp/_webapp npm install npm run build diff --git a/go.mod b/go.mod index e82a40e8..4dc59a93 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/google/wire v0.7.0 github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 github.com/joho/godotenv v1.5.1 - github.com/openai/openai-go/v2 v2.1.1 + github.com/openai/openai-go/v2 v2.7.1 github.com/samber/lo v1.51.0 github.com/stretchr/testify v1.10.0 go.mongodb.org/mongo-driver/v2 v2.3.0 diff --git a/go.sum b/go.sum index 92e81ad0..41824e06 100644 --- a/go.sum +++ b/go.sum @@ -88,8 +88,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc= github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk= -github.com/openai/openai-go/v2 v2.1.1 h1:/RMA/V3D+yF/Cc4jHXFt6lkqSOWRf5roRi+DvZaDYQI= -github.com/openai/openai-go/v2 v2.1.1/go.mod h1:sIUkR+Cu/PMUVkSKhkk742PRURkQOCFhiwJ7eRSBqmk= +github.com/openai/openai-go/v2 v2.7.1 h1:/tfvTJhfv7hTSL8mWwc5VL4WLLSDL5yn9VqVykdu9r8= +github.com/openai/openai-go/v2 v2.7.1/go.mod h1:jrJs23apqJKKbT+pqtFgNKpRju/KP9zpUTZhz3GElQE= github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= diff --git a/internal/api/chat/create_conversation_message.go b/internal/api/chat/create_conversation_message.go index ae6c7fb7..9f78a2ac 100644 --- a/internal/api/chat/create_conversation_message.go +++ b/internal/api/chat/create_conversation_message.go @@ -180,20 +180,20 @@ func (s *ChatServer) appendConversationMessage( // 如果 conversationId 是 "", 就创建新对话,否则就追加消息到对话 // conversationType 可以在一次 conversation 中多次切换 -func (s *ChatServer) prepare(ctx context.Context, projectId string, conversationId string, userMessage string, userSelectedText string, languageModel models.LanguageModel, conversationType chatv1.ConversationType) (context.Context, *models.Conversation, error) { +func (s *ChatServer) prepare(ctx context.Context, projectId string, conversationId string, userMessage string, userSelectedText string, languageModel models.LanguageModel, conversationType chatv1.ConversationType) (context.Context, *models.Conversation, *models.Settings, error) { actor, err := contextutil.GetActor(ctx) if err != nil { - return ctx, nil, err + return ctx, nil, nil, err } project, err := s.projectService.GetProject(ctx, actor.ID, projectId) if err != nil && err != mongo.ErrNoDocuments { - return ctx, nil, err + return ctx, nil, nil, err } userInstructions, err := s.userService.GetUserInstructions(ctx, actor.ID) if err != nil { - return ctx, nil, err + return ctx, nil, nil, err } var latexFullSource string @@ -202,12 +202,12 @@ func (s *ChatServer) prepare(ctx context.Context, projectId string, conversation latexFullSource = "latex_full_source is not available in debug mode" default: if project == nil || project.IsOutOfDate() { - return ctx, nil, shared.ErrProjectOutOfDate("project is out of date") + return ctx, nil, nil, shared.ErrProjectOutOfDate("project is out of date") } latexFullSource, err = project.GetFullContent() if err != nil { - return ctx, nil, err + return ctx, nil, nil, err } } @@ -238,13 +238,18 @@ func (s *ChatServer) prepare(ctx context.Context, projectId string, conversation } if err != nil { - return ctx, nil, err + return ctx, nil, nil, err } ctx = contextutil.SetProjectID(ctx, conversation.ProjectID) ctx = contextutil.SetConversationID(ctx, conversation.ID.Hex()) - return ctx, conversation, nil + settings, err := s.userService.GetUserSettings(ctx, actor.ID) + if err != nil { + return ctx, conversation, nil, err + } + + return ctx, conversation, settings, nil } // Deprecated: Use CreateConversationMessageStream instead. @@ -252,20 +257,25 @@ func (s *ChatServer) CreateConversationMessage( ctx context.Context, req *chatv1.CreateConversationMessageRequest, ) (*chatv1.CreateConversationMessageResponse, error) { - ctx, conversation, err := s.prepare( + languageModel := models.LanguageModel(req.GetLanguageModel()) + ctx, conversation, settings, err := s.prepare( ctx, req.GetProjectId(), req.GetConversationId(), req.GetUserMessage(), req.GetUserSelectedText(), - models.LanguageModel(req.GetLanguageModel()), + languageModel, req.GetConversationType(), ) if err != nil { return nil, err } - openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletion(ctx, conversation.LanguageModel, conversation.OpenaiChatHistory) + llmProvider := &models.LLMProviderConfig{ + Endpoint: s.cfg.OpenAIBaseURL, + APIKey: settings.OpenAIAPIKey, + } + openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletion(ctx, languageModel, conversation.OpenaiChatHistory, llmProvider) if err != nil { return nil, err } @@ -290,7 +300,7 @@ func (s *ChatServer) CreateConversationMessage( for i, bsonMsg := range conversation.InappChatHistory { protoMessages[i] = mapper.BSONToChatMessage(bsonMsg) } - title, err := s.aiClient.GetConversationTitle(ctx, protoMessages) + title, err := s.aiClient.GetConversationTitle(ctx, protoMessages, llmProvider) if err != nil { s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex()) return diff --git a/internal/api/chat/create_conversation_message_stream.go b/internal/api/chat/create_conversation_message_stream.go index 0f971c54..0e659a28 100644 --- a/internal/api/chat/create_conversation_message_stream.go +++ b/internal/api/chat/create_conversation_message_stream.go @@ -24,13 +24,15 @@ func (s *ChatServer) CreateConversationMessageStream( stream chatv1.ChatService_CreateConversationMessageStreamServer, ) error { ctx := stream.Context() - ctx, conversation, err := s.prepare( + + languageModel := models.LanguageModel(req.GetLanguageModel()) + ctx, conversation, settings, err := s.prepare( ctx, req.GetProjectId(), req.GetConversationId(), req.GetUserMessage(), req.GetUserSelectedText(), - models.LanguageModel(req.GetLanguageModel()), + languageModel, req.GetConversationType(), ) if err != nil { @@ -38,7 +40,12 @@ func (s *ChatServer) CreateConversationMessageStream( } // 用法跟 ChatCompletion 一样,只是传递了 stream 参数 - openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletionStream(ctx, stream, conversation.ID.Hex(), conversation.LanguageModel, conversation.OpenaiChatHistory) + llmProvider := &models.LLMProviderConfig{ + Endpoint: s.cfg.OpenAIBaseURL, + APIKey: settings.OpenAIAPIKey, + } + + openaiChatHistory, inappChatHistory, err := s.aiClient.ChatCompletionStream(ctx, stream, conversation.ID.Hex(), languageModel, conversation.OpenaiChatHistory, llmProvider) if err != nil { return s.sendStreamError(stream, err) } @@ -64,7 +71,7 @@ func (s *ChatServer) CreateConversationMessageStream( for i, bsonMsg := range conversation.InappChatHistory { protoMessages[i] = mapper.BSONToChatMessage(bsonMsg) } - title, err := s.aiClient.GetConversationTitle(ctx, protoMessages) + title, err := s.aiClient.GetConversationTitle(ctx, protoMessages, llmProvider) if err != nil { s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex()) return diff --git a/internal/api/chat/list_supported_models.go b/internal/api/chat/list_supported_models.go new file mode 100644 index 00000000..cf032b55 --- /dev/null +++ b/internal/api/chat/list_supported_models.go @@ -0,0 +1,104 @@ +package chat + +import ( + "context" + "strings" + + "paperdebugger/internal/libs/contextutil" + chatv1 "paperdebugger/pkg/gen/api/chat/v1" + + "github.com/openai/openai-go/v2" +) + +func (s *ChatServer) ListSupportedModels( + ctx context.Context, + req *chatv1.ListSupportedModelsRequest, +) (*chatv1.ListSupportedModelsResponse, error) { + actor, err := contextutil.GetActor(ctx) + if err != nil { + return nil, err + } + + settings, err := s.userService.GetUserSettings(ctx, actor.ID) + if err != nil { + return nil, err + } + + var models []*chatv1.SupportedModel + if strings.TrimSpace(settings.OpenAIAPIKey) == "" { + models = []*chatv1.SupportedModel{ + { + + Name: "GPT-4o", + Slug: openai.ChatModelGPT4o, + }, + { + Name: "GPT-4.1", + Slug: openai.ChatModelGPT4_1, + }, + { + Name: "GPT-4.1-mini", + Slug: openai.ChatModelGPT4_1Mini, + }, + } + } else { + models = []*chatv1.SupportedModel{ + { + Name: "GPT 4o", + Slug: openai.ChatModelGPT4o, + }, + { + Name: "GPT 4.1", + Slug: openai.ChatModelGPT4_1, + }, + { + Name: "GPT 4.1 mini", + Slug: openai.ChatModelGPT4_1Mini, + }, + { + Name: "GPT 5", + Slug: openai.ChatModelGPT5, + }, + { + Name: "GPT 5 mini", + Slug: openai.ChatModelGPT5Mini, + }, + { + Name: "GPT 5 nano", + Slug: openai.ChatModelGPT5Nano, + }, + { + Name: "GPT 5 Chat Latest", + Slug: openai.ChatModelGPT5ChatLatest, + }, + { + Name: "o1", + Slug: openai.ChatModelO1, + }, + { + Name: "o1 mini", + Slug: openai.ChatModelO1Mini, + }, + { + Name: "o3", + Slug: openai.ChatModelO3, + }, + { + Name: "o3 mini", + Slug: openai.ChatModelO3Mini, + }, + { + Name: "o4 mini", + Slug: openai.ChatModelO4Mini, + }, + { + Name: "Codex Mini Latest", + Slug: openai.ChatModelCodexMiniLatest, + }, + } + } + + return &chatv1.ListSupportedModelsResponse{ + Models: models, + }, nil +} diff --git a/internal/api/mapper/user.go b/internal/api/mapper/user.go index 02e348a7..a7fa8538 100644 --- a/internal/api/mapper/user.go +++ b/internal/api/mapper/user.go @@ -12,6 +12,7 @@ func MapProtoSettingsToModel(settings *userv1.Settings) *models.Settings { EnableCompletion: settings.EnableCompletion, FullDocumentRag: settings.FullDocumentRag, ShowedOnboarding: settings.ShowedOnboarding, + OpenAIAPIKey: settings.OpenaiApiKey, } } @@ -22,5 +23,6 @@ func MapModelSettingsToProto(settings *models.Settings) *userv1.Settings { EnableCompletion: settings.EnableCompletion, FullDocumentRag: settings.FullDocumentRag, ShowedOnboarding: settings.ShowedOnboarding, + OpenaiApiKey: settings.OpenAIAPIKey, } } diff --git a/internal/models/language_model.go b/internal/models/language_model.go index 35f643e8..7f1e8df0 100644 --- a/internal/models/language_model.go +++ b/internal/models/language_model.go @@ -38,6 +38,20 @@ func (x LanguageModel) Name() string { return openai.ChatModelGPT5Mini case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO: return openai.ChatModelGPT5Nano + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST: + return openai.ChatModelGPT5ChatLatest + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1: + return openai.ChatModelO1 + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI: + return openai.ChatModelO1Mini + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3: + return openai.ChatModelO3 + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI: + return openai.ChatModelO3Mini + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI: + return openai.ChatModelO4Mini + case chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST: + return openai.ChatModelCodexMiniLatest default: return openai.ChatModelGPT5 } diff --git a/internal/models/llm_provider.go b/internal/models/llm_provider.go new file mode 100644 index 00000000..06f6b0e5 --- /dev/null +++ b/internal/models/llm_provider.go @@ -0,0 +1,14 @@ +package models + +// LLMProviderConfig holds the configuration for LLM API calls. +// If both Endpoint and APIKey are empty, the system default will be used. +type LLMProviderConfig struct { + Endpoint string + APIKey string + ModelName string +} + +// IsCustom returns true if the user has configured custom LLM provider settings. +func (c *LLMProviderConfig) IsCustom() bool { + return c != nil && c.APIKey != "" +} diff --git a/internal/models/user.go b/internal/models/user.go index bae43f43..c9bd1509 100644 --- a/internal/models/user.go +++ b/internal/models/user.go @@ -3,11 +3,12 @@ package models import "go.mongodb.org/mongo-driver/v2/bson" type Settings struct { - ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"` - FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"` - EnableCompletion bool `bson:"enable_completion"` - FullDocumentRag bool `bson:"full_document_rag"` - ShowedOnboarding bool `bson:"showed_onboarding"` + ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"` + FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"` + EnableCompletion bool `bson:"enable_completion"` + FullDocumentRag bool `bson:"full_document_rag"` + ShowedOnboarding bool `bson:"showed_onboarding"` + OpenAIAPIKey string `bson:"openai_api_key"` } type User struct { diff --git a/internal/services/toolkit/client/client.go b/internal/services/toolkit/client/client.go index e6c111da..68599397 100644 --- a/internal/services/toolkit/client/client.go +++ b/internal/services/toolkit/client/client.go @@ -17,7 +17,6 @@ import ( ) type AIClient struct { - openaiClient *openai.Client toolCallHandler *handler.ToolCallHandler db *mongo.Database @@ -29,6 +28,29 @@ type AIClient struct { logger *logger.Logger } +// SetOpenAIClient sets the appropriate OpenAI client based on the LLM provider config. +// If the config specifies a custom endpoint and API key, a new client is created for that endpoint. +func (a *AIClient) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *openai.Client { + var Endpoint string = llmConfig.Endpoint + var APIKey string = llmConfig.APIKey + + if Endpoint == "" { + Endpoint = a.cfg.OpenAIBaseURL + } + + if APIKey == "" { + APIKey = a.cfg.OpenAIAPIKey + } + + opts := []option.RequestOption{ + option.WithAPIKey(APIKey), + option.WithBaseURL(Endpoint), + } + + client := openai.NewClient(opts...) + return &client +} + func NewAIClient( db *db.DB, @@ -73,7 +95,6 @@ func NewAIClient( toolCallHandler := handler.NewToolCallHandler(toolRegistry) client := &AIClient{ - openaiClient: &oaiClient, toolCallHandler: toolCallHandler, db: database, diff --git a/internal/services/toolkit/client/completion.go b/internal/services/toolkit/client/completion.go index bb2340db..6bc73b88 100644 --- a/internal/services/toolkit/client/completion.go +++ b/internal/services/toolkit/client/completion.go @@ -21,8 +21,8 @@ import ( // 1. The full chat history sent to the language model (including any tool call results). // 2. The incremental chat history visible to the user (including tool call results and assistant responses). // 3. An error, if any occurred during the process. -func (a *AIClient) ChatCompletion(ctx context.Context, languageModel models.LanguageModel, messages responses.ResponseInputParam) (responses.ResponseInputParam, []chatv1.Message, error) { - openaiChatHistory, inappChatHistory, err := a.ChatCompletionStream(ctx, nil, "", languageModel, messages) +func (a *AIClient) ChatCompletion(ctx context.Context, languageModel models.LanguageModel, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { + openaiChatHistory, inappChatHistory, err := a.ChatCompletionStream(ctx, nil, "", languageModel, messages, llmProvider) if err != nil { return nil, nil, err } @@ -50,7 +50,7 @@ func (a *AIClient) ChatCompletion(ctx context.Context, languageModel models.Lang // - If tool calls are required, it handles them and appends the results to the chat history, then continues the loop. // - If no tool calls are needed, it appends the assistant's response and exits the loop. // - Finally, it returns the updated chat histories and any error encountered. -func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chatv1.ChatService_CreateConversationMessageStreamServer, conversationId string, languageModel models.LanguageModel, messages responses.ResponseInputParam) (responses.ResponseInputParam, []chatv1.Message, error) { +func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chatv1.ChatService_CreateConversationMessageStreamServer, conversationId string, languageModel models.LanguageModel, messages responses.ResponseInputParam, llmProvider *models.LLMProviderConfig) (responses.ResponseInputParam, []chatv1.Message, error) { openaiChatHistory := responses.ResponseNewParamsInputUnion{OfInputItemList: messages} inappChatHistory := []chatv1.Message{} @@ -61,12 +61,13 @@ func (a *AIClient) ChatCompletionStream(ctx context.Context, callbackStream chat streamHandler.SendFinalization() }() + oaiClient := a.GetOpenAIClient(llmProvider) params := getDefaultParams(languageModel, openaiChatHistory, a.toolCallHandler.Registry) for { params.Input = openaiChatHistory var openaiOutput []responses.ResponseOutputItemUnion - stream := a.openaiClient.Responses.NewStreaming(context.Background(), params) + stream := oaiClient.Responses.NewStreaming(context.Background(), params) for stream.Next() { // time.Sleep(200 * time.Millisecond) // DEBUG POINT: change this to test in a slow mode diff --git a/internal/services/toolkit/client/get_conversation_title.go b/internal/services/toolkit/client/get_conversation_title.go index 3e0e9892..f956bf0d 100644 --- a/internal/services/toolkit/client/get_conversation_title.go +++ b/internal/services/toolkit/client/get_conversation_title.go @@ -13,7 +13,7 @@ import ( "github.com/samber/lo" ) -func (a *AIClient) GetConversationTitle(ctx context.Context, inappChatHistory []*chatv1.Message) (string, error) { +func (a *AIClient) GetConversationTitle(ctx context.Context, inappChatHistory []*chatv1.Message, llmProvider *models.LLMProviderConfig) (string, error) { messages := lo.Map(inappChatHistory, func(message *chatv1.Message, _ int) string { if _, ok := message.Payload.MessageType.(*chatv1.MessagePayload_Assistant); ok { return fmt.Sprintf("Assistant: %s", message.Payload.GetAssistant().GetContent()) @@ -46,7 +46,7 @@ func (a *AIClient) GetConversationTitle(ctx context.Context, inappChatHistory [] }, }, }, - }) + }, llmProvider) if err != nil { return "", err } diff --git a/internal/services/toolkit/client/utils.go b/internal/services/toolkit/client/utils.go index 6505d8f5..d2b4d4c1 100644 --- a/internal/services/toolkit/client/utils.go +++ b/internal/services/toolkit/client/utils.go @@ -46,7 +46,14 @@ func appendAssistantTextResponse(openaiChatHistory *responses.ResponseNewParamsI func getDefaultParams(languageModel models.LanguageModel, chatHistory responses.ResponseNewParamsInputUnion, toolRegistry *registry.ToolRegistry) responses.ResponseNewParams { if languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5) || languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI) || - languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO) { + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O3) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_O1) || + languageModel == models.LanguageModel(chatv1.LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST) { return responses.ResponseNewParams{ Model: languageModel.Name(), Tools: toolRegistry.GetTools(), diff --git a/pkg/gen/api/auth/v1/auth.pb.go b/pkg/gen/api/auth/v1/auth.pb.go index 988944a9..87514ddd 100644 --- a/pkg/gen/api/auth/v1/auth.pb.go +++ b/pkg/gen/api/auth/v1/auth.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: auth/v1/auth.proto diff --git a/pkg/gen/api/auth/v1/auth_grpc.pb.go b/pkg/gen/api/auth/v1/auth_grpc.pb.go index 06bca21d..3b72abb0 100644 --- a/pkg/gen/api/auth/v1/auth_grpc.pb.go +++ b/pkg/gen/api/auth/v1/auth_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.5.1 +// - protoc-gen-go-grpc v1.6.0 // - protoc (unknown) // source: auth/v1/auth.proto @@ -102,16 +102,16 @@ type AuthServiceServer interface { type UnimplementedAuthServiceServer struct{} func (UnimplementedAuthServiceServer) LoginByGoogle(context.Context, *LoginByGoogleRequest) (*LoginByGoogleResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method LoginByGoogle not implemented") + return nil, status.Error(codes.Unimplemented, "method LoginByGoogle not implemented") } func (UnimplementedAuthServiceServer) LoginByOverleaf(context.Context, *LoginByOverleafRequest) (*LoginByOverleafResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method LoginByOverleaf not implemented") + return nil, status.Error(codes.Unimplemented, "method LoginByOverleaf not implemented") } func (UnimplementedAuthServiceServer) RefreshToken(context.Context, *RefreshTokenRequest) (*RefreshTokenResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RefreshToken not implemented") + return nil, status.Error(codes.Unimplemented, "method RefreshToken not implemented") } func (UnimplementedAuthServiceServer) Logout(context.Context, *LogoutRequest) (*LogoutResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method Logout not implemented") + return nil, status.Error(codes.Unimplemented, "method Logout not implemented") } func (UnimplementedAuthServiceServer) mustEmbedUnimplementedAuthServiceServer() {} func (UnimplementedAuthServiceServer) testEmbeddedByValue() {} @@ -124,7 +124,7 @@ type UnsafeAuthServiceServer interface { } func RegisterAuthServiceServer(s grpc.ServiceRegistrar, srv AuthServiceServer) { - // If the following call pancis, it indicates UnimplementedAuthServiceServer was + // If the following call panics, it indicates UnimplementedAuthServiceServer was // embedded by pointer and is nil. This will cause panics if an // unimplemented method is ever invoked, so we test this at initialization // time to prevent it from happening at runtime later due to I/O. diff --git a/pkg/gen/api/chat/v1/chat.pb.go b/pkg/gen/api/chat/v1/chat.pb.go index dbe52330..7f048947 100644 --- a/pkg/gen/api/chat/v1/chat.pb.go +++ b/pkg/gen/api/chat/v1/chat.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: chat/v1/chat.proto @@ -22,37 +22,59 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +// deprecated type LanguageModel int32 const ( - LanguageModel_LANGUAGE_MODEL_UNSPECIFIED LanguageModel = 0 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT4O LanguageModel = 1 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41_MINI LanguageModel = 2 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41 LanguageModel = 4 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5 LanguageModel = 7 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI LanguageModel = 8 - LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO LanguageModel = 9 + LanguageModel_LANGUAGE_MODEL_UNSPECIFIED LanguageModel = 0 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT4O LanguageModel = 1 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41_MINI LanguageModel = 2 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT41 LanguageModel = 4 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5 LanguageModel = 7 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_MINI LanguageModel = 8 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_NANO LanguageModel = 9 + LanguageModel_LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST LanguageModel = 10 + LanguageModel_LANGUAGE_MODEL_OPENAI_O1 LanguageModel = 11 + LanguageModel_LANGUAGE_MODEL_OPENAI_O1_MINI LanguageModel = 12 + LanguageModel_LANGUAGE_MODEL_OPENAI_O3 LanguageModel = 13 + LanguageModel_LANGUAGE_MODEL_OPENAI_O3_MINI LanguageModel = 14 + LanguageModel_LANGUAGE_MODEL_OPENAI_O4_MINI LanguageModel = 15 + LanguageModel_LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST LanguageModel = 16 ) // Enum value maps for LanguageModel. var ( LanguageModel_name = map[int32]string{ - 0: "LANGUAGE_MODEL_UNSPECIFIED", - 1: "LANGUAGE_MODEL_OPENAI_GPT4O", - 2: "LANGUAGE_MODEL_OPENAI_GPT41_MINI", - 4: "LANGUAGE_MODEL_OPENAI_GPT41", - 7: "LANGUAGE_MODEL_OPENAI_GPT5", - 8: "LANGUAGE_MODEL_OPENAI_GPT5_MINI", - 9: "LANGUAGE_MODEL_OPENAI_GPT5_NANO", + 0: "LANGUAGE_MODEL_UNSPECIFIED", + 1: "LANGUAGE_MODEL_OPENAI_GPT4O", + 2: "LANGUAGE_MODEL_OPENAI_GPT41_MINI", + 4: "LANGUAGE_MODEL_OPENAI_GPT41", + 7: "LANGUAGE_MODEL_OPENAI_GPT5", + 8: "LANGUAGE_MODEL_OPENAI_GPT5_MINI", + 9: "LANGUAGE_MODEL_OPENAI_GPT5_NANO", + 10: "LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST", + 11: "LANGUAGE_MODEL_OPENAI_O1", + 12: "LANGUAGE_MODEL_OPENAI_O1_MINI", + 13: "LANGUAGE_MODEL_OPENAI_O3", + 14: "LANGUAGE_MODEL_OPENAI_O3_MINI", + 15: "LANGUAGE_MODEL_OPENAI_O4_MINI", + 16: "LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST", } LanguageModel_value = map[string]int32{ - "LANGUAGE_MODEL_UNSPECIFIED": 0, - "LANGUAGE_MODEL_OPENAI_GPT4O": 1, - "LANGUAGE_MODEL_OPENAI_GPT41_MINI": 2, - "LANGUAGE_MODEL_OPENAI_GPT41": 4, - "LANGUAGE_MODEL_OPENAI_GPT5": 7, - "LANGUAGE_MODEL_OPENAI_GPT5_MINI": 8, - "LANGUAGE_MODEL_OPENAI_GPT5_NANO": 9, + "LANGUAGE_MODEL_UNSPECIFIED": 0, + "LANGUAGE_MODEL_OPENAI_GPT4O": 1, + "LANGUAGE_MODEL_OPENAI_GPT41_MINI": 2, + "LANGUAGE_MODEL_OPENAI_GPT41": 4, + "LANGUAGE_MODEL_OPENAI_GPT5": 7, + "LANGUAGE_MODEL_OPENAI_GPT5_MINI": 8, + "LANGUAGE_MODEL_OPENAI_GPT5_NANO": 9, + "LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST": 10, + "LANGUAGE_MODEL_OPENAI_O1": 11, + "LANGUAGE_MODEL_OPENAI_O1_MINI": 12, + "LANGUAGE_MODEL_OPENAI_O3": 13, + "LANGUAGE_MODEL_OPENAI_O3_MINI": 14, + "LANGUAGE_MODEL_OPENAI_O4_MINI": 15, + "LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST": 16, } ) @@ -1183,6 +1205,138 @@ func (*DeleteConversationResponse) Descriptor() ([]byte, []int) { return file_chat_v1_chat_proto_rawDescGZIP(), []int{18} } +type SupportedModel struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Slug string `protobuf:"bytes,2,opt,name=slug,proto3" json:"slug,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *SupportedModel) Reset() { + *x = SupportedModel{} + mi := &file_chat_v1_chat_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SupportedModel) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SupportedModel) ProtoMessage() {} + +func (x *SupportedModel) ProtoReflect() protoreflect.Message { + mi := &file_chat_v1_chat_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SupportedModel.ProtoReflect.Descriptor instead. +func (*SupportedModel) Descriptor() ([]byte, []int) { + return file_chat_v1_chat_proto_rawDescGZIP(), []int{19} +} + +func (x *SupportedModel) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *SupportedModel) GetSlug() string { + if x != nil { + return x.Slug + } + return "" +} + +type ListSupportedModelsRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListSupportedModelsRequest) Reset() { + *x = ListSupportedModelsRequest{} + mi := &file_chat_v1_chat_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListSupportedModelsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListSupportedModelsRequest) ProtoMessage() {} + +func (x *ListSupportedModelsRequest) ProtoReflect() protoreflect.Message { + mi := &file_chat_v1_chat_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListSupportedModelsRequest.ProtoReflect.Descriptor instead. +func (*ListSupportedModelsRequest) Descriptor() ([]byte, []int) { + return file_chat_v1_chat_proto_rawDescGZIP(), []int{20} +} + +type ListSupportedModelsResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Models []*SupportedModel `protobuf:"bytes,1,rep,name=models,proto3" json:"models,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListSupportedModelsResponse) Reset() { + *x = ListSupportedModelsResponse{} + mi := &file_chat_v1_chat_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListSupportedModelsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListSupportedModelsResponse) ProtoMessage() {} + +func (x *ListSupportedModelsResponse) ProtoReflect() protoreflect.Message { + mi := &file_chat_v1_chat_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListSupportedModelsResponse.ProtoReflect.Descriptor instead. +func (*ListSupportedModelsResponse) Descriptor() ([]byte, []int) { + return file_chat_v1_chat_proto_rawDescGZIP(), []int{21} +} + +func (x *ListSupportedModelsResponse) GetModels() []*SupportedModel { + if x != nil { + return x.Models + } + return nil +} + // Information sent once at the beginning of a new conversation stream type StreamInitialization struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -1194,7 +1348,7 @@ type StreamInitialization struct { func (x *StreamInitialization) Reset() { *x = StreamInitialization{} - mi := &file_chat_v1_chat_proto_msgTypes[19] + mi := &file_chat_v1_chat_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1206,7 +1360,7 @@ func (x *StreamInitialization) String() string { func (*StreamInitialization) ProtoMessage() {} func (x *StreamInitialization) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[19] + mi := &file_chat_v1_chat_proto_msgTypes[22] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1219,7 +1373,7 @@ func (x *StreamInitialization) ProtoReflect() protoreflect.Message { // Deprecated: Use StreamInitialization.ProtoReflect.Descriptor instead. func (*StreamInitialization) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{19} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{22} } func (x *StreamInitialization) GetConversationId() string { @@ -1251,7 +1405,7 @@ type StreamPartBegin struct { func (x *StreamPartBegin) Reset() { *x = StreamPartBegin{} - mi := &file_chat_v1_chat_proto_msgTypes[20] + mi := &file_chat_v1_chat_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1263,7 +1417,7 @@ func (x *StreamPartBegin) String() string { func (*StreamPartBegin) ProtoMessage() {} func (x *StreamPartBegin) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[20] + mi := &file_chat_v1_chat_proto_msgTypes[23] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1276,7 +1430,7 @@ func (x *StreamPartBegin) ProtoReflect() protoreflect.Message { // Deprecated: Use StreamPartBegin.ProtoReflect.Descriptor instead. func (*StreamPartBegin) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{20} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{23} } func (x *StreamPartBegin) GetMessageId() string { @@ -1306,7 +1460,7 @@ type MessageChunk struct { func (x *MessageChunk) Reset() { *x = MessageChunk{} - mi := &file_chat_v1_chat_proto_msgTypes[21] + mi := &file_chat_v1_chat_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1318,7 +1472,7 @@ func (x *MessageChunk) String() string { func (*MessageChunk) ProtoMessage() {} func (x *MessageChunk) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[21] + mi := &file_chat_v1_chat_proto_msgTypes[24] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1331,7 +1485,7 @@ func (x *MessageChunk) ProtoReflect() protoreflect.Message { // Deprecated: Use MessageChunk.ProtoReflect.Descriptor instead. func (*MessageChunk) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{21} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{24} } func (x *MessageChunk) GetMessageId() string { @@ -1358,7 +1512,7 @@ type IncompleteIndicator struct { func (x *IncompleteIndicator) Reset() { *x = IncompleteIndicator{} - mi := &file_chat_v1_chat_proto_msgTypes[22] + mi := &file_chat_v1_chat_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1370,7 +1524,7 @@ func (x *IncompleteIndicator) String() string { func (*IncompleteIndicator) ProtoMessage() {} func (x *IncompleteIndicator) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[22] + mi := &file_chat_v1_chat_proto_msgTypes[25] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1383,7 +1537,7 @@ func (x *IncompleteIndicator) ProtoReflect() protoreflect.Message { // Deprecated: Use IncompleteIndicator.ProtoReflect.Descriptor instead. func (*IncompleteIndicator) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{22} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{25} } func (x *IncompleteIndicator) GetReason() string { @@ -1410,7 +1564,7 @@ type StreamPartEnd struct { func (x *StreamPartEnd) Reset() { *x = StreamPartEnd{} - mi := &file_chat_v1_chat_proto_msgTypes[23] + mi := &file_chat_v1_chat_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1422,7 +1576,7 @@ func (x *StreamPartEnd) String() string { func (*StreamPartEnd) ProtoMessage() {} func (x *StreamPartEnd) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[23] + mi := &file_chat_v1_chat_proto_msgTypes[26] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1435,7 +1589,7 @@ func (x *StreamPartEnd) ProtoReflect() protoreflect.Message { // Deprecated: Use StreamPartEnd.ProtoReflect.Descriptor instead. func (*StreamPartEnd) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{23} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{26} } func (x *StreamPartEnd) GetMessageId() string { @@ -1462,7 +1616,7 @@ type StreamFinalization struct { func (x *StreamFinalization) Reset() { *x = StreamFinalization{} - mi := &file_chat_v1_chat_proto_msgTypes[24] + mi := &file_chat_v1_chat_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1474,7 +1628,7 @@ func (x *StreamFinalization) String() string { func (*StreamFinalization) ProtoMessage() {} func (x *StreamFinalization) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[24] + mi := &file_chat_v1_chat_proto_msgTypes[27] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1487,7 +1641,7 @@ func (x *StreamFinalization) ProtoReflect() protoreflect.Message { // Deprecated: Use StreamFinalization.ProtoReflect.Descriptor instead. func (*StreamFinalization) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{24} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{27} } func (x *StreamFinalization) GetConversationId() string { @@ -1506,7 +1660,7 @@ type StreamError struct { func (x *StreamError) Reset() { *x = StreamError{} - mi := &file_chat_v1_chat_proto_msgTypes[25] + mi := &file_chat_v1_chat_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1518,7 +1672,7 @@ func (x *StreamError) String() string { func (*StreamError) ProtoMessage() {} func (x *StreamError) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[25] + mi := &file_chat_v1_chat_proto_msgTypes[28] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1531,7 +1685,7 @@ func (x *StreamError) ProtoReflect() protoreflect.Message { // Deprecated: Use StreamError.ProtoReflect.Descriptor instead. func (*StreamError) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{25} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{28} } func (x *StreamError) GetErrorMessage() string { @@ -1559,7 +1713,7 @@ type CreateConversationMessageStreamRequest struct { func (x *CreateConversationMessageStreamRequest) Reset() { *x = CreateConversationMessageStreamRequest{} - mi := &file_chat_v1_chat_proto_msgTypes[26] + mi := &file_chat_v1_chat_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1571,7 +1725,7 @@ func (x *CreateConversationMessageStreamRequest) String() string { func (*CreateConversationMessageStreamRequest) ProtoMessage() {} func (x *CreateConversationMessageStreamRequest) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[26] + mi := &file_chat_v1_chat_proto_msgTypes[29] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1584,7 +1738,7 @@ func (x *CreateConversationMessageStreamRequest) ProtoReflect() protoreflect.Mes // Deprecated: Use CreateConversationMessageStreamRequest.ProtoReflect.Descriptor instead. func (*CreateConversationMessageStreamRequest) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{26} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{29} } func (x *CreateConversationMessageStreamRequest) GetProjectId() string { @@ -1648,7 +1802,7 @@ type CreateConversationMessageStreamResponse struct { func (x *CreateConversationMessageStreamResponse) Reset() { *x = CreateConversationMessageStreamResponse{} - mi := &file_chat_v1_chat_proto_msgTypes[27] + mi := &file_chat_v1_chat_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1660,7 +1814,7 @@ func (x *CreateConversationMessageStreamResponse) String() string { func (*CreateConversationMessageStreamResponse) ProtoMessage() {} func (x *CreateConversationMessageStreamResponse) ProtoReflect() protoreflect.Message { - mi := &file_chat_v1_chat_proto_msgTypes[27] + mi := &file_chat_v1_chat_proto_msgTypes[30] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1673,7 +1827,7 @@ func (x *CreateConversationMessageStreamResponse) ProtoReflect() protoreflect.Me // Deprecated: Use CreateConversationMessageStreamResponse.ProtoReflect.Descriptor instead. func (*CreateConversationMessageStreamResponse) Descriptor() ([]byte, []int) { - return file_chat_v1_chat_proto_rawDescGZIP(), []int{27} + return file_chat_v1_chat_proto_rawDescGZIP(), []int{30} } func (x *CreateConversationMessageStreamResponse) GetResponsePayload() isCreateConversationMessageStreamResponse_ResponsePayload { @@ -1869,7 +2023,13 @@ const file_chat_v1_chat_proto_rawDesc = "" + "\fconversation\x18\x01 \x01(\v2\x15.chat.v1.ConversationR\fconversation\"D\n" + "\x19DeleteConversationRequest\x12'\n" + "\x0fconversation_id\x18\x01 \x01(\tR\x0econversationId\"\x1c\n" + - "\x1aDeleteConversationResponse\"~\n" + + "\x1aDeleteConversationResponse\"8\n" + + "\x0eSupportedModel\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x12\n" + + "\x04slug\x18\x02 \x01(\tR\x04slug\"\x1c\n" + + "\x1aListSupportedModelsRequest\"N\n" + + "\x1bListSupportedModelsResponse\x12/\n" + + "\x06models\x18\x01 \x03(\v2\x17.chat.v1.SupportedModelR\x06models\"~\n" + "\x14StreamInitialization\x12'\n" + "\x0fconversation_id\x18\x01 \x01(\tR\x0econversationId\x12=\n" + "\x0elanguage_model\x18\x05 \x01(\x0e2\x16.chat.v1.LanguageModelR\rlanguageModel\"c\n" + @@ -1912,7 +2072,7 @@ const file_chat_v1_chat_proto_rawDesc = "" + "\x0fstream_part_end\x18\x05 \x01(\v2\x16.chat.v1.StreamPartEndH\x00R\rstreamPartEnd\x12N\n" + "\x13stream_finalization\x18\x06 \x01(\v2\x1b.chat.v1.StreamFinalizationH\x00R\x12streamFinalization\x129\n" + "\fstream_error\x18\a \x01(\v2\x14.chat.v1.StreamErrorH\x00R\vstreamErrorB\x12\n" + - "\x10response_payload*\x81\x02\n" + + "\x10response_payload*\xff\x03\n" + "\rLanguageModel\x12\x1e\n" + "\x1aLANGUAGE_MODEL_UNSPECIFIED\x10\x00\x12\x1f\n" + "\x1bLANGUAGE_MODEL_OPENAI_GPT4O\x10\x01\x12$\n" + @@ -1920,17 +2080,26 @@ const file_chat_v1_chat_proto_rawDesc = "" + "\x1bLANGUAGE_MODEL_OPENAI_GPT41\x10\x04\x12\x1e\n" + "\x1aLANGUAGE_MODEL_OPENAI_GPT5\x10\a\x12#\n" + "\x1fLANGUAGE_MODEL_OPENAI_GPT5_MINI\x10\b\x12#\n" + - "\x1fLANGUAGE_MODEL_OPENAI_GPT5_NANO\x10\t*R\n" + + "\x1fLANGUAGE_MODEL_OPENAI_GPT5_NANO\x10\t\x12*\n" + + "&LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST\x10\n" + + "\x12\x1c\n" + + "\x18LANGUAGE_MODEL_OPENAI_O1\x10\v\x12!\n" + + "\x1dLANGUAGE_MODEL_OPENAI_O1_MINI\x10\f\x12\x1c\n" + + "\x18LANGUAGE_MODEL_OPENAI_O3\x10\r\x12!\n" + + "\x1dLANGUAGE_MODEL_OPENAI_O3_MINI\x10\x0e\x12!\n" + + "\x1dLANGUAGE_MODEL_OPENAI_O4_MINI\x10\x0f\x12+\n" + + "'LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST\x10\x10*R\n" + "\x10ConversationType\x12!\n" + "\x1dCONVERSATION_TYPE_UNSPECIFIED\x10\x00\x12\x1b\n" + - "\x17CONVERSATION_TYPE_DEBUG\x10\x012\xcd\a\n" + + "\x17CONVERSATION_TYPE_DEBUG\x10\x012\xd2\b\n" + "\vChatService\x12\x83\x01\n" + "\x11ListConversations\x12!.chat.v1.ListConversationsRequest\x1a\".chat.v1.ListConversationsResponse\"'\x82\xd3\xe4\x93\x02!\x12\x1f/_pd/api/v1/chats/conversations\x12\x8f\x01\n" + "\x0fGetConversation\x12\x1f.chat.v1.GetConversationRequest\x1a .chat.v1.GetConversationResponse\"9\x82\xd3\xe4\x93\x023\x121/_pd/api/v1/chats/conversations/{conversation_id}\x12\xa7\x01\n" + "\x19CreateConversationMessage\x12).chat.v1.CreateConversationMessageRequest\x1a*.chat.v1.CreateConversationMessageResponse\"3\x82\xd3\xe4\x93\x02-:\x01*\"(/_pd/api/v1/chats/conversations/messages\x12\xc2\x01\n" + "\x1fCreateConversationMessageStream\x12/.chat.v1.CreateConversationMessageStreamRequest\x1a0.chat.v1.CreateConversationMessageStreamResponse\":\x82\xd3\xe4\x93\x024:\x01*\"//_pd/api/v1/chats/conversations/messages/stream0\x01\x12\x9b\x01\n" + "\x12UpdateConversation\x12\".chat.v1.UpdateConversationRequest\x1a#.chat.v1.UpdateConversationResponse\"<\x82\xd3\xe4\x93\x026:\x01*21/_pd/api/v1/chats/conversations/{conversation_id}\x12\x98\x01\n" + - "\x12DeleteConversation\x12\".chat.v1.DeleteConversationRequest\x1a#.chat.v1.DeleteConversationResponse\"9\x82\xd3\xe4\x93\x023*1/_pd/api/v1/chats/conversations/{conversation_id}B\x7f\n" + + "\x12DeleteConversation\x12\".chat.v1.DeleteConversationRequest\x1a#.chat.v1.DeleteConversationResponse\"9\x82\xd3\xe4\x93\x023*1/_pd/api/v1/chats/conversations/{conversation_id}\x12\x82\x01\n" + + "\x13ListSupportedModels\x12#.chat.v1.ListSupportedModelsRequest\x1a$.chat.v1.ListSupportedModelsResponse\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/_pd/api/v1/chats/modelsB\x7f\n" + "\vcom.chat.v1B\tChatProtoP\x01Z(paperdebugger/pkg/gen/api/chat/v1;chatv1\xa2\x02\x03CXX\xaa\x02\aChat.V1\xca\x02\aChat\\V1\xe2\x02\x13Chat\\V1\\GPBMetadata\xea\x02\bChat::V1b\x06proto3" var ( @@ -1946,7 +2115,7 @@ func file_chat_v1_chat_proto_rawDescGZIP() []byte { } var file_chat_v1_chat_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_chat_v1_chat_proto_msgTypes = make([]protoimpl.MessageInfo, 28) +var file_chat_v1_chat_proto_msgTypes = make([]protoimpl.MessageInfo, 31) var file_chat_v1_chat_proto_goTypes = []any{ (LanguageModel)(0), // 0: chat.v1.LanguageModel (ConversationType)(0), // 1: chat.v1.ConversationType @@ -1969,15 +2138,18 @@ var file_chat_v1_chat_proto_goTypes = []any{ (*UpdateConversationResponse)(nil), // 18: chat.v1.UpdateConversationResponse (*DeleteConversationRequest)(nil), // 19: chat.v1.DeleteConversationRequest (*DeleteConversationResponse)(nil), // 20: chat.v1.DeleteConversationResponse - (*StreamInitialization)(nil), // 21: chat.v1.StreamInitialization - (*StreamPartBegin)(nil), // 22: chat.v1.StreamPartBegin - (*MessageChunk)(nil), // 23: chat.v1.MessageChunk - (*IncompleteIndicator)(nil), // 24: chat.v1.IncompleteIndicator - (*StreamPartEnd)(nil), // 25: chat.v1.StreamPartEnd - (*StreamFinalization)(nil), // 26: chat.v1.StreamFinalization - (*StreamError)(nil), // 27: chat.v1.StreamError - (*CreateConversationMessageStreamRequest)(nil), // 28: chat.v1.CreateConversationMessageStreamRequest - (*CreateConversationMessageStreamResponse)(nil), // 29: chat.v1.CreateConversationMessageStreamResponse + (*SupportedModel)(nil), // 21: chat.v1.SupportedModel + (*ListSupportedModelsRequest)(nil), // 22: chat.v1.ListSupportedModelsRequest + (*ListSupportedModelsResponse)(nil), // 23: chat.v1.ListSupportedModelsResponse + (*StreamInitialization)(nil), // 24: chat.v1.StreamInitialization + (*StreamPartBegin)(nil), // 25: chat.v1.StreamPartBegin + (*MessageChunk)(nil), // 26: chat.v1.MessageChunk + (*IncompleteIndicator)(nil), // 27: chat.v1.IncompleteIndicator + (*StreamPartEnd)(nil), // 28: chat.v1.StreamPartEnd + (*StreamFinalization)(nil), // 29: chat.v1.StreamFinalization + (*StreamError)(nil), // 30: chat.v1.StreamError + (*CreateConversationMessageStreamRequest)(nil), // 31: chat.v1.CreateConversationMessageStreamRequest + (*CreateConversationMessageStreamResponse)(nil), // 32: chat.v1.CreateConversationMessageStreamResponse } var file_chat_v1_chat_proto_depIdxs = []int32{ 4, // 0: chat.v1.MessagePayload.system:type_name -> chat.v1.MessageTypeSystem @@ -1995,35 +2167,38 @@ var file_chat_v1_chat_proto_depIdxs = []int32{ 1, // 12: chat.v1.CreateConversationMessageRequest.conversation_type:type_name -> chat.v1.ConversationType 10, // 13: chat.v1.CreateConversationMessageResponse.conversation:type_name -> chat.v1.Conversation 10, // 14: chat.v1.UpdateConversationResponse.conversation:type_name -> chat.v1.Conversation - 0, // 15: chat.v1.StreamInitialization.language_model:type_name -> chat.v1.LanguageModel - 8, // 16: chat.v1.StreamPartBegin.payload:type_name -> chat.v1.MessagePayload - 8, // 17: chat.v1.StreamPartEnd.payload:type_name -> chat.v1.MessagePayload - 0, // 18: chat.v1.CreateConversationMessageStreamRequest.language_model:type_name -> chat.v1.LanguageModel - 1, // 19: chat.v1.CreateConversationMessageStreamRequest.conversation_type:type_name -> chat.v1.ConversationType - 21, // 20: chat.v1.CreateConversationMessageStreamResponse.stream_initialization:type_name -> chat.v1.StreamInitialization - 22, // 21: chat.v1.CreateConversationMessageStreamResponse.stream_part_begin:type_name -> chat.v1.StreamPartBegin - 23, // 22: chat.v1.CreateConversationMessageStreamResponse.message_chunk:type_name -> chat.v1.MessageChunk - 24, // 23: chat.v1.CreateConversationMessageStreamResponse.incomplete_indicator:type_name -> chat.v1.IncompleteIndicator - 25, // 24: chat.v1.CreateConversationMessageStreamResponse.stream_part_end:type_name -> chat.v1.StreamPartEnd - 26, // 25: chat.v1.CreateConversationMessageStreamResponse.stream_finalization:type_name -> chat.v1.StreamFinalization - 27, // 26: chat.v1.CreateConversationMessageStreamResponse.stream_error:type_name -> chat.v1.StreamError - 11, // 27: chat.v1.ChatService.ListConversations:input_type -> chat.v1.ListConversationsRequest - 13, // 28: chat.v1.ChatService.GetConversation:input_type -> chat.v1.GetConversationRequest - 15, // 29: chat.v1.ChatService.CreateConversationMessage:input_type -> chat.v1.CreateConversationMessageRequest - 28, // 30: chat.v1.ChatService.CreateConversationMessageStream:input_type -> chat.v1.CreateConversationMessageStreamRequest - 17, // 31: chat.v1.ChatService.UpdateConversation:input_type -> chat.v1.UpdateConversationRequest - 19, // 32: chat.v1.ChatService.DeleteConversation:input_type -> chat.v1.DeleteConversationRequest - 12, // 33: chat.v1.ChatService.ListConversations:output_type -> chat.v1.ListConversationsResponse - 14, // 34: chat.v1.ChatService.GetConversation:output_type -> chat.v1.GetConversationResponse - 16, // 35: chat.v1.ChatService.CreateConversationMessage:output_type -> chat.v1.CreateConversationMessageResponse - 29, // 36: chat.v1.ChatService.CreateConversationMessageStream:output_type -> chat.v1.CreateConversationMessageStreamResponse - 18, // 37: chat.v1.ChatService.UpdateConversation:output_type -> chat.v1.UpdateConversationResponse - 20, // 38: chat.v1.ChatService.DeleteConversation:output_type -> chat.v1.DeleteConversationResponse - 33, // [33:39] is the sub-list for method output_type - 27, // [27:33] is the sub-list for method input_type - 27, // [27:27] is the sub-list for extension type_name - 27, // [27:27] is the sub-list for extension extendee - 0, // [0:27] is the sub-list for field type_name + 21, // 15: chat.v1.ListSupportedModelsResponse.models:type_name -> chat.v1.SupportedModel + 0, // 16: chat.v1.StreamInitialization.language_model:type_name -> chat.v1.LanguageModel + 8, // 17: chat.v1.StreamPartBegin.payload:type_name -> chat.v1.MessagePayload + 8, // 18: chat.v1.StreamPartEnd.payload:type_name -> chat.v1.MessagePayload + 0, // 19: chat.v1.CreateConversationMessageStreamRequest.language_model:type_name -> chat.v1.LanguageModel + 1, // 20: chat.v1.CreateConversationMessageStreamRequest.conversation_type:type_name -> chat.v1.ConversationType + 24, // 21: chat.v1.CreateConversationMessageStreamResponse.stream_initialization:type_name -> chat.v1.StreamInitialization + 25, // 22: chat.v1.CreateConversationMessageStreamResponse.stream_part_begin:type_name -> chat.v1.StreamPartBegin + 26, // 23: chat.v1.CreateConversationMessageStreamResponse.message_chunk:type_name -> chat.v1.MessageChunk + 27, // 24: chat.v1.CreateConversationMessageStreamResponse.incomplete_indicator:type_name -> chat.v1.IncompleteIndicator + 28, // 25: chat.v1.CreateConversationMessageStreamResponse.stream_part_end:type_name -> chat.v1.StreamPartEnd + 29, // 26: chat.v1.CreateConversationMessageStreamResponse.stream_finalization:type_name -> chat.v1.StreamFinalization + 30, // 27: chat.v1.CreateConversationMessageStreamResponse.stream_error:type_name -> chat.v1.StreamError + 11, // 28: chat.v1.ChatService.ListConversations:input_type -> chat.v1.ListConversationsRequest + 13, // 29: chat.v1.ChatService.GetConversation:input_type -> chat.v1.GetConversationRequest + 15, // 30: chat.v1.ChatService.CreateConversationMessage:input_type -> chat.v1.CreateConversationMessageRequest + 31, // 31: chat.v1.ChatService.CreateConversationMessageStream:input_type -> chat.v1.CreateConversationMessageStreamRequest + 17, // 32: chat.v1.ChatService.UpdateConversation:input_type -> chat.v1.UpdateConversationRequest + 19, // 33: chat.v1.ChatService.DeleteConversation:input_type -> chat.v1.DeleteConversationRequest + 22, // 34: chat.v1.ChatService.ListSupportedModels:input_type -> chat.v1.ListSupportedModelsRequest + 12, // 35: chat.v1.ChatService.ListConversations:output_type -> chat.v1.ListConversationsResponse + 14, // 36: chat.v1.ChatService.GetConversation:output_type -> chat.v1.GetConversationResponse + 16, // 37: chat.v1.ChatService.CreateConversationMessage:output_type -> chat.v1.CreateConversationMessageResponse + 32, // 38: chat.v1.ChatService.CreateConversationMessageStream:output_type -> chat.v1.CreateConversationMessageStreamResponse + 18, // 39: chat.v1.ChatService.UpdateConversation:output_type -> chat.v1.UpdateConversationResponse + 20, // 40: chat.v1.ChatService.DeleteConversation:output_type -> chat.v1.DeleteConversationResponse + 23, // 41: chat.v1.ChatService.ListSupportedModels:output_type -> chat.v1.ListSupportedModelsResponse + 35, // [35:42] is the sub-list for method output_type + 28, // [28:35] is the sub-list for method input_type + 28, // [28:28] is the sub-list for extension type_name + 28, // [28:28] is the sub-list for extension extendee + 0, // [0:28] is the sub-list for field type_name } func init() { file_chat_v1_chat_proto_init() } @@ -2042,8 +2217,8 @@ func file_chat_v1_chat_proto_init() { } file_chat_v1_chat_proto_msgTypes[9].OneofWrappers = []any{} file_chat_v1_chat_proto_msgTypes[13].OneofWrappers = []any{} - file_chat_v1_chat_proto_msgTypes[26].OneofWrappers = []any{} - file_chat_v1_chat_proto_msgTypes[27].OneofWrappers = []any{ + file_chat_v1_chat_proto_msgTypes[29].OneofWrappers = []any{} + file_chat_v1_chat_proto_msgTypes[30].OneofWrappers = []any{ (*CreateConversationMessageStreamResponse_StreamInitialization)(nil), (*CreateConversationMessageStreamResponse_StreamPartBegin)(nil), (*CreateConversationMessageStreamResponse_MessageChunk)(nil), @@ -2058,7 +2233,7 @@ func file_chat_v1_chat_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_chat_v1_chat_proto_rawDesc), len(file_chat_v1_chat_proto_rawDesc)), NumEnums: 2, - NumMessages: 28, + NumMessages: 31, NumExtensions: 0, NumServices: 1, }, diff --git a/pkg/gen/api/chat/v1/chat.pb.gw.go b/pkg/gen/api/chat/v1/chat.pb.gw.go index 11049d9a..dab97565 100644 --- a/pkg/gen/api/chat/v1/chat.pb.gw.go +++ b/pkg/gen/api/chat/v1/chat.pb.gw.go @@ -243,6 +243,27 @@ func local_request_ChatService_DeleteConversation_0(ctx context.Context, marshal return msg, metadata, err } +func request_ChatService_ListSupportedModels_0(ctx context.Context, marshaler runtime.Marshaler, client ChatServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListSupportedModelsRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.ListSupportedModels(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ChatService_ListSupportedModels_0(ctx context.Context, marshaler runtime.Marshaler, server ChatServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListSupportedModelsRequest + metadata runtime.ServerMetadata + ) + msg, err := server.ListSupportedModels(ctx, &protoReq) + return msg, metadata, err +} + // RegisterChatServiceHandlerServer registers the http handlers for service ChatService to "mux". // UnaryRPC :call ChatServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. @@ -356,6 +377,26 @@ func RegisterChatServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux } forward_ChatService_DeleteConversation_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) + mux.Handle(http.MethodGet, pattern_ChatService_ListSupportedModels_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/chat.v1.ChatService/ListSupportedModels", runtime.WithHTTPPathPattern("/_pd/api/v1/chats/models")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ChatService_ListSupportedModels_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ChatService_ListSupportedModels_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) return nil } @@ -498,6 +539,23 @@ func RegisterChatServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux } forward_ChatService_DeleteConversation_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) + mux.Handle(http.MethodGet, pattern_ChatService_ListSupportedModels_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/chat.v1.ChatService/ListSupportedModels", runtime.WithHTTPPathPattern("/_pd/api/v1/chats/models")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ChatService_ListSupportedModels_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ChatService_ListSupportedModels_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) return nil } @@ -508,6 +566,7 @@ var ( pattern_ChatService_CreateConversationMessageStream_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 2, 6}, []string{"_pd", "api", "v1", "chats", "conversations", "messages", "stream"}, "")) pattern_ChatService_UpdateConversation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"_pd", "api", "v1", "chats", "conversations", "conversation_id"}, "")) pattern_ChatService_DeleteConversation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"_pd", "api", "v1", "chats", "conversations", "conversation_id"}, "")) + pattern_ChatService_ListSupportedModels_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4}, []string{"_pd", "api", "v1", "chats", "models"}, "")) ) var ( @@ -517,4 +576,5 @@ var ( forward_ChatService_CreateConversationMessageStream_0 = runtime.ForwardResponseStream forward_ChatService_UpdateConversation_0 = runtime.ForwardResponseMessage forward_ChatService_DeleteConversation_0 = runtime.ForwardResponseMessage + forward_ChatService_ListSupportedModels_0 = runtime.ForwardResponseMessage ) diff --git a/pkg/gen/api/chat/v1/chat_grpc.pb.go b/pkg/gen/api/chat/v1/chat_grpc.pb.go index aa9b7082..c0916102 100644 --- a/pkg/gen/api/chat/v1/chat_grpc.pb.go +++ b/pkg/gen/api/chat/v1/chat_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.5.1 +// - protoc-gen-go-grpc v1.6.0 // - protoc (unknown) // source: chat/v1/chat.proto @@ -25,6 +25,7 @@ const ( ChatService_CreateConversationMessageStream_FullMethodName = "/chat.v1.ChatService/CreateConversationMessageStream" ChatService_UpdateConversation_FullMethodName = "/chat.v1.ChatService/UpdateConversation" ChatService_DeleteConversation_FullMethodName = "/chat.v1.ChatService/DeleteConversation" + ChatService_ListSupportedModels_FullMethodName = "/chat.v1.ChatService/ListSupportedModels" ) // ChatServiceClient is the client API for ChatService service. @@ -37,6 +38,7 @@ type ChatServiceClient interface { CreateConversationMessageStream(ctx context.Context, in *CreateConversationMessageStreamRequest, opts ...grpc.CallOption) (grpc.ServerStreamingClient[CreateConversationMessageStreamResponse], error) UpdateConversation(ctx context.Context, in *UpdateConversationRequest, opts ...grpc.CallOption) (*UpdateConversationResponse, error) DeleteConversation(ctx context.Context, in *DeleteConversationRequest, opts ...grpc.CallOption) (*DeleteConversationResponse, error) + ListSupportedModels(ctx context.Context, in *ListSupportedModelsRequest, opts ...grpc.CallOption) (*ListSupportedModelsResponse, error) } type chatServiceClient struct { @@ -116,6 +118,16 @@ func (c *chatServiceClient) DeleteConversation(ctx context.Context, in *DeleteCo return out, nil } +func (c *chatServiceClient) ListSupportedModels(ctx context.Context, in *ListSupportedModelsRequest, opts ...grpc.CallOption) (*ListSupportedModelsResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListSupportedModelsResponse) + err := c.cc.Invoke(ctx, ChatService_ListSupportedModels_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + // ChatServiceServer is the server API for ChatService service. // All implementations must embed UnimplementedChatServiceServer // for forward compatibility. @@ -126,6 +138,7 @@ type ChatServiceServer interface { CreateConversationMessageStream(*CreateConversationMessageStreamRequest, grpc.ServerStreamingServer[CreateConversationMessageStreamResponse]) error UpdateConversation(context.Context, *UpdateConversationRequest) (*UpdateConversationResponse, error) DeleteConversation(context.Context, *DeleteConversationRequest) (*DeleteConversationResponse, error) + ListSupportedModels(context.Context, *ListSupportedModelsRequest) (*ListSupportedModelsResponse, error) mustEmbedUnimplementedChatServiceServer() } @@ -137,22 +150,25 @@ type ChatServiceServer interface { type UnimplementedChatServiceServer struct{} func (UnimplementedChatServiceServer) ListConversations(context.Context, *ListConversationsRequest) (*ListConversationsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListConversations not implemented") + return nil, status.Error(codes.Unimplemented, "method ListConversations not implemented") } func (UnimplementedChatServiceServer) GetConversation(context.Context, *GetConversationRequest) (*GetConversationResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetConversation not implemented") + return nil, status.Error(codes.Unimplemented, "method GetConversation not implemented") } func (UnimplementedChatServiceServer) CreateConversationMessage(context.Context, *CreateConversationMessageRequest) (*CreateConversationMessageResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateConversationMessage not implemented") + return nil, status.Error(codes.Unimplemented, "method CreateConversationMessage not implemented") } func (UnimplementedChatServiceServer) CreateConversationMessageStream(*CreateConversationMessageStreamRequest, grpc.ServerStreamingServer[CreateConversationMessageStreamResponse]) error { - return status.Errorf(codes.Unimplemented, "method CreateConversationMessageStream not implemented") + return status.Error(codes.Unimplemented, "method CreateConversationMessageStream not implemented") } func (UnimplementedChatServiceServer) UpdateConversation(context.Context, *UpdateConversationRequest) (*UpdateConversationResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdateConversation not implemented") + return nil, status.Error(codes.Unimplemented, "method UpdateConversation not implemented") } func (UnimplementedChatServiceServer) DeleteConversation(context.Context, *DeleteConversationRequest) (*DeleteConversationResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteConversation not implemented") + return nil, status.Error(codes.Unimplemented, "method DeleteConversation not implemented") +} +func (UnimplementedChatServiceServer) ListSupportedModels(context.Context, *ListSupportedModelsRequest) (*ListSupportedModelsResponse, error) { + return nil, status.Error(codes.Unimplemented, "method ListSupportedModels not implemented") } func (UnimplementedChatServiceServer) mustEmbedUnimplementedChatServiceServer() {} func (UnimplementedChatServiceServer) testEmbeddedByValue() {} @@ -165,7 +181,7 @@ type UnsafeChatServiceServer interface { } func RegisterChatServiceServer(s grpc.ServiceRegistrar, srv ChatServiceServer) { - // If the following call pancis, it indicates UnimplementedChatServiceServer was + // If the following call panics, it indicates UnimplementedChatServiceServer was // embedded by pointer and is nil. This will cause panics if an // unimplemented method is ever invoked, so we test this at initialization // time to prevent it from happening at runtime later due to I/O. @@ -276,6 +292,24 @@ func _ChatService_DeleteConversation_Handler(srv interface{}, ctx context.Contex return interceptor(ctx, in, info, handler) } +func _ChatService_ListSupportedModels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSupportedModelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChatServiceServer).ListSupportedModels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ChatService_ListSupportedModels_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChatServiceServer).ListSupportedModels(ctx, req.(*ListSupportedModelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + // ChatService_ServiceDesc is the grpc.ServiceDesc for ChatService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -303,6 +337,10 @@ var ChatService_ServiceDesc = grpc.ServiceDesc{ MethodName: "DeleteConversation", Handler: _ChatService_DeleteConversation_Handler, }, + { + MethodName: "ListSupportedModels", + Handler: _ChatService_ListSupportedModels_Handler, + }, }, Streams: []grpc.StreamDesc{ { diff --git a/pkg/gen/api/comment/v1/comment.pb.go b/pkg/gen/api/comment/v1/comment.pb.go index f4c5e1fb..8daf2720 100644 --- a/pkg/gen/api/comment/v1/comment.pb.go +++ b/pkg/gen/api/comment/v1/comment.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: comment/v1/comment.proto diff --git a/pkg/gen/api/comment/v1/comment_grpc.pb.go b/pkg/gen/api/comment/v1/comment_grpc.pb.go index a2d37609..b077d68b 100644 --- a/pkg/gen/api/comment/v1/comment_grpc.pb.go +++ b/pkg/gen/api/comment/v1/comment_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.5.1 +// - protoc-gen-go-grpc v1.6.0 // - protoc (unknown) // source: comment/v1/comment.proto @@ -63,7 +63,7 @@ type CommentServiceServer interface { type UnimplementedCommentServiceServer struct{} func (UnimplementedCommentServiceServer) CommentsAccepted(context.Context, *CommentsAcceptedRequest) (*CommentsAcceptedResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CommentsAccepted not implemented") + return nil, status.Error(codes.Unimplemented, "method CommentsAccepted not implemented") } func (UnimplementedCommentServiceServer) mustEmbedUnimplementedCommentServiceServer() {} func (UnimplementedCommentServiceServer) testEmbeddedByValue() {} @@ -76,7 +76,7 @@ type UnsafeCommentServiceServer interface { } func RegisterCommentServiceServer(s grpc.ServiceRegistrar, srv CommentServiceServer) { - // If the following call pancis, it indicates UnimplementedCommentServiceServer was + // If the following call panics, it indicates UnimplementedCommentServiceServer was // embedded by pointer and is nil. This will cause panics if an // unimplemented method is ever invoked, so we test this at initialization // time to prevent it from happening at runtime later due to I/O. diff --git a/pkg/gen/api/project/v1/project.pb.go b/pkg/gen/api/project/v1/project.pb.go index 3866d766..f67566ca 100644 --- a/pkg/gen/api/project/v1/project.pb.go +++ b/pkg/gen/api/project/v1/project.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: project/v1/project.proto diff --git a/pkg/gen/api/project/v1/project_grpc.pb.go b/pkg/gen/api/project/v1/project_grpc.pb.go index ae132f4b..c50d3475 100644 --- a/pkg/gen/api/project/v1/project_grpc.pb.go +++ b/pkg/gen/api/project/v1/project_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.5.1 +// - protoc-gen-go-grpc v1.6.0 // - protoc (unknown) // source: project/v1/project.proto @@ -141,25 +141,25 @@ type ProjectServiceServer interface { type UnimplementedProjectServiceServer struct{} func (UnimplementedProjectServiceServer) UpsertProject(context.Context, *UpsertProjectRequest) (*UpsertProjectResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpsertProject not implemented") + return nil, status.Error(codes.Unimplemented, "method UpsertProject not implemented") } func (UnimplementedProjectServiceServer) GetProject(context.Context, *GetProjectRequest) (*GetProjectResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetProject not implemented") + return nil, status.Error(codes.Unimplemented, "method GetProject not implemented") } func (UnimplementedProjectServiceServer) RunProjectPaperScore(context.Context, *RunProjectPaperScoreRequest) (*RunProjectPaperScoreResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RunProjectPaperScore not implemented") + return nil, status.Error(codes.Unimplemented, "method RunProjectPaperScore not implemented") } func (UnimplementedProjectServiceServer) RunProjectPaperScoreComment(context.Context, *RunProjectPaperScoreCommentRequest) (*RunProjectPaperScoreCommentResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RunProjectPaperScoreComment not implemented") + return nil, status.Error(codes.Unimplemented, "method RunProjectPaperScoreComment not implemented") } func (UnimplementedProjectServiceServer) RunProjectOverleafComment(context.Context, *RunProjectOverleafCommentRequest) (*RunProjectOverleafCommentResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RunProjectOverleafComment not implemented") + return nil, status.Error(codes.Unimplemented, "method RunProjectOverleafComment not implemented") } func (UnimplementedProjectServiceServer) GetProjectInstructions(context.Context, *GetProjectInstructionsRequest) (*GetProjectInstructionsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetProjectInstructions not implemented") + return nil, status.Error(codes.Unimplemented, "method GetProjectInstructions not implemented") } func (UnimplementedProjectServiceServer) UpsertProjectInstructions(context.Context, *UpsertProjectInstructionsRequest) (*UpsertProjectInstructionsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpsertProjectInstructions not implemented") + return nil, status.Error(codes.Unimplemented, "method UpsertProjectInstructions not implemented") } func (UnimplementedProjectServiceServer) mustEmbedUnimplementedProjectServiceServer() {} func (UnimplementedProjectServiceServer) testEmbeddedByValue() {} @@ -172,7 +172,7 @@ type UnsafeProjectServiceServer interface { } func RegisterProjectServiceServer(s grpc.ServiceRegistrar, srv ProjectServiceServer) { - // If the following call pancis, it indicates UnimplementedProjectServiceServer was + // If the following call panics, it indicates UnimplementedProjectServiceServer was // embedded by pointer and is nil. This will cause panics if an // unimplemented method is ever invoked, so we test this at initialization // time to prevent it from happening at runtime later due to I/O. diff --git a/pkg/gen/api/shared/v1/shared.pb.go b/pkg/gen/api/shared/v1/shared.pb.go index ca0ec7ce..58d084f2 100644 --- a/pkg/gen/api/shared/v1/shared.pb.go +++ b/pkg/gen/api/shared/v1/shared.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: shared/v1/shared.proto diff --git a/pkg/gen/api/user/v1/user.pb.go b/pkg/gen/api/user/v1/user.pb.go index 17871b25..85603cf2 100644 --- a/pkg/gen/api/user/v1/user.pb.go +++ b/pkg/gen/api/user/v1/user.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.36.8 +// protoc-gen-go v1.36.10 // protoc (unknown) // source: user/v1/user.proto @@ -622,6 +622,7 @@ type Settings struct { EnableCompletion bool `protobuf:"varint,3,opt,name=enable_completion,json=enableCompletion,proto3" json:"enable_completion,omitempty"` FullDocumentRag bool `protobuf:"varint,4,opt,name=full_document_rag,json=fullDocumentRag,proto3" json:"full_document_rag,omitempty"` ShowedOnboarding bool `protobuf:"varint,5,opt,name=showed_onboarding,json=showedOnboarding,proto3" json:"showed_onboarding,omitempty"` + OpenaiApiKey string `protobuf:"bytes,6,opt,name=openai_api_key,json=openaiApiKey,proto3" json:"openai_api_key,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -691,6 +692,13 @@ func (x *Settings) GetShowedOnboarding() bool { return false } +func (x *Settings) GetOpenaiApiKey() string { + if x != nil { + return x.OpenaiApiKey + } + return "" +} + type GetSettingsRequest struct { state protoimpl.MessageState `protogen:"open.v1"` unknownFields protoimpl.UnknownFields @@ -1145,13 +1153,14 @@ const file_user_v1_user_proto_rawDesc = "" + "\x06prompt\x18\x01 \x01(\v2\x0f.user.v1.PromptR\x06prompt\"2\n" + "\x13DeletePromptRequest\x12\x1b\n" + "\tprompt_id\x18\x01 \x01(\tR\bpromptId\"\x16\n" + - "\x14DeletePromptResponse\"\x9d\x02\n" + + "\x14DeletePromptResponse\"\xc3\x02\n" + "\bSettings\x12C\n" + "\x1eshow_shortcuts_after_selection\x18\x01 \x01(\bR\x1bshowShortcutsAfterSelection\x12F\n" + " full_width_paper_debugger_button\x18\x02 \x01(\bR\x1cfullWidthPaperDebuggerButton\x12+\n" + "\x11enable_completion\x18\x03 \x01(\bR\x10enableCompletion\x12*\n" + "\x11full_document_rag\x18\x04 \x01(\bR\x0ffullDocumentRag\x12+\n" + - "\x11showed_onboarding\x18\x05 \x01(\bR\x10showedOnboarding\"\x14\n" + + "\x11showed_onboarding\x18\x05 \x01(\bR\x10showedOnboarding\x12$\n" + + "\x0eopenai_api_key\x18\x06 \x01(\tR\fopenaiApiKey\"\x14\n" + "\x12GetSettingsRequest\"D\n" + "\x13GetSettingsResponse\x12-\n" + "\bsettings\x18\x01 \x01(\v2\x11.user.v1.SettingsR\bsettings\"F\n" + diff --git a/pkg/gen/api/user/v1/user_grpc.pb.go b/pkg/gen/api/user/v1/user_grpc.pb.go index add007be..898ff765 100644 --- a/pkg/gen/api/user/v1/user_grpc.pb.go +++ b/pkg/gen/api/user/v1/user_grpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: -// - protoc-gen-go-grpc v1.5.1 +// - protoc-gen-go-grpc v1.6.0 // - protoc (unknown) // source: user/v1/user.proto @@ -180,34 +180,34 @@ type UserServiceServer interface { type UnimplementedUserServiceServer struct{} func (UnimplementedUserServiceServer) GetUser(context.Context, *GetUserRequest) (*GetUserResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetUser not implemented") + return nil, status.Error(codes.Unimplemented, "method GetUser not implemented") } func (UnimplementedUserServiceServer) ListPrompts(context.Context, *ListPromptsRequest) (*ListPromptsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ListPrompts not implemented") + return nil, status.Error(codes.Unimplemented, "method ListPrompts not implemented") } func (UnimplementedUserServiceServer) CreatePrompt(context.Context, *CreatePromptRequest) (*CreatePromptResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreatePrompt not implemented") + return nil, status.Error(codes.Unimplemented, "method CreatePrompt not implemented") } func (UnimplementedUserServiceServer) UpdatePrompt(context.Context, *UpdatePromptRequest) (*UpdatePromptResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdatePrompt not implemented") + return nil, status.Error(codes.Unimplemented, "method UpdatePrompt not implemented") } func (UnimplementedUserServiceServer) GetUserInstructions(context.Context, *GetUserInstructionsRequest) (*GetUserInstructionsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetUserInstructions not implemented") + return nil, status.Error(codes.Unimplemented, "method GetUserInstructions not implemented") } func (UnimplementedUserServiceServer) UpsertUserInstructions(context.Context, *UpsertUserInstructionsRequest) (*UpsertUserInstructionsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpsertUserInstructions not implemented") + return nil, status.Error(codes.Unimplemented, "method UpsertUserInstructions not implemented") } func (UnimplementedUserServiceServer) DeletePrompt(context.Context, *DeletePromptRequest) (*DeletePromptResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeletePrompt not implemented") + return nil, status.Error(codes.Unimplemented, "method DeletePrompt not implemented") } func (UnimplementedUserServiceServer) GetSettings(context.Context, *GetSettingsRequest) (*GetSettingsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetSettings not implemented") + return nil, status.Error(codes.Unimplemented, "method GetSettings not implemented") } func (UnimplementedUserServiceServer) UpdateSettings(context.Context, *UpdateSettingsRequest) (*UpdateSettingsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdateSettings not implemented") + return nil, status.Error(codes.Unimplemented, "method UpdateSettings not implemented") } func (UnimplementedUserServiceServer) ResetSettings(context.Context, *ResetSettingsRequest) (*ResetSettingsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ResetSettings not implemented") + return nil, status.Error(codes.Unimplemented, "method ResetSettings not implemented") } func (UnimplementedUserServiceServer) mustEmbedUnimplementedUserServiceServer() {} func (UnimplementedUserServiceServer) testEmbeddedByValue() {} @@ -220,7 +220,7 @@ type UnsafeUserServiceServer interface { } func RegisterUserServiceServer(s grpc.ServiceRegistrar, srv UserServiceServer) { - // If the following call pancis, it indicates UnimplementedUserServiceServer was + // If the following call panics, it indicates UnimplementedUserServiceServer was // embedded by pointer and is nil. This will cause panics if an // unimplemented method is ever invoked, so we test this at initialization // time to prevent it from happening at runtime later due to I/O. diff --git a/proto/chat/v1/chat.proto b/proto/chat/v1/chat.proto index 7c9854a8..ab8b7e12 100644 --- a/proto/chat/v1/chat.proto +++ b/proto/chat/v1/chat.proto @@ -34,8 +34,12 @@ service ChatService { rpc DeleteConversation(DeleteConversationRequest) returns (DeleteConversationResponse) { option (google.api.http) = {delete: "/_pd/api/v1/chats/conversations/{conversation_id}"}; } + rpc ListSupportedModels(ListSupportedModelsRequest) returns (ListSupportedModelsResponse) { + option (google.api.http) = {get: "/_pd/api/v1/chats/models"}; + } } +// deprecated enum LanguageModel { LANGUAGE_MODEL_UNSPECIFIED = 0; LANGUAGE_MODEL_OPENAI_GPT4O = 1; @@ -44,6 +48,13 @@ enum LanguageModel { LANGUAGE_MODEL_OPENAI_GPT5 = 7; LANGUAGE_MODEL_OPENAI_GPT5_MINI = 8; LANGUAGE_MODEL_OPENAI_GPT5_NANO = 9; + LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST = 10; + LANGUAGE_MODEL_OPENAI_O1 = 11; + LANGUAGE_MODEL_OPENAI_O1_MINI = 12; + LANGUAGE_MODEL_OPENAI_O3 = 13; + LANGUAGE_MODEL_OPENAI_O3_MINI = 14; + LANGUAGE_MODEL_OPENAI_O4_MINI = 15; + LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST = 16; } message MessageTypeToolCall { @@ -149,6 +160,19 @@ message DeleteConversationResponse { // explicitly empty } +message SupportedModel { + string name = 1; + string slug = 2; +} + +message ListSupportedModelsRequest { + // explicitly empty +} + +message ListSupportedModelsResponse { + repeated SupportedModel models = 1; +} + // ============================== Streaming Messages // Information sent once at the beginning of a new conversation stream diff --git a/proto/user/v1/user.proto b/proto/user/v1/user.proto index c9a57597..08056803 100644 --- a/proto/user/v1/user.proto +++ b/proto/user/v1/user.proto @@ -120,6 +120,7 @@ message Settings { bool enable_completion = 3; bool full_document_rag = 4; bool showed_onboarding = 5; + string openai_api_key = 6; } message GetSettingsRequest {} diff --git a/webapp/_webapp/bun.lock b/webapp/_webapp/bun.lock index 77c6502c..6851b641 100644 --- a/webapp/_webapp/bun.lock +++ b/webapp/_webapp/bun.lock @@ -7,6 +7,8 @@ "@buf/googleapis_googleapis.bufbuild_es": "^2.2.3-20250211200939-546238c53f73.1", "@bufbuild/protobuf": "^2.5.1", "@capacitor-community/apple-sign-in": "^7.0.1", + "@grafana/faro-web-sdk": "^2.0.2", + "@grafana/faro-web-tracing": "^2.0.2", "@heroui/react": "^2.7.9", "@iconify/react": "^6.0.0", "@lukemorales/query-key-factory": "^1.3.4", @@ -34,6 +36,7 @@ "@codemirror/state": "^6.5.2", "@codemirror/view": "^6.37.1", "@eslint/js": "^9.28.0", + "@grafana/faro-rollup-plugin": "^0.7.0", "@types/chrome": "^0.0.326", "@types/codemirror": "^5.60.16", "@types/events": "^3.0.3", @@ -151,6 +154,16 @@ "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.6.1", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-ePEgLgVCqi2BBFnTMWPfIghu6FkbZnnBVhO2sSxvLfrdFw7wCHAHiDoM2h4NRgjbaY7+B7HgOLZGkK187pZTZg=="], + "@grafana/faro-bundlers-shared": ["@grafana/faro-bundlers-shared@0.6.0", "", { "dependencies": { "ansis": "^4.0.0", "tar": "^7.1.0" } }, "sha512-aphGaqubY8uqHQDBhTT7kyc5eunlk1K/vxfj8tqL3h4QIbpOBYSAM3/NAT0s7GVVyxIBV7x+MTbohyy5/j4C/w=="], + + "@grafana/faro-core": ["@grafana/faro-core@2.0.2", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/otlp-transformer": "^0.208.0" } }, "sha512-dUcQBUDzvWmDbVDlYYEmVyjxZJjqC8syCuE/YWDJp/Zps9DkDH5CdF5kWuSb3cCm6U1iTo9czDkORPj0KEN9WQ=="], + + "@grafana/faro-rollup-plugin": ["@grafana/faro-rollup-plugin@0.7.0", "", { "dependencies": { "@grafana/faro-bundlers-shared": "^0.6.0", "cross-fetch": "^4.0.0", "magic-string": "^0.30.5", "rollup": "^4.22.4" } }, "sha512-l9tyBpKxz7O32SGCyqCsImV39V/O1/VWSbQkOVt5+JZlZIG9ZYpGr6E10JvmK6raKPQ8vEl3uQdUl0mExlQnjA=="], + + "@grafana/faro-web-sdk": ["@grafana/faro-web-sdk@2.0.2", "", { "dependencies": { "@grafana/faro-core": "^2.0.2", "ua-parser-js": "^1.0.32", "web-vitals": "^5.0.3" } }, "sha512-Tn5XUBfmEOXexFGRVmtR1JqyoUWU0luw1T27p9vwBCAg6X5yCVOf7N73ctIZ+WD7eS1DCLdAGT8ehx44WXgyeQ=="], + + "@grafana/faro-web-tracing": ["@grafana/faro-web-tracing@2.0.2", "", { "dependencies": { "@grafana/faro-web-sdk": "^2.0.2", "@opentelemetry/api": "^1.9.0", "@opentelemetry/core": "^2.0.0", "@opentelemetry/exporter-trace-otlp-http": "^0.208.0", "@opentelemetry/instrumentation": "^0.208.0", "@opentelemetry/instrumentation-fetch": "^0.208.0", "@opentelemetry/instrumentation-xml-http-request": "^0.208.0", "@opentelemetry/otlp-transformer": "^0.208.0", "@opentelemetry/resources": "^2.0.0", "@opentelemetry/sdk-trace-web": "^2.0.0", "@opentelemetry/semantic-conventions": "^1.32.0" } }, "sha512-jFKpXkAGI4SO1wlDQfZ6I0ZBeGq1VykT4TiGWFY7VnMnQZeiEUA/KfpJpEya+CdkDmd+c9hiBmSZDLzplu8dNA=="], + "@heroui/accordion": ["@heroui/accordion@2.2.17", "", { "dependencies": { "@heroui/aria-utils": "2.2.17", "@heroui/divider": "2.2.14", "@heroui/dom-animation": "2.1.9", "@heroui/framer-utils": "2.1.16", "@heroui/react-utils": "2.1.10", "@heroui/shared-icons": "2.1.8", "@heroui/shared-utils": "2.1.9", "@heroui/use-aria-accordion": "2.2.12", "@react-aria/button": "3.13.1", "@react-aria/focus": "3.20.3", "@react-aria/interactions": "3.25.1", "@react-aria/utils": "3.29.0", "@react-stately/tree": "3.8.10", "@react-types/accordion": "3.0.0-alpha.26", "@react-types/shared": "3.29.1" }, "peerDependencies": { "@heroui/system": ">=2.4.7", "@heroui/theme": ">=2.4.6", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "sha512-HrFl5cVtmNt+5pVboMmw0Eo21Ikx+pPZjvxm1GaFayCaaQD1gvLcISPSQF4rOft+197FA8vuk9Ke5UsBy0Ylig=="], "@heroui/alert": ["@heroui/alert@2.2.20", "", { "dependencies": { "@heroui/button": "2.2.20", "@heroui/react-utils": "2.1.10", "@heroui/shared-icons": "2.1.8", "@heroui/shared-utils": "2.1.9", "@react-aria/utils": "3.29.0", "@react-stately/utils": "3.10.6" }, "peerDependencies": { "@heroui/system": ">=2.4.7", "@heroui/theme": ">=2.4.6", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "sha512-qGrZ3NkbKs30CzbAabLwJT4wzafJPZb7YCLpK8V1NwEEkwCGVZUsnAV6XmYHqJ7yiDyx9saq/W3laOeDyDT7Fw=="], @@ -325,13 +338,15 @@ "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="], "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], @@ -345,8 +360,58 @@ "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="], + + "@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg=="], + + "@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="], + + "@opentelemetry/exporter-trace-otlp-http": ["@opentelemetry/exporter-trace-otlp-http@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.208.0", "@opentelemetry/otlp-transformer": "0.208.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-jbzDw1q+BkwKFq9yxhjAJ9rjKldbt5AgIy1gmEIJjEV/WRxQ3B6HcLVkwbjJ3RcMif86BDNKR846KJ0tY0aOJA=="], + + "@opentelemetry/instrumentation": ["@opentelemetry/instrumentation@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "import-in-the-middle": "^2.0.0", "require-in-the-middle": "^8.0.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-Eju0L4qWcQS+oXxi6pgh7zvE2byogAkcsVv0OjHF/97iOz1N/aKE6etSGowYkie+YA1uo6DNwdSxaaNnLvcRlA=="], + + "@opentelemetry/instrumentation-fetch": ["@opentelemetry/instrumentation-fetch@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/instrumentation": "0.208.0", "@opentelemetry/sdk-trace-web": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-zgStoUfNF1xH9bCq539k1aeieKxPiAvBo5gKipQ9fIt+eJsFvqGcSzrrDX+OYgpIPW/IVNgWBoOw6zVmKwgNwQ=="], + + "@opentelemetry/instrumentation-xml-http-request": ["@opentelemetry/instrumentation-xml-http-request@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/instrumentation": "0.208.0", "@opentelemetry/sdk-trace-web": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-me0knebxJxnzis73p5/ZQgdLNG6nsUXMsDR/dZk+BPOiNyd3Me9ye2wVM06JlcLA54w4JESw6wMTNi4lMhowFQ=="], + + "@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.208.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA=="], + + "@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.208.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ=="], + + "@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="], + + "@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA=="], + + "@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="], + + "@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="], + + "@opentelemetry/sdk-trace-web": ["@opentelemetry/sdk-trace-web@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-x/LHsDBO3kfqaFx5qSzBljJ5QHsRXrvS4MybBDy1k7Svidb8ZyIPudWVzj3s5LpPkYZIgi9e+7tdsNCnptoelw=="], + + "@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.38.0", "", {}, "sha512-kocjix+/sSggfJhwXqClZ3i9Y/MI0fp7b+g7kCRm6psy2dsf8uApTRclwG18h8Avm7C9+fnt+O36PspJ/OzoWg=="], + "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], + "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="], + + "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="], + + "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="], + + "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="], + + "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="], + + "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="], + + "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="], + + "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="], + + "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="], + + "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="], + "@react-aria/breadcrumbs": ["@react-aria/breadcrumbs@3.5.24", "", { "dependencies": { "@react-aria/i18n": "^3.12.9", "@react-aria/link": "^3.8.1", "@react-aria/utils": "^3.29.0", "@react-types/breadcrumbs": "^3.7.13", "@react-types/shared": "^3.29.1", "@swc/helpers": "^0.5.0" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1", "react-dom": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1" } }, "sha512-CRheGyyM8afPJvDHLXn/mmGG/WAr/z2LReK3DlPdxVKcsOn7g3NIRxAcAIAJQlDLdOiu1SXHiZe6uu2jPhHrxA=="], "@react-aria/button": ["@react-aria/button@3.13.1", "", { "dependencies": { "@react-aria/interactions": "^3.25.1", "@react-aria/toolbar": "3.0.0-beta.16", "@react-aria/utils": "^3.29.0", "@react-stately/toggle": "^3.8.4", "@react-types/button": "^3.12.1", "@react-types/shared": "^3.29.1", "@swc/helpers": "^0.5.0" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1", "react-dom": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1" } }, "sha512-E49qcbBRgofXYfWbli50bepWVNtQBq7qewL9XsX7nHkwPPUe1IRwJOnWZqYMgwwhUBOXfnsR6/TssiXqZsrJdw=="], @@ -655,6 +720,8 @@ "acorn": ["acorn@8.14.1", "", { "bin": "bin/acorn" }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], + "acorn-import-attributes": ["acorn-import-attributes@1.9.5", "", { "peerDependencies": { "acorn": "^8" } }, "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ=="], + "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], @@ -663,6 +730,8 @@ "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], + "ansis": ["ansis@4.2.0", "", {}, "sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig=="], + "any-promise": ["any-promise@1.3.0", "", {}, "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A=="], "anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="], @@ -699,6 +768,10 @@ "chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="], + "chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], + + "cjs-module-lexer": ["cjs-module-lexer@1.4.3", "", {}, "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q=="], + "clsx": ["clsx@1.2.1", "", {}, "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg=="], "color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="], @@ -723,6 +796,8 @@ "crelt": ["crelt@1.0.6", "", {}, "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g=="], + "cross-fetch": ["cross-fetch@4.1.0", "", { "dependencies": { "node-fetch": "^2.7.0" } }, "sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw=="], + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], "cssesc": ["cssesc@3.0.0", "", { "bin": "bin/cssesc" }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="], @@ -861,6 +936,8 @@ "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], + "import-in-the-middle": ["import-in-the-middle@2.0.0", "", { "dependencies": { "acorn": "^8.14.0", "acorn-import-attributes": "^1.9.5", "cjs-module-lexer": "^1.2.2", "module-details-from-path": "^1.0.3" } }, "sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A=="], + "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], @@ -917,10 +994,14 @@ "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], + "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="], + "loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": "cli.js" }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="], "lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="], + "markdown-to-jsx": ["markdown-to-jsx@7.7.6", "", { "peerDependencies": { "react": ">= 0.14.0" } }, "sha512-/PWFFoKKMidk4Ut06F5hs5sluq1aJ0CGvUJWsnCK6hx/LPM8vlhvKAxtGHJ+U+V2Il2wmnfO6r81ICD3xZRVaw=="], "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], @@ -937,6 +1018,10 @@ "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], + "minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="], + + "module-details-from-path": ["module-details-from-path@1.0.4", "", {}, "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w=="], + "motion-dom": ["motion-dom@12.15.0", "", { "dependencies": { "motion-utils": "^12.12.1" } }, "sha512-D2ldJgor+2vdcrDtKJw48k3OddXiZN1dDLLWrS8kiHzQdYVruh0IoTwbJBslrnTXIPgFED7PBN2Zbwl7rNqnhA=="], "motion-utils": ["motion-utils@12.12.1", "", {}, "sha512-f9qiqUHm7hWSLlNW8gS9pisnsN7CRFRD58vNjptKdsqFLpkVnX00TNeD6Q0d27V9KzT7ySFyK1TZ/DShfVOv6w=="], @@ -949,6 +1034,8 @@ "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], + "node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], + "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], "nodemon": ["nodemon@3.1.10", "", { "dependencies": { "chokidar": "^3.5.2", "debug": "^4", "ignore-by-default": "^1.0.1", "minimatch": "^3.1.2", "pstree.remy": "^1.1.8", "semver": "^7.5.3", "simple-update-notifier": "^2.0.0", "supports-color": "^5.5.0", "touch": "^3.1.0", "undefsafe": "^2.0.5" }, "bin": "bin/nodemon.js" }, "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw=="], @@ -1013,6 +1100,8 @@ "prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="], + "protobufjs": ["protobufjs@7.5.4", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg=="], + "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], "pstree.remy": ["pstree.remy@1.1.8", "", {}, "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w=="], @@ -1041,6 +1130,8 @@ "readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="], + "require-in-the-middle": ["require-in-the-middle@8.0.1", "", { "dependencies": { "debug": "^4.3.5", "module-details-from-path": "^1.0.3" } }, "sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ=="], + "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": "bin/resolve" }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], @@ -1101,6 +1192,8 @@ "tailwindcss": ["tailwindcss@3.4.17", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.6", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og=="], + "tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="], + "thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="], "thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="], @@ -1111,6 +1204,8 @@ "touch": ["touch@3.1.1", "", { "bin": { "nodetouch": "bin/nodetouch.js" } }, "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA=="], + "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], + "ts-api-utils": ["ts-api-utils@2.1.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="], "ts-interface-checker": ["ts-interface-checker@0.1.13", "", {}, "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="], @@ -1123,6 +1218,8 @@ "typescript-eslint": ["typescript-eslint@8.33.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.33.0", "@typescript-eslint/parser": "8.33.0", "@typescript-eslint/utils": "8.33.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-5YmNhF24ylCsvdNW2oJwMzTbaeO4bg90KeGtMjUw0AGtHksgEPLRTUil+coHwCfiu4QjVJFnjp94DmU6zV7DhQ=="], + "ua-parser-js": ["ua-parser-js@1.0.41", "", { "bin": { "ua-parser-js": "script/cli.js" } }, "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug=="], + "undefsafe": ["undefsafe@2.0.5", "", {}, "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA=="], "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], @@ -1147,6 +1244,12 @@ "w3c-keyname": ["w3c-keyname@2.2.8", "", {}, "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ=="], + "web-vitals": ["web-vitals@5.1.0", "", {}, "sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg=="], + + "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], + + "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], + "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], @@ -1155,6 +1258,8 @@ "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + "yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + "yaml": ["yaml@2.8.0", "", { "bin": "bin.mjs" }, "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ=="], "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], @@ -1169,6 +1274,10 @@ "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], + "@jridgewell/gen-mapping/@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], + + "@jridgewell/trace-mapping/@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], + "@react-aria/focus/clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="], "@react-aria/utils/clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="], @@ -1199,10 +1308,6 @@ "strip-ansi-cjs/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - "tinyglobby/fdir": ["fdir@6.4.5", "", { "peerDependencies": { "picomatch": "^3 || ^4" } }, "sha512-4BG7puHpVsIYxZUbiUE3RqGloLaSSwzYie5jvasC4LWuBWzZawynvYouhjbQKw2JuIGYdm0DzIxl8iVidKlUEw=="], - - "tinyglobby/picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], - "wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], "wrap-ansi-cjs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], diff --git a/webapp/_webapp/package.json b/webapp/_webapp/package.json index 5630d99b..8b08c29e 100644 --- a/webapp/_webapp/package.json +++ b/webapp/_webapp/package.json @@ -24,6 +24,8 @@ "@buf/googleapis_googleapis.bufbuild_es": "^2.2.3-20250211200939-546238c53f73.1", "@bufbuild/protobuf": "^2.5.1", "@capacitor-community/apple-sign-in": "^7.0.1", + "@grafana/faro-web-sdk": "^2.0.2", + "@grafana/faro-web-tracing": "^2.0.2", "@heroui/react": "^2.7.9", "@iconify/react": "^6.0.0", "@lukemorales/query-key-factory": "^1.3.4", @@ -54,6 +56,7 @@ "@types/chrome": "^0.0.326", "@types/codemirror": "^5.60.16", "@types/events": "^3.0.3", + "@grafana/faro-rollup-plugin": "^0.7.0", "@types/node": "^22.15.29", "@types/react": "^19.1.6", "@types/react-dom": "^19.1.5", @@ -71,4 +74,4 @@ "typescript-eslint": "^8.33.0", "vite": "^6.3.5" } -} +} \ No newline at end of file diff --git a/webapp/_webapp/src/components/cell-wrapper.tsx b/webapp/_webapp/src/components/cell-wrapper.tsx index 19421d82..c3486f4e 100644 --- a/webapp/_webapp/src/components/cell-wrapper.tsx +++ b/webapp/_webapp/src/components/cell-wrapper.tsx @@ -10,7 +10,7 @@ const CellWrapper = React.forwardRef -
+
{/* Message content */}
diff --git a/webapp/_webapp/src/components/switch-cell.tsx b/webapp/_webapp/src/components/switch-cell.tsx index 6058b0ba..f10d78bc 100644 --- a/webapp/_webapp/src/components/switch-cell.tsx +++ b/webapp/_webapp/src/components/switch-cell.tsx @@ -37,7 +37,7 @@ const SwitchCell = React.forwardRef( ...classNames, base: cn( "inline-flex bg-content2 flex-row-reverse w-full max-w-full items-center", - "justify-between cursor-pointer rounded-medium gap-2 !p-3", + "justify-between cursor-pointer rounded-medium gap-2 !p-2", classNames?.base, ), }} diff --git a/webapp/_webapp/src/hooks/useLanguageModels.ts b/webapp/_webapp/src/hooks/useLanguageModels.ts index b600e3e3..5985e479 100644 --- a/webapp/_webapp/src/hooks/useLanguageModels.ts +++ b/webapp/_webapp/src/hooks/useLanguageModels.ts @@ -1,57 +1,116 @@ import { useCallback, useMemo } from "react"; -import { LanguageModel } from "../pkg/gen/apiclient/chat/v1/chat_pb"; +import { LanguageModel, SupportedModel } from "../pkg/gen/apiclient/chat/v1/chat_pb"; import { useConversationStore } from "../stores/conversation/conversation-store"; +import { useListSupportedModelsQuery } from "../query"; export type Model = { name: string; - description: string; + slug: string; languageModel: LanguageModel; }; +const slugToLanguageModel = (slug: string) => { + switch (slug) { + case "gpt-4.1": + return LanguageModel.OPENAI_GPT41; + case "gpt-4o": + return LanguageModel.OPENAI_GPT4O; + case "gpt-4.1-mini": + return LanguageModel.OPENAI_GPT41_MINI; + case "gpt-5": + return LanguageModel.OPENAI_GPT5; + case "gpt-5-mini": + return LanguageModel.OPENAI_GPT5_MINI; + case "gpt-5-nano": + return LanguageModel.OPENAI_GPT5_NANO; + case "gpt-5-chat-latest": + return LanguageModel.OPENAI_GPT5_CHAT_LATEST; + case "o1": + return LanguageModel.OPENAI_O1; + case "o1-mini": + return LanguageModel.OPENAI_O1_MINI; + case "o3": + return LanguageModel.OPENAI_O3; + case "o3-mini": + return LanguageModel.OPENAI_O3_MINI; + case "o4-mini": + return LanguageModel.OPENAI_O4_MINI; + case "codex-mini-latest": + return LanguageModel.OPENAI_CODEX_MINI_LATEST; + default: + return LanguageModel.OPENAI_GPT41; + } +}; + +const languageModelToSlug = (languageModel: LanguageModel) => { + switch (languageModel) { + case LanguageModel.OPENAI_GPT41: + return "gpt-4.1"; + case LanguageModel.OPENAI_GPT4O: + return "gpt-4o"; + case LanguageModel.OPENAI_GPT41_MINI: + return "gpt-4.1-mini"; + case LanguageModel.OPENAI_GPT5: + return "gpt-5"; + case LanguageModel.OPENAI_GPT5_MINI: + return "gpt-5-mini"; + case LanguageModel.OPENAI_GPT5_NANO: + return "gpt-5-nano"; + case LanguageModel.OPENAI_GPT5_CHAT_LATEST: + return "gpt-5-chat-latest"; + case LanguageModel.OPENAI_O1: + return "o1"; + case LanguageModel.OPENAI_O1_MINI: + return "o1-mini"; + case LanguageModel.OPENAI_O3: + return "o3"; + case LanguageModel.OPENAI_O3_MINI: + return "o3-mini"; + case LanguageModel.OPENAI_O4_MINI: + return "o4-mini"; + case LanguageModel.OPENAI_CODEX_MINI_LATEST: + return "codex-mini-latest"; + default: + return "gpt-4.1"; + } +}; + +// Fallback models in case the API fails +const fallbackModels: Model[] = [ + { + name: "GPT-4.1", + slug: "gpt-4.1", + languageModel: LanguageModel.OPENAI_GPT41, + }, +]; + +const mapSupportedModelToModel = (supportedModel: SupportedModel): Model => ({ + name: supportedModel.name, + slug: supportedModel.slug, + languageModel: slugToLanguageModel(supportedModel.slug), +}); + export const useLanguageModels = () => { const { currentConversation, setCurrentConversation } = useConversationStore(); + const { data: supportedModelsResponse } = useListSupportedModelsQuery(); - const models: Model[] = useMemo( - () => [ - { - name: "GPT-4o", - description: "OpenAI GPT-4o", - languageModel: LanguageModel.OPENAI_GPT4O, - }, - { - name: "GPT-4.1", - description: "OpenAI GPT-4.1", - languageModel: LanguageModel.OPENAI_GPT41, - }, - // { - // name: "GPT-5", - // description: "OpenAI GPT-5", - // languageModel: LanguageModel.OPENAI_GPT5, - // }, - // { - // name: "GPT-5-mini", - // description: "OpenAI GPT-5-mini", - // languageModel: LanguageModel.OPENAI_GPT5_MINI, - // }, - // { - // name: "GPT-5-nano", - // description: "OpenAI GPT-5-nano", - // languageModel: LanguageModel.OPENAI_GPT5_NANO, - // }, - ], - [], - ); + const models: Model[] = useMemo(() => { + if (supportedModelsResponse?.models && supportedModelsResponse.models.length > 0) { + return supportedModelsResponse.models.map(mapSupportedModelToModel); + } + return fallbackModels; + }, [supportedModelsResponse]); const currentModel = useMemo(() => { - const model = models.find((m) => m.languageModel === currentConversation.languageModel); - return model || models[2]; + const model = models.find((m) => m.slug === languageModelToSlug(currentConversation.languageModel)); + return model || models[0]; }, [models, currentConversation.languageModel]); const setModel = useCallback( (model: Model) => { setCurrentConversation({ ...currentConversation, - languageModel: model.languageModel, + languageModel: slugToLanguageModel(model.slug), }); }, [setCurrentConversation, currentConversation], diff --git a/webapp/_webapp/src/hooks/useSendMessageStream.ts b/webapp/_webapp/src/hooks/useSendMessageStream.ts index 49088301..4026237e 100644 --- a/webapp/_webapp/src/hooks/useSendMessageStream.ts +++ b/webapp/_webapp/src/hooks/useSendMessageStream.ts @@ -139,6 +139,7 @@ export function useSendMessageStream() { selectedText, sync, sendMessageStream, + updateStreamingMessage, ); break; case "incompleteIndicator": diff --git a/webapp/_webapp/src/libs/toasts.ts b/webapp/_webapp/src/libs/toasts.tsx similarity index 80% rename from webapp/_webapp/src/libs/toasts.ts rename to webapp/_webapp/src/libs/toasts.tsx index 7e010b6b..c1129682 100644 --- a/webapp/_webapp/src/libs/toasts.ts +++ b/webapp/_webapp/src/libs/toasts.tsx @@ -21,9 +21,10 @@ export function warnToast(description: string, title: string = "Warning") { export function errorToast(description: string, title: string = "Error") { addToast({ title: title, - description: description, + description:
{description}
, color: "danger", timeout: 10000, }); console.trace(); // eslint-disable-line no-console + console.error(title, description); // eslint-disable-line no-console } diff --git a/webapp/_webapp/src/main.tsx b/webapp/_webapp/src/main.tsx index f699ded1..291773d3 100644 --- a/webapp/_webapp/src/main.tsx +++ b/webapp/_webapp/src/main.tsx @@ -21,6 +21,29 @@ import { DevTools } from "./views/devtools"; import { usePromptLibraryStore } from "./stores/prompt-library-store"; import { TopMenuButton } from "./components/top-menu-button"; import { Logo } from "./components/logo"; +import { getWebInstrumentations, initializeFaro } from '@grafana/faro-web-sdk'; +import { TracingInstrumentation } from '@grafana/faro-web-tracing'; +import { getManifest } from "./libs/manifest"; + +initializeFaro({ + url: 'https://faro-collector-prod-ap-southeast-1.grafana.net/collect/79c7648395df4df8b58c228fad42af57', + app: { + name: getManifest().name, + version: getManifest().version, + environment: 'production' + }, + sessionTracking: { + samplingRate: 1, + persistent: true + }, + instrumentations: [ + // Mandatory, omits default instrumentations otherwise. + ...getWebInstrumentations(), + + // Tracing package to get end-to-end visibility for HTTP requests. + new TracingInstrumentation(), + ], +}); export const Main = () => { const { inputRef, setActiveTab } = useConversationUiStore(); diff --git a/webapp/_webapp/src/pkg/gen/apiclient/auth/v1/auth_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/auth/v1/auth_pb.ts index 7934ad2c..04201eaa 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/auth/v1/auth_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/auth/v1/auth_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file auth/v1/auth.proto (package auth.v1, syntax proto3) /* eslint-disable */ diff --git a/webapp/_webapp/src/pkg/gen/apiclient/chat/v1/chat_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/chat/v1/chat_pb.ts index 3113e297..2e15f8ea 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/chat/v1/chat_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/chat/v1/chat_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file chat/v1/chat.proto (package chat.v1, syntax proto3) /* eslint-disable */ @@ -11,7 +11,7 @@ import type { Message as Message$1 } from "@bufbuild/protobuf"; * Describes the file chat/v1/chat.proto. */ export const file_chat_v1_chat: GenFile = /*@__PURE__*/ - fileDesc("ChJjaGF0L3YxL2NoYXQucHJvdG8SB2NoYXQudjEiUAoTTWVzc2FnZVR5cGVUb29sQ2FsbBIMCgRuYW1lGAEgASgJEgwKBGFyZ3MYAiABKAkSDgoGcmVzdWx0GAMgASgJEg0KBWVycm9yGAQgASgJIkEKI01lc3NhZ2VUeXBlVG9vbENhbGxQcmVwYXJlQXJndW1lbnRzEgwKBG5hbWUYASABKAkSDAoEYXJncxgCIAEoCSIkChFNZXNzYWdlVHlwZVN5c3RlbRIPCgdjb250ZW50GAEgASgJIicKFE1lc3NhZ2VUeXBlQXNzaXN0YW50Eg8KB2NvbnRlbnQYASABKAkiUAoPTWVzc2FnZVR5cGVVc2VyEg8KB2NvbnRlbnQYASABKAkSGgoNc2VsZWN0ZWRfdGV4dBgCIAEoCUgAiAEBQhAKDl9zZWxlY3RlZF90ZXh0IikKEk1lc3NhZ2VUeXBlVW5rbm93bhITCgtkZXNjcmlwdGlvbhgBIAEoCSLkAgoOTWVzc2FnZVBheWxvYWQSLAoGc3lzdGVtGAEgASgLMhouY2hhdC52MS5NZXNzYWdlVHlwZVN5c3RlbUgAEigKBHVzZXIYAiABKAsyGC5jaGF0LnYxLk1lc3NhZ2VUeXBlVXNlckgAEjIKCWFzc2lzdGFudBgDIAEoCzIdLmNoYXQudjEuTWVzc2FnZVR5cGVBc3Npc3RhbnRIABJTCht0b29sX2NhbGxfcHJlcGFyZV9hcmd1bWVudHMYBCABKAsyLC5jaGF0LnYxLk1lc3NhZ2VUeXBlVG9vbENhbGxQcmVwYXJlQXJndW1lbnRzSAASMQoJdG9vbF9jYWxsGAUgASgLMhwuY2hhdC52MS5NZXNzYWdlVHlwZVRvb2xDYWxsSAASLgoHdW5rbm93bhgGIAEoCzIbLmNoYXQudjEuTWVzc2FnZVR5cGVVbmtub3duSABCDgoMbWVzc2FnZV90eXBlIkcKB01lc3NhZ2USEgoKbWVzc2FnZV9pZBgBIAEoCRIoCgdwYXlsb2FkGAMgASgLMhcuY2hhdC52MS5NZXNzYWdlUGF5bG9hZCJ9CgxDb252ZXJzYXRpb24SCgoCaWQYASABKAkSDQoFdGl0bGUYAyABKAkSLgoObGFuZ3VhZ2VfbW9kZWwYAiABKA4yFi5jaGF0LnYxLkxhbmd1YWdlTW9kZWwSIgoIbWVzc2FnZXMYBCADKAsyEC5jaGF0LnYxLk1lc3NhZ2UiQgoYTGlzdENvbnZlcnNhdGlvbnNSZXF1ZXN0EhcKCnByb2plY3RfaWQYASABKAlIAIgBAUINCgtfcHJvamVjdF9pZCJJChlMaXN0Q29udmVyc2F0aW9uc1Jlc3BvbnNlEiwKDWNvbnZlcnNhdGlvbnMYASADKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiIxChZHZXRDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCSJGChdHZXRDb252ZXJzYXRpb25SZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiK3AgogQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVJlcXVlc3QSEgoKcHJvamVjdF9pZBgBIAEoCRIcCg9jb252ZXJzYXRpb25faWQYAiABKAlIAIgBARIuCg5sYW5ndWFnZV9tb2RlbBgDIAEoDjIWLmNoYXQudjEuTGFuZ3VhZ2VNb2RlbBIUCgx1c2VyX21lc3NhZ2UYBCABKAkSHwoSdXNlcl9zZWxlY3RlZF90ZXh0GAUgASgJSAGIAQESOQoRY29udmVyc2F0aW9uX3R5cGUYBiABKA4yGS5jaGF0LnYxLkNvbnZlcnNhdGlvblR5cGVIAogBAUISChBfY29udmVyc2F0aW9uX2lkQhUKE191c2VyX3NlbGVjdGVkX3RleHRCFAoSX2NvbnZlcnNhdGlvbl90eXBlIlAKIUNyZWF0ZUNvbnZlcnNhdGlvbk1lc3NhZ2VSZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiJDChlVcGRhdGVDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCRINCgV0aXRsZRgCIAEoCSJJChpVcGRhdGVDb252ZXJzYXRpb25SZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiI0ChlEZWxldGVDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCSIcChpEZWxldGVDb252ZXJzYXRpb25SZXNwb25zZSJfChRTdHJlYW1Jbml0aWFsaXphdGlvbhIXCg9jb252ZXJzYXRpb25faWQYASABKAkSLgoObGFuZ3VhZ2VfbW9kZWwYBSABKA4yFi5jaGF0LnYxLkxhbmd1YWdlTW9kZWwiTwoPU3RyZWFtUGFydEJlZ2luEhIKCm1lc3NhZ2VfaWQYASABKAkSKAoHcGF5bG9hZBgDIAEoCzIXLmNoYXQudjEuTWVzc2FnZVBheWxvYWQiMQoMTWVzc2FnZUNodW5rEhIKCm1lc3NhZ2VfaWQYASABKAkSDQoFZGVsdGEYAiABKAkiOgoTSW5jb21wbGV0ZUluZGljYXRvchIOCgZyZWFzb24YASABKAkSEwoLcmVzcG9uc2VfaWQYAiABKAkiTQoNU3RyZWFtUGFydEVuZBISCgptZXNzYWdlX2lkGAEgASgJEigKB3BheWxvYWQYAyABKAsyFy5jaGF0LnYxLk1lc3NhZ2VQYXlsb2FkIi0KElN0cmVhbUZpbmFsaXphdGlvbhIXCg9jb252ZXJzYXRpb25faWQYASABKAkiJAoLU3RyZWFtRXJyb3ISFQoNZXJyb3JfbWVzc2FnZRgBIAEoCSK9AgomQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVN0cmVhbVJlcXVlc3QSEgoKcHJvamVjdF9pZBgBIAEoCRIcCg9jb252ZXJzYXRpb25faWQYAiABKAlIAIgBARIuCg5sYW5ndWFnZV9tb2RlbBgDIAEoDjIWLmNoYXQudjEuTGFuZ3VhZ2VNb2RlbBIUCgx1c2VyX21lc3NhZ2UYBCABKAkSHwoSdXNlcl9zZWxlY3RlZF90ZXh0GAUgASgJSAGIAQESOQoRY29udmVyc2F0aW9uX3R5cGUYBiABKA4yGS5jaGF0LnYxLkNvbnZlcnNhdGlvblR5cGVIAogBAUISChBfY29udmVyc2F0aW9uX2lkQhUKE191c2VyX3NlbGVjdGVkX3RleHRCFAoSX2NvbnZlcnNhdGlvbl90eXBlIr8DCidDcmVhdGVDb252ZXJzYXRpb25NZXNzYWdlU3RyZWFtUmVzcG9uc2USPgoVc3RyZWFtX2luaXRpYWxpemF0aW9uGAEgASgLMh0uY2hhdC52MS5TdHJlYW1Jbml0aWFsaXphdGlvbkgAEjUKEXN0cmVhbV9wYXJ0X2JlZ2luGAIgASgLMhguY2hhdC52MS5TdHJlYW1QYXJ0QmVnaW5IABIuCg1tZXNzYWdlX2NodW5rGAMgASgLMhUuY2hhdC52MS5NZXNzYWdlQ2h1bmtIABI8ChRpbmNvbXBsZXRlX2luZGljYXRvchgEIAEoCzIcLmNoYXQudjEuSW5jb21wbGV0ZUluZGljYXRvckgAEjEKD3N0cmVhbV9wYXJ0X2VuZBgFIAEoCzIWLmNoYXQudjEuU3RyZWFtUGFydEVuZEgAEjoKE3N0cmVhbV9maW5hbGl6YXRpb24YBiABKAsyGy5jaGF0LnYxLlN0cmVhbUZpbmFsaXphdGlvbkgAEiwKDHN0cmVhbV9lcnJvchgHIAEoCzIULmNoYXQudjEuU3RyZWFtRXJyb3JIAEISChByZXNwb25zZV9wYXlsb2FkKoECCg1MYW5ndWFnZU1vZGVsEh4KGkxBTkdVQUdFX01PREVMX1VOU1BFQ0lGSUVEEAASHwobTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0dQVDRPEAESJAogTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0dQVDQxX01JTkkQAhIfChtMQU5HVUFHRV9NT0RFTF9PUEVOQUlfR1BUNDEQBBIeChpMQU5HVUFHRV9NT0RFTF9PUEVOQUlfR1BUNRAHEiMKH0xBTkdVQUdFX01PREVMX09QRU5BSV9HUFQ1X01JTkkQCBIjCh9MQU5HVUFHRV9NT0RFTF9PUEVOQUlfR1BUNV9OQU5PEAkqUgoQQ29udmVyc2F0aW9uVHlwZRIhCh1DT05WRVJTQVRJT05fVFlQRV9VTlNQRUNJRklFRBAAEhsKF0NPTlZFUlNBVElPTl9UWVBFX0RFQlVHEAEyzQcKC0NoYXRTZXJ2aWNlEoMBChFMaXN0Q29udmVyc2F0aW9ucxIhLmNoYXQudjEuTGlzdENvbnZlcnNhdGlvbnNSZXF1ZXN0GiIuY2hhdC52MS5MaXN0Q29udmVyc2F0aW9uc1Jlc3BvbnNlIieC0+STAiESHy9fcGQvYXBpL3YxL2NoYXRzL2NvbnZlcnNhdGlvbnMSjwEKD0dldENvbnZlcnNhdGlvbhIfLmNoYXQudjEuR2V0Q29udmVyc2F0aW9uUmVxdWVzdBogLmNoYXQudjEuR2V0Q29udmVyc2F0aW9uUmVzcG9uc2UiOYLT5JMCMxIxL19wZC9hcGkvdjEvY2hhdHMvY29udmVyc2F0aW9ucy97Y29udmVyc2F0aW9uX2lkfRKnAQoZQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZRIpLmNoYXQudjEuQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVJlcXVlc3QaKi5jaGF0LnYxLkNyZWF0ZUNvbnZlcnNhdGlvbk1lc3NhZ2VSZXNwb25zZSIzgtPkkwItOgEqIigvX3BkL2FwaS92MS9jaGF0cy9jb252ZXJzYXRpb25zL21lc3NhZ2VzEsIBCh9DcmVhdGVDb252ZXJzYXRpb25NZXNzYWdlU3RyZWFtEi8uY2hhdC52MS5DcmVhdGVDb252ZXJzYXRpb25NZXNzYWdlU3RyZWFtUmVxdWVzdBowLmNoYXQudjEuQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVN0cmVhbVJlc3BvbnNlIjqC0+STAjQ6ASoiLy9fcGQvYXBpL3YxL2NoYXRzL2NvbnZlcnNhdGlvbnMvbWVzc2FnZXMvc3RyZWFtMAESmwEKElVwZGF0ZUNvbnZlcnNhdGlvbhIiLmNoYXQudjEuVXBkYXRlQ29udmVyc2F0aW9uUmVxdWVzdBojLmNoYXQudjEuVXBkYXRlQ29udmVyc2F0aW9uUmVzcG9uc2UiPILT5JMCNjoBKjIxL19wZC9hcGkvdjEvY2hhdHMvY29udmVyc2F0aW9ucy97Y29udmVyc2F0aW9uX2lkfRKYAQoSRGVsZXRlQ29udmVyc2F0aW9uEiIuY2hhdC52MS5EZWxldGVDb252ZXJzYXRpb25SZXF1ZXN0GiMuY2hhdC52MS5EZWxldGVDb252ZXJzYXRpb25SZXNwb25zZSI5gtPkkwIzKjEvX3BkL2FwaS92MS9jaGF0cy9jb252ZXJzYXRpb25zL3tjb252ZXJzYXRpb25faWR9Qn8KC2NvbS5jaGF0LnYxQglDaGF0UHJvdG9QAVoocGFwZXJkZWJ1Z2dlci9wa2cvZ2VuL2FwaS9jaGF0L3YxO2NoYXR2MaICA0NYWKoCB0NoYXQuVjHKAgdDaGF0XFYx4gITQ2hhdFxWMVxHUEJNZXRhZGF0YeoCCENoYXQ6OlYxYgZwcm90bzM", [file_google_api_annotations]); + fileDesc("ChJjaGF0L3YxL2NoYXQucHJvdG8SB2NoYXQudjEiUAoTTWVzc2FnZVR5cGVUb29sQ2FsbBIMCgRuYW1lGAEgASgJEgwKBGFyZ3MYAiABKAkSDgoGcmVzdWx0GAMgASgJEg0KBWVycm9yGAQgASgJIkEKI01lc3NhZ2VUeXBlVG9vbENhbGxQcmVwYXJlQXJndW1lbnRzEgwKBG5hbWUYASABKAkSDAoEYXJncxgCIAEoCSIkChFNZXNzYWdlVHlwZVN5c3RlbRIPCgdjb250ZW50GAEgASgJIicKFE1lc3NhZ2VUeXBlQXNzaXN0YW50Eg8KB2NvbnRlbnQYASABKAkiUAoPTWVzc2FnZVR5cGVVc2VyEg8KB2NvbnRlbnQYASABKAkSGgoNc2VsZWN0ZWRfdGV4dBgCIAEoCUgAiAEBQhAKDl9zZWxlY3RlZF90ZXh0IikKEk1lc3NhZ2VUeXBlVW5rbm93bhITCgtkZXNjcmlwdGlvbhgBIAEoCSLkAgoOTWVzc2FnZVBheWxvYWQSLAoGc3lzdGVtGAEgASgLMhouY2hhdC52MS5NZXNzYWdlVHlwZVN5c3RlbUgAEigKBHVzZXIYAiABKAsyGC5jaGF0LnYxLk1lc3NhZ2VUeXBlVXNlckgAEjIKCWFzc2lzdGFudBgDIAEoCzIdLmNoYXQudjEuTWVzc2FnZVR5cGVBc3Npc3RhbnRIABJTCht0b29sX2NhbGxfcHJlcGFyZV9hcmd1bWVudHMYBCABKAsyLC5jaGF0LnYxLk1lc3NhZ2VUeXBlVG9vbENhbGxQcmVwYXJlQXJndW1lbnRzSAASMQoJdG9vbF9jYWxsGAUgASgLMhwuY2hhdC52MS5NZXNzYWdlVHlwZVRvb2xDYWxsSAASLgoHdW5rbm93bhgGIAEoCzIbLmNoYXQudjEuTWVzc2FnZVR5cGVVbmtub3duSABCDgoMbWVzc2FnZV90eXBlIkcKB01lc3NhZ2USEgoKbWVzc2FnZV9pZBgBIAEoCRIoCgdwYXlsb2FkGAMgASgLMhcuY2hhdC52MS5NZXNzYWdlUGF5bG9hZCJ9CgxDb252ZXJzYXRpb24SCgoCaWQYASABKAkSDQoFdGl0bGUYAyABKAkSLgoObGFuZ3VhZ2VfbW9kZWwYAiABKA4yFi5jaGF0LnYxLkxhbmd1YWdlTW9kZWwSIgoIbWVzc2FnZXMYBCADKAsyEC5jaGF0LnYxLk1lc3NhZ2UiQgoYTGlzdENvbnZlcnNhdGlvbnNSZXF1ZXN0EhcKCnByb2plY3RfaWQYASABKAlIAIgBAUINCgtfcHJvamVjdF9pZCJJChlMaXN0Q29udmVyc2F0aW9uc1Jlc3BvbnNlEiwKDWNvbnZlcnNhdGlvbnMYASADKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiIxChZHZXRDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCSJGChdHZXRDb252ZXJzYXRpb25SZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiK3AgogQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVJlcXVlc3QSEgoKcHJvamVjdF9pZBgBIAEoCRIcCg9jb252ZXJzYXRpb25faWQYAiABKAlIAIgBARIuCg5sYW5ndWFnZV9tb2RlbBgDIAEoDjIWLmNoYXQudjEuTGFuZ3VhZ2VNb2RlbBIUCgx1c2VyX21lc3NhZ2UYBCABKAkSHwoSdXNlcl9zZWxlY3RlZF90ZXh0GAUgASgJSAGIAQESOQoRY29udmVyc2F0aW9uX3R5cGUYBiABKA4yGS5jaGF0LnYxLkNvbnZlcnNhdGlvblR5cGVIAogBAUISChBfY29udmVyc2F0aW9uX2lkQhUKE191c2VyX3NlbGVjdGVkX3RleHRCFAoSX2NvbnZlcnNhdGlvbl90eXBlIlAKIUNyZWF0ZUNvbnZlcnNhdGlvbk1lc3NhZ2VSZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiJDChlVcGRhdGVDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCRINCgV0aXRsZRgCIAEoCSJJChpVcGRhdGVDb252ZXJzYXRpb25SZXNwb25zZRIrCgxjb252ZXJzYXRpb24YASABKAsyFS5jaGF0LnYxLkNvbnZlcnNhdGlvbiI0ChlEZWxldGVDb252ZXJzYXRpb25SZXF1ZXN0EhcKD2NvbnZlcnNhdGlvbl9pZBgBIAEoCSIcChpEZWxldGVDb252ZXJzYXRpb25SZXNwb25zZSIsCg5TdXBwb3J0ZWRNb2RlbBIMCgRuYW1lGAEgASgJEgwKBHNsdWcYAiABKAkiHAoaTGlzdFN1cHBvcnRlZE1vZGVsc1JlcXVlc3QiRgobTGlzdFN1cHBvcnRlZE1vZGVsc1Jlc3BvbnNlEicKBm1vZGVscxgBIAMoCzIXLmNoYXQudjEuU3VwcG9ydGVkTW9kZWwiXwoUU3RyZWFtSW5pdGlhbGl6YXRpb24SFwoPY29udmVyc2F0aW9uX2lkGAEgASgJEi4KDmxhbmd1YWdlX21vZGVsGAUgASgOMhYuY2hhdC52MS5MYW5ndWFnZU1vZGVsIk8KD1N0cmVhbVBhcnRCZWdpbhISCgptZXNzYWdlX2lkGAEgASgJEigKB3BheWxvYWQYAyABKAsyFy5jaGF0LnYxLk1lc3NhZ2VQYXlsb2FkIjEKDE1lc3NhZ2VDaHVuaxISCgptZXNzYWdlX2lkGAEgASgJEg0KBWRlbHRhGAIgASgJIjoKE0luY29tcGxldGVJbmRpY2F0b3ISDgoGcmVhc29uGAEgASgJEhMKC3Jlc3BvbnNlX2lkGAIgASgJIk0KDVN0cmVhbVBhcnRFbmQSEgoKbWVzc2FnZV9pZBgBIAEoCRIoCgdwYXlsb2FkGAMgASgLMhcuY2hhdC52MS5NZXNzYWdlUGF5bG9hZCItChJTdHJlYW1GaW5hbGl6YXRpb24SFwoPY29udmVyc2F0aW9uX2lkGAEgASgJIiQKC1N0cmVhbUVycm9yEhUKDWVycm9yX21lc3NhZ2UYASABKAkivQIKJkNyZWF0ZUNvbnZlcnNhdGlvbk1lc3NhZ2VTdHJlYW1SZXF1ZXN0EhIKCnByb2plY3RfaWQYASABKAkSHAoPY29udmVyc2F0aW9uX2lkGAIgASgJSACIAQESLgoObGFuZ3VhZ2VfbW9kZWwYAyABKA4yFi5jaGF0LnYxLkxhbmd1YWdlTW9kZWwSFAoMdXNlcl9tZXNzYWdlGAQgASgJEh8KEnVzZXJfc2VsZWN0ZWRfdGV4dBgFIAEoCUgBiAEBEjkKEWNvbnZlcnNhdGlvbl90eXBlGAYgASgOMhkuY2hhdC52MS5Db252ZXJzYXRpb25UeXBlSAKIAQFCEgoQX2NvbnZlcnNhdGlvbl9pZEIVChNfdXNlcl9zZWxlY3RlZF90ZXh0QhQKEl9jb252ZXJzYXRpb25fdHlwZSK/AwonQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVN0cmVhbVJlc3BvbnNlEj4KFXN0cmVhbV9pbml0aWFsaXphdGlvbhgBIAEoCzIdLmNoYXQudjEuU3RyZWFtSW5pdGlhbGl6YXRpb25IABI1ChFzdHJlYW1fcGFydF9iZWdpbhgCIAEoCzIYLmNoYXQudjEuU3RyZWFtUGFydEJlZ2luSAASLgoNbWVzc2FnZV9jaHVuaxgDIAEoCzIVLmNoYXQudjEuTWVzc2FnZUNodW5rSAASPAoUaW5jb21wbGV0ZV9pbmRpY2F0b3IYBCABKAsyHC5jaGF0LnYxLkluY29tcGxldGVJbmRpY2F0b3JIABIxCg9zdHJlYW1fcGFydF9lbmQYBSABKAsyFi5jaGF0LnYxLlN0cmVhbVBhcnRFbmRIABI6ChNzdHJlYW1fZmluYWxpemF0aW9uGAYgASgLMhsuY2hhdC52MS5TdHJlYW1GaW5hbGl6YXRpb25IABIsCgxzdHJlYW1fZXJyb3IYByABKAsyFC5jaGF0LnYxLlN0cmVhbUVycm9ySABCEgoQcmVzcG9uc2VfcGF5bG9hZCr/AwoNTGFuZ3VhZ2VNb2RlbBIeChpMQU5HVUFHRV9NT0RFTF9VTlNQRUNJRklFRBAAEh8KG0xBTkdVQUdFX01PREVMX09QRU5BSV9HUFQ0TxABEiQKIExBTkdVQUdFX01PREVMX09QRU5BSV9HUFQ0MV9NSU5JEAISHwobTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0dQVDQxEAQSHgoaTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0dQVDUQBxIjCh9MQU5HVUFHRV9NT0RFTF9PUEVOQUlfR1BUNV9NSU5JEAgSIwofTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0dQVDVfTkFOTxAJEioKJkxBTkdVQUdFX01PREVMX09QRU5BSV9HUFQ1X0NIQVRfTEFURVNUEAoSHAoYTEFOR1VBR0VfTU9ERUxfT1BFTkFJX08xEAsSIQodTEFOR1VBR0VfTU9ERUxfT1BFTkFJX08xX01JTkkQDBIcChhMQU5HVUFHRV9NT0RFTF9PUEVOQUlfTzMQDRIhCh1MQU5HVUFHRV9NT0RFTF9PUEVOQUlfTzNfTUlOSRAOEiEKHUxBTkdVQUdFX01PREVMX09QRU5BSV9PNF9NSU5JEA8SKwonTEFOR1VBR0VfTU9ERUxfT1BFTkFJX0NPREVYX01JTklfTEFURVNUEBAqUgoQQ29udmVyc2F0aW9uVHlwZRIhCh1DT05WRVJTQVRJT05fVFlQRV9VTlNQRUNJRklFRBAAEhsKF0NPTlZFUlNBVElPTl9UWVBFX0RFQlVHEAEy0ggKC0NoYXRTZXJ2aWNlEoMBChFMaXN0Q29udmVyc2F0aW9ucxIhLmNoYXQudjEuTGlzdENvbnZlcnNhdGlvbnNSZXF1ZXN0GiIuY2hhdC52MS5MaXN0Q29udmVyc2F0aW9uc1Jlc3BvbnNlIieC0+STAiESHy9fcGQvYXBpL3YxL2NoYXRzL2NvbnZlcnNhdGlvbnMSjwEKD0dldENvbnZlcnNhdGlvbhIfLmNoYXQudjEuR2V0Q29udmVyc2F0aW9uUmVxdWVzdBogLmNoYXQudjEuR2V0Q29udmVyc2F0aW9uUmVzcG9uc2UiOYLT5JMCMxIxL19wZC9hcGkvdjEvY2hhdHMvY29udmVyc2F0aW9ucy97Y29udmVyc2F0aW9uX2lkfRKnAQoZQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZRIpLmNoYXQudjEuQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVJlcXVlc3QaKi5jaGF0LnYxLkNyZWF0ZUNvbnZlcnNhdGlvbk1lc3NhZ2VSZXNwb25zZSIzgtPkkwItOgEqIigvX3BkL2FwaS92MS9jaGF0cy9jb252ZXJzYXRpb25zL21lc3NhZ2VzEsIBCh9DcmVhdGVDb252ZXJzYXRpb25NZXNzYWdlU3RyZWFtEi8uY2hhdC52MS5DcmVhdGVDb252ZXJzYXRpb25NZXNzYWdlU3RyZWFtUmVxdWVzdBowLmNoYXQudjEuQ3JlYXRlQ29udmVyc2F0aW9uTWVzc2FnZVN0cmVhbVJlc3BvbnNlIjqC0+STAjQ6ASoiLy9fcGQvYXBpL3YxL2NoYXRzL2NvbnZlcnNhdGlvbnMvbWVzc2FnZXMvc3RyZWFtMAESmwEKElVwZGF0ZUNvbnZlcnNhdGlvbhIiLmNoYXQudjEuVXBkYXRlQ29udmVyc2F0aW9uUmVxdWVzdBojLmNoYXQudjEuVXBkYXRlQ29udmVyc2F0aW9uUmVzcG9uc2UiPILT5JMCNjoBKjIxL19wZC9hcGkvdjEvY2hhdHMvY29udmVyc2F0aW9ucy97Y29udmVyc2F0aW9uX2lkfRKYAQoSRGVsZXRlQ29udmVyc2F0aW9uEiIuY2hhdC52MS5EZWxldGVDb252ZXJzYXRpb25SZXF1ZXN0GiMuY2hhdC52MS5EZWxldGVDb252ZXJzYXRpb25SZXNwb25zZSI5gtPkkwIzKjEvX3BkL2FwaS92MS9jaGF0cy9jb252ZXJzYXRpb25zL3tjb252ZXJzYXRpb25faWR9EoIBChNMaXN0U3VwcG9ydGVkTW9kZWxzEiMuY2hhdC52MS5MaXN0U3VwcG9ydGVkTW9kZWxzUmVxdWVzdBokLmNoYXQudjEuTGlzdFN1cHBvcnRlZE1vZGVsc1Jlc3BvbnNlIiCC0+STAhoSGC9fcGQvYXBpL3YxL2NoYXRzL21vZGVsc0J/Cgtjb20uY2hhdC52MUIJQ2hhdFByb3RvUAFaKHBhcGVyZGVidWdnZXIvcGtnL2dlbi9hcGkvY2hhdC92MTtjaGF0djGiAgNDWFiqAgdDaGF0LlYxygIHQ2hhdFxWMeICE0NoYXRcVjFcR1BCTWV0YWRhdGHqAghDaGF0OjpWMWIGcHJvdG8z", [file_google_api_annotations]); /** * @generated from message chat.v1.MessageTypeToolCall @@ -460,6 +460,60 @@ export type DeleteConversationResponse = Message$1<"chat.v1.DeleteConversationRe export const DeleteConversationResponseSchema: GenMessage = /*@__PURE__*/ messageDesc(file_chat_v1_chat, 18); +/** + * @generated from message chat.v1.SupportedModel + */ +export type SupportedModel = Message$1<"chat.v1.SupportedModel"> & { + /** + * @generated from field: string name = 1; + */ + name: string; + + /** + * @generated from field: string slug = 2; + */ + slug: string; +}; + +/** + * Describes the message chat.v1.SupportedModel. + * Use `create(SupportedModelSchema)` to create a new message. + */ +export const SupportedModelSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_chat_v1_chat, 19); + +/** + * explicitly empty + * + * @generated from message chat.v1.ListSupportedModelsRequest + */ +export type ListSupportedModelsRequest = Message$1<"chat.v1.ListSupportedModelsRequest"> & { +}; + +/** + * Describes the message chat.v1.ListSupportedModelsRequest. + * Use `create(ListSupportedModelsRequestSchema)` to create a new message. + */ +export const ListSupportedModelsRequestSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_chat_v1_chat, 20); + +/** + * @generated from message chat.v1.ListSupportedModelsResponse + */ +export type ListSupportedModelsResponse = Message$1<"chat.v1.ListSupportedModelsResponse"> & { + /** + * @generated from field: repeated chat.v1.SupportedModel models = 1; + */ + models: SupportedModel[]; +}; + +/** + * Describes the message chat.v1.ListSupportedModelsResponse. + * Use `create(ListSupportedModelsResponseSchema)` to create a new message. + */ +export const ListSupportedModelsResponseSchema: GenMessage = /*@__PURE__*/ + messageDesc(file_chat_v1_chat, 21); + /** * Information sent once at the beginning of a new conversation stream * @@ -482,7 +536,7 @@ export type StreamInitialization = Message$1<"chat.v1.StreamInitialization"> & { * Use `create(StreamInitializationSchema)` to create a new message. */ export const StreamInitializationSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 19); + messageDesc(file_chat_v1_chat, 22); /** * Designed as StreamPartBegin and StreamPartEnd to @@ -510,7 +564,7 @@ export type StreamPartBegin = Message$1<"chat.v1.StreamPartBegin"> & { * Use `create(StreamPartBeginSchema)` to create a new message. */ export const StreamPartBeginSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 20); + messageDesc(file_chat_v1_chat, 23); /** * Note: After the StreamPartBegin of tool_call, there can be no MessageChunk, @@ -539,7 +593,7 @@ export type MessageChunk = Message$1<"chat.v1.MessageChunk"> & { * Use `create(MessageChunkSchema)` to create a new message. */ export const MessageChunkSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 21); + messageDesc(file_chat_v1_chat, 24); /** * @generated from message chat.v1.IncompleteIndicator @@ -561,7 +615,7 @@ export type IncompleteIndicator = Message$1<"chat.v1.IncompleteIndicator"> & { * Use `create(IncompleteIndicatorSchema)` to create a new message. */ export const IncompleteIndicatorSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 22); + messageDesc(file_chat_v1_chat, 25); /** * @generated from message chat.v1.StreamPartEnd @@ -583,7 +637,7 @@ export type StreamPartEnd = Message$1<"chat.v1.StreamPartEnd"> & { * Use `create(StreamPartEndSchema)` to create a new message. */ export const StreamPartEndSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 23); + messageDesc(file_chat_v1_chat, 26); /** * Sent when the current AI response is fully streamed @@ -607,7 +661,7 @@ export type StreamFinalization = Message$1<"chat.v1.StreamFinalization"> & { * Use `create(StreamFinalizationSchema)` to create a new message. */ export const StreamFinalizationSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 24); + messageDesc(file_chat_v1_chat, 27); /** * @generated from message chat.v1.StreamError @@ -624,7 +678,7 @@ export type StreamError = Message$1<"chat.v1.StreamError"> & { * Use `create(StreamErrorSchema)` to create a new message. */ export const StreamErrorSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 25); + messageDesc(file_chat_v1_chat, 28); /** * This message should be the same as CreateConversationMessageRequest @@ -670,7 +724,7 @@ export type CreateConversationMessageStreamRequest = Message$1<"chat.v1.CreateCo * Use `create(CreateConversationMessageStreamRequestSchema)` to create a new message. */ export const CreateConversationMessageStreamRequestSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 26); + messageDesc(file_chat_v1_chat, 29); /** * Response for streaming a message within an existing conversation @@ -731,9 +785,11 @@ export type CreateConversationMessageStreamResponse = Message$1<"chat.v1.CreateC * Use `create(CreateConversationMessageStreamResponseSchema)` to create a new message. */ export const CreateConversationMessageStreamResponseSchema: GenMessage = /*@__PURE__*/ - messageDesc(file_chat_v1_chat, 27); + messageDesc(file_chat_v1_chat, 30); /** + * deprecated + * * @generated from enum chat.v1.LanguageModel */ export enum LanguageModel { @@ -771,6 +827,41 @@ export enum LanguageModel { * @generated from enum value: LANGUAGE_MODEL_OPENAI_GPT5_NANO = 9; */ OPENAI_GPT5_NANO = 9, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_GPT5_CHAT_LATEST = 10; + */ + OPENAI_GPT5_CHAT_LATEST = 10, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_O1 = 11; + */ + OPENAI_O1 = 11, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_O1_MINI = 12; + */ + OPENAI_O1_MINI = 12, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_O3 = 13; + */ + OPENAI_O3 = 13, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_O3_MINI = 14; + */ + OPENAI_O3_MINI = 14, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_O4_MINI = 15; + */ + OPENAI_O4_MINI = 15, + + /** + * @generated from enum value: LANGUAGE_MODEL_OPENAI_CODEX_MINI_LATEST = 16; + */ + OPENAI_CODEX_MINI_LATEST = 16, } /** @@ -854,6 +945,14 @@ export const ChatService: GenService<{ input: typeof DeleteConversationRequestSchema; output: typeof DeleteConversationResponseSchema; }, + /** + * @generated from rpc chat.v1.ChatService.ListSupportedModels + */ + listSupportedModels: { + methodKind: "unary"; + input: typeof ListSupportedModelsRequestSchema; + output: typeof ListSupportedModelsResponseSchema; + }, }> = /*@__PURE__*/ serviceDesc(file_chat_v1_chat, 0); diff --git a/webapp/_webapp/src/pkg/gen/apiclient/comment/v1/comment_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/comment/v1/comment_pb.ts index bc4b009b..d865420c 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/comment/v1/comment_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/comment/v1/comment_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file comment/v1/comment.proto (package comment.v1, syntax proto3) /* eslint-disable */ diff --git a/webapp/_webapp/src/pkg/gen/apiclient/project/v1/project_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/project/v1/project_pb.ts index cd96015f..f6186351 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/project/v1/project_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/project/v1/project_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file project/v1/project.proto (package project.v1, syntax proto3) /* eslint-disable */ diff --git a/webapp/_webapp/src/pkg/gen/apiclient/shared/v1/shared_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/shared/v1/shared_pb.ts index dfe20420..39093c32 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/shared/v1/shared_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/shared/v1/shared_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file shared/v1/shared.proto (package shared.v1, syntax proto3) /* eslint-disable */ diff --git a/webapp/_webapp/src/pkg/gen/apiclient/user/v1/user_pb.ts b/webapp/_webapp/src/pkg/gen/apiclient/user/v1/user_pb.ts index 40db1548..5a831081 100644 --- a/webapp/_webapp/src/pkg/gen/apiclient/user/v1/user_pb.ts +++ b/webapp/_webapp/src/pkg/gen/apiclient/user/v1/user_pb.ts @@ -1,4 +1,4 @@ -// @generated by protoc-gen-es v2.7.0 with parameter "target=ts" +// @generated by protoc-gen-es v2.10.1 with parameter "target=ts" // @generated from file user/v1/user.proto (package user.v1, syntax proto3) /* eslint-disable */ @@ -13,7 +13,7 @@ import type { Message } from "@bufbuild/protobuf"; * Describes the file user/v1/user.proto. */ export const file_user_v1_user: GenFile = /*@__PURE__*/ - fileDesc("ChJ1c2VyL3YxL3VzZXIucHJvdG8SB3VzZXIudjEiQAoEVXNlchIKCgJpZBgBIAEoCRINCgVlbWFpbBgCIAEoCRIMCgRuYW1lGAMgASgJEg8KB3BpY3R1cmUYBCABKAkiEAoOR2V0VXNlclJlcXVlc3QiLgoPR2V0VXNlclJlc3BvbnNlEhsKBHVzZXIYASABKAsyDS51c2VyLnYxLlVzZXIirAEKBlByb21wdBIKCgJpZBgBIAEoCRIuCgpjcmVhdGVkX2F0GAIgASgLMhouZ29vZ2xlLnByb3RvYnVmLlRpbWVzdGFtcBIuCgp1cGRhdGVkX2F0GAMgASgLMhouZ29vZ2xlLnByb3RvYnVmLlRpbWVzdGFtcBINCgV0aXRsZRgEIAEoCRIPCgdjb250ZW50GAUgASgJEhYKDmlzX3VzZXJfcHJvbXB0GAYgASgIIhQKEkxpc3RQcm9tcHRzUmVxdWVzdCI3ChNMaXN0UHJvbXB0c1Jlc3BvbnNlEiAKB3Byb21wdHMYASADKAsyDy51c2VyLnYxLlByb21wdCI1ChNDcmVhdGVQcm9tcHRSZXF1ZXN0Eg0KBXRpdGxlGAEgASgJEg8KB2NvbnRlbnQYAiABKAkiNwoUQ3JlYXRlUHJvbXB0UmVzcG9uc2USHwoGcHJvbXB0GAEgASgLMg8udXNlci52MS5Qcm9tcHQiSAoTVXBkYXRlUHJvbXB0UmVxdWVzdBIRCglwcm9tcHRfaWQYASABKAkSDQoFdGl0bGUYAiABKAkSDwoHY29udGVudBgDIAEoCSI3ChRVcGRhdGVQcm9tcHRSZXNwb25zZRIfCgZwcm9tcHQYASABKAsyDy51c2VyLnYxLlByb21wdCIoChNEZWxldGVQcm9tcHRSZXF1ZXN0EhEKCXByb21wdF9pZBgBIAEoCSIWChREZWxldGVQcm9tcHRSZXNwb25zZSKtAQoIU2V0dGluZ3MSJgoec2hvd19zaG9ydGN1dHNfYWZ0ZXJfc2VsZWN0aW9uGAEgASgIEigKIGZ1bGxfd2lkdGhfcGFwZXJfZGVidWdnZXJfYnV0dG9uGAIgASgIEhkKEWVuYWJsZV9jb21wbGV0aW9uGAMgASgIEhkKEWZ1bGxfZG9jdW1lbnRfcmFnGAQgASgIEhkKEXNob3dlZF9vbmJvYXJkaW5nGAUgASgIIhQKEkdldFNldHRpbmdzUmVxdWVzdCI6ChNHZXRTZXR0aW5nc1Jlc3BvbnNlEiMKCHNldHRpbmdzGAEgASgLMhEudXNlci52MS5TZXR0aW5ncyI8ChVVcGRhdGVTZXR0aW5nc1JlcXVlc3QSIwoIc2V0dGluZ3MYASABKAsyES51c2VyLnYxLlNldHRpbmdzIj0KFlVwZGF0ZVNldHRpbmdzUmVzcG9uc2USIwoIc2V0dGluZ3MYASABKAsyES51c2VyLnYxLlNldHRpbmdzIhYKFFJlc2V0U2V0dGluZ3NSZXF1ZXN0IjwKFVJlc2V0U2V0dGluZ3NSZXNwb25zZRIjCghzZXR0aW5ncxgBIAEoCzIRLnVzZXIudjEuU2V0dGluZ3MiHAoaR2V0VXNlckluc3RydWN0aW9uc1JlcXVlc3QiMwobR2V0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlEhQKDGluc3RydWN0aW9ucxgBIAEoCSI1Ch1VcHNlcnRVc2VySW5zdHJ1Y3Rpb25zUmVxdWVzdBIUCgxpbnN0cnVjdGlvbnMYASABKAkiNgoeVXBzZXJ0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlEhQKDGluc3RydWN0aW9ucxgBIAEoCTKDCgoLVXNlclNlcnZpY2USXQoHR2V0VXNlchIXLnVzZXIudjEuR2V0VXNlclJlcXVlc3QaGC51c2VyLnYxLkdldFVzZXJSZXNwb25zZSIfgtPkkwIZEhcvX3BkL2FwaS92MS91c2Vycy9Ac2VsZhJxCgtMaXN0UHJvbXB0cxIbLnVzZXIudjEuTGlzdFByb21wdHNSZXF1ZXN0GhwudXNlci52MS5MaXN0UHJvbXB0c1Jlc3BvbnNlIieC0+STAiESHy9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3Byb21wdHMSdwoMQ3JlYXRlUHJvbXB0EhwudXNlci52MS5DcmVhdGVQcm9tcHRSZXF1ZXN0Gh0udXNlci52MS5DcmVhdGVQcm9tcHRSZXNwb25zZSIqgtPkkwIkOgEqIh8vX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9wcm9tcHRzEoMBCgxVcGRhdGVQcm9tcHQSHC51c2VyLnYxLlVwZGF0ZVByb21wdFJlcXVlc3QaHS51c2VyLnYxLlVwZGF0ZVByb21wdFJlc3BvbnNlIjaC0+STAjA6ASoaKy9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3Byb21wdHMve3Byb21wdF9pZH0SjgEKE0dldFVzZXJJbnN0cnVjdGlvbnMSIy51c2VyLnYxLkdldFVzZXJJbnN0cnVjdGlvbnNSZXF1ZXN0GiQudXNlci52MS5HZXRVc2VySW5zdHJ1Y3Rpb25zUmVzcG9uc2UiLILT5JMCJhIkL19wZC9hcGkvdjEvdXNlcnMvQHNlbGYvaW5zdHJ1Y3Rpb25zEpoBChZVcHNlcnRVc2VySW5zdHJ1Y3Rpb25zEiYudXNlci52MS5VcHNlcnRVc2VySW5zdHJ1Y3Rpb25zUmVxdWVzdBonLnVzZXIudjEuVXBzZXJ0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlIi+C0+STAik6ASoiJC9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL2luc3RydWN0aW9ucxKAAQoMRGVsZXRlUHJvbXB0EhwudXNlci52MS5EZWxldGVQcm9tcHRSZXF1ZXN0Gh0udXNlci52MS5EZWxldGVQcm9tcHRSZXNwb25zZSIzgtPkkwItKisvX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9wcm9tcHRzL3twcm9tcHRfaWR9EnIKC0dldFNldHRpbmdzEhsudXNlci52MS5HZXRTZXR0aW5nc1JlcXVlc3QaHC51c2VyLnYxLkdldFNldHRpbmdzUmVzcG9uc2UiKILT5JMCIhIgL19wZC9hcGkvdjEvdXNlcnMvQHNlbGYvc2V0dGluZ3MSfgoOVXBkYXRlU2V0dGluZ3MSHi51c2VyLnYxLlVwZGF0ZVNldHRpbmdzUmVxdWVzdBofLnVzZXIudjEuVXBkYXRlU2V0dGluZ3NSZXNwb25zZSIrgtPkkwIlOgEqGiAvX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9zZXR0aW5ncxJ+Cg1SZXNldFNldHRpbmdzEh0udXNlci52MS5SZXNldFNldHRpbmdzUmVxdWVzdBoeLnVzZXIudjEuUmVzZXRTZXR0aW5nc1Jlc3BvbnNlIi6C0+STAigiJi9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3NldHRpbmdzL3Jlc2V0Qn8KC2NvbS51c2VyLnYxQglVc2VyUHJvdG9QAVoocGFwZXJkZWJ1Z2dlci9wa2cvZ2VuL2FwaS91c2VyL3YxO3VzZXJ2MaICA1VYWKoCB1VzZXIuVjHKAgdVc2VyXFYx4gITVXNlclxWMVxHUEJNZXRhZGF0YeoCCFVzZXI6OlYxYgZwcm90bzM", [file_google_api_annotations, file_google_protobuf_timestamp]); + fileDesc("ChJ1c2VyL3YxL3VzZXIucHJvdG8SB3VzZXIudjEiQAoEVXNlchIKCgJpZBgBIAEoCRINCgVlbWFpbBgCIAEoCRIMCgRuYW1lGAMgASgJEg8KB3BpY3R1cmUYBCABKAkiEAoOR2V0VXNlclJlcXVlc3QiLgoPR2V0VXNlclJlc3BvbnNlEhsKBHVzZXIYASABKAsyDS51c2VyLnYxLlVzZXIirAEKBlByb21wdBIKCgJpZBgBIAEoCRIuCgpjcmVhdGVkX2F0GAIgASgLMhouZ29vZ2xlLnByb3RvYnVmLlRpbWVzdGFtcBIuCgp1cGRhdGVkX2F0GAMgASgLMhouZ29vZ2xlLnByb3RvYnVmLlRpbWVzdGFtcBINCgV0aXRsZRgEIAEoCRIPCgdjb250ZW50GAUgASgJEhYKDmlzX3VzZXJfcHJvbXB0GAYgASgIIhQKEkxpc3RQcm9tcHRzUmVxdWVzdCI3ChNMaXN0UHJvbXB0c1Jlc3BvbnNlEiAKB3Byb21wdHMYASADKAsyDy51c2VyLnYxLlByb21wdCI1ChNDcmVhdGVQcm9tcHRSZXF1ZXN0Eg0KBXRpdGxlGAEgASgJEg8KB2NvbnRlbnQYAiABKAkiNwoUQ3JlYXRlUHJvbXB0UmVzcG9uc2USHwoGcHJvbXB0GAEgASgLMg8udXNlci52MS5Qcm9tcHQiSAoTVXBkYXRlUHJvbXB0UmVxdWVzdBIRCglwcm9tcHRfaWQYASABKAkSDQoFdGl0bGUYAiABKAkSDwoHY29udGVudBgDIAEoCSI3ChRVcGRhdGVQcm9tcHRSZXNwb25zZRIfCgZwcm9tcHQYASABKAsyDy51c2VyLnYxLlByb21wdCIoChNEZWxldGVQcm9tcHRSZXF1ZXN0EhEKCXByb21wdF9pZBgBIAEoCSIWChREZWxldGVQcm9tcHRSZXNwb25zZSLFAQoIU2V0dGluZ3MSJgoec2hvd19zaG9ydGN1dHNfYWZ0ZXJfc2VsZWN0aW9uGAEgASgIEigKIGZ1bGxfd2lkdGhfcGFwZXJfZGVidWdnZXJfYnV0dG9uGAIgASgIEhkKEWVuYWJsZV9jb21wbGV0aW9uGAMgASgIEhkKEWZ1bGxfZG9jdW1lbnRfcmFnGAQgASgIEhkKEXNob3dlZF9vbmJvYXJkaW5nGAUgASgIEhYKDm9wZW5haV9hcGlfa2V5GAYgASgJIhQKEkdldFNldHRpbmdzUmVxdWVzdCI6ChNHZXRTZXR0aW5nc1Jlc3BvbnNlEiMKCHNldHRpbmdzGAEgASgLMhEudXNlci52MS5TZXR0aW5ncyI8ChVVcGRhdGVTZXR0aW5nc1JlcXVlc3QSIwoIc2V0dGluZ3MYASABKAsyES51c2VyLnYxLlNldHRpbmdzIj0KFlVwZGF0ZVNldHRpbmdzUmVzcG9uc2USIwoIc2V0dGluZ3MYASABKAsyES51c2VyLnYxLlNldHRpbmdzIhYKFFJlc2V0U2V0dGluZ3NSZXF1ZXN0IjwKFVJlc2V0U2V0dGluZ3NSZXNwb25zZRIjCghzZXR0aW5ncxgBIAEoCzIRLnVzZXIudjEuU2V0dGluZ3MiHAoaR2V0VXNlckluc3RydWN0aW9uc1JlcXVlc3QiMwobR2V0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlEhQKDGluc3RydWN0aW9ucxgBIAEoCSI1Ch1VcHNlcnRVc2VySW5zdHJ1Y3Rpb25zUmVxdWVzdBIUCgxpbnN0cnVjdGlvbnMYASABKAkiNgoeVXBzZXJ0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlEhQKDGluc3RydWN0aW9ucxgBIAEoCTKDCgoLVXNlclNlcnZpY2USXQoHR2V0VXNlchIXLnVzZXIudjEuR2V0VXNlclJlcXVlc3QaGC51c2VyLnYxLkdldFVzZXJSZXNwb25zZSIfgtPkkwIZEhcvX3BkL2FwaS92MS91c2Vycy9Ac2VsZhJxCgtMaXN0UHJvbXB0cxIbLnVzZXIudjEuTGlzdFByb21wdHNSZXF1ZXN0GhwudXNlci52MS5MaXN0UHJvbXB0c1Jlc3BvbnNlIieC0+STAiESHy9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3Byb21wdHMSdwoMQ3JlYXRlUHJvbXB0EhwudXNlci52MS5DcmVhdGVQcm9tcHRSZXF1ZXN0Gh0udXNlci52MS5DcmVhdGVQcm9tcHRSZXNwb25zZSIqgtPkkwIkOgEqIh8vX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9wcm9tcHRzEoMBCgxVcGRhdGVQcm9tcHQSHC51c2VyLnYxLlVwZGF0ZVByb21wdFJlcXVlc3QaHS51c2VyLnYxLlVwZGF0ZVByb21wdFJlc3BvbnNlIjaC0+STAjA6ASoaKy9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3Byb21wdHMve3Byb21wdF9pZH0SjgEKE0dldFVzZXJJbnN0cnVjdGlvbnMSIy51c2VyLnYxLkdldFVzZXJJbnN0cnVjdGlvbnNSZXF1ZXN0GiQudXNlci52MS5HZXRVc2VySW5zdHJ1Y3Rpb25zUmVzcG9uc2UiLILT5JMCJhIkL19wZC9hcGkvdjEvdXNlcnMvQHNlbGYvaW5zdHJ1Y3Rpb25zEpoBChZVcHNlcnRVc2VySW5zdHJ1Y3Rpb25zEiYudXNlci52MS5VcHNlcnRVc2VySW5zdHJ1Y3Rpb25zUmVxdWVzdBonLnVzZXIudjEuVXBzZXJ0VXNlckluc3RydWN0aW9uc1Jlc3BvbnNlIi+C0+STAik6ASoiJC9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL2luc3RydWN0aW9ucxKAAQoMRGVsZXRlUHJvbXB0EhwudXNlci52MS5EZWxldGVQcm9tcHRSZXF1ZXN0Gh0udXNlci52MS5EZWxldGVQcm9tcHRSZXNwb25zZSIzgtPkkwItKisvX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9wcm9tcHRzL3twcm9tcHRfaWR9EnIKC0dldFNldHRpbmdzEhsudXNlci52MS5HZXRTZXR0aW5nc1JlcXVlc3QaHC51c2VyLnYxLkdldFNldHRpbmdzUmVzcG9uc2UiKILT5JMCIhIgL19wZC9hcGkvdjEvdXNlcnMvQHNlbGYvc2V0dGluZ3MSfgoOVXBkYXRlU2V0dGluZ3MSHi51c2VyLnYxLlVwZGF0ZVNldHRpbmdzUmVxdWVzdBofLnVzZXIudjEuVXBkYXRlU2V0dGluZ3NSZXNwb25zZSIrgtPkkwIlOgEqGiAvX3BkL2FwaS92MS91c2Vycy9Ac2VsZi9zZXR0aW5ncxJ+Cg1SZXNldFNldHRpbmdzEh0udXNlci52MS5SZXNldFNldHRpbmdzUmVxdWVzdBoeLnVzZXIudjEuUmVzZXRTZXR0aW5nc1Jlc3BvbnNlIi6C0+STAigiJi9fcGQvYXBpL3YxL3VzZXJzL0BzZWxmL3NldHRpbmdzL3Jlc2V0Qn8KC2NvbS51c2VyLnYxQglVc2VyUHJvdG9QAVoocGFwZXJkZWJ1Z2dlci9wa2cvZ2VuL2FwaS91c2VyL3YxO3VzZXJ2MaICA1VYWKoCB1VzZXIuVjHKAgdVc2VyXFYx4gITVXNlclxWMVxHUEJNZXRhZGF0YeoCCFVzZXI6OlYxYgZwcm90bzM", [file_google_api_annotations, file_google_protobuf_timestamp]); /** * @generated from message user.v1.User @@ -290,6 +290,11 @@ export type Settings = Message<"user.v1.Settings"> & { * @generated from field: bool showed_onboarding = 5; */ showedOnboarding: boolean; + + /** + * @generated from field: string openai_api_key = 6; + */ + openaiApiKey: string; }; /** diff --git a/webapp/_webapp/src/query/api.ts b/webapp/_webapp/src/query/api.ts index 25560954..55ed5aa2 100644 --- a/webapp/_webapp/src/query/api.ts +++ b/webapp/_webapp/src/query/api.ts @@ -20,6 +20,8 @@ import { GetConversationResponseSchema, ListConversationsRequest, ListConversationsResponseSchema, + ListSupportedModelsRequest, + ListSupportedModelsResponseSchema, UpdateConversationRequest, UpdateConversationResponseSchema, } from "../pkg/gen/apiclient/chat/v1/chat_pb"; @@ -118,6 +120,11 @@ export const listConversations = async (data: PlainMessage) => { + const response = await apiclient.get("/chats/models", data); + return fromJson(ListSupportedModelsResponseSchema, response); +}; + export const getConversation = async (data: PlainMessage) => { const response = await apiclient.get(`/chats/conversations/${data.conversationId}`); return fromJson(GetConversationResponseSchema, response); diff --git a/webapp/_webapp/src/query/index.ts b/webapp/_webapp/src/query/index.ts index 99ddd631..ca60f155 100644 --- a/webapp/_webapp/src/query/index.ts +++ b/webapp/_webapp/src/query/index.ts @@ -4,6 +4,7 @@ import { DeleteConversationResponse, GetConversationResponse, ListConversationsResponse, + ListSupportedModelsResponse, UpdateConversationResponse, } from "../pkg/gen/apiclient/chat/v1/chat_pb"; import { UseMutationOptionsOverride, UseQueryOptionsOverride } from "./types"; @@ -16,6 +17,7 @@ import { getProject, listConversations, listPrompts, + listSupportedModels, runProjectPaperScore, updateConversation, updatePrompt, @@ -60,6 +62,14 @@ export const useGetProjectQuery = (projectId: string, opts?: UseQueryOptionsOver }); }; +export const useListSupportedModelsQuery = (opts?: UseQueryOptionsOverride) => { + return useQuery({ + queryKey: queryKeys.chats.listSupportedModels().queryKey, + queryFn: () => listSupportedModels({}), + ...opts, + }); +}; + export const useListPromptsQuery = (opts?: UseQueryOptionsOverride) => { return useQuery({ queryKey: queryKeys.prompts.listPrompts().queryKey, @@ -149,7 +159,7 @@ export const useGetUserInstructionsQuery = (opts?: UseQueryOptionsOverride getUserInstructions({}), enabled: !!user, ...opts, }); diff --git a/webapp/_webapp/src/query/keys.ts b/webapp/_webapp/src/query/keys.ts index 832db5a5..e28ef91e 100644 --- a/webapp/_webapp/src/query/keys.ts +++ b/webapp/_webapp/src/query/keys.ts @@ -8,6 +8,9 @@ export const queryKeys = createQueryKeyStore({ prompts: { listPrompts: () => ["users", "@self", "prompts"], }, + chats: { + listSupportedModels: () => ["chats", "models"], + }, conversations: { listConversations: (projectId: string) => ["conversations", projectId], getConversation: (conversationId: string) => ["conversations", conversationId], diff --git a/webapp/_webapp/src/stores/conversation/handlers/handleStreamError.ts b/webapp/_webapp/src/stores/conversation/handlers/handleStreamError.ts index c1d79a06..bd02109a 100644 --- a/webapp/_webapp/src/stores/conversation/handlers/handleStreamError.ts +++ b/webapp/_webapp/src/stores/conversation/handlers/handleStreamError.ts @@ -1,8 +1,11 @@ -import { StreamError } from "../../../pkg/gen/apiclient/chat/v1/chat_pb"; +import { MessageTypeAssistantSchema, StreamError } from "../../../pkg/gen/apiclient/chat/v1/chat_pb"; import { errorToast } from "../../../libs/toasts"; import { OverleafAuthentication, OverleafVersionedDoc } from "../../../libs/overleaf-socket"; import { getProjectId } from "../../../libs/helpers"; import { getCookies } from "../../../intermediate"; +import { StreamingMessage } from "../../streaming-message-store"; +import { MessageEntry, MessageEntryStatus } from "../types"; +import { fromJson } from "@bufbuild/protobuf"; export async function handleStreamError( streamError: StreamError, @@ -16,10 +19,27 @@ export async function handleStreamError( csrfToken: string, ) => Promise>, sendMessageStream: (message: string, selectedText: string) => Promise, + updateStreamingMessage: (updater: (prev: StreamingMessage) => StreamingMessage) => void, ) { + // Append an error message to the streaming message + const updateFunc = (prev: StreamingMessage) => { + const errorMessageEntry: MessageEntry = { + messageId: "error-" + Date.now(), + status: MessageEntryStatus.STALE, + assistant: fromJson(MessageTypeAssistantSchema, { + content: `${streamError.errorMessage}`, + }), + }; + return { + ...prev, + parts: [...prev.parts, errorMessageEntry], + }; + }; + try { const { session, gclb } = await getCookies(window.location.hostname); if (streamError.errorMessage.includes("project is out of date")) { + // TODO: replace this into a shared variable for both backend and frontend await sync( userId, getProjectId(), @@ -32,9 +52,11 @@ export async function handleStreamError( // Retry sending the message after sync await sendMessageStream(currentPrompt, currentSelectedText); } else { - errorToast(streamError.errorMessage, "Chat Error"); + updateStreamingMessage(updateFunc); + errorToast(streamError.errorMessage, "Chat Stream Error"); } } catch (error) { - errorToast(error instanceof Error ? error.message : "Unknown error", "Chat Error"); + updateStreamingMessage(updateFunc); + errorToast(error instanceof Error ? error.message : "Unknown error", "Chat Stream Error"); } } diff --git a/webapp/_webapp/src/stores/setting-store.ts b/webapp/_webapp/src/stores/setting-store.ts index bb675e3e..1a327225 100644 --- a/webapp/_webapp/src/stores/setting-store.ts +++ b/webapp/_webapp/src/stores/setting-store.ts @@ -35,6 +35,7 @@ const defaultSettings: PlainMessage = { enableCompletion: false, fullDocumentRag: false, showedOnboarding: true, + openaiApiKey: "", }; export const useSettingStore = create()((set, get) => ({ diff --git a/webapp/_webapp/src/views/chat/footer/toolbar/model-selection.tsx b/webapp/_webapp/src/views/chat/footer/toolbar/model-selection.tsx index cca02952..4c1d48d4 100644 --- a/webapp/_webapp/src/views/chat/footer/toolbar/model-selection.tsx +++ b/webapp/_webapp/src/views/chat/footer/toolbar/model-selection.tsx @@ -5,7 +5,7 @@ import { LanguageModel } from "../../../../pkg/gen/apiclient/chat/v1/chat_pb"; import { useConversationUiStore } from "../../../../stores/conversation/conversation-ui-store"; type ModelSelectionProps = { - onSelectModel: (languageModel: LanguageModel) => void; + onSelectModel: () => void; }; export function ModelSelection({ onSelectModel }: ModelSelectionProps) { @@ -14,7 +14,7 @@ export function ModelSelection({ onSelectModel }: ModelSelectionProps) { const items: SelectionItem[] = useMemo(() => { return models.map((model) => ({ title: model.name, - description: model.description, + subtitle: model.slug, value: model.languageModel, })); }, [models]); @@ -22,7 +22,7 @@ export function ModelSelection({ onSelectModel }: ModelSelectionProps) { const onSelect = useCallback( (item: SelectionItem) => { setModel(models.find((m) => m.languageModel === item.value)!); - onSelectModel(item.value); + onSelectModel(); inputRef.current?.focus(); }, [setModel, onSelectModel, inputRef, models], diff --git a/webapp/_webapp/src/views/chat/footer/toolbar/selection.tsx b/webapp/_webapp/src/views/chat/footer/toolbar/selection.tsx index 2e703f24..48ef9b58 100644 --- a/webapp/_webapp/src/views/chat/footer/toolbar/selection.tsx +++ b/webapp/_webapp/src/views/chat/footer/toolbar/selection.tsx @@ -7,7 +7,8 @@ import { useSettingStore } from "../../../../stores/setting-store"; export type SelectionItem = { title: string; - description: string; + subtitle?: string; + description?: string; value: T; }; @@ -97,7 +98,7 @@ export function Selection({ items, onSelect }: SelectionProps) { > {items?.map((item, idx) => (
({ items, onSelect }: SelectionProps) { } }} > -
- {item.title} -
- {item.description} + {item.title} + {item.subtitle && ( + + {item.subtitle} + + )}
+ {item.description && ( +
+ {item.description} +
+ )}
))}
diff --git a/webapp/_webapp/src/views/extension-settings/components/HostPermissionWidget/useHostPermissionStore.ts b/webapp/_webapp/src/views/extension-settings/components/HostPermissionWidget/useHostPermissionStore.ts index 780e91e9..9e387cd6 100644 --- a/webapp/_webapp/src/views/extension-settings/components/HostPermissionWidget/useHostPermissionStore.ts +++ b/webapp/_webapp/src/views/extension-settings/components/HostPermissionWidget/useHostPermissionStore.ts @@ -19,12 +19,12 @@ const normalizeWildcardPattern = (url: string) => { const scheme = match[1].toLowerCase(); const host = match[2]; const path = match[3] || "/*"; - + // Normalize scheme (keep * as is, normalize http/https) const normalizedScheme = scheme === "*" ? "*" : scheme; // Ensure path ends with /* if it's just / const normalizedPath = path === "/" ? "/*" : path.endsWith("/*") ? path : `${path}/*`; - + return { valid: true as const, origin: `${normalizedScheme}://${host}${normalizedPath}` }; } @@ -71,7 +71,7 @@ export const useHostPermissionStore = create((set, get) => clearMessage: () => set({ message: null }), loadPermissions: async () => { set({ isLoadingPermissions: true }); - + const chromePermissions = await chrome.permissions.getAll().catch((error) => { const errorMessage = handleError(error, "Error loading permissions."); set({ message: { text: errorMessage, type: "error" } }); @@ -80,7 +80,7 @@ export const useHostPermissionStore = create((set, get) => const origins = chromePermissions?.origins || []; const permissions: PermissionItem[] = origins.map((origin) => ({ origin, granted: true })); - + set({ permissions, isLoadingPermissions: false }); }, submitPermissionRequest: async () => { @@ -119,7 +119,7 @@ export const useHostPermissionStore = create((set, get) => } else { set({ message: { text: `Permission denied for ${origin}`, type: "error" } }); } - + set({ isSubmitting: false }); }, })); @@ -136,4 +136,3 @@ export const getMessageClassName = (type: PermissionMessage["type"]): string => return ""; } }; - diff --git a/webapp/_webapp/src/views/settings/index.tsx b/webapp/_webapp/src/views/settings/index.tsx index 15a6a5a7..949ed551 100644 --- a/webapp/_webapp/src/views/settings/index.tsx +++ b/webapp/_webapp/src/views/settings/index.tsx @@ -8,6 +8,7 @@ import { AccountSettings } from "./sections/account-settings"; import { UISettings } from "./sections/ui-settings"; import { RealDeveloperTools } from "./sections/real-developer-tools"; import { SettingsFooter } from "./sections/footer"; +import { ApiKeySettings } from "./sections/api-key-settings"; export const Settings = () => { const { settings, isLoading, loadSettings, enableUserDeveloperTools } = useSettingStore(); @@ -32,6 +33,7 @@ export const Settings = () => {
{/* */} + {enableUserDeveloperTools && } diff --git a/webapp/_webapp/src/views/settings/sections/account-settings.tsx b/webapp/_webapp/src/views/settings/sections/account-settings.tsx index 3d8a6f5c..959d2821 100644 --- a/webapp/_webapp/src/views/settings/sections/account-settings.tsx +++ b/webapp/_webapp/src/views/settings/sections/account-settings.tsx @@ -24,7 +24,7 @@ export const AccountSettings = () => { Account
-
View onboarding guide
+
View onboarding guide
Learn how to use PaperDebugger effectively