diff --git a/README.md b/README.md index 7523ff17..e6664693 100644 --- a/README.md +++ b/README.md @@ -105,11 +105,11 @@ Send a POST request to `/v1/chat/completions` (or any other available endpoint) ### Supported Models -| Model | Description | -|-------|-------------| -| `gemini-3.0-pro` | Latest and most powerful model | -| `gemini-2.5-pro` | Advanced reasoning model | -| `gemini-2.5-flash` | Fast and efficient model (default) | +| Model | Description | +| --------------------------- | ---------------------------------- | +| `gemini-3.0-pro` | Most powerful model | +| `gemini-3.0-flash` | Fast and efficient model (default) | +| `gemini-3.0-flash-thinking` | Enhanced reasoning model | ### Example Request (Basic) @@ -124,7 +124,7 @@ Send a POST request to `/v1/chat/completions` (or any other available endpoint) ```json { - "model": "gemini-2.5-pro", + "model": "gemini-3.0-flash-thinking", "messages": [ { "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is Python?" }, @@ -283,8 +283,8 @@ If the cookies are left empty, the application will automatically retrieve them # Default AI service. default_ai = gemini -# Default model for Gemini (options: gemini-3.0-pro, gemini-2.5-pro, gemini-2.5-flash) -default_model_gemini = gemini-2.5-flash +# Default model for Gemini (options: gemini-3.0-pro, gemini-3.0-flash, gemini-3.0-flash-thinking) +default_model_gemini = gemini-3.0-flash # Gemini cookies (leave empty to use browser_cookies3 for automatic authentication). gemini_cookie_1psid = diff --git a/config.conf.example b/config.conf.example index e963a2a9..f6afc512 100755 --- a/config.conf.example +++ b/config.conf.example @@ -19,14 +19,10 @@ default_ai = gemini # --- Gemini Model Configuration --- # Choose the model to be used when Gemini is selected as the AI. # Available models: -# - gemini-1.5-flash -# - gemini-2.0-flash -# - gemini-2.0-flash-thinking -# - gemini-2.0-flash-thinking-with-apps -# - gemini-2.5-pro -# - gemini-2.5-flash # - gemini-3.0-pro -default_model_gemini = gemini-3.0-pro +# - gemini-3.0-flash +# - gemini-3.0-flash-thinking +default_model_gemini = gemini-3.0-flash # --- Gemini Cookies --- # Provide your authentication cookies for Gemini here. diff --git a/requirements.txt b/requirements.txt index 5939d565..bff871ac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and sys_pl curl-cffi==0.7.4 ; python_version >= "3.10" and python_version < "4.0" exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" fastapi==0.115.7 ; python_version >= "3.10" and python_version < "4.0" -gemini-webapi==1.8.3 ; python_version >= "3.10" and python_version < "4.0" +gemini-webapi==1.19.2 ; python_version >= "3.10" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" h2==4.1.0 ; python_version >= "3.10" and python_version < "4.0" hpack==4.1.0 ; python_version >= "3.10" and python_version < "4.0" @@ -21,13 +21,13 @@ loguru==0.7.3 ; python_version >= "3.10" and python_version < "4.0" lz4==4.4.3 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" pycryptodomex==3.21.0 ; python_version >= "3.10" and python_version < "4.0" -pydantic-core==2.27.2 ; python_version >= "3.10" and python_version < "4.0" -pydantic==2.10.6 ; python_version >= "3.10" and python_version < "4.0" +pydantic-core +pydantic pywin32==308 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" shadowcopy==0.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" starlette==0.45.3 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" +typing-extensions uvicorn==0.34.0 ; python_version >= "3.10" and python_version < "4.0" win32-setctime==1.2.0 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" wmi==1.5.1 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" diff --git a/src/schemas/request.py b/src/schemas/request.py index 0eb3662a..fddfae65 100644 --- a/src/schemas/request.py +++ b/src/schemas/request.py @@ -10,15 +10,17 @@ class GeminiModels(str, Enum): # Gemini 3.0 Series PRO_3_0 = "gemini-3.0-pro" - - # Gemini 2.5 Series - PRO_2_5 = "gemini-2.5-pro" - FLASH_2_5 = "gemini-2.5-flash" + FLASH_3_0 = "gemini-3.0-flash" + FLASH_3_0_THINKING = "gemini-3.0-flash-thinking" + + # Default model + DEFAULT = "unspecified" + class GeminiRequest(BaseModel): message: str - model: GeminiModels = Field(default=GeminiModels.FLASH_2_5, description="Model to use for Gemini.") + model: GeminiModels = Field(default=GeminiModels.FLASH_3_0, description="Model to use for Gemini.") files: Optional[List[str]] = [] class OpenAIChatRequest(BaseModel):