Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ Send a POST request to `/v1/chat/completions` (or any other available endpoint)

### Supported Models

| Model | Description |
|-------|-------------|
| `gemini-3.0-pro` | Latest and most powerful model |
| `gemini-2.5-pro` | Advanced reasoning model |
| `gemini-2.5-flash` | Fast and efficient model (default) |
| Model | Description |
| --------------------------- | ---------------------------------- |
| `gemini-3.0-pro` | Most powerful model |
| `gemini-3.0-flash` | Fast and efficient model (default) |
| `gemini-3.0-flash-thinking` | Enhanced reasoning model |

### Example Request (Basic)

Expand All @@ -124,7 +124,7 @@ Send a POST request to `/v1/chat/completions` (or any other available endpoint)

```json
{
"model": "gemini-2.5-pro",
"model": "gemini-3.0-flash-thinking",
"messages": [
{ "role": "system", "content": "You are a helpful assistant." },
{ "role": "user", "content": "What is Python?" },
Expand Down Expand Up @@ -283,8 +283,8 @@ If the cookies are left empty, the application will automatically retrieve them
# Default AI service.
default_ai = gemini

# Default model for Gemini (options: gemini-3.0-pro, gemini-2.5-pro, gemini-2.5-flash)
default_model_gemini = gemini-2.5-flash
# Default model for Gemini (options: gemini-3.0-pro, gemini-3.0-flash, gemini-3.0-flash-thinking)
default_model_gemini = gemini-3.0-flash

# Gemini cookies (leave empty to use browser_cookies3 for automatic authentication).
gemini_cookie_1psid =
Expand Down
10 changes: 3 additions & 7 deletions config.conf.example
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,10 @@ default_ai = gemini
# --- Gemini Model Configuration ---
# Choose the model to be used when Gemini is selected as the AI.
# Available models:
# - gemini-1.5-flash
# - gemini-2.0-flash
# - gemini-2.0-flash-thinking
# - gemini-2.0-flash-thinking-with-apps
# - gemini-2.5-pro
# - gemini-2.5-flash
# - gemini-3.0-pro
default_model_gemini = gemini-3.0-pro
# - gemini-3.0-flash
# - gemini-3.0-flash-thinking
default_model_gemini = gemini-3.0-flash

# --- Gemini Cookies ---
# Provide your authentication cookies for Gemini here.
Expand Down
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and sys_pl
curl-cffi==0.7.4 ; python_version >= "3.10" and python_version < "4.0"
exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11"
fastapi==0.115.7 ; python_version >= "3.10" and python_version < "4.0"
gemini-webapi==1.8.3 ; python_version >= "3.10" and python_version < "4.0"
gemini-webapi==1.19.2 ; python_version >= "3.10" and python_version < "4.0"
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0"
h2==4.1.0 ; python_version >= "3.10" and python_version < "4.0"
hpack==4.1.0 ; python_version >= "3.10" and python_version < "4.0"
Expand All @@ -21,13 +21,13 @@ loguru==0.7.3 ; python_version >= "3.10" and python_version < "4.0"
lz4==4.4.3 ; python_version >= "3.10" and python_version < "4.0"
pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0"
pycryptodomex==3.21.0 ; python_version >= "3.10" and python_version < "4.0"
pydantic-core==2.27.2 ; python_version >= "3.10" and python_version < "4.0"
pydantic==2.10.6 ; python_version >= "3.10" and python_version < "4.0"
pydantic-core
pydantic
pywin32==308 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
shadowcopy==0.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0"
starlette==0.45.3 ; python_version >= "3.10" and python_version < "4.0"
typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0"
typing-extensions
uvicorn==0.34.0 ; python_version >= "3.10" and python_version < "4.0"
win32-setctime==1.2.0 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32"
wmi==1.5.1 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows"
12 changes: 7 additions & 5 deletions src/schemas/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,17 @@ class GeminiModels(str, Enum):

# Gemini 3.0 Series
PRO_3_0 = "gemini-3.0-pro"

# Gemini 2.5 Series
PRO_2_5 = "gemini-2.5-pro"
FLASH_2_5 = "gemini-2.5-flash"
FLASH_3_0 = "gemini-3.0-flash"
FLASH_3_0_THINKING = "gemini-3.0-flash-thinking"

# Default model
DEFAULT = "unspecified"



class GeminiRequest(BaseModel):
message: str
model: GeminiModels = Field(default=GeminiModels.FLASH_2_5, description="Model to use for Gemini.")
model: GeminiModels = Field(default=GeminiModels.FLASH_3_0, description="Model to use for Gemini.")
files: Optional[List[str]] = []

class OpenAIChatRequest(BaseModel):
Expand Down