Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
185 changes: 185 additions & 0 deletions backend/app/alembic/versions/041_add_llm_call_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
"""add llm call table

Revision ID: 9ca4b67c5226
Revises: 040
Create Date: 2026-01-23 13:36:23.023444

"""
from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "041"
down_revision = "040"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"llm_call",
sa.Column(
"id",
sa.Uuid(),
nullable=False,
comment="Unique identifier for the LLM call record",
),
sa.Column(
"job_id",
sa.Uuid(),
nullable=False,
comment="Reference to the parent job (status tracked in job table)",
),
sa.Column(
"project_id",
sa.Integer(),
nullable=False,
comment="Reference to the project this LLM call belongs to",
),
sa.Column(
"organization_id",
sa.Integer(),
nullable=False,
comment="Reference to the organization this LLM call belongs to",
),
sa.Column(
"input",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
comment="User input - text string, binary data, or file path for multimodal",
),
sa.Column(
"input_type",
sa.String(),
nullable=False,
comment="Input type: text, audio, image",
),
sa.Column(
"output_type",
sa.String(),
nullable=True,
comment="Expected output type: text, audio, image",
),
sa.Column(
"provider",
sa.String(),
nullable=False,
comment="AI provider: openai, google, anthropic",
),
sa.Column(
"model",
sqlmodel.sql.sqltypes.AutoString(),
nullable=False,
comment="Specific model used e.g. 'gpt-4o', 'gemini-2.5-pro'",
),
sa.Column(
"provider_response_id",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
comment="Original response ID from the provider (e.g., OpenAI's response ID)",
),
sa.Column(
"content",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Response content: {text: '...'}, {audio_bytes: '...'}, or {image: '...'}",
),
sa.Column(
"usage",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Token usage: {input_tokens, output_tokens, reasoning_tokens}",
),
sa.Column(
"conversation_id",
sqlmodel.sql.sqltypes.AutoString(),
nullable=True,
comment="Identifier linking this response to its conversation thread",
),
sa.Column(
"auto_create",
sa.Boolean(),
nullable=True,
comment="Whether to auto-create conversation if conversation_id doesn't exist (OpenAI specific)",
),
sa.Column(
"config",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Configuration: {config_id, config_version} for stored config OR {config_blob} for ad-hoc config",
),
sa.Column(
"created_at",
sa.DateTime(),
nullable=False,
comment="Timestamp when the LLM call was created",
),
sa.Column(
"updated_at",
sa.DateTime(),
nullable=False,
comment="Timestamp when the LLM call was last updated",
),
sa.Column(
"deleted_at",
sa.DateTime(),
nullable=True,
comment="Timestamp when the record was soft-deleted",
),
sa.ForeignKeyConstraint(["job_id"], ["job.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["organization_id"], ["organization.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"idx_llm_call_conversation_id",
"llm_call",
["conversation_id"],
unique=False,
postgresql_where=sa.text("conversation_id IS NOT NULL AND deleted_at IS NULL"),
)
op.create_index(
"idx_llm_call_job_id",
"llm_call",
["job_id"],
unique=False,
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.alter_column(
"collection",
"llm_service_name",
existing_type=sa.VARCHAR(),
comment="Name of the LLM service",
existing_comment="Name of the LLM service provider",
existing_nullable=False,
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"collection",
"llm_service_name",
existing_type=sa.VARCHAR(),
comment="Name of the LLM service provider",
existing_comment="Name of the LLM service",
existing_nullable=False,
)
op.drop_index(
"idx_llm_call_job_id",
table_name="llm_call",
postgresql_where=sa.text("deleted_at IS NULL"),
)
op.drop_index(
"idx_llm_call_conversation_id",
table_name="llm_call",
postgresql_where=sa.text("conversation_id IS NOT NULL AND deleted_at IS NULL"),
)
op.drop_table("llm_call")
# ### end Alembic commands ###
1 change: 1 addition & 0 deletions backend/app/celery/beat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Celery beat scheduler for cron jobs.
"""

import logging
from celery import Celery
from app.celery.celery_app import celery_app
Expand Down
1 change: 1 addition & 0 deletions backend/app/celery/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Utility functions for easy Celery integration across the application.
Business logic modules can use these functions without knowing Celery internals.
"""

import logging
from typing import Any, Dict, Optional
from celery.result import AsyncResult
Expand Down
1 change: 1 addition & 0 deletions backend/app/celery/worker.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Celery worker management script.
"""

import logging
import multiprocessing
from celery.bin import worker
Expand Down
2 changes: 1 addition & 1 deletion backend/app/cli/bench/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def send_benchmark_request(
)
else:
typer.echo(response.text)
typer.echo(f"[{i+1}/{total}] FAILED - Status: {response.status_code}")
typer.echo(f"[{i + 1}/{total}] FAILED - Status: {response.status_code}")
raise Exception(f"Request failed with status code {response.status_code}")


Expand Down
1 change: 1 addition & 0 deletions backend/app/core/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ class Provider(str, Enum):
OPENAI = "openai"
AWS = "aws"
LANGFUSE = "langfuse"
GOOGLE = "google"


@dataclass
Expand Down
Loading
Loading