Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ authors = [{ name = "Your Name", email = "you@example.com" }]
requires-python = ">=3.10,<3.14"
dependencies = [
"colorama>=0.4.6",
"crewai[tools]>=0.134.0,<1.0.0",
"crewai[tools]==0.201.1",
"streamlit>=1.44.1",
]

Expand Down
26 changes: 3 additions & 23 deletions src/conversational_routing/crews/assistant_crew/assistant_crew.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
import os
import glob
from crewai import Agent, Crew, Process, Task, LLM
from crewai.project import CrewBase, agent, crew, task
from crewai.knowledge.source.text_file_knowledge_source import TextFileKnowledgeSource
from crewai.knowledge.knowledge_config import KnowledgeConfig
from pathlib import Path


@CrewBase
class AssistantCrew:
Expand All @@ -16,30 +12,14 @@ class AssistantCrew:

# LLM Configuration
llm = LLM(
model="groq/meta-llama/llama-4-scout-17b-16e-instruct",
model="gpt-4o-mini",
temperature=0.1,
)

@agent
def crewai_expert_agent(self) -> Agent:
# Knowledge Configuration
# Define base path to current file
knowledge_base_path = Path(__file__).parent / "knowledge"

# Prepare the knowledge base for the OSS Framework
files = glob.glob(os.path.join(knowledge_base_path, "oss-docs/**/*.mdx"), recursive=True)
# Convert file strings to Path objects
knowledge_file_paths = [Path(file) for file in files]

return Agent(
config=self.agents_config["crewai_expert_agent"],
knowledge_sources=[TextFileKnowledgeSource(
file_paths=knowledge_file_paths,
metadata={
"category": "CrewAI",
},
)],
knowledge_config=KnowledgeConfig(results_limit=5, score_threshold=0.7),
llm=self.llm,
)

Expand All @@ -57,5 +37,5 @@ def crew(self) -> Crew:
tasks=self.tasks, # Automatically created by the @task decorator
process=Process.sequential,
verbose=True,
memory=True,
# memory=True, # Enables short-term, long-term, and entity memory
)
19 changes: 7 additions & 12 deletions src/conversational_routing/crews/assistant_crew/config/agents.yaml
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
crewai_expert_agent:
role: >
CrewAI Support Expert
Helpful AI Assistant
goal: >
Provide concise (less than a 500 characters unless there is a code example) and accurate information about the CrewAI framework, answering user questions and guiding them in its effective use.
Provide code snippets if required.
Have natural conversations with users. Be friendly and helpful.
If users ask about CrewAI, provide accurate information and code examples.
If users ask about other topics, respond naturally and conversationally.
backstory: >
You are a helpful and knowledgeable CrewAI expert.
You have extensive experience with the framework and its various components.
You are dedicated to assisting users in understanding and utilizing CrewAI for their agentic AI projects.
You are familiar with the latest features, best practices, and community resources related to CrewAI.
You are passionate about helping others build successful agentic AI solutions using CrewAI.
You stay up-to-date with the latest developments in the CrewAI ecosystem.
You are adept at explaining complex concepts in a clear and concise manner.
You are patient and willing to go the extra mile to ensure users grasp the intricacies of CrewAI.
You are always eager to share your expertise and contribute to the growth of the CrewAI community.
You are a friendly AI assistant who can chat about various topics.
You're particularly knowledgeable about CrewAI but happy to have casual conversations too.
You remember what users tell you during the conversation.
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
answer_crewai_questions_task:
agent: crewai_expert_agent
description: >
Respond to user questions about the CrewAI framework.
Have a natural conversation with the user. Respond appropriately to their message.
Use the context provided to maintain conversation continuity.

Previous context summary: {conversation_summary}
Recent messages: {conversation_history}
Current message: {current_message}
Conversation history: {conversation_history}
expected_output: >
If required:
- Provide links to relevant documentation
- Offer practical examples
A helpful, conversational response that addresses what the user said.
60 changes: 59 additions & 1 deletion src/conversational_routing/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,79 @@
import json

from crewai.flow import Flow, start, persist
from litellm import completion

from src.conversational_routing.crews.assistant_crew.assistant_crew import AssistantCrew


class ChatState(BaseModel):
current_message: str = ""
conversation_history: List[dict] = []
conversation_summary: str = "" # Running summary of older context
summary_threshold: int = 8 # Summarize when history exceeds this
keep_recent: int = 4 # Keep this many recent messages in full

@persist()
class ChatFlow(Flow[ChatState]):

def _summarize_conversation(self) -> None:
"""
Summarize older messages when conversation history exceeds threshold.
Keeps recent messages in full detail while compressing older context.
"""
history_length = len(self.state.conversation_history)

if history_length <= self.state.summary_threshold:
return # No need to summarize yet

# Calculate how many messages to summarize
messages_to_summarize = history_length - self.state.keep_recent
if messages_to_summarize <= 0:
return

# Extract older messages to summarize
older_messages = self.state.conversation_history[:messages_to_summarize]

# Format messages for summarization
formatted_messages = "\n".join([
f"{msg['role'].upper()}: {msg['content']}"
for msg in older_messages
])

# Include existing summary if present
existing_context = ""
if self.state.conversation_summary:
existing_context = f"Previous summary: {self.state.conversation_summary}\n\n"

# Use lightweight LLM call for summarization
summary_prompt = f"""Summarize the following conversation context concisely, preserving key facts, user preferences, and important details mentioned. Keep it under 200 words.

{existing_context}New messages to incorporate:
{formatted_messages}

Concise summary:"""

response = completion(
model="gpt-4o-mini",
messages=[{"role": "user", "content": summary_prompt}],
temperature=0.3,
max_tokens=300
)

# Update summary and trim conversation history
self.state.conversation_summary = response.choices[0].message.content
self.state.conversation_history = self.state.conversation_history[messages_to_summarize:]

@start()
def answer_message(self):
# Check if we need to summarize before processing
self._summarize_conversation()

# Here define the crew that will respond to the user message
assistant_crew = AssistantCrew().crew().kickoff({
"current_message": self.state.current_message,
"conversation_history": self.state.conversation_history
"conversation_history": self.state.conversation_history,
"conversation_summary": self.state.conversation_summary
})

response = assistant_crew.raw
Expand Down
Loading