-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathConversationManager.py
More file actions
252 lines (195 loc) · 8.96 KB
/
ConversationManager.py
File metadata and controls
252 lines (195 loc) · 8.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
"""
Conversation Management Module
This module provides conversation tracking and summarization functionality for the RAG system.
It maintains conversation history, generates summaries, and provides context for improved
continuity across multiple interactions.
Key Features:
- Conversation tracking with configurable storage limits
- Automatic summarization of Q&A exchanges
- Cumulative conversation summaries
- Memory management with oldest-first removal
- Integration with RAG pipeline for context-aware responses
Author: Professional Development Team
Version: 1.0.0
"""
import time
from typing import List, Dict
from Configurator import AppConfig
from llmhandler import LLMHandler
class ConversationManager:
"""
Manages conversation tracking, summarization, and context for the RAG system.
This class handles the storage and processing of conversation history,
including individual Q&A summaries and cumulative conversation summaries.
"""
def __init__(self, config: AppConfig):
"""
Initialize the conversation manager.
Args:
config (AppConfig): Application configuration containing conversation settings
"""
self.config = config
self.conversation_config = config.conversation
self.llm_handler = LLMHandler()
# Initialize conversations list if not present
if not hasattr(self.conversation_config, 'conversations') or self.conversation_config.conversations is None:
self.conversation_config.conversations = []
def is_conversation_enabled(self) -> bool:
"""
Check if conversation tracking is enabled.
Returns:
bool: True if conversation mood is enabled, False otherwise
"""
return self.conversation_config.conversation_mood
async def add_conversation(self, user_question: str, llm_response: str) -> Dict:
"""
Add a new conversation entry with summarization.
Args:
user_question (str): The question asked by the user
llm_response (str): The response generated by the LLM
Returns:
Dict: The complete conversation entry with summaries
"""
if not self.is_conversation_enabled():
return {}
# Generate serial number (highest existing + 1, or 1 if empty)
serial_number = 1
if self.conversation_config.conversations:
serial_number = max(conv.get('SerialNumber', 0) for conv in self.conversation_config.conversations) + 1
# Generate current Q&A summary
current_qa_summary = await self._generate_qa_summary(user_question, llm_response)
# Generate cumulative conversation summary
conversation_summary_so_far = await self._generate_cumulative_summary(
user_question, llm_response, current_qa_summary
)
# Create conversation entry
conversation_entry = {
"SerialNumber": serial_number,
"UserQuestion": user_question,
"LLMResponse": llm_response,
"CurrentQASummary": current_qa_summary,
"ConversationSummarySoFar": conversation_summary_so_far,
"Timestamp": time.time()
}
# Add to conversations list
self.conversation_config.conversations.append(conversation_entry)
# Manage storage limit
self._manage_storage_limit()
return conversation_entry
def get_conversation_context(self) -> str:
"""
Get the conversation context for LLM processing.
Returns:
str: Formatted conversation context or empty string if disabled
"""
if not self.is_conversation_enabled() or not self.conversation_config.conversations:
return ""
# Get the latest conversation summary
latest_conversation = self.conversation_config.conversations[-1]
conversation_summary = latest_conversation.get("ConversationSummarySoFar", "")
if conversation_summary:
return f"Previous conversation context:\n{conversation_summary}\n\n"
return ""
def get_all_conversations(self) -> List[Dict]:
"""
Get all stored conversations.
Returns:
List[Dict]: List of all conversation entries
"""
return self.conversation_config.conversations.copy()
def clear_conversations(self) -> None:
"""
Clear all stored conversations.
"""
self.conversation_config.conversations = []
def get_conversation_stats(self) -> Dict:
"""
Get conversation statistics.
Returns:
Dict: Statistics about stored conversations
"""
conversations = self.conversation_config.conversations
return {
"conversation_enabled": self.is_conversation_enabled(),
"total_conversations": len(conversations),
"max_conversations": self.conversation_config.number_of_conversations_to_store,
"oldest_serial": min(conv.get('SerialNumber', 0) for conv in conversations) if conversations else 0,
"newest_serial": max(conv.get('SerialNumber', 0) for conv in conversations) if conversations else 0
}
async def _generate_qa_summary(self, question: str, response: str) -> str:
"""
Generate a summary of the current Q&A exchange.
Args:
question (str): User's question
response (str): LLM's response
Returns:
str: Summary of the Q&A exchange
"""
prompt = f"""Please provide a concise summary of this Q&A exchange:
Question: {question}
Response: {response}
Provide a brief summary (2-3 sentences) that captures the key points discussed."""
try:
summary = await self.llm_handler.process_request(prompt)
return summary.strip()
except Exception as e:
return f"Summary generation failed: {str(e)}"
async def _generate_cumulative_summary(self, question: str, response: str, current_summary: str) -> str:
"""
Generate a cumulative summary of the entire conversation.
Args:
question (str): Current user's question
response (str): Current LLM's response
current_summary (str): Summary of current Q&A
Returns:
str: Updated cumulative conversation summary
"""
# Get previous conversation summary
previous_summary = ""
if len(self.conversation_config.conversations) > 0:
previous_summary = self.conversation_config.conversations[-1].get("ConversationSummarySoFar", "")
if not previous_summary:
# First conversation
return current_summary
# Generate updated cumulative summary
prompt = f"""Please update the conversation summary with the new Q&A exchange:
Previous conversation summary:
{previous_summary}
New Q&A exchange:
Question: {question}
Response: {response}
Current Q&A summary: {current_summary}
Provide an updated cumulative summary that incorporates the new information while maintaining context from previous exchanges. Keep it concise but comprehensive."""
try:
updated_summary = await self.llm_handler.process_request(prompt)
return updated_summary.strip()
except Exception as e:
return f"Cumulative summary generation failed: {str(e)}"
def _manage_storage_limit(self) -> None:
"""
Manage the storage limit by removing oldest conversations when limit is exceeded.
"""
max_conversations = self.conversation_config.number_of_conversations_to_store
if len(self.conversation_config.conversations) > max_conversations:
# Remove oldest conversations (lowest SerialNumber)
self.conversation_config.conversations.sort(key=lambda x: x.get('SerialNumber', 0))
conversations_to_remove = len(self.conversation_config.conversations) - max_conversations
self.conversation_config.conversations = self.conversation_config.conversations[conversations_to_remove:]
def save_conversations_to_config(self) -> None:
"""
Save conversations back to the configuration.
This method can be used to persist conversations to keys.json if needed.
"""
# This would typically update the keys.json file
# For now, we'll just ensure the conversations are in the config object
pass
# Convenience function for direct usage
def create_conversation_manager(config: AppConfig) -> ConversationManager:
"""
Create a conversation manager instance.
Args:
config (AppConfig): Application configuration
Returns:
ConversationManager: Configured conversation manager
"""
return ConversationManager(config)