-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathagent.py
More file actions
70 lines (52 loc) · 2.07 KB
/
agent.py
File metadata and controls
70 lines (52 loc) · 2.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import chromadb
from sentence_transformers import SentenceTransformer
import anthropic
import os
os.environ["ANTHROPIC_API_KEY"] = "your_key_here"
def search(query, collection, model, n_results=3):
# Step 1 - embed the query
query_embedding = model.encode(query).tolist()
#step 2 - search ChromDB using the query embedding'
results = collection.query(
query_embeddings = [query_embedding],
n_results = n_results,
)
#step 3- return the retrieved documents(chunks)
return results['documents'][0]
def generate_response(query, retrieved_chunks):
client = anthropic.Anthropic()
# combine the retrieved chunks into a single string (context)
context = "\n\n".join(retrieved_chunks)
message = client.messages.create(
model = "claude-opus-4-20250514",
max_tokens = 1024,
messages = [{
'role': 'user',
"content": f"""You are a helpful ecommerce customer support assistant.
Use ONLY the following policy documents to answer the customer's question.
If the answer is not in the documents, say 'I could not find this information in our policies.'
**POLICY DOCUMENTS:**
{context}
**CUSTOMER QUESTION:**
{query}
**Provide a clear, helpful answer based only on the above documents.**"""
}]
)
return message.content[0].text
def main():
#load embedding model and chromaDB
print("Loading model and Database...")
model = SentenceTransformer('all-MiniLM-L6-v2')
client = chromadb.PersistentClient(path="./chroma_db")
collection = client.get_collection("ecommerce_policies")
print("RAG Agent ready. Type your question or 'quit' to exit.\n")
while True:
query = input('Customer:')
if query.lower() == 'quit':
break
print("Thinking...")
retrived_chunks = search(query,collection,model)
answer = generate_response(query, retrived_chunks)
print("Assistant:", answer)
if __name__ == "__main__":
main()