-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathhello.py
More file actions
160 lines (132 loc) · 6.57 KB
/
hello.py
File metadata and controls
160 lines (132 loc) · 6.57 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
import requests
from bs4 import BeautifulSoup
import ollama
import re
import json
OLLAMA_API = "http://localhost:11434/api/chat"
HEADERS = {"Content-Type": "application/json"}
MODEL = "llama3.2"
class Website:
"""
A utility class to represent a Website that we have scraped, now with links
"""
def __init__(self, url):
self.url = url
try:
response = requests.get(url, timeout=10) # Add a timeout to prevent long waits
response.raise_for_status() # Raise an HTTPError for bad responses
self.body = response.content
soup = BeautifulSoup(self.body, 'html.parser')
self.title = soup.title.string if soup.title else "No title found"
if soup.body:
for irrelevant in soup.body(["script", "style", "img", "input"]):
irrelevant.decompose()
self.text = soup.body.get_text(separator="\n", strip=True)
else:
self.text = ""
links = [link.get('href') for link in soup.find_all('a')]
self.links = [link for link in links if link]
except requests.exceptions.RequestException as e:
print(f"Error fetching URL {url}: {e}")
self.title = "No title found (Error fetching page)"
self.text = ""
self.links = []
def get_contents(self):
return f"Webpage Title:\n{self.title}\nWebpage Contents:\n{self.text}\n\n"
link_system_prompt = "You are provided with a list of links found on a webpage. \
You are able to decide which of the links would be most relevant to include in a brochure about the company, \
such as links to an About page, or a Company page, or Careers/Jobs pages, and more.\n"
link_system_prompt += "You should respond in JSON as in this example:"
link_system_prompt += """
{
"links": [
{"type": "about page", "url": "https://full.url/goes/here/about"},
{"type": "careers page": "url": "https://another.full.url/careers"}
]
}
"""
def get_links_user_prompt(website):
user_prompt = f"Here is the list of links on the website of {website.url} - "
user_prompt += "please decide which of these are relevant web links for a brochure about the company, respond with the full https URL in JSON format. \
Do not include Terms of Service, Privacy, email links.\n"
user_prompt += "Links (some might be relative links):\n"
user_prompt += "\n".join(website.links)
return user_prompt
def get_links(url):
website = Website(url)
messages = [
{"role": "system", "content": link_system_prompt},
{"role": "user", "content": get_links_user_prompt(website)}
]
response = ollama.chat(model=MODEL, messages=messages)
# Debug the response structure
print("Response structure:", response)
try:
if hasattr(response, 'message') and hasattr(response.message, 'content'):
raw_content = response.message.content
# Try to parse the raw content directly as JSON
try:
result = json.loads(raw_content)
except json.JSONDecodeError:
# If parsing fails, extract JSON using regex
json_match = re.search(r"({.*})", raw_content, re.DOTALL)
if json_match:
json_content = json_match.group(1).strip()
result = json.loads(json_content)
else:
raise ValueError("No valid JSON found in the response content.")
else:
raise AttributeError("Unexpected response structure. Missing 'message.content' attribute.")
except Exception as e:
print(f"Error processing response content: {e}")
raise
return result
def get_all_details(url):
result = "Landing page:\n"
result += Website(url).get_contents()
links = get_links(url)
print("Found links:", links)
for link in links["links"]:
if not link["url"].startswith(("http://", "https://")):
print(f"Skipping non-HTTP link: {link['url']}")
continue
try:
result += f"\n\n{link['type']}\n"
result += Website(link["url"]).get_contents()
except Exception as e:
print(f"Error processing link {link['url']}: {e}")
return result
system_prompt = (
"You are a professional marketing assistant tasked with creating a short, polished brochure for a company. "
"Analyze the provided website content and generate a brochure in Markdown format. "
"The brochure should include key sections such as 'About Us,' 'Our Mission,' 'Careers,' 'Research,' 'Contact Information,' "
"and other relevant details that appeal to customers, investors, and recruits. "
"Do not include technical issues, support content, or unrelated status updates. "
"Use concise and professional language, and format the brochure with Markdown syntax (e.g., headings, bullet points, and links)."
)
# Or uncomment the lines below for a more humorous brochure - this demonstrates how easy it is to incorporate 'tone':
# system_prompt = "You are an assistant that analyzes the contents of several relevant pages from a company website \
# and creates a short humorous, entertaining, jokey brochure about the company for prospective customers, investors and recruits. Respond in markdown.\
# Include details of company culture, customers and careers/jobs if you have the information."
def get_brochure_user_prompt(company_name, url):
user_prompt = f"You are creating a brochure for the company: {company_name}.\n"
user_prompt += "Below is the content extracted from the company's website, organized by relevant sections. "
user_prompt += "Use this content to generate a Markdown brochure with headings and links, focusing on key sections:\n\n"
user_prompt += get_all_details(url)
user_prompt += (
"\n\nEnsure that the brochure includes only relevant sections such as 'About Us,' 'Our Mission,' "
"'Careers,' 'Research,' and 'Contact Information.' Exclude technical issues, support, and status updates."
)
return user_prompt[:20_000]
def create_brochure(company_name, url):
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": get_brochure_user_prompt(company_name, url)}
]
response = ollama.chat(model=MODEL, messages=messages, stream=True)
brochure_content = ""
for chunk in response:
if hasattr(chunk, 'message') and hasattr(chunk.message, 'content'):
brochure_content += chunk.message.content
print(chunk.message.content, end="")
create_brochure("Anthropic", "https://anthropic.com")