|
| 1 | +import requests |
| 2 | +import json |
| 3 | +from datetime import datetime, timedelta, timezone |
| 4 | +from crawler.keyword_extractor import extract_keyword |
| 5 | +from crawler.save_to_db import save_activities |
| 6 | +from server.db import run_query |
| 7 | + |
| 8 | +ENDPOINT = "https://nsv3auess7-dsn.algolia.net/1/indexes/*/queries" |
| 9 | +HEADERS = { |
| 10 | + "Content-Type": "application/json", |
| 11 | + "x-algolia-agent": "Algolia for JavaScript (5.20.0); Search (5.20.0); Browser", |
| 12 | + "x-algolia-api-key": "c2730ea10ab82787f2f3cc961e8c1e06", |
| 13 | + "x-algolia-application-id": "NSV3AUESS7" |
| 14 | +} |
| 15 | +DEFAULT_IMAGE_URL = "https://www.idealist.org/assets/417d88fd628db1c1ac861f3ea8db58c1a159d52a/images/icons/action-opps/action-opps-volunteermatch.svg" |
| 16 | + |
| 17 | +def get_last_timestamp(): |
| 18 | + sql = """ |
| 19 | + SELECT start_date |
| 20 | + FROM activities |
| 21 | + WHERE activity_site = 'IDEALIST' |
| 22 | + ORDER BY start_date DESC |
| 23 | + LIMIT 1; |
| 24 | + """ |
| 25 | + last_timestamp = run_query(sql) |
| 26 | + |
| 27 | + if last_timestamp: |
| 28 | + dt = last_timestamp[0][0].replace(tzinfo=timezone.utc) |
| 29 | + return int(dt.timestamp()) |
| 30 | + else: |
| 31 | + return 0 |
| 32 | + |
| 33 | +def build_payload(page, type='volunteer', timestamp=0): |
| 34 | + if type == 'volunteer': |
| 35 | + filters = f"actionType:'VOLOP' AND published > {timestamp}" |
| 36 | + index_name = "idealist7-production-action-opps" |
| 37 | + else: |
| 38 | + filters = f"type:'INTERNSHIP' AND published > {timestamp}" |
| 39 | + index_name = "idealist7-production" |
| 40 | + |
| 41 | + return { |
| 42 | + "requests": [ |
| 43 | + { |
| 44 | + "indexName": index_name, |
| 45 | + "facets": ["*"], |
| 46 | + "hitsPerPage": 100, |
| 47 | + "attributesToSnippet": ["description:20"], |
| 48 | + "attributesToRetrieve": ["*"], |
| 49 | + "filters": filters, |
| 50 | + "removeStopWords": True, |
| 51 | + "ignorePlurals": True, |
| 52 | + "advancedSyntax": True, |
| 53 | + "queryLanguages": ["en"], |
| 54 | + "page": page, |
| 55 | + "query": "", |
| 56 | + "getRankingInfo": True, |
| 57 | + "clickAnalytics": True, |
| 58 | + "analytics": True |
| 59 | + } |
| 60 | + ] |
| 61 | + } |
| 62 | + |
| 63 | +def get_url(item): |
| 64 | + url = item.get("url") |
| 65 | + if isinstance(url, str): |
| 66 | + return url |
| 67 | + elif isinstance(url, dict): |
| 68 | + return "https://www.idealist.org" + next(iter(url.values()), "") |
| 69 | + return "" |
| 70 | + |
| 71 | +def get_image(item): |
| 72 | + img = item.get("imageUrl") or DEFAULT_IMAGE_URL |
| 73 | + return img |
| 74 | + |
| 75 | +def get_published(item): |
| 76 | + timestamp = item.get("published") |
| 77 | + return datetime.fromtimestamp(timestamp, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S.%f') |
| 78 | + |
| 79 | +def get_activities(page, timestamp, type): |
| 80 | + payload = build_payload(page, type, timestamp) |
| 81 | + response = requests.post(ENDPOINT, headers=HEADERS, json=payload) |
| 82 | + |
| 83 | + try: |
| 84 | + data = response.json()["results"][0]["hits"] |
| 85 | + except Exception as e: |
| 86 | + print(f"[!] JSON 파싱 에러: {e}") |
| 87 | + return None |
| 88 | + |
| 89 | + result = [] |
| 90 | + |
| 91 | + if data: |
| 92 | + for item in data: |
| 93 | + activity_type = "VOLUNTEER" if type=='volunteer' else 'INTERNSHIP' |
| 94 | + activity_content = item.get("description") |
| 95 | + activity_name = item.get("name") |
| 96 | + activity_image_url = get_image(item) |
| 97 | + activity_url = get_url(item) |
| 98 | + start_date = get_published(item) |
| 99 | + end_date = None |
| 100 | + keyword = extract_keyword(activity_content) |
| 101 | + |
| 102 | + result.append( |
| 103 | + { |
| 104 | + "activity_site": "IDEALIST", |
| 105 | + "activity_type": activity_type, |
| 106 | + "activity_content": activity_content, |
| 107 | + "end_date": end_date, |
| 108 | + "activity_image_url": activity_image_url, |
| 109 | + "keyword": keyword, |
| 110 | + "activity_name": activity_name, |
| 111 | + "site_url": activity_url, |
| 112 | + "start_date": start_date |
| 113 | + } |
| 114 | + ) |
| 115 | + print(f"[IDEALIST] 크롤링 완료 : {item.get("name", '')}") |
| 116 | + return result |
| 117 | + else: |
| 118 | + return None |
| 119 | + |
| 120 | +def crawl(): |
| 121 | + print("[IDEALIST] 크롤링 시작") |
| 122 | + crawled_activities = [] |
| 123 | + last_timestamp = get_last_timestamp() |
| 124 | + |
| 125 | + if last_timestamp > 0: |
| 126 | + print(f"[IDEALIST] DB의 마지막 활동 이후 데이터만 크롤링 시작 (TIMESTAMP: {last_timestamp})") |
| 127 | + else: |
| 128 | + print(f"[IDEALIST] DB에 활동 없음, 모든 데이터 크롤링 시작") |
| 129 | + |
| 130 | + for type in ['volunteer', 'internship']: |
| 131 | + page = 0 |
| 132 | + while True: |
| 133 | + activities = get_activities(page, last_timestamp, type) |
| 134 | + if not activities: |
| 135 | + break |
| 136 | + crawled_activities.extend(activities) |
| 137 | + page += 1 |
| 138 | + |
| 139 | + if crawled_activities: |
| 140 | + print(f"[IDEALIST] 크롤링 완료 : {len(crawled_activities)}개의 활동을 크롤링했습니다.") |
| 141 | + save_activities(crawled_activities) |
| 142 | + else: |
| 143 | + print("[IDEALIST] 크롤링 완료 : 새로운 활동이 없습니다.") |
| 144 | + |
| 145 | +if __name__ == "__main__": |
| 146 | + crawl() |
0 commit comments