-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsmart_fetcher.py
More file actions
56 lines (48 loc) · 1.66 KB
/
smart_fetcher.py
File metadata and controls
56 lines (48 loc) · 1.66 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import asyncio
from playwright.async_api import async_playwright
import requests
from fake_useragent import UserAgent
import urllib.robotparser as rp
from urllib.parse import urljoin
import json
ua = UserAgent()
def robots_allowed(url):
base = "/".join(url.split("/")[:3])
parser = rp.RobotFileParser()
parser.set_url(urljoin(base, "/robots.txt"))
try:
parser.read()
return parser.can_fetch("*", url)
except:
return True
async def fetch_dynamic(url, timeout=20):
try:
async with async_playwright() as p:
browser = await p.chromium.launch(headless=True)
page = await browser.new_page(user_agent=ua.random)
await page.goto(url, timeout=timeout * 1000)
html = await page.content()
await browser.close()
return html
except Exception as e:
print(f"[JS Fetch ERROR] {url}: {e}")
return None
def fetch_static(url, timeout=10):
try:
headers = {"User-Agent": ua.random}
r = requests.get(url, headers=headers, timeout=timeout)
if r.status_code == 200:
return r.text
except Exception as e:
print(f"[Static Fetch ERROR] {url}: {e}")
return None
async def smart_fetch(url):
if not robots_allowed(url):
return {"url": url, "status": "blocked_by_robots", "content": None}
html = fetch_static(url)
if html:
return {"url": url, "status": "ok", "method": "static", "content": html}
html = await fetch_dynamic(url)
if html:
return {"url": url, "status": "ok", "method": "js_rendered", "content": html}
return {"url": url, "status": "failed", "content": None}