-
Notifications
You must be signed in to change notification settings - Fork 0
/
webscrapepr.py
73 lines (66 loc) · 2.42 KB
/
webscrapepr.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# Webscrapper.py
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import bs4
from pyvirtualdisplay import Display
import time
import traceback
import asyncio
class Webscrapper:
@staticmethod
def __get_chrome_driver():
options = Options()
options.add_argument("--no-sandbox")
driver = webdriver.Chrome(options=options)
return driver
@staticmethod
async def scrape(url, retries, wait_time, load_page_wait_time=3):
while retries > 0:
try:
display = Display(visible=0, size=(1920, 1280))
display.start()
driver = Webscrapper.__get_chrome_driver()
driver.get(url)
# Wait for page to load
time.sleep(load_page_wait_time)
soup = bs4.BeautifulSoup(driver.page_source, features="html.parser")
driver.quit()
display.stop()
print (f"Scraped {url}")
break
except Exception as e:
print (f"Error while scraping {url}\n{e}\nRetrying...")
# Traceback
print (traceback.format_exc())
time.sleep(wait_time)
retries -= 1
return soup
@staticmethod
def get_or_create_eventloop():
try:
return asyncio.get_event_loop()
except RuntimeError as ex:
if "There is no current event loop" in str(ex):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return asyncio.get_event_loop()
@staticmethod
def scrape_elements(url, name, attrs, except_retries, none_retries, wait_time):
loop = Webscrapper.get_or_create_eventloop()
while none_retries > 0:
soup = loop.run_until_complete(Webscrapper.scrape(url, except_retries, wait_time))
elems = soup.find_all(name, attrs)
if len(elems) > 0:
return elems
time.sleep(wait_time)
none_retries -= 1
print(f"None returned, Retrying {url}")
return None
@staticmethod
def scrape_whole_page(url, retries, wait_time):
loop = Webscrapper.get_or_create_eventloop()
try:
soup = loop.run_until_complete(Webscrapper.scrape(url, retries, wait_time))
finally:
loop.close()
return soup