-
Notifications
You must be signed in to change notification settings - Fork 104
/
Copy pathjs_scrape.py
61 lines (36 loc) · 1.09 KB
/
js_scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import os
import shutil
import time
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
url = "http://www.chrisburkard.com/"
web_r = requests.get(url)
web_soup = BeautifulSoup(web_r.text, 'html.parser')
print(web_soup.findAll("img"))
#<img src=''/>
driver = webdriver.Firefox()
driver.get(url)
iterations = 0
while iterations < 10:
html = driver.execute_script("return document.documentElement.outerHTML")
sel_soup = BeautifulSoup(html, 'html.parser')
print(len(sel_soup.findAll("img")))
images = []
for i in sel_soup.findAll("img"):
src = i["src"]
images.append(src)
print(images)
current_path = os.getcwd()
for img in images:
try:
file_name = os.path.basename(img)
img_r = requests.get(img, stream=True)
new_path = os.path.join(current_path, "images", file_name)
with open(new_path, "wb") as output_file:
shutil.copyfileobj(img_r.raw, output_file)
del img_r
except:
pass
iterations += 1
time.sleep(5)