forked from enghamzasalem/SouqScraper
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathScraper.py
31 lines (30 loc) · 985 Bytes
/
Scraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import requests
import urllib.request
import time
from bs4 import BeautifulSoup
import json
import csv
filecsv = open('SouqDataapple.csv', 'w',encoding='utf8')
file = open('SouqDataapple.json','w',encoding='utf8')
# Set the URL you want to webscrape from
url = 'https://saudi.souq.com/sa-ar/apple/new/a-c/s/?section=2&page='
file.write('[\n')
data = {}
csv_columns = ['name','price','img']
for page in range(3):
print('---', page, '---')
r = requests.get(url + str(page))
print(url + str(page))
soup = BeautifulSoup(r.content, "html.parser")
ancher=soup.find_all('div',{'class' : 'column column-block block-grid-large single-item'})
writer = csv.DictWriter(filecsv, fieldnames=csv_columns)
i=0
writer.writeheader()
for pt in ancher:
name=pt.find('h6',{'class':'title itemTitle'})
price=pt.find('span',{'class':'itemPrice'})
print (name.text)
print (name.price)
file.write("\n]")
filecsv.close()
file.close()