forked from Snivyn/NERYS-product-monitor-lite
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsupreme.py
274 lines (228 loc) · 8.53 KB
/
supreme.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
'''
NERYS
supreme module
left to do:
save products to sql db
load products from sql db on startup
'''
import requests
from bs4 import BeautifulSoup as soup
import random
from log import log as log
from threading import Thread
from discord_hooks import Webhook
import time
class Product:
def __init__(self, link, image, title = "", stock = False):
self.link = link
self.image = image
self.title = title
self.stock = stock
def read_from_txt(path):
'''
(None) -> list of str
Loads up all sites from the sitelist.txt file in the root directory.
Returns the sites as a list
'''
# Initialize variables
raw_lines = []
lines = []
# Load data from the txt file
try:
f = open(path, "r")
raw_lines = f.readlines()
f.close()
# Raise an error if the file couldn't be found
except:
log('e', "Couldn't locate <" + path + ">.")
raise FileNotFound()
if(len(raw_lines) == 0):
raise NoDataLoaded()
# Parse the data
for line in raw_lines:
lines.append(line.strip("\n"))
# Return the data
return lines
def get_proxy(proxy_list):
'''
(list) -> dict
Given a proxy list <proxy_list>, a proxy is selected and returned.
'''
# Choose a random proxy
proxy = random.choice(proxy_list)
# Set up the proxy to be used
proxies = {
"http": str(proxy),
"https": str(proxy)
}
# Return the proxy
return proxies
def send_embed(alert_type, product):
'''
(str, str, list, str, str, str) -> None
Sends a discord alert based on info provided.
'''
# Set webhook
url = discord_webhook
# Create embed to send to webhook
embed = Webhook(url, color=123123)
# Set author info
embed.set_author(name='NERYS', icon='https://static.zerochan.net/Daenerys.Targaryen.full.2190849.jpg')
# Set product details
if(alert_type == "RESTOCK"):
embed.set_desc("RESTOCK: " + product.title)
elif(alert_type == "NEW"):
embed.set_desc("NEW: " + product.title)
embed.add_field(name="Product", value=product.title)
embed.add_field(name="Link", value=product.link)
embed.add_field(name="Stock", value=str(product.stock))
# Set product image
embed.set_thumbnail(product.image)
embed.set_image(product.image)
# Set footer
embed.set_footer(text='NERYS by @snivynGOD', icon='https://static.zerochan.net/Daenerys.Targaryen.full.2190849.jpg', ts=True)
# Send Discord alert
embed.post()
def monitor():
# GET "view all" page
link = "http://www.supremenewyork.com/shop/all"
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
}
proxies = get_proxy(proxy_list)
try:
r = requests.get(link, timeout=5, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed. Retrying...")
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed.")
return
page = soup(r.text, "html.parser")
products = page.findAll("div", {"class": "inner-article"})
log('i', "Checking stock of Supreme products...")
for product in products:
link = "https://www.supremenewyork.com" + product.a["href"]
monitor_supreme_product(link, product)
def monitor_supreme_product(link, product):
# Product info
image = "https:" + product.a.img["src"]
if(product.text == "sold out"):
stock = False
else:
stock = True
# Product already in database
try:
if(stock is True and products_list[link].stock is False):
log('s', products_list[link].title + " is back in stock!")
products_list[link].stock = True
send_embed("RESTOCK", products_list[link])
elif(stock is False and products_list[link].stock is True):
log('s', products_list[link].title + " is now out of stock.")
products_list[link].stock = False
# Add new product to database
except:
# GET product name
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed. Retrying...")
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed.")
return
title = soup(r.text, "html.parser").find("title").text
# Add product to database
products_list[link] = Product(link, image, title, stock)
log('s', "Added " + title + " to the database.")
send_embed("NEW", products_list[link])
def build_db():
# GET "view all" page
link = "http://www.supremenewyork.com/shop/all"
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36"
}
proxies = get_proxy(proxy_list)
try:
r = requests.get(link, timeout=5, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed. Retrying...")
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed.")
return
page = soup(r.text, "html.parser")
products = page.findAll("div", {"class": "inner-article"})
log('i', "Checking stock of Supreme products...")
for product in products:
link = "https://www.supremenewyork.com" + product.a["href"]
# Product info
image = "https:" + product.a.img["src"]
if(product.text == "sold out"):
stock = False
else:
stock = True
# GET product name
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
log('e', "Connection to URL <" + link + "> failed. Retrying...")
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
try:
if(use_proxies):
proxies = get_proxy(proxy_list)
r = requests.get(link, proxies=proxies, timeout=8, verify=False)
else:
r = requests.get(link, timeout=8, verify=False)
except:
proxies = get_proxy(proxy_list)
log('e', "Connection to URL <" + link + "> failed.")
return
title = soup(r.text, "html.parser").find("title").text
# Add product to database
products_list[link] = Product(link, image, title, stock)
log('s', "Added " + title + " to the database.")
if(__name__ == "__main__"):
# Ignore insecure messages
requests.packages.urllib3.disable_warnings()
# Load proxies (if available)
proxy_list = read_from_txt("proxies.txt")
log('i', "Loaded " + str(len(proxy_list)) + " proxies.")
if(len(proxy_list) == 0):
use_proxies = False
else:
use_proxies = True
# Initialize variables
products_list = {}
proxies = get_proxy(proxy_list)
discord_webhook = "" # Put your webhook here
# Build database
build_db()
# Monitor products
while(True):
monitor()
time.sleep(8)