prntscrape/scrape.py
2025-05-25 19:21:41 +03:00

37 lines
1.2 KiB
Python

import json
import requests
import random, string, time, datetime
from bs4 import BeautifulSoup
with open("config.json") as configFile:
config = json.load(configFile)
notfound = "//st.prntscr.com/2023/07/24/0635/img/0_173a7b_211be8ff.png" # yes I am hard-coding this
headers = {
"User-Agent": config["userAgent"]
}
proxies = {
"http": f"http://{config['proxy']['user']}:{config['proxy']['pass']}@{config['proxy']['host']}:{config['proxy']['port']}",
}
running = True
while True:
attempt = "".join(random.choices(string.ascii_lowercase + string.digits, k=5))
if config['proxyEnabled']:
r = requests.get(f"https://prnt.sc/{attempt}", proxies=proxies, headers=headers)
else:
r = requests.get(f"https://prnt.sc/{attempt}", headers=headers)
soup = BeautifulSoup(r.text, 'html.parser')
image = soup.find('img', class_='no-click screenshot-image')
if image is not None:
if image['src'] != notfound:
print(f"[{attempt}] Success!")
data = {
"content" : f"https://prnt.sc/{attempt}"
}
requests.post(config['webhook'], json=data)
with open("log.txt", "a") as log:
log.write(f"[{datetime.datetime.now()}] https://prnt.sc/{attempt} \n")
time.sleep(config['wait'])