From 876abec47f0a0f73a29005d3b458c04359a3ca7a Mon Sep 17 00:00:00 2001 From: fzorb Date: Sun, 25 May 2025 19:21:41 +0300 Subject: [PATCH] first version, kinda rough --- config.json.sample | 12 ++++++++++++ scrape.py | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 config.json.sample diff --git a/config.json.sample b/config.json.sample new file mode 100644 index 0000000..1106cb8 --- /dev/null +++ b/config.json.sample @@ -0,0 +1,12 @@ +{ + "webhook": "", + "proxyEnabled": true, + "proxy": { + "user": "", + "pass": "", + "host": "", + "port": 0000 + }, + "wait": 1, + "userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0" +} diff --git a/scrape.py b/scrape.py index e69de29..24fda7a 100644 --- a/scrape.py +++ b/scrape.py @@ -0,0 +1,37 @@ +import json +import requests +import random, string, time, datetime +from bs4 import BeautifulSoup +with open("config.json") as configFile: + config = json.load(configFile) + +notfound = "//st.prntscr.com/2023/07/24/0635/img/0_173a7b_211be8ff.png" # yes I am hard-coding this + +headers = { + "User-Agent": config["userAgent"] +} + +proxies = { + "http": f"http://{config['proxy']['user']}:{config['proxy']['pass']}@{config['proxy']['host']}:{config['proxy']['port']}", +} + +running = True + +while True: + attempt = "".join(random.choices(string.ascii_lowercase + string.digits, k=5)) + if config['proxyEnabled']: + r = requests.get(f"https://prnt.sc/{attempt}", proxies=proxies, headers=headers) + else: + r = requests.get(f"https://prnt.sc/{attempt}", headers=headers) + soup = BeautifulSoup(r.text, 'html.parser') + image = soup.find('img', class_='no-click screenshot-image') + if image is not None: + if image['src'] != notfound: + print(f"[{attempt}] Success!") + data = { + "content" : f"https://prnt.sc/{attempt}" + } + requests.post(config['webhook'], json=data) + with open("log.txt", "a") as log: + log.write(f"[{datetime.datetime.now()}] https://prnt.sc/{attempt} \n") + time.sleep(config['wait'])