65 lines
2.0 KiB
Python
65 lines
2.0 KiB
Python
import sys
|
|
from shutil import rmtree
|
|
from os import remove
|
|
from threading import Thread
|
|
|
|
import requests
|
|
import bs4
|
|
from gevent import sleep
|
|
|
|
from .config import SINGLE_IMAGE_DELETE_AFTER_SECS, ALBUM_DELETE_AFTER_SECS
|
|
|
|
def delete_file(path):
|
|
sleep(SINGLE_IMAGE_DELETE_AFTER_SECS)
|
|
print('Erasing', path)
|
|
try:
|
|
remove(path)
|
|
except FileNotFoundError:
|
|
pass
|
|
|
|
|
|
def error(msg):
|
|
sys.stderr.write(msg + "\n")
|
|
sys.stderr.flush()
|
|
|
|
def get(url: str, write_dir: str, delete=True):
|
|
if not url.startswith('https://imgur.com/'):
|
|
url = 'https://imgur.com/' + url
|
|
found_url = ''
|
|
|
|
print('it not album', url)
|
|
album = False
|
|
if "gallery" in url:
|
|
url = url.replace("gallery", "a")
|
|
print('it album')
|
|
if "/a/" in url:
|
|
print('it album')
|
|
album = True
|
|
if not url.endswith("blog"):
|
|
url += "/layout/blog"
|
|
|
|
if not album:
|
|
print('getting img', url)
|
|
url = 'https://i.imgur.com/' + url.rsplit('/', 1)[-1].replace('jpeg', '').replace('jpg', '')
|
|
with open(f'{write_dir}/{url[-12:]}', 'wb') as img:
|
|
img.write(requests.get(url).content)
|
|
if delete:
|
|
Thread(target=delete_file, args=[f"{write_dir}/{url[-12:]}"]).start()
|
|
else:
|
|
print('Detecting album/gallery images', url)
|
|
soup = bs4.BeautifulSoup(requests.get(url).text, 'html.parser')
|
|
for count, el in enumerate(soup.select('.post-image meta[itemprop="contentUrl"]'), start=1):
|
|
try:
|
|
found_url = "https:" + el['content']
|
|
except KeyError:
|
|
error("Could not obtain url for detected image")
|
|
continue
|
|
print(f"Downloading image {count}: {found_url}")
|
|
|
|
print("Writing image", f"{write_dir}{found_url[-11:]}")
|
|
with open(f"{write_dir}{found_url[-11:]}", "wb") as f:
|
|
f.write(requests.get(found_url).content)
|
|
if delete:
|
|
Thread(target=delete_file, args=[f"{write_dir}{found_url[-11:]}"]).start()
|
|
|