parent
c652308979
commit
15c199f610
@ -1,6 +1,8 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
This project uses Semantic Versioning
|
## 0.0.1
|
||||||
|
|
||||||
|
Switched from glob to a list file which fixes albums overwriting each other (Reported by SeerLite)
|
||||||
|
|
||||||
## 0.0.0
|
## 0.0.0
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ monkey.patch_all()
|
|||||||
from threading import Thread
|
from threading import Thread
|
||||||
from os import remove, mkdir, path, stat
|
from os import remove, mkdir, path, stat
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
from glob import glob
|
|
||||||
|
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
@ -20,12 +19,16 @@ def get_timestamp_of_file(file):
|
|||||||
return stat(file).st_ctime
|
return stat(file).st_ctime
|
||||||
|
|
||||||
def album(id):
|
def album(id):
|
||||||
req_id = str(uuid4())
|
|
||||||
req = IMAGE_CACHE
|
req = IMAGE_CACHE
|
||||||
|
|
||||||
get("/a/" + id, req)
|
get("/a/" + id, req)
|
||||||
|
found_list_file = IMAGE_CACHE + ("/a/" + id).replace('/', '_')
|
||||||
|
|
||||||
imgs = glob(req + "*")
|
with open(found_list_file, 'r') as f:
|
||||||
|
imgs = f.read().split(',')
|
||||||
|
|
||||||
|
for c, img in enumerate(imgs):
|
||||||
|
imgs[c] = IMAGE_CACHE + imgs[c]
|
||||||
|
|
||||||
# sort image order (file creation time)
|
# sort image order (file creation time)
|
||||||
imgs = sorted(imgs, key=get_timestamp_of_file)
|
imgs = sorted(imgs, key=get_timestamp_of_file)
|
||||||
@ -62,6 +65,7 @@ def gallery(id=''):
|
|||||||
def img(img=''):
|
def img(img=''):
|
||||||
if not img.endswith("jpeg") and not img.endswith("jpg") and not img.endswith("png"):
|
if not img.endswith("jpeg") and not img.endswith("jpg") and not img.endswith("png"):
|
||||||
img = img + ".jpg"
|
img = img + ".jpg"
|
||||||
|
img = img.replace('jpeg', 'jpg')
|
||||||
if not path.exists(IMAGE_CACHE + img):
|
if not path.exists(IMAGE_CACHE + img):
|
||||||
get(img, IMAGE_CACHE)
|
get(img, IMAGE_CACHE)
|
||||||
return static_file(img, root=IMAGE_CACHE)
|
return static_file(img, root=IMAGE_CACHE)
|
||||||
|
15
imgin/get.py
15
imgin/get.py
@ -6,7 +6,7 @@ import requests
|
|||||||
import bs4
|
import bs4
|
||||||
from gevent import sleep
|
from gevent import sleep
|
||||||
|
|
||||||
from .config import SINGLE_IMAGE_DELETE_AFTER_SECS
|
from .config import IMAGE_CACHE, SINGLE_IMAGE_DELETE_AFTER_SECS
|
||||||
|
|
||||||
def delete_file(path):
|
def delete_file(path):
|
||||||
sleep(SINGLE_IMAGE_DELETE_AFTER_SECS)
|
sleep(SINGLE_IMAGE_DELETE_AFTER_SECS)
|
||||||
@ -22,9 +22,12 @@ def error(msg):
|
|||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def get(url: str, write_dir: str, delete=True):
|
def get(url: str, write_dir: str, delete=True):
|
||||||
|
orig_url = url
|
||||||
if not url.startswith('https://imgur.com/'):
|
if not url.startswith('https://imgur.com/'):
|
||||||
url = 'https://imgur.com/' + url
|
url = 'https://imgur.com/' + url
|
||||||
found_url = ''
|
found_url = ''
|
||||||
|
found_urls = []
|
||||||
|
found_list_file = ''
|
||||||
|
|
||||||
album = False
|
album = False
|
||||||
if "gallery" in url:
|
if "gallery" in url:
|
||||||
@ -37,7 +40,7 @@ def get(url: str, write_dir: str, delete=True):
|
|||||||
|
|
||||||
if not album:
|
if not album:
|
||||||
print('Getting img', url)
|
print('Getting img', url)
|
||||||
url = 'https://i.imgur.com/' + url.rsplit('/', 1)[-1].replace('jpeg', 'jpg')
|
url = 'https://i.imgur.com/' + url.rsplit('/', 1)[-1]
|
||||||
with open(f'{write_dir}/{url[-11:]}', 'wb') as img:
|
with open(f'{write_dir}/{url[-11:]}', 'wb') as img:
|
||||||
img.write(requests.get(url).content)
|
img.write(requests.get(url).content)
|
||||||
if delete:
|
if delete:
|
||||||
@ -53,11 +56,19 @@ def get(url: str, write_dir: str, delete=True):
|
|||||||
continue
|
continue
|
||||||
if found_url.endswith('ico.jpg'):
|
if found_url.endswith('ico.jpg'):
|
||||||
continue
|
continue
|
||||||
|
found_urls.append(found_url[-11:])
|
||||||
print(f"Downloading image {count}: {found_url}")
|
print(f"Downloading image {count}: {found_url}")
|
||||||
|
|
||||||
print("Writing image", f"{write_dir}{found_url[-11:]}")
|
print("Writing image", f"{write_dir}{found_url[-11:]}")
|
||||||
with open(f"{write_dir}{found_url[-11:]}", "wb") as f:
|
with open(f"{write_dir}{found_url[-11:]}", "wb") as f:
|
||||||
f.write(requests.get(found_url).content)
|
f.write(requests.get(found_url).content)
|
||||||
|
|
||||||
if delete:
|
if delete:
|
||||||
Thread(target=delete_file, args=[f"{write_dir}{found_url[-11:]}"]).start()
|
Thread(target=delete_file, args=[f"{write_dir}{found_url[-11:]}"]).start()
|
||||||
|
# Write the found urls to a file with the name of the album so the viewer endpoint can get them
|
||||||
|
found_list_file = IMAGE_CACHE + orig_url.replace('/', '_')
|
||||||
|
with open(found_list_file, 'w') as f:
|
||||||
|
f.write(','.join(found_urls))
|
||||||
|
Thread(target=delete_file, args=[found_list_file]).start()
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
<p>Or, run it with python by installing the requirements.txt and ./run.py. Or use the Dockerfile. Contact me if you want help or find a bug.</p>
|
<p>Or, run it with python by installing the requirements.txt and ./run.py. Or use the Dockerfile. Contact me if you want help or find a bug.</p>
|
||||||
<footer>
|
<footer>
|
||||||
<small>
|
<small>
|
||||||
|
<p>Imgin Version 0.0.1</p>
|
||||||
Powered by <a href="https://voidnet.tech/">VoidNetwork LLC</a><br>
|
Powered by <a href="https://voidnet.tech/">VoidNetwork LLC</a><br>
|
||||||
This website does not claim ownership of any media.
|
This website does not claim ownership of any media.
|
||||||
<br>This service simply acts as a proxy to Imgur.com and does not store images aside from a brief cache.
|
<br>This service simply acts as a proxy to Imgur.com and does not store images aside from a brief cache.
|
||||||
|
Loading…
Reference in New Issue
Block a user