2019-10-01 23:06:51 -04:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
"""
|
|
|
|
A file hosting service similar to Pomf and Uguu but without the public nature.
|
|
|
|
"""
|
2019-10-02 18:44:06 -04:00
|
|
|
import os
|
2019-10-06 17:01:46 -04:00
|
|
|
import time
|
2019-10-02 18:44:06 -04:00
|
|
|
import string
|
|
|
|
import random
|
2019-10-02 19:47:27 -04:00
|
|
|
import datetime
|
2019-10-02 18:44:06 -04:00
|
|
|
|
2019-10-01 23:06:51 -04:00
|
|
|
from aiohttp import web
|
|
|
|
import jinja2
|
|
|
|
import aiohttp_jinja2
|
|
|
|
from aiohttp_jinja2 import render_template
|
|
|
|
import asyncpg
|
|
|
|
import uvloop
|
2019-10-06 17:01:46 -04:00
|
|
|
import requests
|
2019-10-01 23:06:51 -04:00
|
|
|
|
|
|
|
import config
|
|
|
|
import buckler_aiohttp
|
|
|
|
|
|
|
|
uvloop.install()
|
|
|
|
routes = web.RouteTableDef()
|
|
|
|
|
|
|
|
|
|
|
|
@routes.get('/', name='index')
|
2019-10-02 18:44:06 -04:00
|
|
|
@routes.post('/', name='index')
|
2019-10-01 23:06:51 -04:00
|
|
|
async def index(request):
|
|
|
|
"""The index page."""
|
2019-10-02 18:44:06 -04:00
|
|
|
if request.method == 'GET':
|
|
|
|
return render_template("index.html", request, locals())
|
|
|
|
data = await request.post()
|
2019-10-02 19:47:27 -04:00
|
|
|
rand_name = bool(data.get('rand_name'))
|
2019-10-08 16:18:19 -04:00
|
|
|
response_type = data.get('reponse_type')
|
2019-10-06 17:01:46 -04:00
|
|
|
|
2019-10-02 18:44:06 -04:00
|
|
|
files = []
|
|
|
|
for filefield in data.getall('files'):
|
2019-10-06 17:01:46 -04:00
|
|
|
if not filefield:
|
|
|
|
continue
|
2019-10-02 19:47:27 -04:00
|
|
|
files.append(handle_filefield(filefield, rand_name=rand_name))
|
2019-10-06 17:01:46 -04:00
|
|
|
if data.get('url'):
|
|
|
|
files.append(handle_url(data.get('url'), rand_name=rand_name))
|
|
|
|
|
2019-10-02 19:47:27 -04:00
|
|
|
if data.get('delete_this'):
|
|
|
|
delete_num = data.get('delete_num', '')
|
|
|
|
delete_type = data.get('delete_type', '')
|
|
|
|
|
|
|
|
try:
|
|
|
|
delete_num = int(delete_num)
|
|
|
|
assert delete_num >= 1 and delete_num <= 59
|
|
|
|
assert delete_type in ['minutes', 'hours', 'days', 'weeks']
|
|
|
|
except (ValueError, AssertionError):
|
|
|
|
return 'ur ghey' # TODO: return error
|
|
|
|
delta = datetime.timedelta(**{delete_type: delete_num})
|
|
|
|
expiration_date = datetime.datetime.now() + delta
|
|
|
|
else:
|
|
|
|
expiration_date = None
|
2019-10-02 18:44:06 -04:00
|
|
|
|
|
|
|
files_insert = []
|
|
|
|
for file in files:
|
2019-10-02 19:47:27 -04:00
|
|
|
t = (int(request.cookies.get('userid')), file[0], file[1], expiration_date)
|
|
|
|
files_insert.append(t)
|
2019-10-08 16:18:19 -04:00
|
|
|
|
|
|
|
async with request.app['pool'].acquire() as conn:
|
2019-10-02 18:44:06 -04:00
|
|
|
await conn.executemany(
|
|
|
|
"INSERT INTO upload (user_id, id, filename, expiration_date) "
|
|
|
|
"VALUES ($1, $2, $3, $4)",
|
|
|
|
files_insert)
|
2019-10-08 16:18:19 -04:00
|
|
|
|
2019-10-02 18:44:06 -04:00
|
|
|
urls = [config.upload_url + f[1] for f in files]
|
2019-10-08 16:18:19 -04:00
|
|
|
if response_type == 'html':
|
|
|
|
return render_template("result.html", request, locals())
|
|
|
|
elif response_type == 'plain':
|
|
|
|
return web.Response(body='\n'.join(urls))
|
|
|
|
elif response_type == 'json':
|
|
|
|
return web.json_response(urls)
|
2019-10-02 18:44:06 -04:00
|
|
|
|
|
|
|
|
2019-10-06 18:11:48 -04:00
|
|
|
@routes.get('/gallery/{user_id}', name='gallery')
|
|
|
|
async def gallery(request):
|
|
|
|
"""A user's gallery page."""
|
|
|
|
try:
|
|
|
|
user_id = int(request.match_info['user_id'])
|
|
|
|
except ValueError:
|
|
|
|
raise web.HTTPNotFound
|
|
|
|
|
2019-10-08 16:18:19 -04:00
|
|
|
async with request.app['pool'].acquire() as conn:
|
2019-10-06 18:11:48 -04:00
|
|
|
uploads = await conn.fetch(
|
|
|
|
"SELECT * FROM upload WHERE user_id = $1",
|
|
|
|
user_id)
|
|
|
|
upload_url = config.upload_url
|
|
|
|
return render_template("gallery.html", request, locals())
|
|
|
|
|
|
|
|
|
2019-10-02 18:44:06 -04:00
|
|
|
def handle_filefield(filefield, rand_name=True):
|
|
|
|
"""Handles a posted file."""
|
|
|
|
filename = safe_filename(filefield.filename)
|
|
|
|
if not filename:
|
|
|
|
rand_name = True
|
|
|
|
prefix = get_rand_chars()
|
|
|
|
if rand_name:
|
|
|
|
filename = prefix + os.path.splitext(filename)[1]
|
|
|
|
else:
|
|
|
|
filename = prefix + '_' + filename
|
|
|
|
|
|
|
|
with open(os.path.join(config.upload_dir, filename), 'wb') as file:
|
|
|
|
file.write(filefield.file.read())
|
|
|
|
|
2019-10-06 17:01:46 -04:00
|
|
|
tup = (prefix, filename)
|
|
|
|
return tup
|
|
|
|
|
|
|
|
|
|
|
|
def handle_url(url, rand_name=True):
|
|
|
|
"""Handles a posted URL."""
|
|
|
|
try:
|
|
|
|
filename, data = download_file(url)
|
|
|
|
except ValueError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
filename = safe_filename(filename)
|
|
|
|
if not filename:
|
|
|
|
rand_name = True
|
|
|
|
prefix = get_rand_chars()
|
|
|
|
if rand_name:
|
|
|
|
filename = prefix + os.path.splitext(filename)[1]
|
|
|
|
else:
|
|
|
|
filename = prefix + '_' + filename
|
|
|
|
|
|
|
|
with open(os.path.join(config.upload_dir, filename), 'wb') as file:
|
|
|
|
file.write(data)
|
|
|
|
|
|
|
|
tup = (prefix, filename)
|
|
|
|
return tup
|
2019-10-02 18:44:06 -04:00
|
|
|
|
|
|
|
|
|
|
|
def safe_filename(filename=''):
|
|
|
|
"""Sanitizes the given filename."""
|
|
|
|
safe_char = string.ascii_letters + string.digits + '._ '
|
|
|
|
filename = ''.join([c for c in filename if c in safe_char])
|
|
|
|
filename = filename.strip('._ ')
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
|
|
def get_rand_chars(n=8):
|
|
|
|
"""Returns `n` number of random ASCII characters."""
|
|
|
|
chars = []
|
|
|
|
for _ in range(n):
|
|
|
|
char = random.choice(string.ascii_letters + string.digits)
|
|
|
|
chars.append(char)
|
|
|
|
return "".join(chars)
|
2019-10-01 23:06:51 -04:00
|
|
|
|
|
|
|
|
2019-10-06 17:01:46 -04:00
|
|
|
def download_file(url, timeout=10, max_file_size=config.client_max_size):
|
|
|
|
"""
|
|
|
|
Downloads the file at the given url while observing file size and
|
|
|
|
timeout limitations.
|
|
|
|
"""
|
|
|
|
requests_kwargs = {
|
|
|
|
'stream': True,
|
|
|
|
'headers': {'User-Agent': "Steelbea.me LTD needs YOUR files."},
|
|
|
|
'timeout': timeout,
|
|
|
|
'verify': True
|
|
|
|
}
|
|
|
|
temp = b''
|
|
|
|
with requests.get(url, **requests_kwargs) as r:
|
|
|
|
size = 0
|
|
|
|
start_time = time.time()
|
|
|
|
for chunk in r.iter_content(102400):
|
|
|
|
if time.time() - start_time > timeout:
|
|
|
|
raise ValueError('timeout reached')
|
|
|
|
if len(temp) > max_file_size:
|
|
|
|
raise ValueError('response too large')
|
|
|
|
temp += chunk
|
|
|
|
|
|
|
|
if r.headers.get('Content-Disposition'):
|
|
|
|
fname = re.search(r'filename="(.+)"',
|
|
|
|
r.headers['content-disposition'])
|
|
|
|
else:
|
|
|
|
fname = os.path.basename(url)
|
|
|
|
return (fname, temp)
|
|
|
|
|
|
|
|
|
2019-10-01 23:06:51 -04:00
|
|
|
async def init_app():
|
|
|
|
"""Initializes the application."""
|
2019-10-06 19:25:43 -04:00
|
|
|
app = web.Application(middlewares=[buckler_aiohttp.buckler_session])
|
2019-10-01 23:06:51 -04:00
|
|
|
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates'))
|
|
|
|
app['pool'] = await asyncpg.create_pool(**config.db)
|
|
|
|
|
|
|
|
async with app['pool'].acquire() as conn:
|
|
|
|
with open('saddle.sql', 'r') as file:
|
|
|
|
await conn.execute(file.read())
|
|
|
|
|
|
|
|
app.router.add_routes(routes)
|
|
|
|
|
2019-10-06 17:01:46 -04:00
|
|
|
app_wrap = web.Application(client_max_size=config.client_max_size)
|
2019-10-01 23:06:51 -04:00
|
|
|
app_wrap.add_subapp(config.url_prefix, app)
|
|
|
|
return app_wrap
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
app = init_app()
|
|
|
|
web.run_app(app, host='0.0.0.0', port=5000)
|