Compare commits
No commits in common. "master" and "aiohttp" have entirely different histories.
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -2,4 +2,3 @@ __pycache__/
|
|||
*.swp
|
||||
*.swo
|
||||
sync.sh
|
||||
config.py
|
||||
|
|
15
LICENSE
15
LICENSE
|
@ -1,15 +0,0 @@
|
|||
ISC License
|
||||
|
||||
Copyright (c) 2019, iou1name <iou1name@steelbea.me>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -10,4 +10,4 @@ Python packages: `gunicorn aiohttp aiohttp_jinja2`
|
|||
2. Walk the dinosaur
|
||||
|
||||
## Usage
|
||||
`gunicorn aberrant:init_app --bind localhost:5250 --worker-class aiohttp.GunicornWebWorker`
|
||||
`gunicorn aberrant:app --bind localhost:5250 --worker-class aiohttp.GunicornWebWorker`
|
||||
|
|
54
aberrant.py
54
aberrant.py
|
@ -3,7 +3,6 @@
|
|||
The primary module for serving the Aberrant application.
|
||||
"""
|
||||
import json
|
||||
import asyncio
|
||||
|
||||
import jinja2
|
||||
import aiohttp_jinja2
|
||||
|
@ -13,23 +12,31 @@ from aiohttp_jinja2 import render_template
|
|||
import config
|
||||
import events
|
||||
import rtorrent
|
||||
import buckler_aiohttp
|
||||
|
||||
app = web.Application()
|
||||
app.on_shutdown.append(rtorrent.stop_watch)
|
||||
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates'))
|
||||
rtorrent.init()
|
||||
|
||||
routes = web.RouteTableDef()
|
||||
|
||||
@routes.get('/', name='index')
|
||||
@routes.get(config.prefix + "/", name='index')
|
||||
async def index(request):
|
||||
"""The index page."""
|
||||
torrents = rtorrent.get_active()
|
||||
tracker_stats = rtorrent.get_stats()
|
||||
return render_template("index.html", request, locals())
|
||||
|
||||
@routes.get(config.prefix + "/get_active_torrents", name='active-torrents')
|
||||
async def get_active_torrents(request):
|
||||
"""Returns all active torrents formatted as JSON."""
|
||||
data = [vars(t) for t in rtorrent.get_active()]
|
||||
return web.json_response(data)
|
||||
|
||||
@routes.get('/ws', name='ws')
|
||||
@routes.get(config.prefix + '/ws', name='ws')
|
||||
async def websocket_handler(request):
|
||||
"""The websocket endpoint."""
|
||||
ws = web.WebSocketResponse(heartbeat=30)
|
||||
ws = web.WebSocketResponse()
|
||||
ws_ready = ws.can_prepare(request)
|
||||
if not ws_ready.ok:
|
||||
return web.Response(text="Cannot start websocket.")
|
||||
|
@ -52,40 +59,7 @@ async def websocket_handler(request):
|
|||
await ws.close()
|
||||
return ws
|
||||
|
||||
|
||||
async def watcher_loop():
|
||||
try:
|
||||
while True:
|
||||
await rtorrent.update_torrents()
|
||||
await asyncio.sleep(10)
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
|
||||
|
||||
async def start_background_tasks(app):
|
||||
rtorrent.init()
|
||||
app['watcher'] = asyncio.create_task(watcher_loop())
|
||||
|
||||
|
||||
async def cleanup_background_tasks(app):
|
||||
app['watcher'].cancel()
|
||||
await app['watcher']
|
||||
|
||||
|
||||
async def init_app():
|
||||
"""Initializes the application."""
|
||||
app = web.Application(middlewares=[buckler_aiohttp.buckler_session])
|
||||
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates'))
|
||||
app.on_startup.append(start_background_tasks)
|
||||
app.on_cleanup.append(cleanup_background_tasks)
|
||||
|
||||
app.router.add_routes(routes)
|
||||
|
||||
app_wrap = web.Application()
|
||||
app_wrap.add_subapp(config.url_prefix, app)
|
||||
return app_wrap
|
||||
|
||||
app.router.add_routes(routes)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = init_app()
|
||||
web.run_app(app, host='0.0.0.0', port=5250)
|
||||
aiohttp.web.run_app(app, host='0.0.0.0', port=5250)
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Session interface middlewares to integrate the aiohttp app with Buckler.
|
||||
"""
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import web
|
||||
|
||||
import config
|
||||
|
||||
@web.middleware
|
||||
async def buckler_session(request, handler):
|
||||
"""
|
||||
Verifies the user with the configured Buckler app and retrieves any
|
||||
session data they may have. Redirects them to the login page otherwise.
|
||||
"""
|
||||
user_id = request.cookies.get('userid', '')
|
||||
user_sid = request.cookies.get('session', '')
|
||||
|
||||
url = config.buckler['url'] + '/get_session'
|
||||
params = {
|
||||
'app_id': config.buckler['app_id'],
|
||||
'app_key': config.buckler['app_key'],
|
||||
'userid': user_id,
|
||||
'session': user_sid }
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url, params=params) as resp:
|
||||
data = await resp.json()
|
||||
if data.get('error'):
|
||||
resp = web.HTTPFound(config.buckler['login_url'])
|
||||
resp.set_cookie(
|
||||
'redirect',
|
||||
request.url,
|
||||
domain=config.server_domain,
|
||||
secure=True,
|
||||
httponly=True)
|
||||
#samesite='strict')
|
||||
raise resp
|
||||
request['session'] = data['session_data']
|
||||
request['meta'] = data['meta']
|
||||
|
||||
resp = await handler(request)
|
||||
|
||||
if request['session'] != data['session_data']: # session data modified
|
||||
url = config.buckler['url'] + '/set_session'
|
||||
data = json.dumps(request['session'])
|
||||
session.post(url, params=params, data=data) # TODO: error handle?
|
||||
|
||||
last_used = datetime.fromisoformat(request['meta']['last_used'])
|
||||
now = datetime.now(last_used.tzinfo)
|
||||
delta = now - last_used
|
||||
if delta.seconds > 600:
|
||||
resp.set_cookie(
|
||||
'userid',
|
||||
user_id,
|
||||
domain=config.server_domain,
|
||||
max_age=30*24*60*60,
|
||||
secure=True,
|
||||
httponly=True)
|
||||
#samesite='strict')
|
||||
resp.set_cookie(
|
||||
'session',
|
||||
user_sid,
|
||||
domain=config.server_domain,
|
||||
max_age=30*24*60*60,
|
||||
secure=True,
|
||||
httponly=True)
|
||||
#samesite='strict')
|
||||
|
||||
return resp
|
5
config.py
Normal file
5
config.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Configuation settings for Aberrant.
|
||||
"""
|
||||
prefix = '/aberrant'
|
|
@ -1,17 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Configuation settings for Aberrant.
|
||||
"""
|
||||
rtorrents = [
|
||||
'scgi://localhost:5000',
|
||||
'scgi://localhost:5001',
|
||||
]
|
||||
|
||||
server_domain = 'steelbea.me'
|
||||
url_prefix = '/aberrant'
|
||||
buckler = {
|
||||
'url': "http://192.168.1.100:5400/buckler",
|
||||
'app_id': 1,
|
||||
'app_key': """password""",
|
||||
'login_url': "/buckler/login",
|
||||
}
|
|
@ -12,12 +12,6 @@ async def active_torrents(ws, data):
|
|||
res = {'event': 'active_torrents', 'data': data}
|
||||
await ws.send_json(res)
|
||||
|
||||
async def tracker_stats(ws, data):
|
||||
"""Returns tracker stats."""
|
||||
data = rtorrent.get_stats()
|
||||
res = {'event': 'tracker_stats', 'data': data}
|
||||
await ws.send_json(res)
|
||||
|
||||
events = {}
|
||||
for obj in dir():
|
||||
if type(locals()[obj]) == types.FunctionType:
|
||||
|
|
47
rtorrent.py
47
rtorrent.py
|
@ -4,14 +4,15 @@ This module handles the interface with rTorrent via XMLRPC.
|
|||
"""
|
||||
import re
|
||||
import time
|
||||
import asyncio
|
||||
import threading
|
||||
from collections import defaultdict
|
||||
|
||||
import config
|
||||
import rtorrent_xmlrpc
|
||||
|
||||
NUM_INST = 10
|
||||
WATCH_HANDLE = None
|
||||
sp = []
|
||||
torrents = [[] for _ in range(len(config.rtorrent_insts))]
|
||||
torrents = [[]] * NUM_INST
|
||||
|
||||
class Torrent:
|
||||
def __init__(self, raw, rt_id):
|
||||
|
@ -54,12 +55,26 @@ class Torrent:
|
|||
self.hashing = raw[10]
|
||||
|
||||
|
||||
async def update_torrents():
|
||||
for n in range(len(torrents)):
|
||||
try:
|
||||
torrents[n] = await asyncio.wait_for(get_all(n), 5)
|
||||
except asyncio.TimeoutError:
|
||||
continue
|
||||
class Watch(threading.Thread):
|
||||
"""A thread class that continously queries the rTorrent instances."""
|
||||
def __init__(self):
|
||||
super(Watch, self).__init__()
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
def stop(self):
|
||||
self._stop_event.set()
|
||||
|
||||
def stopped(self):
|
||||
return self._stop_event.is_set()
|
||||
|
||||
def run(self):
|
||||
global torrents
|
||||
while not self.stopped():
|
||||
for n in range(NUM_INST):
|
||||
if self.stopped():
|
||||
break
|
||||
torrents[n] = get_all(n)
|
||||
self._stop_event.wait(2)
|
||||
|
||||
|
||||
def size_units(rate):
|
||||
|
@ -111,7 +126,7 @@ def all_torrents():
|
|||
res += item
|
||||
return res
|
||||
|
||||
async def get_all(n):
|
||||
def get_all(n):
|
||||
"""Gets all torrent information from a instance and returns it."""
|
||||
res = sp[n].d.multicall2('', 'main',
|
||||
'd.hash=',
|
||||
|
@ -130,10 +145,18 @@ async def get_all(n):
|
|||
|
||||
def init():
|
||||
"""Initializes the rTorrent interface."""
|
||||
global WATCH_HANDLE
|
||||
global sp
|
||||
for rt_port in config.rtorrent_insts:
|
||||
s = rtorrent_xmlrpc.SCGIServerProxy(rt_port)
|
||||
for n in range(NUM_INST):
|
||||
s = rtorrent_xmlrpc.SCGIServerProxy(f"scgi://localhost:500{n}")
|
||||
sp.append(s)
|
||||
WATCH_HANDLE = Watch()
|
||||
WATCH_HANDLE.start()
|
||||
|
||||
async def stop_watch(*args, **kwargs):
|
||||
"""Stops the watch thread."""
|
||||
global WATCH_HANDLE
|
||||
WATCH_HANDLE.stop()
|
||||
|
||||
def get_active():
|
||||
"""Returns all actively seeding or leeching torrents."""
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
This file handles a rather silly use case:
|
||||
You have two servers, A and B, each with various rtorrent instances running.
|
||||
You want one instance of Aberrant, running on server A. In your infinite
|
||||
wisdom, you bound the SCGI ports of the rtorrent instances on server B to
|
||||
localhost, leaving Aberrant unable to query them. Rtorrent won't allow you
|
||||
to rebind the SCGI port while it's running, and you don't want to restart
|
||||
the rtorrent instance(s) because restarting stuff sucks. Therefore, you
|
||||
need another way for Aberrant to communicate to them.
|
||||
|
||||
This script acts as a proxy to the rtorrent instances on server B. It will
|
||||
listen on a port (don't forget to open the port in your iptables), and
|
||||
relay all data it receives to the torrent SCGI port and vice versa.
|
||||
|
||||
To use:
|
||||
Configure `port_mapping` in the form `listen_port`: `rtorrent_port`.
|
||||
Run the script on server B.
|
||||
"""
|
||||
import socket
|
||||
import threading
|
||||
|
||||
port_mapping = {
|
||||
4000: 5000,
|
||||
4001: 5001,
|
||||
4002: 5002,
|
||||
}
|
||||
|
||||
class SocketProxy(threading.Thread):
|
||||
"""Represents a proxy to an rTorrent SCGI port."""
|
||||
def __init__(self, listen_port, rtorrent_port):
|
||||
super().__init__()
|
||||
self.listen_port = listen_port
|
||||
self.rtorrent_port = rtorrent_port
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
def stop(self):
|
||||
"""Sets the internal 'stop running' flag."""
|
||||
self._stop_event.set()
|
||||
|
||||
def stopped(self):
|
||||
"""Returns true if the internal stop flag has been set."""
|
||||
return self._stop_event.is_set()
|
||||
|
||||
def run(self):
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
s.settimeout(1)
|
||||
s.bind(('0.0.0.0', self.listen_port))
|
||||
s.listen(1)
|
||||
while not self.stopped():
|
||||
try:
|
||||
conn, addr = s.accept() # client connected
|
||||
except socket.timeout:
|
||||
continue
|
||||
with conn:
|
||||
# receive data from client
|
||||
data = []
|
||||
data.append(conn.recv(1024)) # the header really shouldn't be longer than this...
|
||||
header = data[0].partition(b',')[0]
|
||||
body_len = int(header.split(b'\x00')[1])
|
||||
recv_len = len(data[0])
|
||||
while recv_len < body_len + len(header) + 1:
|
||||
data.append(conn.recv(1024))
|
||||
recv_len += len(data[-1])
|
||||
data = b''.join(data)
|
||||
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as rt_conn:
|
||||
rt_conn.connect(('127.0.0.1', self.rtorrent_port))
|
||||
rt_conn.sendall(data) # send to rtorrent
|
||||
|
||||
# receive data from rtorrent
|
||||
data = []
|
||||
data.append(rt_conn.recv(1024))
|
||||
header = data[0].partition(b'<?xml')[0]
|
||||
body_len = header.split(b'\r\n')[2].partition(b' ')[2]
|
||||
body_len = int(body_len)
|
||||
recv_len = len(data[0])
|
||||
while recv_len < body_len + len(header):
|
||||
data.append(rt_conn.recv(1024))
|
||||
recv_len += len(data[-1])
|
||||
data = b''.join(data)
|
||||
conn.sendall(data) # send to client
|
||||
|
||||
def run_proxies():
|
||||
"""Run all proxies."""
|
||||
proxies = []
|
||||
for listen_port, rtorrent_port in port_mapping.items():
|
||||
p = SocketProxy(listen_port, rtorrent_port)
|
||||
p.start()
|
||||
proxies.append(p)
|
||||
|
||||
try:
|
||||
proxies[0].join()
|
||||
except KeyboardInterrupt:
|
||||
for p in proxies:
|
||||
p.stop()
|
||||
|
||||
if __name__ == '__main__':
|
||||
run_proxies()
|
|
@ -1,55 +1,56 @@
|
|||
var socket;
|
||||
|
||||
function load() {
|
||||
socket = init_websocket();
|
||||
let intervalID = window.setInterval(update, 5000);
|
||||
//let intervalID = window.setInterval(get_active_torrents_ajax, 20000);
|
||||
let intervalID = window.setInterval(get_active_torrents, 20000);
|
||||
}
|
||||
|
||||
function init_websocket() {
|
||||
let socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
||||
socket._send = socket.send;
|
||||
socket.send = function(event_title, data) {
|
||||
data = JSON.stringify({event: event_title, data: data});
|
||||
if (socket.readyState == 0) {
|
||||
console.log("Socket is still opening!");
|
||||
return;
|
||||
function get_active_torrents_ajax() {
|
||||
let httpRequest;
|
||||
httpRequest = new XMLHttpRequest();
|
||||
httpRequest.onreadystatechange = function() {
|
||||
if (httpRequest.readyState !== XMLHttpRequest.DONE) { return; }
|
||||
if (httpRequest.status !== 200) { return; }
|
||||
torrents = JSON.parse(httpRequest.responseText);
|
||||
let html_str = '';
|
||||
for (let i = 0; i < torrents.length; i++) {
|
||||
html_str += '<tr>'
|
||||
html_str += '<td class="name">' + torrents[i].name + '</td>';
|
||||
html_str += '<td class="total_size_str">' + torrents[i].total_size_str + '</td>';
|
||||
html_str += '<td class="state">' + torrents[i].state + '</td>';
|
||||
html_str += '<td class="downrate_str">' + torrents[i].downrate_str + '</td>';
|
||||
html_str += '<td class="down_percent">' + torrents[i].down_percent + '%</td>';
|
||||
html_str += '<td class="eta_str">' + torrents[i].eta_str + '</td>';
|
||||
html_str += '<td class="uprate_str">' + torrents[i].uprate_str + '</td>';
|
||||
html_str += '<td class="tracker">' + torrents[i].tracker + '</td>';
|
||||
html_str += '<td class="rtorrent_id">' + torrents[i].rtorrent_id + '</td>';
|
||||
html_str += '</tr>';
|
||||
}
|
||||
socket._send(data);
|
||||
}
|
||||
socket.onmessage = onmessage;
|
||||
socket.onclose = onclose;
|
||||
socket.onerror = onerror;
|
||||
socket.events = {};
|
||||
socket.events['active_torrents'] = active_torrents_recv;
|
||||
socket.events['tracker_stats'] = tracker_stats_recv;
|
||||
return socket;
|
||||
document.getElementById('torrents').children[1].innerHTML = html_str;
|
||||
};
|
||||
httpRequest.open('GET', get_torrents_uri, true);
|
||||
httpRequest.send();
|
||||
}
|
||||
|
||||
function onmessage (e) {
|
||||
let data = JSON.parse(e.data);
|
||||
let event = data.event;
|
||||
data = data.data;
|
||||
if (socket.events[event] === undefined) {
|
||||
console.log("Unknown socket event: " + event);
|
||||
return;
|
||||
}
|
||||
socket.events[event](data);
|
||||
var socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
||||
socket.oldSend = socket.send;
|
||||
socket.send = function(event_title, data) {
|
||||
data = JSON.stringify({event: event_title, data: data});
|
||||
socket.oldSend.apply(this, [data]);
|
||||
}
|
||||
|
||||
function onclose(e) {
|
||||
if (e.wasClean) { return; } // no need to reconnect
|
||||
console.log(e);
|
||||
console.log('Websocket lost connection to server. Re-trying...');
|
||||
socket = init_websocket();
|
||||
socket.events = {};
|
||||
socket.onmessage = function(e) {
|
||||
let data = JSON.parse(e.data);
|
||||
let event = data.event;
|
||||
data = data.data;
|
||||
if (socket.events[event] === undefined) { return; }
|
||||
socket.events[event](data);
|
||||
}
|
||||
|
||||
function onerror(e) {
|
||||
console.log("Websocket error!")
|
||||
console.log(e);
|
||||
socket.onclose = function(e) {
|
||||
console.log('WebSocket lost connection to server. Re-trying...');
|
||||
// TODO: reconnect
|
||||
}
|
||||
|
||||
/* Websocket receive */
|
||||
function active_torrents_recv(data) {
|
||||
socket.events['active_torrents'] = function(data) {
|
||||
let table = document.querySelector('#torrents tbody');
|
||||
while (table.firstChild) {
|
||||
table.removeChild(table.firstChild);
|
||||
|
@ -63,20 +64,8 @@ function active_torrents_recv(data) {
|
|||
table.appendChild(node);
|
||||
});
|
||||
}
|
||||
function tracker_stats_recv(data) {
|
||||
let table = document.querySelector('#tracker_stats tbody');
|
||||
while (table.firstChild) {
|
||||
table.removeChild(table.firstChild);
|
||||
}
|
||||
for (let [tracker, values] of Object.entries(data)) {
|
||||
let template = document.querySelector('#tracker_template');
|
||||
let node = document.importNode(template.content, true);
|
||||
node.children[0].children[0].textContent = tracker;
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
node.children[0].children[i+1].textContent = values[i];
|
||||
}
|
||||
table.appendChild(node);
|
||||
}
|
||||
socket.events['tracker_stats'] = function(data) {
|
||||
console.log(data);
|
||||
}
|
||||
|
||||
/* Websocket send */
|
||||
|
@ -86,9 +75,3 @@ function get_active_torrents() {
|
|||
function get_tracker_stats() {
|
||||
socket.send('tracker_stats', {});
|
||||
}
|
||||
|
||||
/* Helper */
|
||||
function update() {
|
||||
get_active_torrents();
|
||||
get_tracker_stats();
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
<title>Aberrant</title>
|
||||
<link rel="stylesheet" type="text/css" href="/static/aberrant.css">
|
||||
<script>
|
||||
const get_torrents_uri = "{{ request.app.router['active-torrents'].url_for() }}";
|
||||
const ws_uri = "{{ request.app.router['ws'].url_for() }}";
|
||||
</script>
|
||||
<script type="text/javascript" src="/static/aberrant.js"></script>
|
||||
|
@ -74,13 +75,5 @@
|
|||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<template id="tracker_template">
|
||||
<tr>
|
||||
<td class="tracker_stat"></td>
|
||||
<td class="hashing"></td>
|
||||
<td class="error"></td>
|
||||
<td class="total"></td>
|
||||
</tr>
|
||||
</template>
|
||||
</body>
|
||||
</html>
|
||||
|
|
Loading…
Reference in New Issue
Block a user