Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
52f4829f33 | |||
3d71b50384 | |||
342c1a7c87 | |||
99490dce06 | |||
d2ee6379db | |||
485d1e0e4f | |||
3f1d8c4ae4 | |||
b43038cf18 | |||
b12cec3091 | |||
f7d535131d | |||
2a5ca2df16 | |||
bfb968fc4f | |||
79616c5ed5 |
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -2,3 +2,4 @@ __pycache__/
|
||||||
*.swp
|
*.swp
|
||||||
*.swo
|
*.swo
|
||||||
sync.sh
|
sync.sh
|
||||||
|
config.py
|
||||||
|
|
15
LICENSE
Normal file
15
LICENSE
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
ISC License
|
||||||
|
|
||||||
|
Copyright (c) 2019, iou1name <iou1name@steelbea.me>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
@ -10,4 +10,4 @@ Python packages: `gunicorn aiohttp aiohttp_jinja2`
|
||||||
2. Walk the dinosaur
|
2. Walk the dinosaur
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
`gunicorn aberrant:app --bind localhost:5250 --worker-class aiohttp.GunicornWebWorker`
|
`gunicorn aberrant:init_app --bind localhost:5250 --worker-class aiohttp.GunicornWebWorker`
|
||||||
|
|
54
aberrant.py
54
aberrant.py
|
@ -3,6 +3,7 @@
|
||||||
The primary module for serving the Aberrant application.
|
The primary module for serving the Aberrant application.
|
||||||
"""
|
"""
|
||||||
import json
|
import json
|
||||||
|
import asyncio
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
import aiohttp_jinja2
|
import aiohttp_jinja2
|
||||||
|
@ -12,31 +13,23 @@ from aiohttp_jinja2 import render_template
|
||||||
import config
|
import config
|
||||||
import events
|
import events
|
||||||
import rtorrent
|
import rtorrent
|
||||||
|
import buckler_aiohttp
|
||||||
|
|
||||||
app = web.Application()
|
|
||||||
app.on_shutdown.append(rtorrent.stop_watch)
|
|
||||||
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates'))
|
|
||||||
rtorrent.init()
|
|
||||||
|
|
||||||
routes = web.RouteTableDef()
|
routes = web.RouteTableDef()
|
||||||
|
|
||||||
@routes.get(config.prefix + "/", name='index')
|
@routes.get('/', name='index')
|
||||||
async def index(request):
|
async def index(request):
|
||||||
"""The index page."""
|
"""The index page."""
|
||||||
torrents = rtorrent.get_active()
|
torrents = rtorrent.get_active()
|
||||||
tracker_stats = rtorrent.get_stats()
|
tracker_stats = rtorrent.get_stats()
|
||||||
return render_template("index.html", request, locals())
|
return render_template("index.html", request, locals())
|
||||||
|
|
||||||
@routes.get(config.prefix + "/get_active_torrents", name='active-torrents')
|
|
||||||
async def get_active_torrents(request):
|
|
||||||
"""Returns all active torrents formatted as JSON."""
|
|
||||||
data = [vars(t) for t in rtorrent.get_active()]
|
|
||||||
return web.json_response(data)
|
|
||||||
|
|
||||||
@routes.get(config.prefix + '/ws', name='ws')
|
@routes.get('/ws', name='ws')
|
||||||
async def websocket_handler(request):
|
async def websocket_handler(request):
|
||||||
"""The websocket endpoint."""
|
"""The websocket endpoint."""
|
||||||
ws = web.WebSocketResponse()
|
ws = web.WebSocketResponse(heartbeat=30)
|
||||||
ws_ready = ws.can_prepare(request)
|
ws_ready = ws.can_prepare(request)
|
||||||
if not ws_ready.ok:
|
if not ws_ready.ok:
|
||||||
return web.Response(text="Cannot start websocket.")
|
return web.Response(text="Cannot start websocket.")
|
||||||
|
@ -59,7 +52,40 @@ async def websocket_handler(request):
|
||||||
await ws.close()
|
await ws.close()
|
||||||
return ws
|
return ws
|
||||||
|
|
||||||
app.router.add_routes(routes)
|
|
||||||
|
async def watcher_loop():
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
await rtorrent.update_torrents()
|
||||||
|
await asyncio.sleep(10)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def start_background_tasks(app):
|
||||||
|
rtorrent.init()
|
||||||
|
app['watcher'] = asyncio.create_task(watcher_loop())
|
||||||
|
|
||||||
|
|
||||||
|
async def cleanup_background_tasks(app):
|
||||||
|
app['watcher'].cancel()
|
||||||
|
await app['watcher']
|
||||||
|
|
||||||
|
|
||||||
|
async def init_app():
|
||||||
|
"""Initializes the application."""
|
||||||
|
app = web.Application(middlewares=[buckler_aiohttp.buckler_session])
|
||||||
|
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates'))
|
||||||
|
app.on_startup.append(start_background_tasks)
|
||||||
|
app.on_cleanup.append(cleanup_background_tasks)
|
||||||
|
|
||||||
|
app.router.add_routes(routes)
|
||||||
|
|
||||||
|
app_wrap = web.Application()
|
||||||
|
app_wrap.add_subapp(config.url_prefix, app)
|
||||||
|
return app_wrap
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
aiohttp.web.run_app(app, host='0.0.0.0', port=5250)
|
app = init_app()
|
||||||
|
web.run_app(app, host='0.0.0.0', port=5250)
|
||||||
|
|
72
buckler_aiohttp.py
Normal file
72
buckler_aiohttp.py
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Session interface middlewares to integrate the aiohttp app with Buckler.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
|
import config
|
||||||
|
|
||||||
|
@web.middleware
|
||||||
|
async def buckler_session(request, handler):
|
||||||
|
"""
|
||||||
|
Verifies the user with the configured Buckler app and retrieves any
|
||||||
|
session data they may have. Redirects them to the login page otherwise.
|
||||||
|
"""
|
||||||
|
user_id = request.cookies.get('userid', '')
|
||||||
|
user_sid = request.cookies.get('session', '')
|
||||||
|
|
||||||
|
url = config.buckler['url'] + '/get_session'
|
||||||
|
params = {
|
||||||
|
'app_id': config.buckler['app_id'],
|
||||||
|
'app_key': config.buckler['app_key'],
|
||||||
|
'userid': user_id,
|
||||||
|
'session': user_sid }
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.get(url, params=params) as resp:
|
||||||
|
data = await resp.json()
|
||||||
|
if data.get('error'):
|
||||||
|
resp = web.HTTPFound(config.buckler['login_url'])
|
||||||
|
resp.set_cookie(
|
||||||
|
'redirect',
|
||||||
|
request.url,
|
||||||
|
domain=config.server_domain,
|
||||||
|
secure=True,
|
||||||
|
httponly=True)
|
||||||
|
#samesite='strict')
|
||||||
|
raise resp
|
||||||
|
request['session'] = data['session_data']
|
||||||
|
request['meta'] = data['meta']
|
||||||
|
|
||||||
|
resp = await handler(request)
|
||||||
|
|
||||||
|
if request['session'] != data['session_data']: # session data modified
|
||||||
|
url = config.buckler['url'] + '/set_session'
|
||||||
|
data = json.dumps(request['session'])
|
||||||
|
session.post(url, params=params, data=data) # TODO: error handle?
|
||||||
|
|
||||||
|
last_used = datetime.fromisoformat(request['meta']['last_used'])
|
||||||
|
now = datetime.now(last_used.tzinfo)
|
||||||
|
delta = now - last_used
|
||||||
|
if delta.seconds > 600:
|
||||||
|
resp.set_cookie(
|
||||||
|
'userid',
|
||||||
|
user_id,
|
||||||
|
domain=config.server_domain,
|
||||||
|
max_age=30*24*60*60,
|
||||||
|
secure=True,
|
||||||
|
httponly=True)
|
||||||
|
#samesite='strict')
|
||||||
|
resp.set_cookie(
|
||||||
|
'session',
|
||||||
|
user_sid,
|
||||||
|
domain=config.server_domain,
|
||||||
|
max_age=30*24*60*60,
|
||||||
|
secure=True,
|
||||||
|
httponly=True)
|
||||||
|
#samesite='strict')
|
||||||
|
|
||||||
|
return resp
|
|
@ -1,5 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Configuation settings for Aberrant.
|
|
||||||
"""
|
|
||||||
prefix = '/aberrant'
|
|
17
config.py.template
Normal file
17
config.py.template
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Configuation settings for Aberrant.
|
||||||
|
"""
|
||||||
|
rtorrents = [
|
||||||
|
'scgi://localhost:5000',
|
||||||
|
'scgi://localhost:5001',
|
||||||
|
]
|
||||||
|
|
||||||
|
server_domain = 'steelbea.me'
|
||||||
|
url_prefix = '/aberrant'
|
||||||
|
buckler = {
|
||||||
|
'url': "http://192.168.1.100:5400/buckler",
|
||||||
|
'app_id': 1,
|
||||||
|
'app_key': """password""",
|
||||||
|
'login_url': "/buckler/login",
|
||||||
|
}
|
|
@ -12,6 +12,12 @@ async def active_torrents(ws, data):
|
||||||
res = {'event': 'active_torrents', 'data': data}
|
res = {'event': 'active_torrents', 'data': data}
|
||||||
await ws.send_json(res)
|
await ws.send_json(res)
|
||||||
|
|
||||||
|
async def tracker_stats(ws, data):
|
||||||
|
"""Returns tracker stats."""
|
||||||
|
data = rtorrent.get_stats()
|
||||||
|
res = {'event': 'tracker_stats', 'data': data}
|
||||||
|
await ws.send_json(res)
|
||||||
|
|
||||||
events = {}
|
events = {}
|
||||||
for obj in dir():
|
for obj in dir():
|
||||||
if type(locals()[obj]) == types.FunctionType:
|
if type(locals()[obj]) == types.FunctionType:
|
||||||
|
|
47
rtorrent.py
47
rtorrent.py
|
@ -4,15 +4,14 @@ This module handles the interface with rTorrent via XMLRPC.
|
||||||
"""
|
"""
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import threading
|
import asyncio
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import config
|
||||||
import rtorrent_xmlrpc
|
import rtorrent_xmlrpc
|
||||||
|
|
||||||
NUM_INST = 10
|
|
||||||
WATCH_HANDLE = None
|
|
||||||
sp = []
|
sp = []
|
||||||
torrents = [[]] * NUM_INST
|
torrents = [[] for _ in range(len(config.rtorrent_insts))]
|
||||||
|
|
||||||
class Torrent:
|
class Torrent:
|
||||||
def __init__(self, raw, rt_id):
|
def __init__(self, raw, rt_id):
|
||||||
|
@ -55,26 +54,12 @@ class Torrent:
|
||||||
self.hashing = raw[10]
|
self.hashing = raw[10]
|
||||||
|
|
||||||
|
|
||||||
class Watch(threading.Thread):
|
async def update_torrents():
|
||||||
"""A thread class that continously queries the rTorrent instances."""
|
for n in range(len(torrents)):
|
||||||
def __init__(self):
|
try:
|
||||||
super(Watch, self).__init__()
|
torrents[n] = await asyncio.wait_for(get_all(n), 5)
|
||||||
self._stop_event = threading.Event()
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
def stop(self):
|
|
||||||
self._stop_event.set()
|
|
||||||
|
|
||||||
def stopped(self):
|
|
||||||
return self._stop_event.is_set()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
global torrents
|
|
||||||
while not self.stopped():
|
|
||||||
for n in range(NUM_INST):
|
|
||||||
if self.stopped():
|
|
||||||
break
|
|
||||||
torrents[n] = get_all(n)
|
|
||||||
self._stop_event.wait(2)
|
|
||||||
|
|
||||||
|
|
||||||
def size_units(rate):
|
def size_units(rate):
|
||||||
|
@ -126,7 +111,7 @@ def all_torrents():
|
||||||
res += item
|
res += item
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get_all(n):
|
async def get_all(n):
|
||||||
"""Gets all torrent information from a instance and returns it."""
|
"""Gets all torrent information from a instance and returns it."""
|
||||||
res = sp[n].d.multicall2('', 'main',
|
res = sp[n].d.multicall2('', 'main',
|
||||||
'd.hash=',
|
'd.hash=',
|
||||||
|
@ -145,18 +130,10 @@ def get_all(n):
|
||||||
|
|
||||||
def init():
|
def init():
|
||||||
"""Initializes the rTorrent interface."""
|
"""Initializes the rTorrent interface."""
|
||||||
global WATCH_HANDLE
|
|
||||||
global sp
|
global sp
|
||||||
for n in range(NUM_INST):
|
for rt_port in config.rtorrent_insts:
|
||||||
s = rtorrent_xmlrpc.SCGIServerProxy(f"scgi://localhost:500{n}")
|
s = rtorrent_xmlrpc.SCGIServerProxy(rt_port)
|
||||||
sp.append(s)
|
sp.append(s)
|
||||||
WATCH_HANDLE = Watch()
|
|
||||||
WATCH_HANDLE.start()
|
|
||||||
|
|
||||||
async def stop_watch(*args, **kwargs):
|
|
||||||
"""Stops the watch thread."""
|
|
||||||
global WATCH_HANDLE
|
|
||||||
WATCH_HANDLE.stop()
|
|
||||||
|
|
||||||
def get_active():
|
def get_active():
|
||||||
"""Returns all actively seeding or leeching torrents."""
|
"""Returns all actively seeding or leeching torrents."""
|
||||||
|
|
100
rtorrent_proxy.py
Normal file
100
rtorrent_proxy.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
This file handles a rather silly use case:
|
||||||
|
You have two servers, A and B, each with various rtorrent instances running.
|
||||||
|
You want one instance of Aberrant, running on server A. In your infinite
|
||||||
|
wisdom, you bound the SCGI ports of the rtorrent instances on server B to
|
||||||
|
localhost, leaving Aberrant unable to query them. Rtorrent won't allow you
|
||||||
|
to rebind the SCGI port while it's running, and you don't want to restart
|
||||||
|
the rtorrent instance(s) because restarting stuff sucks. Therefore, you
|
||||||
|
need another way for Aberrant to communicate to them.
|
||||||
|
|
||||||
|
This script acts as a proxy to the rtorrent instances on server B. It will
|
||||||
|
listen on a port (don't forget to open the port in your iptables), and
|
||||||
|
relay all data it receives to the torrent SCGI port and vice versa.
|
||||||
|
|
||||||
|
To use:
|
||||||
|
Configure `port_mapping` in the form `listen_port`: `rtorrent_port`.
|
||||||
|
Run the script on server B.
|
||||||
|
"""
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
|
||||||
|
port_mapping = {
|
||||||
|
4000: 5000,
|
||||||
|
4001: 5001,
|
||||||
|
4002: 5002,
|
||||||
|
}
|
||||||
|
|
||||||
|
class SocketProxy(threading.Thread):
|
||||||
|
"""Represents a proxy to an rTorrent SCGI port."""
|
||||||
|
def __init__(self, listen_port, rtorrent_port):
|
||||||
|
super().__init__()
|
||||||
|
self.listen_port = listen_port
|
||||||
|
self.rtorrent_port = rtorrent_port
|
||||||
|
self._stop_event = threading.Event()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Sets the internal 'stop running' flag."""
|
||||||
|
self._stop_event.set()
|
||||||
|
|
||||||
|
def stopped(self):
|
||||||
|
"""Returns true if the internal stop flag has been set."""
|
||||||
|
return self._stop_event.is_set()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
|
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
s.settimeout(1)
|
||||||
|
s.bind(('0.0.0.0', self.listen_port))
|
||||||
|
s.listen(1)
|
||||||
|
while not self.stopped():
|
||||||
|
try:
|
||||||
|
conn, addr = s.accept() # client connected
|
||||||
|
except socket.timeout:
|
||||||
|
continue
|
||||||
|
with conn:
|
||||||
|
# receive data from client
|
||||||
|
data = []
|
||||||
|
data.append(conn.recv(1024)) # the header really shouldn't be longer than this...
|
||||||
|
header = data[0].partition(b',')[0]
|
||||||
|
body_len = int(header.split(b'\x00')[1])
|
||||||
|
recv_len = len(data[0])
|
||||||
|
while recv_len < body_len + len(header) + 1:
|
||||||
|
data.append(conn.recv(1024))
|
||||||
|
recv_len += len(data[-1])
|
||||||
|
data = b''.join(data)
|
||||||
|
|
||||||
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as rt_conn:
|
||||||
|
rt_conn.connect(('127.0.0.1', self.rtorrent_port))
|
||||||
|
rt_conn.sendall(data) # send to rtorrent
|
||||||
|
|
||||||
|
# receive data from rtorrent
|
||||||
|
data = []
|
||||||
|
data.append(rt_conn.recv(1024))
|
||||||
|
header = data[0].partition(b'<?xml')[0]
|
||||||
|
body_len = header.split(b'\r\n')[2].partition(b' ')[2]
|
||||||
|
body_len = int(body_len)
|
||||||
|
recv_len = len(data[0])
|
||||||
|
while recv_len < body_len + len(header):
|
||||||
|
data.append(rt_conn.recv(1024))
|
||||||
|
recv_len += len(data[-1])
|
||||||
|
data = b''.join(data)
|
||||||
|
conn.sendall(data) # send to client
|
||||||
|
|
||||||
|
def run_proxies():
|
||||||
|
"""Run all proxies."""
|
||||||
|
proxies = []
|
||||||
|
for listen_port, rtorrent_port in port_mapping.items():
|
||||||
|
p = SocketProxy(listen_port, rtorrent_port)
|
||||||
|
p.start()
|
||||||
|
proxies.append(p)
|
||||||
|
|
||||||
|
try:
|
||||||
|
proxies[0].join()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
for p in proxies:
|
||||||
|
p.stop()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
run_proxies()
|
|
@ -1,56 +1,55 @@
|
||||||
|
var socket;
|
||||||
|
|
||||||
function load() {
|
function load() {
|
||||||
//let intervalID = window.setInterval(get_active_torrents_ajax, 20000);
|
socket = init_websocket();
|
||||||
let intervalID = window.setInterval(get_active_torrents, 20000);
|
let intervalID = window.setInterval(update, 5000);
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_active_torrents_ajax() {
|
function init_websocket() {
|
||||||
let httpRequest;
|
let socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
||||||
httpRequest = new XMLHttpRequest();
|
socket._send = socket.send;
|
||||||
httpRequest.onreadystatechange = function() {
|
socket.send = function(event_title, data) {
|
||||||
if (httpRequest.readyState !== XMLHttpRequest.DONE) { return; }
|
|
||||||
if (httpRequest.status !== 200) { return; }
|
|
||||||
torrents = JSON.parse(httpRequest.responseText);
|
|
||||||
let html_str = '';
|
|
||||||
for (let i = 0; i < torrents.length; i++) {
|
|
||||||
html_str += '<tr>'
|
|
||||||
html_str += '<td class="name">' + torrents[i].name + '</td>';
|
|
||||||
html_str += '<td class="total_size_str">' + torrents[i].total_size_str + '</td>';
|
|
||||||
html_str += '<td class="state">' + torrents[i].state + '</td>';
|
|
||||||
html_str += '<td class="downrate_str">' + torrents[i].downrate_str + '</td>';
|
|
||||||
html_str += '<td class="down_percent">' + torrents[i].down_percent + '%</td>';
|
|
||||||
html_str += '<td class="eta_str">' + torrents[i].eta_str + '</td>';
|
|
||||||
html_str += '<td class="uprate_str">' + torrents[i].uprate_str + '</td>';
|
|
||||||
html_str += '<td class="tracker">' + torrents[i].tracker + '</td>';
|
|
||||||
html_str += '<td class="rtorrent_id">' + torrents[i].rtorrent_id + '</td>';
|
|
||||||
html_str += '</tr>';
|
|
||||||
}
|
|
||||||
document.getElementById('torrents').children[1].innerHTML = html_str;
|
|
||||||
};
|
|
||||||
httpRequest.open('GET', get_torrents_uri, true);
|
|
||||||
httpRequest.send();
|
|
||||||
}
|
|
||||||
|
|
||||||
var socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
|
||||||
socket.oldSend = socket.send;
|
|
||||||
socket.send = function(event_title, data) {
|
|
||||||
data = JSON.stringify({event: event_title, data: data});
|
data = JSON.stringify({event: event_title, data: data});
|
||||||
socket.oldSend.apply(this, [data]);
|
if (socket.readyState == 0) {
|
||||||
|
console.log("Socket is still opening!");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
socket._send(data);
|
||||||
|
}
|
||||||
|
socket.onmessage = onmessage;
|
||||||
|
socket.onclose = onclose;
|
||||||
|
socket.onerror = onerror;
|
||||||
|
socket.events = {};
|
||||||
|
socket.events['active_torrents'] = active_torrents_recv;
|
||||||
|
socket.events['tracker_stats'] = tracker_stats_recv;
|
||||||
|
return socket;
|
||||||
}
|
}
|
||||||
socket.events = {};
|
|
||||||
socket.onmessage = function(e) {
|
function onmessage (e) {
|
||||||
let data = JSON.parse(e.data);
|
let data = JSON.parse(e.data);
|
||||||
let event = data.event;
|
let event = data.event;
|
||||||
data = data.data;
|
data = data.data;
|
||||||
if (socket.events[event] === undefined) { return; }
|
if (socket.events[event] === undefined) {
|
||||||
socket.events[event](data);
|
console.log("Unknown socket event: " + event);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
socket.events[event](data);
|
||||||
}
|
}
|
||||||
socket.onclose = function(e) {
|
|
||||||
console.log('WebSocket lost connection to server. Re-trying...');
|
function onclose(e) {
|
||||||
// TODO: reconnect
|
if (e.wasClean) { return; } // no need to reconnect
|
||||||
|
console.log(e);
|
||||||
|
console.log('Websocket lost connection to server. Re-trying...');
|
||||||
|
socket = init_websocket();
|
||||||
|
}
|
||||||
|
|
||||||
|
function onerror(e) {
|
||||||
|
console.log("Websocket error!")
|
||||||
|
console.log(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Websocket receive */
|
/* Websocket receive */
|
||||||
socket.events['active_torrents'] = function(data) {
|
function active_torrents_recv(data) {
|
||||||
let table = document.querySelector('#torrents tbody');
|
let table = document.querySelector('#torrents tbody');
|
||||||
while (table.firstChild) {
|
while (table.firstChild) {
|
||||||
table.removeChild(table.firstChild);
|
table.removeChild(table.firstChild);
|
||||||
|
@ -64,8 +63,20 @@ socket.events['active_torrents'] = function(data) {
|
||||||
table.appendChild(node);
|
table.appendChild(node);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
socket.events['tracker_stats'] = function(data) {
|
function tracker_stats_recv(data) {
|
||||||
console.log(data);
|
let table = document.querySelector('#tracker_stats tbody');
|
||||||
|
while (table.firstChild) {
|
||||||
|
table.removeChild(table.firstChild);
|
||||||
|
}
|
||||||
|
for (let [tracker, values] of Object.entries(data)) {
|
||||||
|
let template = document.querySelector('#tracker_template');
|
||||||
|
let node = document.importNode(template.content, true);
|
||||||
|
node.children[0].children[0].textContent = tracker;
|
||||||
|
for (let i = 0; i < values.length; i++) {
|
||||||
|
node.children[0].children[i+1].textContent = values[i];
|
||||||
|
}
|
||||||
|
table.appendChild(node);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Websocket send */
|
/* Websocket send */
|
||||||
|
@ -75,3 +86,9 @@ function get_active_torrents() {
|
||||||
function get_tracker_stats() {
|
function get_tracker_stats() {
|
||||||
socket.send('tracker_stats', {});
|
socket.send('tracker_stats', {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Helper */
|
||||||
|
function update() {
|
||||||
|
get_active_torrents();
|
||||||
|
get_tracker_stats();
|
||||||
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
<title>Aberrant</title>
|
<title>Aberrant</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/aberrant.css">
|
<link rel="stylesheet" type="text/css" href="/static/aberrant.css">
|
||||||
<script>
|
<script>
|
||||||
const get_torrents_uri = "{{ request.app.router['active-torrents'].url_for() }}";
|
|
||||||
const ws_uri = "{{ request.app.router['ws'].url_for() }}";
|
const ws_uri = "{{ request.app.router['ws'].url_for() }}";
|
||||||
</script>
|
</script>
|
||||||
<script type="text/javascript" src="/static/aberrant.js"></script>
|
<script type="text/javascript" src="/static/aberrant.js"></script>
|
||||||
|
@ -75,5 +74,13 @@
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
<template id="tracker_template">
|
||||||
|
<tr>
|
||||||
|
<td class="tracker_stat"></td>
|
||||||
|
<td class="hashing"></td>
|
||||||
|
<td class="error"></td>
|
||||||
|
<td class="total"></td>
|
||||||
|
</tr>
|
||||||
|
</template>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
Loading…
Reference in New Issue
Block a user