Aberrant/rtorrent.py

186 lines
4.2 KiB
Python
Raw Normal View History

2019-01-16 12:49:42 -05:00
#!/usr/bin/env python3
"""
This module handles the interface with rTorrent via XMLRPC.
"""
2019-02-14 15:00:21 -05:00
import re
import time
import threading
from collections import defaultdict
2019-01-16 12:49:42 -05:00
2019-10-23 12:41:24 -04:00
import config
import rtorrent_xmlrpc
2019-02-14 15:00:21 -05:00
WATCH_HANDLE = None
sp = []
2019-10-23 12:41:24 -04:00
torrents = [[]] * len(config.rtorrent_insts)
2019-02-13 10:20:55 -05:00
class Torrent:
2019-08-31 21:43:41 -04:00
def __init__(self, raw, rt_id):
self.rtorrent_id = rt_id
2019-02-13 10:20:55 -05:00
self.hash = raw[0]
self.name = raw[1]
2019-02-14 15:00:21 -05:00
2019-02-13 10:20:55 -05:00
self.active = raw[2]
self.complete = raw[3]
if not self.active:
self.state = "inactive"
elif self.complete:
self.state = "seeding"
else:
self.state = "leeching"
2019-02-14 15:00:21 -05:00
2019-02-13 10:20:55 -05:00
self.downrate = raw[4]
2019-02-18 15:54:49 -05:00
self.downrate_str = size_units(self.downrate) + '/s'
2019-02-13 10:20:55 -05:00
self.uprate = raw[5]
2019-02-18 15:54:49 -05:00
self.uprate_str = size_units(self.uprate) + '/s'
2019-02-14 15:00:21 -05:00
2019-02-15 07:33:55 -05:00
self.tracker = get_tracker(raw[6])
2019-02-18 15:54:49 -05:00
self.down_total = raw[7]
self.total_size = raw[8]
self.total_size_str = size_units(self.total_size)
self.down_percent = round((self.down_total / self.total_size) * 100, 2)
2019-02-19 13:18:05 -05:00
if self.state == "leeching":
if self.downrate:
self.eta = (self.total_size - self.down_total) / self.downrate
self.eta_str = time_units(self.eta)
else:
self.eta = float("inf")
self.eta_str = ""
else:
self.eta = 0
self.eta_str = ""
2019-02-18 15:54:49 -05:00
self.message = raw[9]
self.hashing = raw[10]
2019-02-14 15:00:21 -05:00
class Watch(threading.Thread):
"""A thread class that continously queries the rTorrent instances."""
def __init__(self):
super(Watch, self).__init__()
self._stop_event = threading.Event()
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
def run(self):
global torrents
while not self.stopped():
2019-10-23 12:41:24 -04:00
for n in range(len(torrents)):
2019-02-14 15:00:21 -05:00
if self.stopped():
break
torrents[n] = get_all(n)
2019-09-12 09:49:10 -04:00
self._stop_event.wait(0.5)
2019-01-16 12:49:42 -05:00
2019-02-14 15:00:21 -05:00
2019-02-18 15:54:49 -05:00
def size_units(rate):
"""Helper to assign appropriate prefixes to numbers."""
unit = "B"
if rate > 1024:
rate /= 1024
unit = "KiB"
2019-02-14 15:00:21 -05:00
if rate > 1024:
rate /= 1024
2019-02-18 15:54:49 -05:00
unit = "MiB"
2019-02-14 15:00:21 -05:00
if rate > 1024:
rate /= 1024
2019-02-18 15:54:49 -05:00
unit = "GiB"
2019-02-14 15:00:21 -05:00
rate = round(rate, 1)
return str(rate) + unit
2019-02-19 13:18:05 -05:00
def time_units(seconds):
"""Helper to convert seconds into more useful units."""
seconds = int(seconds)
2019-02-19 13:18:05 -05:00
if seconds > (24*60*60):
days = seconds // (24*60*60)
hours = (seconds % (24*60*60)) // (60*60)
eta = f"{days}d{hours}h"
elif seconds > (60*60):
2019-02-22 23:16:23 -05:00
hours = seconds // (60*60)
minutes = (seconds % (60*60)) // 60
2019-02-19 13:18:05 -05:00
eta = f"{hours}h{minutes}m"
elif seconds > 60:
minutes = seconds // 60
seconds = seconds % 60
eta = f"{minutes}m{seconds}s"
else:
eta = f"{seconds}s"
return eta
2019-02-15 07:33:55 -05:00
def get_tracker(path):
"""
At present I don't have an efficient way to get the tracker url
with the d.multicall2() function, so we parse it from the
directory path.
"""
return path.split('/')[4]
2019-02-14 15:00:21 -05:00
def all_torrents():
"""Helper that returns a list of all torrents."""
res = []
for item in torrents:
res += item
return res
def get_all(n):
"""Gets all torrent information from a instance and returns it."""
res = sp[n].d.multicall2('', 'main',
2019-02-13 10:20:55 -05:00
'd.hash=',
'd.name=',
'd.is_active=',
'd.complete=',
'd.down.rate=',
'd.up.rate=',
2019-02-15 07:33:55 -05:00
'd.directory=',
2019-02-18 15:54:49 -05:00
'd.completed_bytes=',
'd.size_bytes=',
'd.message=',
'd.hashing=',
2019-02-13 10:20:55 -05:00
)
2019-08-31 21:43:41 -04:00
return [Torrent(raw, n) for raw in res]
2019-02-14 15:00:21 -05:00
def init():
"""Initializes the rTorrent interface."""
global WATCH_HANDLE
2019-02-18 15:54:49 -05:00
global sp
2019-10-23 12:41:24 -04:00
for rt_port in config.rtorrent_insts:
s = rtorrent_xmlrpc.SCGIServerProxy(rt_port)
2019-02-18 15:54:49 -05:00
sp.append(s)
2019-02-14 15:00:21 -05:00
WATCH_HANDLE = Watch()
WATCH_HANDLE.start()
2019-09-01 14:44:54 -04:00
async def stop_watch(*args, **kwargs):
2019-02-14 15:00:21 -05:00
"""Stops the watch thread."""
global WATCH_HANDLE
WATCH_HANDLE.stop()
2019-01-16 12:49:42 -05:00
def get_active():
2019-02-13 10:20:55 -05:00
"""Returns all actively seeding or leeching torrents."""
2019-02-14 15:00:21 -05:00
active = [t for t in all_torrents() if t.downrate or t.uprate]
2019-02-13 10:20:55 -05:00
return active
2019-02-18 10:32:19 -05:00
def get_stats():
"""Returns various statistical information about the torrents."""
trackers = {}
for torrent in all_torrents():
stats = [0]*3
stats[0] = 1 if torrent.hashing else 0
stats[1] = 1 if torrent.message else 0
stats[2] = 1
if trackers.get(torrent.tracker):
for i in range(len(stats)):
trackers[torrent.tracker][i] += stats[i]
else:
trackers[torrent.tracker] = stats
2019-02-20 13:16:50 -05:00
trackers = dict(sorted(trackers.items()))
total = [0]*3
for i in range(len(total)):
total[i] = sum([v[i] for _, v in trackers.items()])
trackers['total'] = total
return trackers