third commit

This commit is contained in:
iou1name 2020-06-03 15:50:49 -04:00
parent 5c975b8725
commit 3282d38821
6 changed files with 153 additions and 64 deletions

View File

@ -6,6 +6,16 @@ Python 3.8+
Python packages: `gunicorn aiohttp aiohttp_jinja2 uvloop mutagen`
## Install
```
$ psql
postgres=# CREATE DATABASE "scorch";
postgres=# CREATE USER "scorch" WITH PASSWORD 'password';
postgres=# ALTER ROLE "scorch" SET client_encoding TO 'utf8';
postgres=# ALTER ROLE "scorch" SET default_transaction_isolation TO 'read committed';
postgres=# ALTER ROLE "scorch" SET timezone TO 'UTC';
postgres=# GRANT ALL PRIVILEGES ON DATABASE "scorch" TO "scorch";
postgres=# \q
```
1. Get on the floor
2. Walk the dinosaur

View File

@ -6,9 +6,18 @@ eg. https://example.com/scorch.
`buckler` specifies settings pertaining to the Buckler server.
"""
url_prefix = '/scorch'
music_dir = "/home/iou1name/music/Music"
db = {
'database': 'scorch',
'user': 'scorch',
'password': """password""",
'host': 'localhost',
'port': 5432,
}
buckler = {
'url': "http://127.0.0.1:5400/buckler",
'app_id': 2,
'app_key': """lol""",
'app_id': 1,
'app_key': """password""",
'login_url': "/buckler/login",
}

93
database.py Normal file
View File

@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
Database operations for Scorch.
"""
import os
import multiprocessing
from datetime import datetime, timezone
import asyncpg
import mutagen
import mutagen.mp3
import config
MUSIC_EXT = ['flac', 'mp3']
async def build_library(root_dir):
"""Walks the music directory and builds a library of tracks."""
print("Building library")
filepaths = []
for dir_name, sub_dirs, files in os.walk(root_dir):
for file in files:
if not os.path.splitext(file)[1][1:] in MUSIC_EXT:
continue
filepath = os.path.join(root_dir, dir_name, file)
last_modified = datetime.fromtimestamp(
os.path.getmtime(filepath), timezone.utc)
filepaths.append((filepath, last_modified))
db_pool = await asyncpg.create_pool(**config.db)
async with db_pool.acquire() as conn:
tracks_prev = await conn.fetch("SELECT * FROM track")
tracks_prev = {track['filepath']: track for track in tracks_prev}
global worker
def worker(tup):
"""Worker for multi-processing tracks."""
filepath, last_modified = tup
track_prev = tracks_prev.get(filepath)
if track_prev:
if track_prev['last_modified'] >= last_modified:
return tuple(track_prev)
data = read_track(filepath)
return data
with multiprocessing.Pool() as pool:
mapping = pool.imap(worker, filepaths)
tracks = []
prev_percent = 0
while True:
try:
tracks.append(mapping.next())
except StopIteration:
break
percent = round(len(tracks) / len(filepaths) * 100, 2)
if percent >= prev_percent + 2.5:
print(f"{percent}%")
prev_percent = percent
async with db_pool.acquire() as conn:
await conn.execute("DELETE FROM track")
await conn.executemany(
"INSERT INTO track VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)",
tracks
)
print("Done")
def read_track(filepath):
"""
Reads the specified file and extracts relevant information from it.
Returns a tuple.
"""
if filepath.endswith("mp3"):
m = mutagen.mp3.EasyMP3(filepath)
else:
m = mutagen.File(filepath)
artist = m.get('artist', [''])[0]
if m.get('albumartist'):
albumartist = m.get('albumartist', [''])[0]
else:
albumartist = m.get('artist', [''])[0]
date = m.get('date', [''])[0]
album = m.get('album', [''])[0]
discnumber = m.get('discnumber', [''])[0]
tracknumber = m.get('tracknumber', [''])[0]
title = m.get('title', [''])[0]
genre = m.get('genre', [''])[0]
length = m.info.length
last_modified = datetime.fromtimestamp(
os.path.getmtime(filepath), timezone.utc)
d = locals()
d.pop('m')
return tuple(d.values())

View File

@ -4,34 +4,27 @@ A music streaming application.
"""
import asyncio
import aiohttp
from aiohttp import web, WSMsgType
import jinja2
import aiohttp_jinja2
from aiohttp_jinja2 import render_template
import uvloop
import asyncpg
import config
import database
import buckler_aiohttp
uvloop.install()
routes = web.RouteTableDef()
with open ('test.opus', 'rb') as file:
test_data = file.read()
def chunker(seq, size):
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
async def send_files(ws):
for n, chunk in enumerate(chunker(test_data, 200*1024)):
print(f"sending packet #{n}")
await ws.send_bytes(chunk)
await asyncio.sleep(5)
@routes.get('/', name='index')
async def index(request):
"""The index page."""
async with request.app['pool'].acquire() as conn:
artists = await conn.execute(
"SELECT DISTINCT albumartist FROM track ORDER BY albumartist ASC")
return render_template('index.html', request, {})
@ -52,11 +45,6 @@ async def websocket_handler(request):
print('ping')
await ws.send_str("pong")
if msg.data == 'test':
print('initiating test')
asyncio.create_task(send_files(ws))
await ws.close()
return ws
@ -71,6 +59,10 @@ async def init_app():
undefined=jinja2.StrictUndefined,
loader=jinja2.FileSystemLoader('templates'),
)
app['pool'] = await asyncpg.create_pool(**config.db)
async with app['pool'].acquire() as conn:
with open('scorch.sql', 'r') as file:
await conn.execute(file.read())
app.router.add_routes(routes)
@ -80,5 +72,8 @@ async def init_app():
if __name__ == "__main__":
app = init_app()
aiohttp.web.run_app(app, host='0.0.0.0', port=5500)
import sys
if len(sys.argv) > 1:
asyncio.run(database.build_library(config.music_dir))
#app = asyncio.run(init_app())
#aiohttp.web.run_app(app, host='0.0.0.0', port=5500)

13
scorch.sql Normal file
View File

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS track (
filepath TEXT PRIMARY KEY,
artist TEXT,
albumartist TEXT,
date TEXT,
album TEXT,
discnumber TEXT,
tracknumber TEXT,
title TEXT,
genre TEXT,
length FLOAT,
last_modified TIMESTAMP WITH TIME ZONE
)

View File

@ -1,27 +1,4 @@
var socket;
var audioCtx = new window.AudioContext();
var source = audioCtx.createBufferSource();
var total_raw;
function appendBuffer(buffer1, buffer2) {
let numberOfChannels = Math.min( buffer1.numberOfChannels, buffer2.numberOfChannels );
let tmp = audioCtx.createBuffer( numberOfChannels, (buffer1.length + buffer2.length), buffer1.sampleRate );
for (let i=0; i<numberOfChannels; i++) {
let channel = tmp.getChannelData(i);
channel.set( buffer1.getChannelData(i), 0);
channel.set( buffer2.getChannelData(i), buffer1.length);
}
return tmp;
}
function appendBuffer2(buffer1, buffer2) {
var tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength);
var buff1 = new Uint8Array(buffer1);
var buff2 = new Uint8Array(buffer2);
tmp.set(buff1, 0);
tmp.set(buff2, buffer1.byteLength);
return tmp.buffer;
};
function load() {
socket = init_websocket();
@ -30,7 +7,7 @@ function load() {
/* Websocket setup */
function init_websocket() {
let socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
socket.binaryType = "arraybuffer";
socket.send_event = send_event;
socket.onmessage = onmessage;
socket.onclose = onclose;
socket.onerror = onerror;
@ -38,25 +15,17 @@ function init_websocket() {
return socket;
}
function send_event(event_title, data) {
data = JSON.stringify({'event': event_title, 'data': data});
if (socket.readyState == 0) {
console.log("Socket is still opening!");
return;
}
socket.send(data);
}
function onmessage(e) {
console.log(e);
if (e.data.constructor === ArrayBuffer) {
console.log(e.data.byteLength);
if (source.buffer != null) {
total_raw = appendBuffer2(total_raw, e.data);
} else {
total_raw = e.data;
}
audioCtx.decodeAudioData(total_raw.slice(0)).then(function(decodedBuffer) {
if (source.buffer != null) {
source.stop();
}
source = audioCtx.createBufferSource();
source.buffer = decodedBuffer;
source.connect(audioCtx.destination);
source.start(0, audioCtx.currentTime);
});
}
}
function onclose(e) {