third commit
This commit is contained in:
parent
5c975b8725
commit
3282d38821
10
README.md
10
README.md
|
@ -6,6 +6,16 @@ Python 3.8+
|
||||||
Python packages: `gunicorn aiohttp aiohttp_jinja2 uvloop mutagen`
|
Python packages: `gunicorn aiohttp aiohttp_jinja2 uvloop mutagen`
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
```
|
||||||
|
$ psql
|
||||||
|
postgres=# CREATE DATABASE "scorch";
|
||||||
|
postgres=# CREATE USER "scorch" WITH PASSWORD 'password';
|
||||||
|
postgres=# ALTER ROLE "scorch" SET client_encoding TO 'utf8';
|
||||||
|
postgres=# ALTER ROLE "scorch" SET default_transaction_isolation TO 'read committed';
|
||||||
|
postgres=# ALTER ROLE "scorch" SET timezone TO 'UTC';
|
||||||
|
postgres=# GRANT ALL PRIVILEGES ON DATABASE "scorch" TO "scorch";
|
||||||
|
postgres=# \q
|
||||||
|
```
|
||||||
1. Get on the floor
|
1. Get on the floor
|
||||||
2. Walk the dinosaur
|
2. Walk the dinosaur
|
||||||
|
|
||||||
|
|
|
@ -6,9 +6,18 @@ eg. https://example.com/scorch.
|
||||||
`buckler` specifies settings pertaining to the Buckler server.
|
`buckler` specifies settings pertaining to the Buckler server.
|
||||||
"""
|
"""
|
||||||
url_prefix = '/scorch'
|
url_prefix = '/scorch'
|
||||||
|
music_dir = "/home/iou1name/music/Music"
|
||||||
|
db = {
|
||||||
|
'database': 'scorch',
|
||||||
|
'user': 'scorch',
|
||||||
|
'password': """password""",
|
||||||
|
'host': 'localhost',
|
||||||
|
'port': 5432,
|
||||||
|
}
|
||||||
|
|
||||||
buckler = {
|
buckler = {
|
||||||
'url': "http://127.0.0.1:5400/buckler",
|
'url': "http://127.0.0.1:5400/buckler",
|
||||||
'app_id': 2,
|
'app_id': 1,
|
||||||
'app_key': """lol""",
|
'app_key': """password""",
|
||||||
'login_url': "/buckler/login",
|
'login_url': "/buckler/login",
|
||||||
}
|
}
|
||||||
|
|
93
database.py
Normal file
93
database.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Database operations for Scorch.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import multiprocessing
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
import asyncpg
|
||||||
|
import mutagen
|
||||||
|
import mutagen.mp3
|
||||||
|
|
||||||
|
import config
|
||||||
|
|
||||||
|
MUSIC_EXT = ['flac', 'mp3']
|
||||||
|
|
||||||
|
async def build_library(root_dir):
|
||||||
|
"""Walks the music directory and builds a library of tracks."""
|
||||||
|
print("Building library")
|
||||||
|
filepaths = []
|
||||||
|
for dir_name, sub_dirs, files in os.walk(root_dir):
|
||||||
|
for file in files:
|
||||||
|
if not os.path.splitext(file)[1][1:] in MUSIC_EXT:
|
||||||
|
continue
|
||||||
|
filepath = os.path.join(root_dir, dir_name, file)
|
||||||
|
last_modified = datetime.fromtimestamp(
|
||||||
|
os.path.getmtime(filepath), timezone.utc)
|
||||||
|
filepaths.append((filepath, last_modified))
|
||||||
|
db_pool = await asyncpg.create_pool(**config.db)
|
||||||
|
async with db_pool.acquire() as conn:
|
||||||
|
tracks_prev = await conn.fetch("SELECT * FROM track")
|
||||||
|
tracks_prev = {track['filepath']: track for track in tracks_prev}
|
||||||
|
|
||||||
|
global worker
|
||||||
|
def worker(tup):
|
||||||
|
"""Worker for multi-processing tracks."""
|
||||||
|
filepath, last_modified = tup
|
||||||
|
track_prev = tracks_prev.get(filepath)
|
||||||
|
if track_prev:
|
||||||
|
if track_prev['last_modified'] >= last_modified:
|
||||||
|
return tuple(track_prev)
|
||||||
|
data = read_track(filepath)
|
||||||
|
return data
|
||||||
|
|
||||||
|
with multiprocessing.Pool() as pool:
|
||||||
|
mapping = pool.imap(worker, filepaths)
|
||||||
|
tracks = []
|
||||||
|
prev_percent = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
tracks.append(mapping.next())
|
||||||
|
except StopIteration:
|
||||||
|
break
|
||||||
|
percent = round(len(tracks) / len(filepaths) * 100, 2)
|
||||||
|
if percent >= prev_percent + 2.5:
|
||||||
|
print(f"{percent}%")
|
||||||
|
prev_percent = percent
|
||||||
|
async with db_pool.acquire() as conn:
|
||||||
|
await conn.execute("DELETE FROM track")
|
||||||
|
await conn.executemany(
|
||||||
|
"INSERT INTO track VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)",
|
||||||
|
tracks
|
||||||
|
)
|
||||||
|
print("Done")
|
||||||
|
|
||||||
|
|
||||||
|
def read_track(filepath):
|
||||||
|
"""
|
||||||
|
Reads the specified file and extracts relevant information from it.
|
||||||
|
Returns a tuple.
|
||||||
|
"""
|
||||||
|
if filepath.endswith("mp3"):
|
||||||
|
m = mutagen.mp3.EasyMP3(filepath)
|
||||||
|
else:
|
||||||
|
m = mutagen.File(filepath)
|
||||||
|
artist = m.get('artist', [''])[0]
|
||||||
|
if m.get('albumartist'):
|
||||||
|
albumartist = m.get('albumartist', [''])[0]
|
||||||
|
else:
|
||||||
|
albumartist = m.get('artist', [''])[0]
|
||||||
|
date = m.get('date', [''])[0]
|
||||||
|
album = m.get('album', [''])[0]
|
||||||
|
discnumber = m.get('discnumber', [''])[0]
|
||||||
|
tracknumber = m.get('tracknumber', [''])[0]
|
||||||
|
title = m.get('title', [''])[0]
|
||||||
|
genre = m.get('genre', [''])[0]
|
||||||
|
length = m.info.length
|
||||||
|
last_modified = datetime.fromtimestamp(
|
||||||
|
os.path.getmtime(filepath), timezone.utc)
|
||||||
|
|
||||||
|
d = locals()
|
||||||
|
d.pop('m')
|
||||||
|
return tuple(d.values())
|
35
scorch.py
35
scorch.py
|
@ -4,34 +4,27 @@ A music streaming application.
|
||||||
"""
|
"""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
from aiohttp import web, WSMsgType
|
from aiohttp import web, WSMsgType
|
||||||
import jinja2
|
import jinja2
|
||||||
import aiohttp_jinja2
|
import aiohttp_jinja2
|
||||||
from aiohttp_jinja2 import render_template
|
from aiohttp_jinja2 import render_template
|
||||||
import uvloop
|
import uvloop
|
||||||
|
import asyncpg
|
||||||
|
|
||||||
import config
|
import config
|
||||||
|
import database
|
||||||
import buckler_aiohttp
|
import buckler_aiohttp
|
||||||
|
|
||||||
uvloop.install()
|
uvloop.install()
|
||||||
routes = web.RouteTableDef()
|
routes = web.RouteTableDef()
|
||||||
|
|
||||||
with open ('test.opus', 'rb') as file:
|
|
||||||
test_data = file.read()
|
|
||||||
|
|
||||||
def chunker(seq, size):
|
|
||||||
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
|
|
||||||
|
|
||||||
async def send_files(ws):
|
|
||||||
for n, chunk in enumerate(chunker(test_data, 200*1024)):
|
|
||||||
print(f"sending packet #{n}")
|
|
||||||
await ws.send_bytes(chunk)
|
|
||||||
await asyncio.sleep(5)
|
|
||||||
|
|
||||||
|
|
||||||
@routes.get('/', name='index')
|
@routes.get('/', name='index')
|
||||||
async def index(request):
|
async def index(request):
|
||||||
"""The index page."""
|
"""The index page."""
|
||||||
|
async with request.app['pool'].acquire() as conn:
|
||||||
|
artists = await conn.execute(
|
||||||
|
"SELECT DISTINCT albumartist FROM track ORDER BY albumartist ASC")
|
||||||
return render_template('index.html', request, {})
|
return render_template('index.html', request, {})
|
||||||
|
|
||||||
|
|
||||||
|
@ -52,11 +45,6 @@ async def websocket_handler(request):
|
||||||
print('ping')
|
print('ping')
|
||||||
await ws.send_str("pong")
|
await ws.send_str("pong")
|
||||||
|
|
||||||
if msg.data == 'test':
|
|
||||||
print('initiating test')
|
|
||||||
asyncio.create_task(send_files(ws))
|
|
||||||
|
|
||||||
|
|
||||||
await ws.close()
|
await ws.close()
|
||||||
return ws
|
return ws
|
||||||
|
|
||||||
|
@ -71,6 +59,10 @@ async def init_app():
|
||||||
undefined=jinja2.StrictUndefined,
|
undefined=jinja2.StrictUndefined,
|
||||||
loader=jinja2.FileSystemLoader('templates'),
|
loader=jinja2.FileSystemLoader('templates'),
|
||||||
)
|
)
|
||||||
|
app['pool'] = await asyncpg.create_pool(**config.db)
|
||||||
|
async with app['pool'].acquire() as conn:
|
||||||
|
with open('scorch.sql', 'r') as file:
|
||||||
|
await conn.execute(file.read())
|
||||||
|
|
||||||
app.router.add_routes(routes)
|
app.router.add_routes(routes)
|
||||||
|
|
||||||
|
@ -80,5 +72,8 @@ async def init_app():
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app = init_app()
|
import sys
|
||||||
aiohttp.web.run_app(app, host='0.0.0.0', port=5500)
|
if len(sys.argv) > 1:
|
||||||
|
asyncio.run(database.build_library(config.music_dir))
|
||||||
|
#app = asyncio.run(init_app())
|
||||||
|
#aiohttp.web.run_app(app, host='0.0.0.0', port=5500)
|
||||||
|
|
13
scorch.sql
Normal file
13
scorch.sql
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS track (
|
||||||
|
filepath TEXT PRIMARY KEY,
|
||||||
|
artist TEXT,
|
||||||
|
albumartist TEXT,
|
||||||
|
date TEXT,
|
||||||
|
album TEXT,
|
||||||
|
discnumber TEXT,
|
||||||
|
tracknumber TEXT,
|
||||||
|
title TEXT,
|
||||||
|
genre TEXT,
|
||||||
|
length FLOAT,
|
||||||
|
last_modified TIMESTAMP WITH TIME ZONE
|
||||||
|
)
|
|
@ -1,27 +1,4 @@
|
||||||
var socket;
|
var socket;
|
||||||
var audioCtx = new window.AudioContext();
|
|
||||||
var source = audioCtx.createBufferSource();
|
|
||||||
var total_raw;
|
|
||||||
|
|
||||||
function appendBuffer(buffer1, buffer2) {
|
|
||||||
let numberOfChannels = Math.min( buffer1.numberOfChannels, buffer2.numberOfChannels );
|
|
||||||
let tmp = audioCtx.createBuffer( numberOfChannels, (buffer1.length + buffer2.length), buffer1.sampleRate );
|
|
||||||
for (let i=0; i<numberOfChannels; i++) {
|
|
||||||
let channel = tmp.getChannelData(i);
|
|
||||||
channel.set( buffer1.getChannelData(i), 0);
|
|
||||||
channel.set( buffer2.getChannelData(i), buffer1.length);
|
|
||||||
}
|
|
||||||
return tmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
function appendBuffer2(buffer1, buffer2) {
|
|
||||||
var tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength);
|
|
||||||
var buff1 = new Uint8Array(buffer1);
|
|
||||||
var buff2 = new Uint8Array(buffer2);
|
|
||||||
tmp.set(buff1, 0);
|
|
||||||
tmp.set(buff2, buffer1.byteLength);
|
|
||||||
return tmp.buffer;
|
|
||||||
};
|
|
||||||
|
|
||||||
function load() {
|
function load() {
|
||||||
socket = init_websocket();
|
socket = init_websocket();
|
||||||
|
@ -30,7 +7,7 @@ function load() {
|
||||||
/* Websocket setup */
|
/* Websocket setup */
|
||||||
function init_websocket() {
|
function init_websocket() {
|
||||||
let socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
let socket = new WebSocket('wss://' + window.location.hostname + ws_uri);
|
||||||
socket.binaryType = "arraybuffer";
|
socket.send_event = send_event;
|
||||||
socket.onmessage = onmessage;
|
socket.onmessage = onmessage;
|
||||||
socket.onclose = onclose;
|
socket.onclose = onclose;
|
||||||
socket.onerror = onerror;
|
socket.onerror = onerror;
|
||||||
|
@ -38,25 +15,17 @@ function init_websocket() {
|
||||||
return socket;
|
return socket;
|
||||||
}
|
}
|
||||||
|
|
||||||
function onmessage (e) {
|
function send_event(event_title, data) {
|
||||||
|
data = JSON.stringify({'event': event_title, 'data': data});
|
||||||
|
if (socket.readyState == 0) {
|
||||||
|
console.log("Socket is still opening!");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
socket.send(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onmessage(e) {
|
||||||
console.log(e);
|
console.log(e);
|
||||||
if (e.data.constructor === ArrayBuffer) {
|
|
||||||
console.log(e.data.byteLength);
|
|
||||||
if (source.buffer != null) {
|
|
||||||
total_raw = appendBuffer2(total_raw, e.data);
|
|
||||||
} else {
|
|
||||||
total_raw = e.data;
|
|
||||||
}
|
|
||||||
audioCtx.decodeAudioData(total_raw.slice(0)).then(function(decodedBuffer) {
|
|
||||||
if (source.buffer != null) {
|
|
||||||
source.stop();
|
|
||||||
}
|
|
||||||
source = audioCtx.createBufferSource();
|
|
||||||
source.buffer = decodedBuffer;
|
|
||||||
source.connect(audioCtx.destination);
|
|
||||||
source.start(0, audioCtx.currentTime);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function onclose(e) {
|
function onclose(e) {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user