Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
ac9b3620e9 | |||
|
33e501928e | ||
|
a3c354d4ef | ||
5f3b2e471b | |||
3aedfcf139 | |||
3718d3b90c | |||
55f48433ed | |||
afd5476ea8 |
226
pysonic/api.py
226
pysonic/api.py
@ -62,6 +62,7 @@ class ApiResponse(object):
|
||||
self.data = defaultdict(lambda: list())
|
||||
|
||||
def add_child(self, _type, _parent="", _real_parent=None, **kwargs):
|
||||
kwargs = {k: v for k, v in kwargs.items() if v or type(v) is int} # filter out empty keys (0 is ok)
|
||||
parent = _real_parent if _real_parent else self.get_child(_parent)
|
||||
m = defaultdict(lambda: list())
|
||||
m.update(dict(kwargs))
|
||||
@ -190,49 +191,50 @@ class PysonicApi(object):
|
||||
def getIndexes_view(self, **kwargs):
|
||||
# Get listing of top-level dir
|
||||
response = ApiResponse()
|
||||
# TODO real lastmodified date
|
||||
# TODO deal with ignoredArticles
|
||||
response.add_child("indexes", lastModified="1502310831000", ignoredArticles="The El La Los Las Le Les")
|
||||
artists = self.library.get_artists(sortby="name", order="asc")
|
||||
for letter in LETTER_GROUPS:
|
||||
index = response.add_child("index", _parent="indexes", name=letter.upper())
|
||||
for artist in self.library.get_artists():
|
||||
for artist in artists:
|
||||
if artist["name"][0].lower() in letter:
|
||||
response.add_child("artist", _real_parent=index, id=artist["id"], name=artist["name"])
|
||||
response.add_child("artist", _real_parent=index, id=artist["dir"], name=artist["name"])
|
||||
return response
|
||||
|
||||
@cherrypy.expose
|
||||
def savePlayQueue_view(self, id, current, position, **kwargs):
|
||||
print("TODO save playlist with items {} current {} position {}".format(id, current, position))
|
||||
|
||||
@cherrypy.expose
|
||||
@formatresponse
|
||||
def getAlbumList_view(self, type, size=50, offset=0, **kwargs):
|
||||
albums = self.library.get_albums()
|
||||
qargs = {}
|
||||
if type == "random":
|
||||
shuffle(albums)
|
||||
qargs.update(sortby="random")
|
||||
elif type == "alphabeticalByName":
|
||||
albums.sort(key=lambda item: item.get("id3_album", item["album"] if item["album"] else "zzzzzUnsortable"))
|
||||
qargs.update(sortby="name", order="asc")
|
||||
elif type == "newest":
|
||||
qargs.update(sortby="added", order="desc")
|
||||
else:
|
||||
raise NotImplemented()
|
||||
albumset = albums[0 + int(offset):int(size) + int(offset)]
|
||||
|
||||
qargs.update(limit=(offset, size))
|
||||
|
||||
albums = self.library.get_albums(**qargs)
|
||||
|
||||
response = ApiResponse()
|
||||
|
||||
response.add_child("albumList")
|
||||
|
||||
for album in albumset:
|
||||
album_meta = album['metadata']
|
||||
album_kw = dict(id=album["id"],
|
||||
parent=album["parent"],
|
||||
isDir="true" if album['isdir'] else "false",
|
||||
title=album_meta.get("id3_title", album["name"]), #TODO these cant be blank or dsub gets mad
|
||||
album=album_meta.get("id3_album", album["album"]),
|
||||
artist=album_meta.get("id3_artist", album["artist"]),
|
||||
for album in albums:
|
||||
album_kw = dict(id=album["dir"],
|
||||
parent=album["artistdir"],
|
||||
isDir="true",
|
||||
title=album["name"],
|
||||
album=album["name"],
|
||||
artist=album["artistname"],
|
||||
coverArt=album["coverid"]
|
||||
#year=TODO
|
||||
# playCount="0"
|
||||
# created="2016-05-08T05:31:31.000Z"/>)
|
||||
)
|
||||
if 'cover' in album_meta:
|
||||
album_kw["coverArt"] = album_meta["cover"]
|
||||
if 'id3_year' in album_meta:
|
||||
album_kw["year"] = album_meta['id3_year']
|
||||
response.add_child("album", _parent="albumList", **album_kw)
|
||||
return response
|
||||
|
||||
@ -243,84 +245,67 @@ class PysonicApi(object):
|
||||
List an artist dir
|
||||
"""
|
||||
dir_id = int(id)
|
||||
|
||||
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
|
||||
dirtype, dirinfo, entity = self.library.db.get_musicdir(dirid=dir_id)
|
||||
|
||||
response = ApiResponse()
|
||||
response.add_child("directory")
|
||||
response.set_attrs(_path="directory", name=entity['name'], id=entity['id'],
|
||||
parent=dirinfo['parent'], playCount=420)
|
||||
|
||||
directory = self.library.get_dir(dir_id)
|
||||
dir_meta = directory["metadata"]
|
||||
children = self.library.get_dir_children(dir_id)
|
||||
response.set_attrs(_path="directory", name=directory['name'], id=directory['id'],
|
||||
parent=directory['parent'], playCount=10)
|
||||
|
||||
for item in children:
|
||||
for childtype, child in entity["children"]:
|
||||
# omit not dirs and media in browser
|
||||
if not item["isdir"] and item["type"] not in MUSIC_TYPES:
|
||||
continue
|
||||
item_meta = item['metadata']
|
||||
response.add_child("child", _parent="directory", **self.render_node(item, item_meta, directory, dir_meta))
|
||||
# if not item["isdir"] and item["type"] not in MUSIC_TYPES:
|
||||
# continue
|
||||
# item_meta = item['metadata']
|
||||
moreargs = {}
|
||||
if childtype == "album":
|
||||
moreargs.update(name=child["name"],
|
||||
isDir="true", # TODO song files in artist dir
|
||||
parent=entity["id"],
|
||||
id=child["dir"])
|
||||
if child["coverid"]:
|
||||
moreargs.update(coverArt=child["coverid"])
|
||||
# album=item["name"],
|
||||
# title=item["name"], # TODO dupe?
|
||||
# artist=artist["name"],
|
||||
# coverArt=item["coverid"],
|
||||
elif childtype == "song":
|
||||
moreargs.update(name=child["title"],
|
||||
artist=child["_artist"]["name"],
|
||||
contentType=child["format"],
|
||||
id=child["id"],
|
||||
duration=child["length"],
|
||||
isDir="false",
|
||||
parent=entity["dir"],
|
||||
# title=xxx
|
||||
)
|
||||
if entity["coverid"]:
|
||||
moreargs.update(coverArt=entity["coverid"])
|
||||
# duration="230" size="8409237" suffix="mp3" track="2" year="2005"/>
|
||||
response.add_child("child", _parent="directory",
|
||||
size="4096",
|
||||
type="music",
|
||||
**moreargs)
|
||||
|
||||
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
|
||||
return response
|
||||
|
||||
def render_node(self, item, item_meta, directory, dir_meta):
|
||||
"""
|
||||
Given a node and it's parent directory, and meta, return a dict with the keys formatted how the subsonic clients
|
||||
expect them to be
|
||||
:param item:
|
||||
:param item_meta:
|
||||
:param directory:
|
||||
:param dir_meta:
|
||||
"""
|
||||
child = dict(id=item["id"],
|
||||
parent=item["id"],
|
||||
isDir="true" if item['isdir'] else "false",
|
||||
title=item_meta.get("id3_title", item["name"]),
|
||||
album=item_meta.get("id3_album", item["album"]),
|
||||
artist=item_meta.get("id3_artist", item["artist"]),
|
||||
# playCount="5",
|
||||
# created="2016-04-25T07:31:33.000Z"
|
||||
# genre="Other",
|
||||
# path="Cosmic Gate/Sign Of The Times/03 Flatline (featuring Kyler England).mp3"
|
||||
type="music")
|
||||
if 'kbitrate' in item_meta:
|
||||
child["bitrate"] = item_meta["kbitrate"]
|
||||
if item["size"] != -1:
|
||||
child["size"] = item["size"]
|
||||
if "media_length" in item_meta:
|
||||
child["duration"] = item_meta["media_length"]
|
||||
if "albumId" in directory:
|
||||
child["albumId"] = directory["id"]
|
||||
if "artistId" in directory:
|
||||
child["artistId"] = directory["parent"]
|
||||
if "." in item["name"]:
|
||||
child["suffix"] = item["name"].split(".")[-1]
|
||||
if item["type"]:
|
||||
child["contentType"] = item["type"]
|
||||
if 'cover' in item_meta:
|
||||
child["coverArt"] = item_meta["cover"]
|
||||
elif 'cover' in dir_meta:
|
||||
child["coverArt"] = dir_meta["cover"]
|
||||
if 'track' in item_meta:
|
||||
child["track"] = item_meta['track']
|
||||
if 'id3_year' in item_meta:
|
||||
child["year"] = item_meta['id3_year']
|
||||
return child
|
||||
|
||||
@cherrypy.expose
|
||||
def stream_view(self, id, maxBitRate="256", **kwargs):
|
||||
maxBitRate = int(maxBitRate)
|
||||
assert maxBitRate >= 32 and maxBitRate <= 320
|
||||
fpath = self.library.get_filepath(id)
|
||||
meta = self.library.get_file_metadata(id)
|
||||
to_bitrate = min(maxBitRate, self.options.max_bitrate, meta.get("media_kbitrate", 320))
|
||||
song = self.library.get_song(id)
|
||||
fpath = song["_fullpath"]
|
||||
media_bitrate = song.get("bitrate") / 1024 if song.get("bitrate") else 320
|
||||
to_bitrate = min(maxBitRate,
|
||||
self.options.max_bitrate,
|
||||
media_bitrate)
|
||||
cherrypy.response.headers['Content-Type'] = 'audio/mpeg'
|
||||
if "media_length" in meta:
|
||||
cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length']))
|
||||
#if "media_length" in meta:
|
||||
# cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length']))
|
||||
cherrypy.response.headers['X-Content-Kbitrate'] = str(to_bitrate)
|
||||
if (self.options.skip_transcode or meta.get("media_kbitrate", -1) == to_bitrate) \
|
||||
and meta["type"] == "audio/mpeg":
|
||||
if (self.options.skip_transcode or (song.get("bitrate") and media_bitrate == to_bitrate)) \
|
||||
and song["format"] == "audio/mpeg":
|
||||
def content():
|
||||
with open(fpath, "rb") as f:
|
||||
while True:
|
||||
@ -330,10 +315,9 @@ class PysonicApi(object):
|
||||
yield data
|
||||
return content()
|
||||
else:
|
||||
transcode_meta = "transcoded_{}_size".format(to_bitrate)
|
||||
if transcode_meta in meta:
|
||||
cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta]))
|
||||
|
||||
# transcode_meta = "transcoded_{}_size".format(to_bitrate)
|
||||
# if transcode_meta in meta:
|
||||
# cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta]))
|
||||
transcode_args = ["ffmpeg", "-i", fpath, "-map", "0:0", "-b:a",
|
||||
"{}k".format(to_bitrate),
|
||||
"-v", "0", "-f", "mp3", "-"]
|
||||
@ -343,13 +327,13 @@ class PysonicApi(object):
|
||||
|
||||
def content(proc):
|
||||
length = 0
|
||||
completed = False
|
||||
# completed = False
|
||||
start = time()
|
||||
try:
|
||||
while True:
|
||||
data = proc.stdout.read(16 * 1024)
|
||||
if not data:
|
||||
completed = True
|
||||
# completed = True
|
||||
break
|
||||
yield data
|
||||
length += len(data)
|
||||
@ -357,8 +341,8 @@ class PysonicApi(object):
|
||||
proc.poll()
|
||||
if proc.returncode is None or proc.returncode == 0:
|
||||
logging.warning("transcoded {} in {}s".format(id, int(time() - start)))
|
||||
if completed:
|
||||
self.library.report_transcode(id, to_bitrate, length)
|
||||
# if completed:
|
||||
# self.library.report_transcode(id, to_bitrate, length)
|
||||
else:
|
||||
logging.error("transcode of {} exited with code {} after {}s".format(id, proc.returncode,
|
||||
int(time() - start)))
|
||||
@ -378,7 +362,8 @@ class PysonicApi(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def getCoverArt_view(self, id, **kwargs):
|
||||
fpath = self.library.get_filepath(id)
|
||||
cover = self.library.get_cover(id)
|
||||
fpath = cover["_fullpath"]
|
||||
type2ct = {
|
||||
'jpg': 'image/jpeg',
|
||||
'png': 'image/png',
|
||||
@ -397,7 +382,6 @@ class PysonicApi(object):
|
||||
yield data
|
||||
logging.info("\nSent {} bytes for {}".format(total, fpath))
|
||||
return content()
|
||||
|
||||
getCoverArt_view._cp_config = {'response.stream': True}
|
||||
|
||||
@cherrypy.expose
|
||||
@ -471,15 +455,34 @@ class PysonicApi(object):
|
||||
"""
|
||||
response = ApiResponse()
|
||||
response.add_child("randomSongs")
|
||||
children = self.library.get_songs(size, shuffle=True)
|
||||
for item in children:
|
||||
# omit not dirs and media in browser
|
||||
if not item["isdir"] and item["type"] not in MUSIC_TYPES:
|
||||
continue
|
||||
item_meta = item['metadata']
|
||||
itemtype = "song" if item["type"] in MUSIC_TYPES else "album"
|
||||
response.add_child(itemtype, _parent="randomSongs",
|
||||
**self.render_node(item, item_meta, {}, self.db.getnode(item["parent"])["metadata"]))
|
||||
children = self.library.db.get_songs(limit=size, sortby="random")
|
||||
for song in children:
|
||||
moreargs = {}
|
||||
if song["format"]:
|
||||
moreargs.update(contentType=song["format"])
|
||||
if song["albumcoverid"]:
|
||||
moreargs.update(coverArt=song["albumcoverid"])
|
||||
if song["length"]:
|
||||
moreargs.update(duration=song["length"])
|
||||
if song["track"]:
|
||||
moreargs.update(track=song["track"])
|
||||
if song["year"]:
|
||||
moreargs.update(year=song["year"])
|
||||
|
||||
file_extension = song["file"].split(".")[-1]
|
||||
|
||||
response.add_child("song",
|
||||
_parent="randomSongs",
|
||||
title=song["title"],
|
||||
album=song["albumname"],
|
||||
artist=song["artistname"],
|
||||
id=song["id"],
|
||||
isDir="false",
|
||||
parent=song["albumid"],
|
||||
size=song["size"],
|
||||
suffix=file_extension,
|
||||
type="music",
|
||||
**moreargs)
|
||||
return response
|
||||
|
||||
@cherrypy.expose
|
||||
@ -487,9 +490,8 @@ class PysonicApi(object):
|
||||
def getGenres_view(self, **kwargs):
|
||||
response = ApiResponse()
|
||||
response.add_child("genres")
|
||||
response.add_child("genre", _parent="genres", value="Death Metal", songCount=420, albumCount=69)
|
||||
response.add_child("genre", _parent="genres", value="Metal", songCount=52, albumCount=3)
|
||||
response.add_child("genre", _parent="genres", value="Punk", songCount=34, albumCount=3)
|
||||
for row in self.library.db.get_genres():
|
||||
response.add_child("genre", _parent="genres", value=row["name"], songCount=420, albumCount=69)
|
||||
return response
|
||||
|
||||
@cherrypy.expose
|
||||
@ -500,7 +502,7 @@ class PysonicApi(object):
|
||||
:param submission: True if end of song reached. False on start of track.
|
||||
"""
|
||||
submission = True if submission == "true" else False
|
||||
# TODO save played track stats
|
||||
# TODO save played track stats and/or do last.fm bullshit
|
||||
return ApiResponse()
|
||||
|
||||
@cherrypy.expose
|
||||
@ -548,3 +550,9 @@ class PysonicApi(object):
|
||||
def setRating_view(self, id, rating):
|
||||
# rating is 1-5
|
||||
pass
|
||||
|
||||
@cherrypy.expose
|
||||
def savePlayQueue_view(self, id, current, position, **kwargs):
|
||||
print("TODO save playlist with items {} current {} position {}".format(id, current, position))
|
||||
# TODO save playlist with items ['378', '386', '384', '380', '383'] current 383 position 4471
|
||||
# id entries are strings!
|
||||
|
@ -1,10 +1,10 @@
|
||||
import os
|
||||
import logging
|
||||
import cherrypy
|
||||
from sqlite3 import IntegrityError
|
||||
from sqlite3 import DatabaseError
|
||||
from pysonic.api import PysonicApi
|
||||
from pysonic.library import PysonicLibrary, DuplicateRootException
|
||||
from pysonic.database import PysonicDatabase
|
||||
from pysonic.library import PysonicLibrary
|
||||
from pysonic.database import PysonicDatabase, DuplicateRootException
|
||||
|
||||
|
||||
def main():
|
||||
@ -31,14 +31,15 @@ def main():
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING)
|
||||
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING,
|
||||
format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s")
|
||||
|
||||
db = PysonicDatabase(path=args.database_path)
|
||||
library = PysonicLibrary(db)
|
||||
for dirname in args.dirs:
|
||||
assert os.path.exists(dirname) and dirname.startswith("/"), "--dirs must be absolute paths and exist!"
|
||||
try:
|
||||
library.add_dir(dirname)
|
||||
library.add_root_dir(dirname)
|
||||
except DuplicateRootException:
|
||||
pass
|
||||
library.update()
|
||||
@ -46,21 +47,25 @@ def main():
|
||||
for username, password in args.user:
|
||||
try:
|
||||
db.add_user(username, password)
|
||||
except IntegrityError:
|
||||
except DatabaseError:
|
||||
db.update_user(username, password)
|
||||
|
||||
logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()]))
|
||||
logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()]))
|
||||
logging.warning("Albums: {}".format(len(library.get_albums())))
|
||||
# logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()]))
|
||||
# logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()]))
|
||||
# logging.warning("Albums: {}".format(len(library.get_albums())))
|
||||
|
||||
api = PysonicApi(db, library, args)
|
||||
api_config = {}
|
||||
if args.disable_auth:
|
||||
logging.warning("starting up with auth disabled")
|
||||
else:
|
||||
def validate_password(realm, username, password):
|
||||
print("I JUST VALIDATED {}:{} ({})".format(username, password, realm))
|
||||
return True
|
||||
|
||||
api_config.update({'tools.auth_basic.on': True,
|
||||
'tools.auth_basic.realm': 'pysonic',
|
||||
'tools.auth_basic.checkpassword': db.validate_password})
|
||||
'tools.auth_basic.checkpassword': validate_password})
|
||||
if args.enable_cors:
|
||||
def cors():
|
||||
cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
@ -99,5 +104,6 @@ def main():
|
||||
logging.info("API has shut down")
|
||||
cherrypy.engine.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -1,5 +1,3 @@
|
||||
import os
|
||||
import json
|
||||
import sqlite3
|
||||
import logging
|
||||
from hashlib import sha512
|
||||
@ -21,12 +19,26 @@ class NotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DuplicateRootException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def readcursor(func):
|
||||
"""
|
||||
Provides a cursor to the wrapped method as the first arg
|
||||
"""
|
||||
def wrapped(*args, **kwargs):
|
||||
self = args[0]
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
return func(*[self, cursor], *args[1:], **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
class PysonicDatabase(object):
|
||||
def __init__(self, path):
|
||||
self.sqlite_opts = dict(check_same_thread=False, cached_statements=0, isolation_level=None)
|
||||
self.sqlite_opts = dict(check_same_thread=False)
|
||||
self.path = path
|
||||
self.db = None
|
||||
|
||||
self.open()
|
||||
self.migrate()
|
||||
|
||||
@ -36,212 +48,359 @@ class PysonicDatabase(object):
|
||||
|
||||
def migrate(self):
|
||||
# Create db
|
||||
queries = ["""CREATE TABLE 'meta' (
|
||||
queries = ["""CREATE TABLE 'libraries' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'name' TEXT,
|
||||
'path' TEXT UNIQUE);""",
|
||||
"""CREATE TABLE 'dirs' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'library' INTEGER,
|
||||
'parent' INTEGER,
|
||||
'name' TEXT,
|
||||
UNIQUE(parent, name)
|
||||
)""",
|
||||
"""CREATE TABLE 'genres' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'name' TEXT UNIQUE)""",
|
||||
"""CREATE TABLE 'artists' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'libraryid' INTEGER,
|
||||
'dir' INTEGER UNIQUE,
|
||||
'name' TEXT)""",
|
||||
"""CREATE TABLE 'albums' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'artistid' INTEGER,
|
||||
'coverid' INTEGER,
|
||||
'dir' INTEGER,
|
||||
'name' TEXT,
|
||||
'added' INTEGER NOT NULL DEFAULT -1,
|
||||
UNIQUE (artistid, dir));""",
|
||||
"""CREATE TABLE 'songs' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'library' INTEGER,
|
||||
'albumid' BOOLEAN,
|
||||
'genre' INTEGER DEFAULT NULL,
|
||||
'file' TEXT UNIQUE, -- path from the library root
|
||||
'size' INTEGER NOT NULL DEFAULT -1,
|
||||
'title' TEXT NOT NULL,
|
||||
'lastscan' INTEGER NOT NULL DEFAULT -1,
|
||||
'format' TEXT,
|
||||
'length' INTEGER,
|
||||
'bitrate' INTEGER,
|
||||
'track' INTEGER,
|
||||
'year' INTEGER
|
||||
)""",
|
||||
"""CREATE TABLE 'covers' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
'library' INTEGER,
|
||||
'type' TEXT,
|
||||
'size' TEXT,
|
||||
'path' TEXT UNIQUE);""",
|
||||
"""CREATE TABLE 'users' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
'username' TEXT UNIQUE NOT NULL,
|
||||
'password' TEXT NOT NULL,
|
||||
'admin' BOOLEAN DEFAULT 0,
|
||||
'email' TEXT)""",
|
||||
"""CREATE TABLE 'stars' (
|
||||
'userid' INTEGER,
|
||||
'songid' INTEGER,
|
||||
primary key ('userid', 'songid'))""",
|
||||
"""CREATE TABLE 'meta' (
|
||||
'key' TEXT PRIMARY KEY NOT NULL,
|
||||
'value' TEXT);""",
|
||||
"""INSERT INTO meta VALUES ('db_version', '3');""",
|
||||
"""CREATE TABLE 'nodes' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
'parent' INTEGER NOT NULL,
|
||||
'isdir' BOOLEAN NOT NULL,
|
||||
'size' INTEGER NOT NULL DEFAULT -1,
|
||||
'name' TEXT NOT NULL,
|
||||
'type' TEXT,
|
||||
'title' TEXT,
|
||||
'album' TEXT,
|
||||
'artist' TEXT,
|
||||
'metadata' TEXT
|
||||
)""",
|
||||
"""CREATE TABLE 'users' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
'username' TEXT UNIQUE NOT NULL,
|
||||
'password' TEXT NOT NULL,
|
||||
'admin' BOOLEAN DEFAULT 0,
|
||||
'email' TEXT)""",
|
||||
"""CREATE TABLE 'stars' (
|
||||
'userid' INTEGER,
|
||||
'nodeid' INTEGER,
|
||||
primary key ('userid', 'nodeid'))"""]
|
||||
"""INSERT INTO meta VALUES ('db_version', '1');"""]
|
||||
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta';")
|
||||
cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta'")
|
||||
|
||||
# Initialize DB
|
||||
if len(cursor.fetchall()) == 0:
|
||||
logging.warning("Initializing database")
|
||||
for query in queries:
|
||||
cursor.execute(query)
|
||||
cursor.execute("COMMIT")
|
||||
else:
|
||||
# Migrate if old db exists
|
||||
version = int(cursor.execute("SELECT * FROM meta WHERE key='db_version';").fetchone()['value'])
|
||||
if version < 1:
|
||||
logging.warning("migrating database to v1 from %s", version)
|
||||
users_table = """CREATE TABLE 'users' (
|
||||
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
'username' TEXT UNIQUE NOT NULL,
|
||||
'password' TEXT NOT NULL,
|
||||
'admin' BOOLEAN DEFAULT 0,
|
||||
'email' TEXT)"""
|
||||
cursor.execute(users_table)
|
||||
version = 1
|
||||
if version < 2:
|
||||
logging.warning("migrating database to v2 from %s", version)
|
||||
stars_table = """CREATE TABLE 'stars' (
|
||||
'userid' INTEGER,
|
||||
'nodeid' INTEGER,
|
||||
primary key ('userid', 'nodeid'))"""
|
||||
cursor.execute(stars_table)
|
||||
version = 2
|
||||
if version < 3:
|
||||
logging.warning("migrating database to v3 from %s", version)
|
||||
size_col = """ALTER TABLE nodes ADD 'size' INTEGER NOT NULL DEFAULT -1;"""
|
||||
cursor.execute(size_col)
|
||||
version = 3
|
||||
# cursor.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
|
||||
# logging.warning("db schema is version {}".format(version))
|
||||
pass
|
||||
|
||||
cursor.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
|
||||
logging.warning("db schema is version {}".format(version))
|
||||
|
||||
# Virtual file tree
|
||||
def getnode(self, node_id):
|
||||
return self.getnodes(node_id=node_id)[0]
|
||||
|
||||
def _populate_meta(self, node):
|
||||
node['metadata'] = self.decode_metadata(node['metadata'])
|
||||
return node
|
||||
|
||||
def getnodes(self, *parent_ids, node_id=None, types=None, limit=None, order=None):
|
||||
def add_root(self, path, name="Library"):
|
||||
"""
|
||||
Find nodes that match the passed paramters.
|
||||
:param parent_ids: one or more parents to find children of
|
||||
:type parent_ids: int
|
||||
:param node_id: single node id to return
|
||||
:type node_id: int
|
||||
:param types: filter by type column
|
||||
:type types: list
|
||||
:param limit: number of records to limit to
|
||||
:param order: one of ("rand") to select ordering mode
|
||||
Add a new library root. Returns the root ID or raises on collision
|
||||
:param path: normalized absolute path to add to the library
|
||||
:type path: str:
|
||||
:return: int
|
||||
:raises: sqlite3.IntegrityError
|
||||
"""
|
||||
query = "SELECT * FROM nodes WHERE "
|
||||
qargs = []
|
||||
assert path.startswith("/")
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
try:
|
||||
cursor.execute("INSERT INTO libraries ('name', 'path') VALUES (?, ?)", (name, path, ))
|
||||
cursor.execute("COMMIT")
|
||||
return cursor.lastrowid
|
||||
except sqlite3.IntegrityError:
|
||||
raise DuplicateRootException("Root '{}' already exists".format(path))
|
||||
|
||||
def add_filter(name, values):
|
||||
nonlocal query
|
||||
nonlocal qargs
|
||||
query += "{} in (".format(name)
|
||||
for value in (values if type(values) in [list, tuple] else [values]):
|
||||
query += "?, "
|
||||
qargs += [value]
|
||||
query = query.rstrip(", ")
|
||||
query += ") AND"
|
||||
@readcursor
|
||||
def get_libraries(self, cursor, id=None):
|
||||
libs = []
|
||||
q = "SELECT * FROM libraries"
|
||||
params = []
|
||||
conditions = []
|
||||
if id:
|
||||
conditions.append("id = ?")
|
||||
params.append(id)
|
||||
if conditions:
|
||||
q += " WHERE " + " AND ".join(conditions)
|
||||
cursor.execute(q, params)
|
||||
for row in cursor:
|
||||
libs.append(row)
|
||||
return libs
|
||||
|
||||
if node_id:
|
||||
add_filter("id", node_id)
|
||||
if parent_ids:
|
||||
add_filter("parent", parent_ids)
|
||||
if types:
|
||||
add_filter("type", types)
|
||||
|
||||
query = query.rstrip(" AND").rstrip("WHERE ")
|
||||
@readcursor
|
||||
def get_artists(self, cursor, id=None, dirid=None, sortby=None, order=None):
|
||||
assert order in ["asc", "desc", None]
|
||||
artists = []
|
||||
q = "SELECT * FROM artists"
|
||||
params = []
|
||||
conditions = []
|
||||
if id:
|
||||
conditions.append("id = ?")
|
||||
params.append(id)
|
||||
if dirid:
|
||||
conditions.append("dir = ?")
|
||||
params.append(dirid)
|
||||
if conditions:
|
||||
q += " WHERE " + " AND ".join(conditions)
|
||||
if sortby:
|
||||
q += " ORDER BY {} {}".format(sortby, order.upper() if order else "ASC")
|
||||
cursor.execute(q, params)
|
||||
for row in cursor:
|
||||
artists.append(row)
|
||||
return artists
|
||||
|
||||
@readcursor
|
||||
def get_albums(self, cursor, id=None, artist=None, sortby=None, order=None, limit=None):
|
||||
"""
|
||||
:param limit: int or tuple of int, int. translates directly to sql logic.
|
||||
"""
|
||||
if order:
|
||||
query += "ORDER BY "
|
||||
if order == "rand":
|
||||
query += "RANDOM()"
|
||||
order = {"asc": "ASC", "desc": "DESC"}[order]
|
||||
|
||||
if limit: # TODO 2-item tuple limit
|
||||
query += " limit {}".format(limit)
|
||||
if sortby and sortby == "random":
|
||||
sortby = "RANDOM()"
|
||||
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
return list(map(self._populate_meta, cursor.execute(query, qargs).fetchall()))
|
||||
albums = []
|
||||
|
||||
def addnode(self, parent_id, fspath, name, size=-1):
|
||||
fullpath = os.path.join(fspath, name)
|
||||
is_dir = os.path.isdir(fullpath)
|
||||
return self._addnode(parent_id, name, is_dir, size=size)
|
||||
q = """
|
||||
SELECT
|
||||
alb.*,
|
||||
art.name as artistname,
|
||||
dirs.parent as artistdir
|
||||
FROM albums as alb
|
||||
INNER JOIN artists as art
|
||||
on alb.artistid = art.id
|
||||
INNER JOIN dirs
|
||||
on dirs.id = alb.dir
|
||||
"""
|
||||
#q = "SELECT * FROM albums"
|
||||
|
||||
def _addnode(self, parent_id, name, is_dir=True, size=-1):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute("INSERT INTO nodes (parent, isdir, name, size) VALUES (?, ?, ?, ?);",
|
||||
(parent_id, 1 if is_dir else 0, name, size))
|
||||
return self.getnode(cursor.lastrowid)
|
||||
params = []
|
||||
|
||||
def delnode(self, node_id):
|
||||
deleted = 1
|
||||
for child in self.getnodes(node_id):
|
||||
deleted += self.delnode(child["id"])
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute("DELETE FROM nodes WHERE id=?;", (node_id, ))
|
||||
return deleted
|
||||
conditions = []
|
||||
if id:
|
||||
conditions.append("id = ?")
|
||||
params.append(id)
|
||||
if artist:
|
||||
conditions.append("artistid = ?")
|
||||
params.append(artist)
|
||||
if conditions:
|
||||
q += " WHERE " + " AND ".join(conditions)
|
||||
|
||||
def update_metadata(self, node_id, mergedict=None, **kwargs):
|
||||
mergedict = mergedict if mergedict else {}
|
||||
mergedict.update(kwargs)
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
for table_key in keys_in_table:
|
||||
if table_key in mergedict:
|
||||
cursor.execute("UPDATE nodes SET {}=? WHERE id=?;".format(table_key),
|
||||
(mergedict[table_key], node_id))
|
||||
other_meta = {k: v for k, v in mergedict.items() if k not in keys_in_table}
|
||||
if other_meta:
|
||||
metadata = self.get_metadata(node_id)
|
||||
metadata.update(other_meta)
|
||||
cursor.execute("UPDATE nodes SET metadata=? WHERE id=?;", (json.dumps(metadata), node_id, ))
|
||||
if sortby:
|
||||
q += " ORDER BY {}".format(sortby)
|
||||
if order:
|
||||
q += " {}".format(order)
|
||||
|
||||
def get_metadata(self, node_id):
|
||||
node = self.getnode(node_id)
|
||||
meta = node["metadata"]
|
||||
meta.update({item: node[item] for item in keys_in_table})
|
||||
return meta
|
||||
if limit:
|
||||
q += " LIMIT {}".format(limit) if isinstance(limit, int) \
|
||||
else " LIMIT {}, {}".format(*limit)
|
||||
|
||||
def decode_metadata(self, metadata):
|
||||
if metadata:
|
||||
return json.loads(metadata)
|
||||
return {}
|
||||
cursor.execute(q, params)
|
||||
for row in cursor:
|
||||
albums.append(row)
|
||||
return albums
|
||||
|
||||
@readcursor
|
||||
def get_songs(self, cursor, id=None, genre=None, sortby=None, order=None, limit=None):
|
||||
# TODO make this query massively uglier by joining albums and artists so that artistid etc can be a filter
|
||||
# or maybe lookup those IDs in the library layer?
|
||||
if order:
|
||||
order = {"asc": "ASC", "desc": "DESC"}[order]
|
||||
|
||||
if sortby and sortby == "random":
|
||||
sortby = "RANDOM()"
|
||||
|
||||
songs = []
|
||||
|
||||
q = """
|
||||
SELECT
|
||||
s.*,
|
||||
alb.name as albumname,
|
||||
alb.coverid as albumcoverid,
|
||||
art.name as artistname,
|
||||
g.name as genrename
|
||||
FROM songs as s
|
||||
INNER JOIN albums as alb
|
||||
on s.albumid == alb.id
|
||||
INNER JOIN artists as art
|
||||
on alb.artistid = art.id
|
||||
LEFT JOIN genres as g
|
||||
on s.genre == g.id
|
||||
"""
|
||||
|
||||
params = []
|
||||
|
||||
conditions = []
|
||||
if id:
|
||||
conditions.append("s.id = ?")
|
||||
params.append(id)
|
||||
if genre:
|
||||
conditions.append("g.name = ?")
|
||||
params.append(genre)
|
||||
if conditions:
|
||||
q += " WHERE " + " AND ".join(conditions)
|
||||
|
||||
if sortby:
|
||||
q += " ORDER BY {}".format(sortby)
|
||||
if order:
|
||||
q += " {}".format(order)
|
||||
|
||||
if limit:
|
||||
q += " LIMIT {}".format(limit) # TODO support limit pagination
|
||||
|
||||
cursor.execute(q, params)
|
||||
for row in cursor:
|
||||
songs.append(row)
|
||||
return songs
|
||||
|
||||
@readcursor
|
||||
def get_genres(self, cursor, genre_id=None):
|
||||
genres = []
|
||||
q = "SELECT * FROM genres"
|
||||
params = []
|
||||
conditions = []
|
||||
if genre_id:
|
||||
conditions.append("id = ?")
|
||||
params.append(genre_id)
|
||||
if conditions:
|
||||
q += " WHERE " + " AND ".join(conditions)
|
||||
cursor.execute(q, params)
|
||||
for row in cursor:
|
||||
genres.append(row)
|
||||
return genres
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# @readcursor
|
||||
# def get_artist_by_dir(self, cursor, dirid):
|
||||
# for row in cursor.execute("""
|
||||
# SELECT artists.*
|
||||
# FROM dirs
|
||||
# INNER JOIN artists
|
||||
# ON artists.dir = dirs.id
|
||||
# WHERE dirs.id=?""", (dirid, )):
|
||||
# return [row]
|
||||
# return []
|
||||
|
||||
@readcursor
|
||||
def get_cover(self, cursor, coverid):
|
||||
cover = None
|
||||
for cover in cursor.execute("SELECT * FROM covers WHERE id = ?", (coverid, )):
|
||||
return cover
|
||||
|
||||
@readcursor
|
||||
def get_musicdir(self, cursor, dirid):
|
||||
"""
|
||||
The world is a harsh place.
|
||||
Again, this bullshit exists only to serve subsonic clients. Given a directory ID it returns a dict containing:
|
||||
- the directory itself
|
||||
- its parent
|
||||
- its child dirs
|
||||
- its child media
|
||||
|
||||
that's a lie, it's a tuple and it's full of BS. read the code
|
||||
"""
|
||||
# find directory
|
||||
dirinfo = None
|
||||
for dirinfo in cursor.execute("SELECT * FROM dirs WHERE id = ?", (dirid, )):
|
||||
pass
|
||||
assert dirinfo
|
||||
|
||||
ret = None
|
||||
|
||||
# see if it matches the artists or albums table
|
||||
artist = None
|
||||
for artist in cursor.execute("SELECT * FROM artists WHERE dir = ?", (dirid, )):
|
||||
pass
|
||||
|
||||
# if artist:
|
||||
# get child albums
|
||||
if artist:
|
||||
ret = ("artist", dirinfo, artist)
|
||||
children = []
|
||||
for album in cursor.execute("SELECT * FROM albums WHERE artistid = ?", (artist["id"], )):
|
||||
children.append(("album", album))
|
||||
ret[2]['children'] = children
|
||||
return ret
|
||||
|
||||
# else if album:
|
||||
# get child tracks
|
||||
album = None
|
||||
for album in cursor.execute("SELECT * FROM albums WHERE dir = ?", (dirid, )):
|
||||
pass
|
||||
if album:
|
||||
ret = ("album", dirinfo, album)
|
||||
|
||||
artist_info = cursor.execute("SELECT * FROM artists WHERE id = ?", (album["artistid"], )).fetchall()[0]
|
||||
|
||||
children = []
|
||||
for song in cursor.execute("SELECT * FROM songs WHERE albumid = ?", (album["id"], )):
|
||||
song["_artist"] = artist_info
|
||||
children.append(("song", song))
|
||||
ret[2]['children'] = children
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@readcursor
|
||||
def add_user(self, cursor, username, password, is_admin=False):
|
||||
cursor.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
|
||||
(username, self.hashit(password), is_admin))
|
||||
cursor.execute("COMMIT")
|
||||
|
||||
@readcursor
|
||||
def update_user(self, cursor, username, password, is_admin=False):
|
||||
cursor.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
|
||||
(self.hashit(password), is_admin, username))
|
||||
cursor.execute("COMMIT")
|
||||
|
||||
@readcursor
|
||||
def get_user(self, cursor, user):
|
||||
try:
|
||||
column = "id" if type(user) is int else "username"
|
||||
return cursor.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
|
||||
except IndexError:
|
||||
raise NotFoundError("User doesn't exist")
|
||||
|
||||
def hashit(self, unicode_string):
|
||||
return sha512(unicode_string.encode('UTF-8')).hexdigest()
|
||||
|
||||
def validate_password(self, realm, username, password):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
users = cursor.execute("SELECT * FROM users WHERE username=? AND password=?;",
|
||||
(username, self.hashit(password))).fetchall()
|
||||
return bool(users)
|
||||
|
||||
def add_user(self, username, password, is_admin=False):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
|
||||
(username, self.hashit(password), is_admin))
|
||||
|
||||
def update_user(self, username, password, is_admin=False):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
|
||||
(self.hashit(password), is_admin, username))
|
||||
|
||||
def get_user(self, user):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
try:
|
||||
column = "id" if type(user) is int else "username"
|
||||
return cursor.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
|
||||
except IndexError:
|
||||
raise NotFoundError("User doesn't exist")
|
||||
|
||||
def set_starred(self, user_id, node_id, starred=True):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
if starred:
|
||||
query = "INSERT INTO stars (userid, nodeid) VALUES (?, ?);"
|
||||
else:
|
||||
query = "DELETE FROM stars WHERE userid=? and nodeid=?;"
|
||||
try:
|
||||
cursor.execute(query, (user_id, node_id))
|
||||
except sqlite3.IntegrityError:
|
||||
pass
|
||||
|
||||
def get_starred_items(self, for_user_id=None):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
q = """SELECT n.* FROM nodes as n INNER JOIN stars as s ON s.nodeid = n.id"""
|
||||
qargs = []
|
||||
if for_user_id:
|
||||
q += """ AND userid=?"""
|
||||
qargs += [int(for_user_id)]
|
||||
return list(map(self._populate_meta,
|
||||
cursor.execute(q, qargs).fetchall()))
|
||||
|
@ -28,64 +28,46 @@ class NoDataException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DuplicateRootException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PysonicLibrary(object):
|
||||
def __init__(self, database):
|
||||
self.db = database
|
||||
|
||||
self.get_libraries = self.db.get_libraries
|
||||
self.get_artists = self.db.get_artists
|
||||
self.get_albums = self.db.get_albums
|
||||
# self.get_song = self.db.get_song
|
||||
# self.get_cover = self.db.get_cover
|
||||
|
||||
self.scanner = PysonicFilesystemScanner(self)
|
||||
logging.info("library ready")
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Start the library media scanner ands
|
||||
"""
|
||||
self.scanner.init_scan()
|
||||
|
||||
def add_dir(self, dir_path):
|
||||
dir_path = os.path.abspath(os.path.normpath(dir_path))
|
||||
libraries = [i['metadata']['fspath'] for i in self.db.getnodes(-1)]
|
||||
if dir_path in libraries:
|
||||
raise DuplicateRootException("Dir already in library")
|
||||
else:
|
||||
new_root = self.db._addnode(-1, 'New Library', is_dir=True)
|
||||
self.db.update_metadata(new_root['id'], fspath=dir_path)
|
||||
|
||||
#@memoize
|
||||
def get_libraries(self):
|
||||
def add_root_dir(self, path):
|
||||
"""
|
||||
Libraries are top-level nodes
|
||||
The music library consists of a number of root dirs. This adds a new root
|
||||
"""
|
||||
return self.db.getnodes(-1)
|
||||
path = os.path.abspath(os.path.normpath(path))
|
||||
self.db.add_root(path)
|
||||
|
||||
#@memoize
|
||||
def get_artists(self):
|
||||
# Assume artists are second level dirs
|
||||
return self.db.getnodes(*[item["id"] for item in self.get_libraries()])
|
||||
# def get_artists(self, *args, **kwargs):
|
||||
# artists = self.db.get_artists(*args, **kwargs)
|
||||
# for item in artists:
|
||||
# item["parent"] = item["libraryid"]
|
||||
# return artists
|
||||
|
||||
def get_dir(self, dirid):
|
||||
return self.db.getnode(dirid)
|
||||
|
||||
def get_dir_children(self, dirid):
|
||||
return self.db.getnodes(dirid)
|
||||
|
||||
#@memoize
|
||||
def get_albums(self):
|
||||
return self.db.getnodes(*[item["id"] for item in self.get_artists()])
|
||||
|
||||
#@memoize
|
||||
def get_filepath(self, nodeid):
|
||||
parents = [self.db.getnode(nodeid)]
|
||||
while parents[-1]['parent'] != -1:
|
||||
parents.append(self.db.getnode(parents[-1]['parent']))
|
||||
root = parents.pop()
|
||||
parents.reverse()
|
||||
return os.path.join(root['metadata']['fspath'], *[i['name'] for i in parents])
|
||||
|
||||
def get_file_metadata(self, nodeid):
|
||||
return self.db.get_metadata(nodeid)
|
||||
# def get_albums(self, *args, **kwargs):
|
||||
# albums = self.db.get_albums(*args, **kwargs)
|
||||
# for item in albums:
|
||||
# item["parent"] = item["artistid"]
|
||||
# return albums
|
||||
|
||||
def get_artist_info(self, item_id):
|
||||
# artist = self.db.getnode(item_id)
|
||||
#TODO
|
||||
return {"biography": "placeholder biography",
|
||||
"musicBrainzId": "playerholder",
|
||||
"lastFmUrl": "https://www.last.fm/music/Placeholder",
|
||||
@ -94,28 +76,84 @@ class PysonicLibrary(object):
|
||||
"largeImageUrl": "",
|
||||
"similarArtists": []}
|
||||
|
||||
def set_starred(self, username, node_id, starred):
|
||||
self.db.set_starred(self.db.get_user(username)["id"], node_id, starred)
|
||||
def get_cover(self, cover_id):
|
||||
cover = self.db.get_cover(cover_id)
|
||||
library = self.db.get_libraries(cover["library"])[0]
|
||||
cover['_fullpath'] = os.path.join(library["path"], cover["path"])
|
||||
return cover
|
||||
|
||||
def get_stars(self, user, user_id):
|
||||
self.db.get_stars()
|
||||
def get_song(self, song_id):
|
||||
song = self.db.get_songs(id=song_id)[0]
|
||||
library = self.db.get_libraries(song["library"])[0]
|
||||
song['_fullpath'] = os.path.join(library["path"], song["file"])
|
||||
return song
|
||||
|
||||
def get_user(self, user):
|
||||
return self.db.get_user(user)
|
||||
# #@memoize
|
||||
# def get_libraries(self):
|
||||
# """
|
||||
# Libraries are top-level nodes
|
||||
# """
|
||||
# return self.db.getnodes(-1)
|
||||
|
||||
def get_starred(self, username):
|
||||
return self.db.get_starred_items(self.db.get_user(username)["id"])
|
||||
# #@memoize
|
||||
# def get_artists(self):
|
||||
# # Assume artists are second level dirs
|
||||
# return self.db.getnodes(*[item["id"] for item in self.get_libraries()])
|
||||
|
||||
def get_songs(self, limit=50, shuffle=True):
|
||||
return self.db.getnodes(types=MUSIC_TYPES, limit=limit, order="rand")
|
||||
# def get_dir(self, dirid):
|
||||
# return self.db.getnode(dirid)
|
||||
|
||||
def get_song(self, id=None):
|
||||
if id:
|
||||
return self.db.getnode(id)
|
||||
else:
|
||||
return self.db.getnodes(types=MUSIC_TYPES, limit=1, order="rand")
|
||||
# def get_dir_children(self, dirid):
|
||||
# return self.db.getnodes(dirid)
|
||||
|
||||
def report_transcode(self, item_id, bitrate, num_bytes):
|
||||
assert type(bitrate) is int and bitrate > 0 and bitrate <= 320
|
||||
logging.info("Got transcode report of {} for item {} @ {}".format(num_bytes, item_id, bitrate))
|
||||
self.db.update_metadata(item_id, {"transcoded_{}_size".format(bitrate):int(num_bytes)})
|
||||
# #@memoize
|
||||
# def get_albums(self):
|
||||
# return self.db.getnodes(*[item["id"] for item in self.get_artists()])
|
||||
|
||||
# #@memoize
|
||||
# def get_filepath(self, nodeid):
|
||||
# parents = [self.db.getnode(nodeid)]
|
||||
# while parents[-1]['parent'] != -1:
|
||||
# parents.append(self.db.getnode(parents[-1]['parent']))
|
||||
# root = parents.pop()
|
||||
# parents.reverse()
|
||||
# return os.path.join(root['metadata']['fspath'], *[i['name'] for i in parents])
|
||||
|
||||
# def get_file_metadata(self, nodeid):
|
||||
# return self.db.get_metadata(nodeid)
|
||||
|
||||
# def get_artist_info(self, item_id):
|
||||
# # artist = self.db.getnode(item_id)
|
||||
# return {"biography": "placeholder biography",
|
||||
# "musicBrainzId": "playerholder",
|
||||
# "lastFmUrl": "https://www.last.fm/music/Placeholder",
|
||||
# "smallImageUrl": "",
|
||||
# "mediumImageUrl": "",
|
||||
# "largeImageUrl": "",
|
||||
# "similarArtists": []}
|
||||
|
||||
# def set_starred(self, username, node_id, starred):
|
||||
# self.db.set_starred(self.db.get_user(username)["id"], node_id, starred)
|
||||
|
||||
# def get_stars(self, user, user_id):
|
||||
# self.db.get_stars()
|
||||
|
||||
# def get_user(self, user):
|
||||
# return self.db.get_user(user)
|
||||
|
||||
# def get_starred(self, username):
|
||||
# return self.db.get_starred_items(self.db.get_user(username)["id"])
|
||||
|
||||
# def get_songs(self, limit=50, shuffle=True):
|
||||
# return self.db.getnodes(types=MUSIC_TYPES, limit=limit, order="rand")
|
||||
|
||||
# def get_song(self, id=None):
|
||||
# if id:
|
||||
# return self.db.getnode(id)
|
||||
# else:
|
||||
# return self.db.getnodes(types=MUSIC_TYPES, limit=1, order="rand")
|
||||
|
||||
# def report_transcode(self, item_id, bitrate, num_bytes):
|
||||
# assert type(bitrate) is int and bitrate > 0 and bitrate <= 320
|
||||
# logging.info("Got transcode report of {} for item {} @ {}".format(num_bytes, item_id, bitrate))
|
||||
# self.db.update_metadata(item_id, {"transcoded_{}_size".format(bitrate):int(num_bytes)})
|
||||
|
@ -1,10 +1,11 @@
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from contextlib import closing
|
||||
import mimetypes
|
||||
from time import time
|
||||
from threading import Thread
|
||||
from pysonic.types import KNOWN_MIMES, MUSIC_TYPES, MPX_TYPES, FLAC_TYPES, WAV_TYPES
|
||||
from pysonic.types import KNOWN_MIMES, MUSIC_TYPES, MPX_TYPES, FLAC_TYPES, WAV_TYPES, MUSIC_EXTENSIONS, IMAGE_EXTENSIONS, IMAGE_TYPES
|
||||
from mutagen.id3 import ID3
|
||||
from mutagen import MutagenError
|
||||
from mutagen.id3._util import ID3NoHeaderError
|
||||
@ -25,151 +26,315 @@ class PysonicFilesystemScanner(object):
|
||||
self.scanner.start()
|
||||
|
||||
def rescan(self):
|
||||
# Perform directory scan
|
||||
logging.warning("Beginning library rescan")
|
||||
"""
|
||||
Perform a full scan of the media library's files
|
||||
"""
|
||||
start = time()
|
||||
for parent in self.library.get_libraries():
|
||||
meta = parent["metadata"]
|
||||
logging.info("Scanning {}".format(meta["fspath"]))
|
||||
logging.warning("Beginning library rescan")
|
||||
for parent in self.library.db.get_libraries():
|
||||
logging.info("Scanning {}".format(parent["path"]))
|
||||
self.scan_root(parent["id"], parent["path"])
|
||||
logging.warning("Rescan complete in %ss", round(time() - start, 3))
|
||||
|
||||
def recurse_dir(path, parent):
|
||||
logging.info("Scanning {}".format(path))
|
||||
# create or update the database of nodes by comparing sets of names
|
||||
fs_entries = set(os.listdir(path))
|
||||
db_entires = self.library.db.getnodes(parent["id"])
|
||||
db_entires_names = set([i['name'] for i in db_entires])
|
||||
to_delete = db_entires_names - fs_entries
|
||||
to_create = fs_entries - db_entires_names
|
||||
def scan_root(self, pid, root):
|
||||
"""
|
||||
Scan a single root the library
|
||||
:param pid: parent ID
|
||||
:param root: absolute path to scan
|
||||
"""
|
||||
logging.warning("Beginning file scan for library %s", pid)
|
||||
root_depth = len(self.split_path(root))
|
||||
for path, dirs, files in os.walk(root):
|
||||
child = self.split_path(path)[root_depth:]
|
||||
# dirid = self.create_or_get_dbdir_tree(pid, child) # dumb table for Subsonic
|
||||
self.scan_dir(pid, root, child, dirs, files)
|
||||
|
||||
# If any size have changed, mark the file to be rescanned
|
||||
for entry in db_entires:
|
||||
finfo = os.stat(os.path.join(path, entry["name"]))
|
||||
if finfo.st_size != entry["size"]:
|
||||
logging.info("{} has changed in size, marking for meta rescan".format(entry["id"]))
|
||||
self.library.db.update_metadata(entry['id'], id3_done=False, size=finfo.st_size)
|
||||
logging.warning("Beginning metadata scan for library %s", pid)
|
||||
self.scan_metadata(pid, root, freshonly=True)
|
||||
|
||||
# Create any nodes not found in the db
|
||||
for create in to_create:
|
||||
new_finfo = os.stat(os.path.join(path, create))
|
||||
new_node = self.library.db.addnode(parent["id"], path, create, size=new_finfo.st_size)
|
||||
logging.info("Added {}".format(os.path.join(path, create)))
|
||||
db_entires.append(new_node)
|
||||
logging.warning("Finished scan for library %s", pid)
|
||||
|
||||
# Delete any db nodes not found on disk
|
||||
for delete in to_delete:
|
||||
logging.info("Prune ", delete, "in parent", path)
|
||||
node = [i for i in db_entires if i["name"] == delete]
|
||||
if node:
|
||||
deleted = self.library.db.delnode(node[0]["id"])
|
||||
logging.info("Pruned {}, deleting total of {}".format(node, deleted))
|
||||
def create_or_get_dbdir_tree(self, cursor, pid, path):
|
||||
"""
|
||||
Return the ID of the directory specified by `path`. The path will be created as necessary. This bullshit exists
|
||||
only to serve Subsonic, and can easily be lopped off.
|
||||
:param pid: root parent the path resides in
|
||||
:param path: single-file tree as a list of dir names under the root parent
|
||||
:type path list
|
||||
"""
|
||||
assert path
|
||||
# with closing(self.library.db.db.cursor()) as cursor:
|
||||
parent_id = 0 # 0 indicates a top level item in the library
|
||||
for name in path:
|
||||
parent_id = self.create_or_get_dbdir(cursor, pid, parent_id, name)
|
||||
return parent_id
|
||||
|
||||
for entry in db_entires:
|
||||
if entry["name"] in to_delete:
|
||||
def create_or_get_dbdir(self, cursor, pid, parent_id, name):
|
||||
for row in cursor.execute("SELECT * FROM dirs WHERE library=? and parent=? and name=?",
|
||||
(pid, parent_id, name, )):
|
||||
return row['id']
|
||||
cursor.execute("INSERT INTO dirs (library, parent, name) VALUES (?, ?, ?)", (pid, parent_id, name))
|
||||
return cursor.lastrowid
|
||||
|
||||
def scan_dir(self, pid, root, path, dirs, files):
|
||||
"""
|
||||
Scan a single directory in the library. Actually, this ignores all dirs that don't contain files. Dirs are
|
||||
interpreted as follows:
|
||||
- The library |