This commit is contained in:
dave 2018-04-05 19:02:17 -07:00
parent 7d727c832d
commit bd2ba225ac
8 changed files with 1178 additions and 674 deletions

View File

@ -1,157 +1,15 @@
import re
import json
import logging import logging
import subprocess import subprocess
from time import time from time import time
from random import shuffle
from threading import Thread from threading import Thread
import cherrypy
from collections import defaultdict
from bs4 import BeautifulSoup
from pysonic.library import LETTER_GROUPS from pysonic.library import LETTER_GROUPS
from pysonic.types import MUSIC_TYPES from pysonic.types import MUSIC_TYPES
from pysonic.apilib import formatresponse, ApiResponse
import cherrypy
CALLBACK_RE = re.compile(r'^[a-zA-Z0-9_]+$')
logging = logging.getLogger("api") logging = logging.getLogger("api")
response_formats = defaultdict(lambda: "render_xml")
response_formats["json"] = "render_json"
response_formats["jsonp"] = "render_jsonp"
response_headers = defaultdict(lambda: "text/xml; charset=utf-8")
response_headers["json"] = "application/json; charset=utf-8"
response_headers["jsonp"] = "text/javascript; charset=utf-8"
def formatresponse(func):
"""
Decorator for rendering ApiResponse responses
"""
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
response_format = kwargs.get("f", "xml")
callback = kwargs.get("callback", None)
cherrypy.response.headers['Content-Type'] = response_headers[response_format]
renderer = getattr(response, response_formats[response_format])
if response_format == "jsonp":
if callback is None:
return response.render_xml().encode('UTF-8') # copy original subsonic behavior
else:
return renderer(callback).encode('UTF-8')
return renderer().encode('UTF-8')
return wrapper
class ApiResponse(object):
def __init__(self, status="ok", version="1.15.0"):
"""
ApiResponses are python data structures that can be converted to other formats. The response has a status and a
version. The response data structure is stored in self.data and follows these rules:
- self.data is a dict
- the dict's values become either child nodes or attributes, named by the key
- lists become many oner one child
- dict values are not allowed
- all other types (str, int, NoneType) are attributes
:param status:
:param version:
"""
self.status = status
self.version = version
self.data = defaultdict(lambda: list())
def add_child(self, _type, _parent="", _real_parent=None, **kwargs):
parent = _real_parent if _real_parent else self.get_child(_parent)
m = defaultdict(lambda: list())
m.update(dict(kwargs))
parent[_type].append(m)
return m
def get_child(self, _path):
parent_path = _path.split(".")
parent = self.data
for item in parent_path:
if not item:
continue
parent = parent.get(item)[0]
return parent
def set_attrs(self, _path, **attrs):
parent = self.get_child(_path)
if type(parent) not in (dict, defaultdict):
raise Exception("wot")
parent.update(attrs)
def render_json(self):
def _flatten_json(item):
"""
Convert defaultdicts to dicts and remove lists where node has 1 or no child
"""
listed_attrs = ["folder"]
d = {}
for k, v in item.items():
if type(v) is list:
if len(v) > 1:
d[k] = []
for subitem in v:
d[k].append(_flatten_json(subitem))
elif len(v) == 1:
d[k] = _flatten_json(v[0])
else:
d[k] = {}
else:
d[k] = [v] if k in listed_attrs else v
return d
data = _flatten_json(self.data)
return json.dumps({"subsonic-response": dict(status=self.status, version=self.version, **data)}, indent=4)
def render_jsonp(self, callback):
assert CALLBACK_RE.match(callback), "Invalid callback"
return "{}({});".format(callback, self.render_json())
def render_xml(self):
text_attrs = ['largeImageUrl', 'musicBrainzId', 'smallImageUrl', 'mediumImageUrl', 'lastFmUrl', 'biography',
'folder']
selftext_attrs = ['value']
# These attributes will be placed in <hello>{{ value }}</hello> tags instead of hello="{{ value }}" on parent
doc = BeautifulSoup('', features='lxml-xml')
root = doc.new_tag("subsonic-response", xmlns="http://subsonic.org/restapi",
status=self.status,
version=self.version)
doc.append(root)
def _render_xml(node, parent):
"""
For every key in the node dict, the parent gets a new child tag with name == key
If the value is a dict, it becomes the new tag's attrs
If the value is a list, the parent gets many new tags with each dict as attrs
If the value is str int etc, parent gets attrs
"""
for key, value in node.items():
if type(value) in (dict, defaultdict):
tag = doc.new_tag(key)
parent.append(tag)
tag.attrs.update(value)
elif type(value) is list:
for item in value:
tag = doc.new_tag(key)
parent.append(tag)
_render_xml(item, tag)
else:
if key in text_attrs:
tag = doc.new_tag(key)
parent.append(tag)
tag.append(str(value))
elif key in selftext_attrs:
parent.append(str(value))
else:
parent.attrs[key] = value
_render_xml(self.data, root)
return doc.prettify()
class PysonicApi(object): class PysonicApi(object):
def __init__(self, db, library, options): def __init__(self, db, library, options):
self.db = db self.db = db
@ -190,49 +48,50 @@ class PysonicApi(object):
def getIndexes_view(self, **kwargs): def getIndexes_view(self, **kwargs):
# Get listing of top-level dir # Get listing of top-level dir
response = ApiResponse() response = ApiResponse()
# TODO real lastmodified date
# TODO deal with ignoredArticles
response.add_child("indexes", lastModified="1502310831000", ignoredArticles="The El La Los Las Le Les") response.add_child("indexes", lastModified="1502310831000", ignoredArticles="The El La Los Las Le Les")
artists = self.library.get_artists(sortby="name", order="asc")
for letter in LETTER_GROUPS: for letter in LETTER_GROUPS:
index = response.add_child("index", _parent="indexes", name=letter.upper()) index = response.add_child("index", _parent="indexes", name=letter.upper())
for artist in self.library.get_artists(): for artist in artists:
if artist["name"][0].lower() in letter: if artist["name"][0].lower() in letter:
response.add_child("artist", _real_parent=index, id=artist["id"], name=artist["name"]) response.add_child("artist", _real_parent=index, id=artist["dir"], name=artist["name"])
return response return response
@cherrypy.expose
def savePlayQueue_view(self, id, current, position, **kwargs):
print("TODO save playlist with items {} current {} position {}".format(id, current, position))
@cherrypy.expose @cherrypy.expose
@formatresponse @formatresponse
def getAlbumList_view(self, type, size=50, offset=0, **kwargs): def getAlbumList_view(self, type, size=50, offset=0, **kwargs):
albums = self.library.get_albums() qargs = {}
if type == "random": if type == "random":
shuffle(albums) qargs.update(sortby="random")
elif type == "alphabeticalByName": elif type == "alphabeticalByName":
albums.sort(key=lambda item: item.get("id3_album", item["album"] if item["album"] else "zzzzzUnsortable")) qargs.update(sortby="name", order="asc")
elif type == "newest":
qargs.update(sortby="added", order="desc")
else: else:
raise NotImplemented() raise NotImplemented()
albumset = albums[0 + int(offset):int(size) + int(offset)]
qargs.update(limit=(offset, size))
albums = self.library.get_albums(**qargs)
response = ApiResponse() response = ApiResponse()
response.add_child("albumList") response.add_child("albumList")
for album in albumset: for album in albums:
album_meta = album['metadata'] album_kw = dict(id=album["dir"],
album_kw = dict(id=album["id"], parent=album["artistdir"],
parent=album["parent"], isDir="true",
isDir="true" if album['isdir'] else "false", title=album["name"],
title=album_meta.get("id3_title", album["name"]), #TODO these cant be blank or dsub gets mad album=album["name"],
album=album_meta.get("id3_album", album["album"]), artist=album["artistname"],
artist=album_meta.get("id3_artist", album["artist"]), coverArt=album["coverid"]
#year=TODO
# playCount="0" # playCount="0"
# created="2016-05-08T05:31:31.000Z"/>) # created="2016-05-08T05:31:31.000Z"/>)
) )
if 'cover' in album_meta:
album_kw["coverArt"] = album_meta["cover"]
if 'id3_year' in album_meta:
album_kw["year"] = album_meta['id3_year']
response.add_child("album", _parent="albumList", **album_kw) response.add_child("album", _parent="albumList", **album_kw)
return response return response
@ -243,84 +102,67 @@ class PysonicApi(object):
List an artist dir List an artist dir
""" """
dir_id = int(id) dir_id = int(id)
dirtype, dirinfo, entity = self.library.db.get_subsonic_musicdir(dirid=dir_id)
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
response = ApiResponse() response = ApiResponse()
response.add_child("directory") response.add_child("directory")
response.set_attrs(_path="directory", name=entity['name'], id=entity['id'],
parent=dirinfo['parent'], playCount=420)
directory = self.library.get_dir(dir_id) for childtype, child in entity["children"]:
dir_meta = directory["metadata"]
children = self.library.get_dir_children(dir_id)
response.set_attrs(_path="directory", name=directory['name'], id=directory['id'],
parent=directory['parent'], playCount=10)
for item in children:
# omit not dirs and media in browser # omit not dirs and media in browser
if not item["isdir"] and item["type"] not in MUSIC_TYPES: # if not item["isdir"] and item["type"] not in MUSIC_TYPES:
continue # continue
item_meta = item['metadata'] # item_meta = item['metadata']
response.add_child("child", _parent="directory", **self.render_node(item, item_meta, directory, dir_meta)) moreargs = {}
if childtype == "album":
moreargs.update(name=child["name"],
isDir="true", # TODO song files in artist dir
parent=entity["id"],
id=child["dir"])
if child["coverid"]:
moreargs.update(coverArt=child["coverid"])
# album=item["name"],
# title=item["name"], # TODO dupe?
# artist=artist["name"],
# coverArt=item["coverid"],
elif childtype == "song":
moreargs.update(name=child["title"],
artist=child["_artist"]["name"],
contentType=child["format"],
id=child["id"],
duration=child["length"],
isDir="false",
parent=entity["dir"],
# title=xxx
)
if entity["coverid"]:
moreargs.update(coverArt=entity["coverid"])
# duration="230" size="8409237" suffix="mp3" track="2" year="2005"/>
response.add_child("child", _parent="directory",
size="4096",
type="music",
**moreargs)
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
return response return response
def render_node(self, item, item_meta, directory, dir_meta):
"""
Given a node and it's parent directory, and meta, return a dict with the keys formatted how the subsonic clients
expect them to be
:param item:
:param item_meta:
:param directory:
:param dir_meta:
"""
child = dict(id=item["id"],
parent=item["id"],
isDir="true" if item['isdir'] else "false",
title=item_meta.get("id3_title", item["name"]),
album=item_meta.get("id3_album", item["album"]),
artist=item_meta.get("id3_artist", item["artist"]),
# playCount="5",
# created="2016-04-25T07:31:33.000Z"
# genre="Other",
# path="Cosmic Gate/Sign Of The Times/03 Flatline (featuring Kyler England).mp3"
type="music")
if 'kbitrate' in item_meta:
child["bitrate"] = item_meta["kbitrate"]
if item["size"] != -1:
child["size"] = item["size"]
if "media_length" in item_meta:
child["duration"] = item_meta["media_length"]
if "albumId" in directory:
child["albumId"] = directory["id"]
if "artistId" in directory:
child["artistId"] = directory["parent"]
if "." in item["name"]:
child["suffix"] = item["name"].split(".")[-1]
if item["type"]:
child["contentType"] = item["type"]
if 'cover' in item_meta:
child["coverArt"] = item_meta["cover"]
elif 'cover' in dir_meta:
child["coverArt"] = dir_meta["cover"]
if 'track' in item_meta:
child["track"] = item_meta['track']
if 'id3_year' in item_meta:
child["year"] = item_meta['id3_year']
return child
@cherrypy.expose @cherrypy.expose
def stream_view(self, id, maxBitRate="256", **kwargs): def stream_view(self, id, maxBitRate="256", **kwargs):
maxBitRate = int(maxBitRate) maxBitRate = int(maxBitRate)
assert maxBitRate >= 32 and maxBitRate <= 320 assert maxBitRate >= 32 and maxBitRate <= 320
fpath = self.library.get_filepath(id) song = self.library.get_song(int(id))
meta = self.library.get_file_metadata(id) fpath = song["_fullpath"]
to_bitrate = min(maxBitRate, self.options.max_bitrate, meta.get("media_kbitrate", 320)) media_bitrate = song.get("bitrate") / 1024 if song.get("bitrate") else 320
to_bitrate = min(maxBitRate,
self.options.max_bitrate,
media_bitrate)
cherrypy.response.headers['Content-Type'] = 'audio/mpeg' cherrypy.response.headers['Content-Type'] = 'audio/mpeg'
if "media_length" in meta: #if "media_length" in meta:
cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length'])) # cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length']))
cherrypy.response.headers['X-Content-Kbitrate'] = str(to_bitrate) cherrypy.response.headers['X-Content-Kbitrate'] = str(to_bitrate)
if (self.options.skip_transcode or meta.get("media_kbitrate", -1) == to_bitrate) \ if (self.options.skip_transcode or (song.get("bitrate") and media_bitrate == to_bitrate)) \
and meta["type"] == "audio/mpeg": and song["format"] == "audio/mpeg":
def content(): def content():
with open(fpath, "rb") as f: with open(fpath, "rb") as f:
while True: while True:
@ -330,10 +172,9 @@ class PysonicApi(object):
yield data yield data
return content() return content()
else: else:
transcode_meta = "transcoded_{}_size".format(to_bitrate) # transcode_meta = "transcoded_{}_size".format(to_bitrate)
if transcode_meta in meta: # if transcode_meta in meta:
cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta])) # cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta]))
transcode_args = ["ffmpeg", "-i", fpath, "-map", "0:0", "-b:a", transcode_args = ["ffmpeg", "-i", fpath, "-map", "0:0", "-b:a",
"{}k".format(to_bitrate), "{}k".format(to_bitrate),
"-v", "0", "-f", "mp3", "-"] "-v", "0", "-f", "mp3", "-"]
@ -343,13 +184,13 @@ class PysonicApi(object):
def content(proc): def content(proc):
length = 0 length = 0
completed = False # completed = False
start = time() start = time()
try: try:
while True: while True:
data = proc.stdout.read(16 * 1024) data = proc.stdout.read(16 * 1024)
if not data: if not data:
completed = True # completed = True
break break
yield data yield data
length += len(data) length += len(data)
@ -357,8 +198,8 @@ class PysonicApi(object):
proc.poll() proc.poll()
if proc.returncode is None or proc.returncode == 0: if proc.returncode is None or proc.returncode == 0:
logging.warning("transcoded {} in {}s".format(id, int(time() - start))) logging.warning("transcoded {} in {}s".format(id, int(time() - start)))
if completed: # if completed:
self.library.report_transcode(id, to_bitrate, length) # self.library.report_transcode(id, to_bitrate, length)
else: else:
logging.error("transcode of {} exited with code {} after {}s".format(id, proc.returncode, logging.error("transcode of {} exited with code {} after {}s".format(id, proc.returncode,
int(time() - start))) int(time() - start)))
@ -378,7 +219,8 @@ class PysonicApi(object):
@cherrypy.expose @cherrypy.expose
def getCoverArt_view(self, id, **kwargs): def getCoverArt_view(self, id, **kwargs):
fpath = self.library.get_filepath(id) cover = self.library.get_cover(id)
fpath = cover["_fullpath"]
type2ct = { type2ct = {
'jpg': 'image/jpeg', 'jpg': 'image/jpeg',
'png': 'image/png', 'png': 'image/png',
@ -397,7 +239,6 @@ class PysonicApi(object):
yield data yield data
logging.info("\nSent {} bytes for {}".format(total, fpath)) logging.info("\nSent {} bytes for {}".format(total, fpath))
return content() return content()
getCoverArt_view._cp_config = {'response.stream': True} getCoverArt_view._cp_config = {'response.stream': True}
@cherrypy.expose @cherrypy.expose
@ -471,15 +312,34 @@ class PysonicApi(object):
""" """
response = ApiResponse() response = ApiResponse()
response.add_child("randomSongs") response.add_child("randomSongs")
children = self.library.get_songs(size, shuffle=True) children = self.library.db.get_songs(limit=size, sortby="random")
for item in children: for song in children:
# omit not dirs and media in browser moreargs = {}
if not item["isdir"] and item["type"] not in MUSIC_TYPES: if song["format"]:
continue moreargs.update(contentType=song["format"])
item_meta = item['metadata'] if song["albumcoverid"]:
itemtype = "song" if item["type"] in MUSIC_TYPES else "album" moreargs.update(coverArt=song["albumcoverid"])
response.add_child(itemtype, _parent="randomSongs", if song["length"]:
**self.render_node(item, item_meta, {}, self.db.getnode(item["parent"])["metadata"])) moreargs.update(duration=song["length"])
if song["track"]:
moreargs.update(track=song["track"])
if song["year"]:
moreargs.update(year=song["year"])
file_extension = song["file"].split(".")[-1]
response.add_child("song",
_parent="randomSongs",
title=song["title"],
album=song["albumname"],
artist=song["artistname"],
id=song["id"],
isDir="false",
parent=song["albumid"],
size=song["size"],
suffix=file_extension,
type="music",
**moreargs)
return response return response
@cherrypy.expose @cherrypy.expose
@ -487,9 +347,8 @@ class PysonicApi(object):
def getGenres_view(self, **kwargs): def getGenres_view(self, **kwargs):
response = ApiResponse() response = ApiResponse()
response.add_child("genres") response.add_child("genres")
response.add_child("genre", _parent="genres", value="Death Metal", songCount=420, albumCount=69) for row in self.library.db.get_genres():
response.add_child("genre", _parent="genres", value="Metal", songCount=52, albumCount=3) response.add_child("genre", _parent="genres", value=row["name"], songCount=420, albumCount=69)
response.add_child("genre", _parent="genres", value="Punk", songCount=34, albumCount=3)
return response return response
@cherrypy.expose @cherrypy.expose
@ -500,7 +359,7 @@ class PysonicApi(object):
:param submission: True if end of song reached. False on start of track. :param submission: True if end of song reached. False on start of track.
""" """
submission = True if submission == "true" else False submission = True if submission == "true" else False
# TODO save played track stats # TODO save played track stats and/or do last.fm bullshit
return ApiResponse() return ApiResponse()
@cherrypy.expose @cherrypy.expose
@ -548,3 +407,108 @@ class PysonicApi(object):
def setRating_view(self, id, rating): def setRating_view(self, id, rating):
# rating is 1-5 # rating is 1-5
pass pass
@cherrypy.expose
def savePlayQueue_view(self, id, current, position, **kwargs):
print("TODO save playqueue with items {} current {} position {}".format(id, current, position))
# TODO save playlist with items ['378', '386', '384', '380', '383'] current 383 position 4471
# id entries are strings!
@cherrypy.expose
@formatresponse
def createPlaylist_view(self, name, songId, **kwargs):
if type(songId) != list:
songId = [songId]
user = self.library.db.get_user(cherrypy.request.login)
self.library.db.add_playlist(user["id"], name, songId)
return ApiResponse()
#TODO the response should be the new playlist, check the cap
@cherrypy.expose
@formatresponse
def getPlaylists_view(self, **kwargs):
user = self.library.db.get_user(cherrypy.request.login)
response = ApiResponse()
response.add_child("playlists")
for playlist in self.library.db.get_playlists(user["id"]):
response.add_child("playlist",
_parent="playlists",
id=playlist["id"],
name=playlist["name"],
owner=user["username"],
public=playlist["public"],
songCount=69,
duration=420,
# changed="2018-04-05T23:23:38.263Z"
# created="2018-04-05T23:23:38.252Z"
# coverArt="pl-1"
)
return response
@cherrypy.expose
@formatresponse
def getPlaylist_view(self, id, **kwargs):
user = self.library.db.get_user(cherrypy.request.login)
plinfo, songs = self.library.get_playlist(int(id))
response = ApiResponse()
response.add_child("playlist",
id=plinfo["id"],
name=plinfo["name"], # TODO this element should match getPlaylists_view
owner=user["username"], # TODO translate id to name
public=plinfo["public"],
songCount=69,
duration=420)
for song in songs:
response.add_child("entry",
_parent="playlist",
id=song["id"],
parent=song["albumid"], # albumid seems wrong? should be dir parent?
isDir="false",
title=song["title"],
album=song["albumname"],
artist=song["artistname"],
track=song["track"],
year=song["year"],
genre=song["genrename"],
coverArt=song["albumcoverid"],
size=song["size"],
contentType=song["format"],
# suffix="mp3"
duration=song["length"],
bitRate=song["bitrate"] / 1024,
path=song["file"],
playCount="1",
# created="2015-06-09T15:26:01.000Z"
albumId=song["albumid"],
artistId=song["artistid"],
type="music")
return response
@cherrypy.expose
@formatresponse
def updatePlaylist_view(self, playlistId, songIndexToRemove=None, songIdToAdd=None, **kwargs):
user = self.library.db.get_user(cherrypy.request.login)
plinfo, songs = self.library.get_playlist(int(playlistId))
assert plinfo["ownerid"] == user["id"]
if songIndexToRemove:
self.library.db.remove_index_from_playlist(playlistId, songIndexToRemove)
elif songIdToAdd:
self.library.db.add_to_playlist(playlistId, songIdToAdd)
#TODO there are more modification methods
return ApiResponse()
@cherrypy.expose
@formatresponse
def deletePlaylist_view(self, id, **kwargs):
user = self.library.db.get_user(cherrypy.request.login)
plinfo, _ = self.library.get_playlist(int(id))
assert plinfo["ownerid"] == user["id"]
self.library.delete_playlist(plinfo["id"])
return ApiResponse()

143
pysonic/apilib.py Normal file
View File

@ -0,0 +1,143 @@
from collections import defaultdict
from bs4 import BeautifulSoup
import re
import cherrypy
import json
CALLBACK_RE = re.compile(r'^[a-zA-Z0-9_]+$')
response_formats = defaultdict(lambda: "render_xml")
response_formats["json"] = "render_json"
response_formats["jsonp"] = "render_jsonp"
response_headers = defaultdict(lambda: "text/xml; charset=utf-8")
response_headers["json"] = "application/json; charset=utf-8"
response_headers["jsonp"] = "text/javascript; charset=utf-8"
def formatresponse(func):
"""
Decorator for rendering ApiResponse responses based on requested response type
"""
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
response_format = kwargs.get("f", "xml")
callback = kwargs.get("callback", None)
cherrypy.response.headers['Content-Type'] = response_headers[response_format]
renderer = getattr(response, response_formats[response_format])
if response_format == "jsonp":
if callback is None:
return response.render_xml().encode('UTF-8') # copy original subsonic behavior
else:
return renderer(callback).encode('UTF-8')
return renderer().encode('UTF-8')
return wrapper
class ApiResponse(object):
def __init__(self, status="ok", version="1.15.0"):
"""
ApiResponses are python data structures that can be converted to other formats. The response has a status and a
version. The response data structure is stored in self.data and follows these rules:
- self.data is a dict
- the dict's values become either child nodes or attributes, named by the key
- lists become many oner one child
- dict values are not allowed
- all other types (str, int, NoneType) are attributes
:param status:
:param version:
"""
self.status = status
self.version = version
self.data = defaultdict(lambda: list())
def add_child(self, _type, _parent="", _real_parent=None, **kwargs):
kwargs = {k: v for k, v in kwargs.items() if v or type(v) is int} # filter out empty keys (0 is ok)
parent = _real_parent if _real_parent else self.get_child(_parent)
m = defaultdict(lambda: list())
m.update(dict(kwargs))
parent[_type].append(m)
return m
def get_child(self, _path):
parent_path = _path.split(".")
parent = self.data
for item in parent_path:
if not item:
continue
parent = parent.get(item)[0]
return parent
def set_attrs(self, _path, **attrs):
parent = self.get_child(_path)
if type(parent) not in (dict, defaultdict):
raise Exception("wot")
parent.update(attrs)
def render_json(self):
def _flatten_json(item):
"""
Convert defaultdicts to dicts and remove lists where node has 1 or no child
"""
listed_attrs = ["folder"]
d = {}
for k, v in item.items():
if type(v) is list:
if len(v) > 1:
d[k] = []
for subitem in v:
d[k].append(_flatten_json(subitem))
elif len(v) == 1:
d[k] = _flatten_json(v[0])
else:
d[k] = {}
else:
d[k] = [v] if k in listed_attrs else v
return d
data = _flatten_json(self.data)
return json.dumps({"subsonic-response": dict(status=self.status, version=self.version, **data)}, indent=4)
def render_jsonp(self, callback):
assert CALLBACK_RE.match(callback), "Invalid callback"
return "{}({});".format(callback, self.render_json())
def render_xml(self):
text_attrs = ['largeImageUrl', 'musicBrainzId', 'smallImageUrl', 'mediumImageUrl', 'lastFmUrl', 'biography',
'folder']
selftext_attrs = ['value']
# These attributes will be placed in <hello>{{ value }}</hello> tags instead of hello="{{ value }}" on parent
doc = BeautifulSoup('', features='lxml-xml')
root = doc.new_tag("subsonic-response", xmlns="http://subsonic.org/restapi",
status=self.status,
version=self.version)
doc.append(root)
def _render_xml(node, parent):
"""
For every key in the node dict, the parent gets a new child tag with name == key
If the value is a dict, it becomes the new tag's attrs
If the value is a list, the parent gets many new tags with each dict as attrs
If the value is str int etc, parent gets attrs
"""
for key, value in node.items():
if type(value) in (dict, defaultdict):
tag = doc.new_tag(key)
parent.append(tag)
tag.attrs.update(value)
elif type(value) is list:
for item in value:
tag = doc.new_tag(key)
parent.append(tag)
_render_xml(item, tag)
else:
if key in text_attrs:
tag = doc.new_tag(key)
parent.append(tag)
tag.append(str(value))
elif key in selftext_attrs:
parent.append(str(value))
else:
parent.attrs[key] = value
_render_xml(self.data, root)
return doc.prettify()

View File

@ -1,10 +1,10 @@
import os import os
import logging import logging
import cherrypy import cherrypy
from sqlite3 import IntegrityError from sqlite3 import DatabaseError
from pysonic.api import PysonicApi from pysonic.api import PysonicApi
from pysonic.library import PysonicLibrary, DuplicateRootException from pysonic.library import PysonicLibrary
from pysonic.database import PysonicDatabase from pysonic.database import PysonicDatabase, DuplicateRootException
def main(): def main():
@ -31,14 +31,15 @@ def main():
args = parser.parse_args() args = parser.parse_args()
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING) logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING,
format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s")
db = PysonicDatabase(path=args.database_path) db = PysonicDatabase(path=args.database_path)
library = PysonicLibrary(db) library = PysonicLibrary(db)
for dirname in args.dirs: for dirname in args.dirs:
assert os.path.exists(dirname) and dirname.startswith("/"), "--dirs must be absolute paths and exist!" assert os.path.exists(dirname) and dirname.startswith("/"), "--dirs must be absolute paths and exist!"
try: try:
library.add_dir(dirname) library.add_root_dir(dirname)
except DuplicateRootException: except DuplicateRootException:
pass pass
library.update() library.update()
@ -46,21 +47,25 @@ def main():
for username, password in args.user: for username, password in args.user:
try: try:
db.add_user(username, password) db.add_user(username, password)
except IntegrityError: except DatabaseError:
db.update_user(username, password) db.update_user(username, password)
logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()])) # logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()]))
logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()])) # logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()]))
logging.warning("Albums: {}".format(len(library.get_albums()))) # logging.warning("Albums: {}".format(len(library.get_albums())))
api = PysonicApi(db, library, args) api = PysonicApi(db, library, args)
api_config = {} api_config = {}
if args.disable_auth: if args.disable_auth:
logging.warning("starting up with auth disabled") logging.warning("starting up with auth disabled")
else: else:
def validate_password(realm, username, password):
print("I JUST VALIDATED {}:{} ({})".format(username, password, realm))
return True
api_config.update({'tools.auth_basic.on': True, api_config.update({'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'pysonic', 'tools.auth_basic.realm': 'pysonic',
'tools.auth_basic.checkpassword': db.validate_password}) 'tools.auth_basic.checkpassword': validate_password})
if args.enable_cors: if args.enable_cors:
def cors(): def cors():
cherrypy.response.headers["Access-Control-Allow-Origin"] = "*" cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
@ -99,5 +104,6 @@ def main():
logging.info("API has shut down") logging.info("API has shut down")
cherrypy.engine.exit() cherrypy.engine.exit()
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -1,10 +1,9 @@
import os
import json
import sqlite3 import sqlite3
import logging import logging
from hashlib import sha512 from hashlib import sha512
from time import time
from contextlib import closing from contextlib import closing
from collections import Iterable
logging = logging.getLogger("database") logging = logging.getLogger("database")
keys_in_table = ["title", "album", "artist", "type", "size"] keys_in_table = ["title", "album", "artist", "type", "size"]
@ -21,12 +20,33 @@ class NotFoundError(Exception):
pass pass
class DuplicateRootException(Exception):
pass
def hash_password(unicode_string):
return sha512(unicode_string.encode('UTF-8')).hexdigest()
def readcursor(func):
"""
Provides a cursor to the wrapped method as the first arg.
"""
def wrapped(*args, **kwargs):
self = args[0]
if len(args) >= 2 and isinstance(args[1], sqlite3.Cursor):
return func(*args, **kwargs)
else:
with closing(self.db.cursor()) as cursor:
return func(*[self, cursor], *args[1:], **kwargs)
return wrapped
class PysonicDatabase(object): class PysonicDatabase(object):
def __init__(self, path): def __init__(self, path):
self.sqlite_opts = dict(check_same_thread=False, cached_statements=0, isolation_level=None) self.sqlite_opts = dict(check_same_thread=False)
self.path = path self.path = path
self.db = None self.db = None
self.open() self.open()
self.migrate() self.migrate()
@ -36,212 +56,423 @@ class PysonicDatabase(object):
def migrate(self): def migrate(self):
# Create db # Create db
queries = ["""CREATE TABLE 'meta' ( queries = ["""CREATE TABLE 'libraries' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'name' TEXT,
'path' TEXT UNIQUE);""",
"""CREATE TABLE 'dirs' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'parent' INTEGER,
'name' TEXT,
UNIQUE(parent, name)
)""",
"""CREATE TABLE 'genres' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'name' TEXT UNIQUE)""",
"""CREATE TABLE 'artists' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'libraryid' INTEGER,
'dir' INTEGER UNIQUE,
'name' TEXT)""",
"""CREATE TABLE 'albums' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'artistid' INTEGER,
'coverid' INTEGER,
'dir' INTEGER,
'name' TEXT,
'added' INTEGER NOT NULL DEFAULT -1,
UNIQUE (artistid, dir));""",
"""CREATE TABLE 'songs' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'albumid' BOOLEAN,
'genre' INTEGER DEFAULT NULL,
'file' TEXT UNIQUE, -- path from the library root
'size' INTEGER NOT NULL DEFAULT -1,
'title' TEXT NOT NULL,
'lastscan' INTEGER NOT NULL DEFAULT -1,
'format' TEXT,
'length' INTEGER,
'bitrate' INTEGER,
'track' INTEGER,
'year' INTEGER
)""",
"""CREATE TABLE 'covers' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'type' TEXT,
'size' TEXT,
'path' TEXT UNIQUE);""",
"""CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)""",
"""CREATE TABLE 'stars' (
'userid' INTEGER,
'songid' INTEGER,
primary key ('userid', 'songid'))""",
"""CREATE TABLE 'playlists' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'ownerid' INTEGER,
'name' TEXT,
'public' BOOLEAN,
'created' INTEGER,
'changed' INTEGER,
'cover' INTEGER,
UNIQUE ('ownerid', 'name'))""",
"""CREATE TABLE 'playlist_entries' (
'playlistid' INTEGER,
'songid' INTEGER,
'order' FLOAT)""",
"""CREATE TABLE 'meta' (
'key' TEXT PRIMARY KEY NOT NULL, 'key' TEXT PRIMARY KEY NOT NULL,
'value' TEXT);""", 'value' TEXT);""",
"""INSERT INTO meta VALUES ('db_version', '3');""", """INSERT INTO meta VALUES ('db_version', '1');"""]
"""CREATE TABLE 'nodes' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'parent' INTEGER NOT NULL,
'isdir' BOOLEAN NOT NULL,
'size' INTEGER NOT NULL DEFAULT -1,
'name' TEXT NOT NULL,
'type' TEXT,
'title' TEXT,
'album' TEXT,
'artist' TEXT,
'metadata' TEXT
)""",
"""CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)""",
"""CREATE TABLE 'stars' (
'userid' INTEGER,
'nodeid' INTEGER,
primary key ('userid', 'nodeid'))"""]
with closing(self.db.cursor()) as cursor: with closing(self.db.cursor()) as cursor:
cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta';") cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta'")
# Initialize DB # Initialize DB
if len(cursor.fetchall()) == 0: if len(cursor.fetchall()) == 0:
logging.warning("Initializing database") logging.warning("Initializing database")
for query in queries: for query in queries:
cursor.execute(query) cursor.execute(query)
cursor.execute("COMMIT")
else: else:
# Migrate if old db exists # Migrate if old db exists
version = int(cursor.execute("SELECT * FROM meta WHERE key='db_version';").fetchone()['value']) # cursor.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
if version < 1: # logging.warning("db schema is version {}".format(version))
logging.warning("migrating database to v1 from %s", version)
users_table = """CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)"""
cursor.execute(users_table)
version = 1
if version < 2:
logging.warning("migrating database to v2 from %s", version)
stars_table = """CREATE TABLE 'stars' (
'userid' INTEGER,
'nodeid' INTEGER,
primary key ('userid', 'nodeid'))"""
cursor.execute(stars_table)
version = 2
if version < 3:
logging.warning("migrating database to v3 from %s", version)
size_col = """ALTER TABLE nodes ADD 'size' INTEGER NOT NULL DEFAULT -1;"""
cursor.execute(size_col)
version = 3
cursor.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
logging.warning("db schema is version {}".format(version))
# Virtual file tree
def getnode(self, node_id):
return self.getnodes(node_id=node_id)[0]
def _populate_meta(self, node):
node['metadata'] = self.decode_metadata(node['metadata'])
return node
def getnodes(self, *parent_ids, node_id=None, types=None, limit=None, order=None):
"""
Find nodes that match the passed paramters.
:param parent_ids: one or more parents to find children of
:type parent_ids: int
:param node_id: single node id to return
:type node_id: int
:param types: filter by type column
:type types: list
:param limit: number of records to limit to
:param order: one of ("rand") to select ordering mode
"""
query = "SELECT * FROM nodes WHERE "
qargs = []
def add_filter(name, values):
nonlocal query
nonlocal qargs
query += "{} in (".format(name)
for value in (values if type(values) in [list, tuple] else [values]):
query += "?, "
qargs += [value]
query = query.rstrip(", ")
query += ") AND"
if node_id:
add_filter("id", node_id)
if parent_ids:
add_filter("parent", parent_ids)
if types:
add_filter("type", types)
query = query.rstrip(" AND").rstrip("WHERE ")
if order:
query += "ORDER BY "
if order == "rand":
query += "RANDOM()"
if limit: # TODO 2-item tuple limit
query += " limit {}".format(limit)
with closing(self.db.cursor()) as cursor:
return list(map(self._populate_meta, cursor.execute(query, qargs).fetchall()))
def addnode(self, parent_id, fspath, name, size=-1):
fullpath = os.path.join(fspath, name)
is_dir = os.path.isdir(fullpath)
return self._addnode(parent_id, name, is_dir, size=size)
def _addnode(self, parent_id, name, is_dir=True, size=-1):
with closing(self.db.cursor()) as cursor:
cursor.execute("INSERT INTO nodes (parent, isdir, name, size) VALUES (?, ?, ?, ?);",
(parent_id, 1 if is_dir else 0, name, size))
return self.getnode(cursor.lastrowid)
def delnode(self, node_id):
deleted = 1
for child in self.getnodes(node_id):
deleted += self.delnode(child["id"])
with closing(self.db.cursor()) as cursor:
cursor.execute("DELETE FROM nodes WHERE id=?;", (node_id, ))
return deleted
def update_metadata(self, node_id, mergedict=None, **kwargs):
mergedict = mergedict if mergedict else {}
mergedict.update(kwargs)
with closing(self.db.cursor()) as cursor:
for table_key in keys_in_table:
if table_key in mergedict:
cursor.execute("UPDATE nodes SET {}=? WHERE id=?;".format(table_key),
(mergedict[table_key], node_id))
other_meta = {k: v for k, v in mergedict.items() if k not in keys_in_table}
if other_meta:
metadata = self.get_metadata(node_id)
metadata.update(other_meta)
cursor.execute("UPDATE nodes SET metadata=? WHERE id=?;", (json.dumps(metadata), node_id, ))
def get_metadata(self, node_id):
node = self.getnode(node_id)
meta = node["metadata"]
meta.update({item: node[item] for item in keys_in_table})
return meta
def decode_metadata(self, metadata):
if metadata:
return json.loads(metadata)
return {}
def hashit(self, unicode_string):
return sha512(unicode_string.encode('UTF-8')).hexdigest()
def validate_password(self, realm, username, password):
with closing(self.db.cursor()) as cursor:
users = cursor.execute("SELECT * FROM users WHERE username=? AND password=?;",
(username, self.hashit(password))).fetchall()
return bool(users)
def add_user(self, username, password, is_admin=False):
with closing(self.db.cursor()) as cursor:
cursor.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
(username, self.hashit(password), is_admin))
def update_user(self, username, password, is_admin=False):
with closing(self.db.cursor()) as cursor:
cursor.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
(self.hashit(password), is_admin, username))
def get_user(self, user):
with closing(self.db.cursor()) as cursor:
try:
column = "id" if type(user) is int else "username"
return cursor.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
except IndexError:
raise NotFoundError("User doesn't exist")
def set_starred(self, user_id, node_id, starred=True):
with closing(self.db.cursor()) as cursor:
if starred:
query = "INSERT INTO stars (userid, nodeid) VALUES (?, ?);"
else:
query = "DELETE FROM stars WHERE userid=? and nodeid=?;"
try:
cursor.execute(query, (user_id, node_id))
except sqlite3.IntegrityError:
pass pass
def get_starred_items(self, for_user_id=None): # Music related
with closing(self.db.cursor()) as cursor: @readcursor
q = """SELECT n.* FROM nodes as n INNER JOIN stars as s ON s.nodeid = n.id""" def add_root(self, cursor, path, name="Library"):
qargs = [] """
if for_user_id: Add a new library root. Returns the root ID or raises on collision
q += """ AND userid=?""" :param path: normalized absolute path to add to the library
qargs += [int(for_user_id)] :type path: str:
return list(map(self._populate_meta, :return: int
cursor.execute(q, qargs).fetchall())) :raises: sqlite3.IntegrityError
"""
assert path.startswith("/")
try:
cursor.execute("INSERT INTO libraries ('name', 'path') VALUES (?, ?)", (name, path, ))
cursor.execute("COMMIT")
return cursor.lastrowid
except sqlite3.IntegrityError:
raise DuplicateRootException("Root '{}' already exists".format(path))
@readcursor
def get_libraries(self, cursor, id=None):
libs = []
q = "SELECT * FROM libraries"
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if conditions:
q += " WHERE " + " AND ".join(conditions)
cursor.execute(q, params)
for row in cursor:
libs.append(row)
return libs
@readcursor
def get_artists(self, cursor, id=None, dirid=None, sortby=None, order=None):
assert order in ["asc", "desc", None]
artists = []
q = "SELECT * FROM artists"
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if dirid:
conditions.append("dir = ?")
params.append(dirid)
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {} {}".format(sortby, order.upper() if order else "ASC")
cursor.execute(q, params)
for row in cursor:
artists.append(row)
return artists
@readcursor
def get_albums(self, cursor, id=None, artist=None, sortby=None, order=None, limit=None):
"""
:param limit: int or tuple of int, int. translates directly to sql logic.
"""
if order:
order = {"asc": "ASC", "desc": "DESC"}[order]
if sortby and sortby == "random":
sortby = "RANDOM()"
albums = []
q = """
SELECT
alb.*,
art.name as artistname,
dirs.parent as artistdir
FROM albums as alb
INNER JOIN artists as art
on alb.artistid = art.id
INNER JOIN dirs
on dirs.id = alb.dir
"""
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if artist:
conditions.append("artistid = ?")
params.append(artist)
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {}".format(sortby)
if order:
q += " {}".format(order)
if limit:
q += " LIMIT {}".format(limit) if isinstance(limit, int) \
else " LIMIT {}, {}".format(*limit)
cursor.execute(q, params)
for row in cursor:
albums.append(row)
return albums
@readcursor
def get_songs(self, cursor, id=None, genre=None, sortby=None, order=None, limit=None):
# TODO make this query massively uglier by joining albums and artists so that artistid etc can be a filter
# or maybe lookup those IDs in the library layer?
if order:
order = {"asc": "ASC", "desc": "DESC"}[order]
if sortby and sortby == "random":
sortby = "RANDOM()"
songs = []
q = """
SELECT
s.*,
alb.name as albumname,
alb.coverid as albumcoverid,
art.name as artistname,
g.name as genrename
FROM songs as s
INNER JOIN albums as alb
on s.albumid == alb.id
INNER JOIN artists as art
on alb.artistid = art.id
LEFT JOIN genres as g
on s.genre == g.id
"""
params = []
conditions = []
if id and isinstance(id, int):
conditions.append("s.id = ?")
params.append(id)
elif id and isinstance(id, Iterable):
conditions.append("s.id IN ({})".format(",".join("?" * len(id))))
params += id
if genre:
conditions.append("g.name = ?")
params.append(genre)
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {}".format(sortby)
if order:
q += " {}".format(order)
if limit:
q += " LIMIT {}".format(limit) # TODO support limit pagination
cursor.execute(q, params)
for row in cursor:
songs.append(row)
return songs
@readcursor
def get_genres(self, cursor, genre_id=None):
genres = []
q = "SELECT * FROM genres"
params = []
conditions = []
if genre_id:
conditions.append("id = ?")
params.append(genre_id)
if conditions:
q += " WHERE " + " AND ".join(conditions)
cursor.execute(q, params)
for row in cursor:
genres.append(row)
return genres
@readcursor
def get_cover(self, cursor, coverid):
cover = None
for cover in cursor.execute("SELECT * FROM covers WHERE id = ?", (coverid, )):
return cover
@readcursor
def get_subsonic_musicdir(self, cursor, dirid):
"""
The world is a harsh place.
Again, this bullshit exists only to serve subsonic clients. Given a directory ID it returns a dict containing:
- the directory itself
- its parent
- its child dirs
- its child media
that's a lie, it's a tuple and it's full of BS. read the code
"""
# find directory
dirinfo = None
for dirinfo in cursor.execute("SELECT * FROM dirs WHERE id = ?", (dirid, )):
pass
assert dirinfo
ret = None
# see if it matches the artists or albums table
artist = None
for artist in cursor.execute("SELECT * FROM artists WHERE dir = ?", (dirid, )):
pass
# if artist:
# get child albums
if artist:
ret = ("artist", dirinfo, artist)
children = []
for album in cursor.execute("SELECT * FROM albums WHERE artistid = ?", (artist["id"], )):
children.append(("album", album))
ret[2]['children'] = children
return ret
# else if album:
# get child tracks
album = None
for album in cursor.execute("SELECT * FROM albums WHERE dir = ?", (dirid, )):
pass
if album:
ret = ("album", dirinfo, album)
artist_info = cursor.execute("SELECT * FROM artists WHERE id = ?", (album["artistid"], )).fetchall()[0]
children = []
for song in cursor.execute("SELECT * FROM songs WHERE albumid = ?", (album["id"], )):
song["_artist"] = artist_info
children.append(("song", song))
ret[2]['children'] = children
return ret
# Playlist related
@readcursor
def add_playlist(self, cursor, ownerid, name, song_ids, public=False):
"""
Create a playlist
"""
now = time()
cursor.execute("INSERT INTO playlists (ownerid, name, public, created, changed) VALUES (?, ?, ?, ?, ?)",
(ownerid, name, public, now, now))
plid = cursor.lastrowid
for song_id in song_ids:
self.add_to_playlist(cursor, plid, song_id)
cursor.execute("COMMIT")
@readcursor
def add_to_playlist(self, cursor, playlist_id, song_id):
# TODO deal with order column
cursor.execute("INSERT INTO playlist_entries (playlistid, songid) VALUES (?, ?)", (playlist_id, song_id))
@readcursor
def get_playlist(self, cursor, playlist_id):
return cursor.execute("SELECT * FROM playlists WHERE id=?", (playlist_id, )).fetchone()
@readcursor
def get_playlist_songs(self, cursor, playlist_id):
songs = []
q = """
SELECT
s.*,
alb.name as albumname,
alb.coverid as albumcoverid,
art.name as artistname,
art.name as artistid,
g.name as genrename
FROM playlist_entries as pe
INNER JOIN songs as s
on pe.songid == s.id
INNER JOIN albums as alb
on s.albumid == alb.id
INNER JOIN artists as art
on alb.artistid = art.id
LEFT JOIN genres as g
on s.genre == g.id
WHERE pe.playlistid = ?
ORDER BY pe.'order' ASC;
"""
for row in cursor.execute(q, (playlist_id, )):
songs.append(row)
return songs
@readcursor
def get_playlists(self, cursor, user_id):
playlists = []
for row in cursor.execute("SELECT * FROM playlists WHERE ownerid=? or public=1", (user_id, )):
playlists.append(row)
return playlists
@readcursor
def remove_index_from_playlist(self, cursor, playlist_id, index):
cursor.execute("DELETE FROM playlist_entries WHERE playlistid=? LIMIT ?, 1", (playlist_id, index, ))
cursor.execute("COMMIT")
@readcursor
def empty_playlist(self, cursor, playlist_id):
#TODO combine with # TODO combine with
cursor.execute("DELETE FROM playlist_entries WHERE playlistid=?", (playlist_id, ))
cursor.execute("COMMIT")
@readcursor
def delete_playlist(self, cursor, playlist_id):
cursor.execute("DELETE FROM playlists WHERE id=?", (playlist_id, ))
cursor.execute("COMMIT")
# User related
@readcursor
def add_user(self, cursor, username, password, is_admin=False):
cursor.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
(username, hash_password(password), is_admin))
cursor.execute("COMMIT")
@readcursor
def update_user(self, cursor, username, password, is_admin=False):
cursor.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
(hash_password(password), is_admin, username))
cursor.execute("COMMIT")
@readcursor
def get_user(self, cursor, user):
try:
column = "id" if type(user) is int else "username"
return cursor.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
except IndexError:
raise NotFoundError("User doesn't exist")

View File

@ -28,64 +28,46 @@ class NoDataException(Exception):
pass pass
class DuplicateRootException(Exception):
pass
class PysonicLibrary(object): class PysonicLibrary(object):
def __init__(self, database): def __init__(self, database):
self.db = database self.db = database
self.get_libraries = self.db.get_libraries
self.get_artists = self.db.get_artists
self.get_albums = self.db.get_albums
# self.get_song = self.db.get_song
# self.get_cover = self.db.get_cover
self.scanner = PysonicFilesystemScanner(self) self.scanner = PysonicFilesystemScanner(self)
logging.info("library ready") logging.info("library ready")
def update(self): def update(self):
"""
Start the library media scanner ands
"""
self.scanner.init_scan() self.scanner.init_scan()
def add_dir(self, dir_path): def add_root_dir(self, path):
dir_path = os.path.abspath(os.path.normpath(dir_path))
libraries = [i['metadata']['fspath'] for i in self.db.getnodes(-1)]
if dir_path in libraries:
raise DuplicateRootException("Dir already in library")
else:
new_root = self.db._addnode(-1, 'New Library', is_dir=True)
self.db.update_metadata(new_root['id'], fspath=dir_path)
#@memoize
def get_libraries(self):
""" """
Libraries are top-level nodes The music library consists of a number of root dirs. This adds a new root
""" """
return self.db.getnodes(-1) path = os.path.abspath(os.path.normpath(path))
self.db.add_root(path)
#@memoize # def get_artists(self, *args, **kwargs):
def get_artists(self): # artists = self.db.get_artists(*args, **kwargs)
# Assume artists are second level dirs # for item in artists:
return self.db.getnodes(*[item["id"] for item in self.get_libraries()]) # item["parent"] = item["libraryid"]
# return artists
def get_dir(self, dirid): # def get_albums(self, *args, **kwargs):
return self.db.getnode(dirid) # albums = self.db.get_albums(*args, **kwargs)
# for item in albums:
def get_dir_children(self, dirid): # item["parent"] = item["artistid"]
return self.db.getnodes(dirid) # return albums
#@memoize
def get_albums(self):
return self.db.getnodes(*[item["id"] for item in self.get_artists()])
#@memoize
def get_filepath(self, nodeid):
parents = [self.db.getnode(nodeid)]
while parents[-1]['parent'] != -1:
parents.append(self.db.getnode(parents[-1]['parent']))
root = parents.pop()
parents.reverse()
return os.path.join(root['metadata']['fspath'], *[i['name'] for i in parents])
def get_file_metadata(self, nodeid):
return self.db.get_metadata(nodeid)
def get_artist_info(self, item_id): def get_artist_info(self, item_id):
# artist = self.db.getnode(item_id) #TODO
return {"biography": "placeholder biography", return {"biography": "placeholder biography",
"musicBrainzId": "playerholder", "musicBrainzId": "playerholder",
"lastFmUrl": "https://www.last.fm/music/Placeholder", "lastFmUrl": "https://www.last.fm/music/Placeholder",
@ -94,28 +76,23 @@ class PysonicLibrary(object):
"largeImageUrl": "", "largeImageUrl": "",
"similarArtists": []} "similarArtists": []}
def set_starred(self, username, node_id, starred): def get_cover(self, cover_id):
self.db.set_starred(self.db.get_user(username)["id"], node_id, starred) cover = self.db.get_cover(cover_id)
library = self.db.get_libraries(cover["library"])[0]
cover['_fullpath'] = os.path.join(library["path"], cover["path"])
return cover
def get_stars(self, user, user_id): def get_song(self, song_id):
self.db.get_stars() song = self.db.get_songs(id=song_id)[0]
library = self.db.get_libraries(song["library"])[0]
song['_fullpath'] = os.path.join(library["path"], song["file"])
return song
def get_user(self, user): def get_playlist(self, playlist_id):
return self.db.get_user(user) playlist_info = self.db.get_playlist(playlist_id)
songs = self.db.get_playlist_songs(playlist_id)
return (playlist_info, songs)
def get_starred(self, username): def delete_playlist(self, playlist_id):
return self.db.get_starred_items(self.db.get_user(username)["id"]) self.db.empty_playlist(playlist_id)
self.db.delete_playlist(playlist_id)
def get_songs(self, limit=50, shuffle=True):
return self.db.getnodes(types=MUSIC_TYPES, limit=limit, order="rand")
def get_song(self, id=None):
if id:
return self.db.getnode(id)
else:
return self.db.getnodes(types=MUSIC_TYPES, limit=1, order="rand")
def report_transcode(self, item_id, bitrate, num_bytes):
assert type(bitrate) is int and bitrate > 0 and bitrate <= 320
logging.info("Got transcode report of {} for item {} @ {}".format(num_bytes, item_id, bitrate))
self.db.update_metadata(item_id, {"transcoded_{}_size".format(bitrate):int(num_bytes)})

View File

@ -1,10 +1,11 @@
import os import os
import re import re
import logging import logging
from contextlib import closing
import mimetypes import mimetypes
from time import time from time import time
from threading import Thread from threading import Thread
from pysonic.types import KNOWN_MIMES, MUSIC_TYPES, MPX_TYPES, FLAC_TYPES, WAV_TYPES from pysonic.types import KNOWN_MIMES, MUSIC_TYPES, MPX_TYPES, FLAC_TYPES, WAV_TYPES, MUSIC_EXTENSIONS, IMAGE_EXTENSIONS, IMAGE_TYPES
from mutagen.id3 import ID3 from mutagen.id3 import ID3
from mutagen import MutagenError from mutagen import MutagenError
from mutagen.id3._util import ID3NoHeaderError from mutagen.id3._util import ID3NoHeaderError
@ -25,151 +26,321 @@ class PysonicFilesystemScanner(object):
self.scanner.start() self.scanner.start()
def rescan(self): def rescan(self):
# Perform directory scan """
logging.warning("Beginning library rescan") Perform a full scan of the media library's files
"""
start = time() start = time()
for parent in self.library.get_libraries(): logging.warning("Beginning library rescan")
meta = parent["metadata"] for parent in self.library.db.get_libraries():
logging.info("Scanning {}".format(meta["fspath"])) logging.info("Scanning {}".format(parent["path"]))
self.scan_root(parent["id"], parent["path"])
logging.warning("Rescan complete in %ss", round(time() - start, 3))
def recurse_dir(path, parent): def scan_root(self, pid, root):
logging.info("Scanning {}".format(path)) """
# create or update the database of nodes by comparing sets of names Scan a single root the library
fs_entries = set(os.listdir(path)) :param pid: parent ID
db_entires = self.library.db.getnodes(parent["id"]) :param root: absolute path to scan
db_entires_names = set([i['name'] for i in db_entires]) """
to_delete = db_entires_names - fs_entries logging.warning("Beginning file scan for library %s", pid)
to_create = fs_entries - db_entires_names root_depth = len(self.split_path(root))
for path, dirs, files in os.walk(root):
child = self.split_path(path)[root_depth:]
# dirid = self.create_or_get_dbdir_tree(pid, child) # dumb table for Subsonic
self.scan_dir(pid, root, child, dirs, files)
# If any size have changed, mark the file to be rescanned logging.warning("Beginning metadata scan for library %s", pid)
for entry in db_entires: self.scan_metadata(pid, root, freshonly=True)
finfo = os.stat(os.path.join(path, entry["name"]))
if finfo.st_size != entry["size"]:
logging.info("{} has changed in size, marking for meta rescan".format(entry["id"]))
self.library.db.update_metadata(entry['id'], id3_done=False, size=finfo.st_size)
# Create any nodes not found in the db logging.warning("Finished scan for library %s", pid)
for create in to_create:
new_finfo = os.stat(os.path.join(path, create))
new_node = self.library.db.addnode(parent["id"], path, create, size=new_finfo.st_size)
logging.info("Added {}".format(os.path.join(path, create)))
db_entires.append(new_node)
# Delete any db nodes not found on disk def create_or_get_dbdir_tree(self, cursor, pid, path):
for delete in to_delete: """
logging.info("Prune ", delete, "in parent", path) Return the ID of the directory specified by `path`. The path will be created as necessary. This bullshit exists
node = [i for i in db_entires if i["name"] == delete] only to serve Subsonic, and can easily be lopped off.
if node: :param pid: root parent the path resides in
deleted = self.library.db.delnode(node[0]["id"]) :param path: single-file tree as a list of dir names under the root parent
logging.info("Pruned {}, deleting total of {}".format(node, deleted)) :type path list
"""
assert path
# with closing(self.library.db.db.cursor()) as cursor:
parent_id = 0 # 0 indicates a top level item in the library
for name in path:
parent_id = self.create_or_get_dbdir(cursor, pid, parent_id, name)
return parent_id
for entry in db_entires: def create_or_get_dbdir(self, cursor, pid, parent_id, name):
if entry["name"] in to_delete: for row in cursor.execute("SELECT * FROM dirs WHERE library=? and parent=? and name=?",
(pid, parent_id, name, )):
return row['id']
cursor.execute("INSERT INTO dirs (library, parent, name) VALUES (?, ?, ?)", (pid, parent_id, name))
return cursor.lastrowid
def scan_dir(self, pid, root, path, dirs, files):
"""
Scan a single directory in the library. Actually, this ignores all dirs that don't contain files. Dirs are
interpreted as follows:
- The library root is ignored
- Empty dirs are ignored
- Dirs containing files are assumed to be an album
- Top level dirs in the library are assumed to be artists
- Any dirs not following the above rules are transparently ignored
- Files placed in an artist dir is an unhandled edge case TODO
- Any files with an image extension in an album dir will be assumed to be the cover regardless of naming
- TODO ignore dotfiles/dirs
TODO remove all file scanning / statting etc from paths where a db transaction is active (gather data then open)
:param pid: parent id
:param root: library root path
:param path: scan location path, as a list of subdirs within the root
:param dirs: dirs in the current path
:param files: files in the current path
"""
# If this is the library root or an empty dir just bail
if not path or not files:
return
# If it is the library root just bail
if len(path) == 0:
return
logging.info("In library %s scanning %s", pid, os.path.join(*path))
# Guess an album from the dir, if possible
album = None
if len(path) > 1:
album = path[-1]
with closing(self.library.db.db.cursor()) as cursor:
artist_id, artist_dirid = self.create_or_get_artist(cursor, pid, path[0])
album_id = None
album_dirid = None
if album:
album_id, album_dirid = self.create_or_get_album(cursor, pid, path, artist_id)
libpath = os.path.join(*path)
new_files = False
for fname in files:
if not any([fname.endswith(".{}".format(i)) for i in MUSIC_EXTENSIONS]):
continue
new_files = self.add_music_if_new(cursor, pid, root, album_id, libpath, fname) or new_files
# Create cover entry TODO we can probably skip this if there were no new audio files?
if album_id:
for file in files:
if not any([file.endswith(".{}".format(i)) for i in IMAGE_EXTENSIONS]):
continue continue
if int(entry['isdir']): # 1 means dir fpath = os.path.join(libpath, file)
recurse_dir(os.path.join(path, entry["name"]), entry) cursor.execute("SELECT id FROM covers WHERE path=?", (fpath, ))
if not cursor.fetchall():
# We leave most fields blank now and return later
cursor.execute("INSERT INTO covers (library, path) VALUES (?, ?);", (pid, fpath, ))
cursor.execute("UPDATE albums SET coverid=? WHERE id=?", (cursor.lastrowid, album_id))
break
# Populate all files for this top-level root if new_files: # Commit after each dir IF audio files were found. no audio == dump the artist
recurse_dir(meta["fspath"], parent) cursor.execute("COMMIT")
#
#
#
# Add simple metadata
for artist_dir in self.library.db.getnodes(parent["id"]):
artist = artist_dir["name"]
for album_dir in self.library.db.getnodes(artist_dir["id"]):
album = album_dir["name"]
album_meta = album_dir["metadata"]
for track_file in self.library.db.getnodes(album_dir["id"]):
title = track_file["name"]
if not track_file["title"]:
self.library.db.update_metadata(track_file["id"], artist=artist, album=album, title=title)
logging.info("Adding simple metadata for {}/{}/{} #{}".format(artist, album,
title, track_file["id"]))
if not album_dir["album"]:
self.library.db.update_metadata(album_dir["id"], artist=artist, album=album)
logging.info("Adding simple metadata for {}/{} #{}".format(artist, album, album_dir["id"]))
if not artist_dir["artist"]:
self.library.db.update_metadata(artist_dir["id"], artist=artist)
logging.info("Adding simple metadata for {} #{}".format(artist, artist_dir["id"]))
if title in ["cover.jpg", "cover.png"] and 'cover' not in album_meta:
# // add cover art
self.library.db.update_metadata(album_dir["id"], cover=track_file["id"])
logging.info("added cover for {}".format(album_dir['id']))
if track_file["type"] is None: def add_music_if_new(self, cursor, pid, root_dir, album_id, fdir, fname):
fpath = self.library.get_filepath(track_file['id']) fpath = os.path.join(fdir, fname)
ftype, extra = mimetypes.guess_type(fpath) cursor.execute("SELECT id FROM songs WHERE file=?", (fpath, ))
if not cursor.fetchall():
# We leave most fields blank now and return later
# TODO probably not here but track file sizes and mark them for rescan on change
cursor.execute("INSERT INTO songs (library, albumid, file, size, title) "
"VALUES (?, ?, ?, ?, ?)",
(pid,
album_id,
fpath,
os.stat(os.path.join(root_dir, fpath)).st_size,
fname, ))
return True
return False
if ftype in KNOWN_MIMES: def create_or_get_artist(self, cursor, pid, dirname):
self.library.db.update_metadata(track_file["id"], type=ftype) """
logging.info("added type {} for {}".format(ftype, track_file['id'])) Retrieve, creating if necessary, directory information about an artist. Return tuple contains the artist's ID
else: and the dir id associated with the artist.
logging.warning("Ignoring unreadable file at {}, unknown ftype ({}, {})" :param cursor: sqlite cursor to use
.format(fpath, ftype, extra)) :param pid: root parent id we're working int
# :param dirname: name of the artist dir
# :return tuple:
# """
# Add advanced id3 / media info metadata artist_dirid = self.create_or_get_dbdir_tree(cursor, pid, [dirname])
for artist_dir in self.library.db.getnodes(parent["id"]): cursor.execute("SELECT * FROM artists WHERE dir = ?", (artist_dirid, ))
artist = artist_dir["name"] row = cursor.fetchone()
for album_dir in self.library.db.getnodes(artist_dir["id"]): artist_id = None
album = album_dir["name"] if row:
album_meta = album_dir["metadata"] artist_id = row['id']
for track_file in self.library.db.getnodes(album_dir["id"]): else:
track_meta = track_file['metadata'] cursor.execute("INSERT INTO artists (libraryid, dir, name) VALUES (?, ?, ?)",
title = track_file["name"] (pid, artist_dirid, dirname))
fpath = self.library.get_filepath(track_file["id"]) artist_id = cursor.lastrowid
if track_meta.get('id3_done', False) or track_file.get("type", None) not in MUSIC_TYPES: return artist_id, artist_dirid
continue
tags = {'id3_done': True}
try:
audio = None
if track_file.get("type", None) in MPX_TYPES:
audio = MP3(fpath)
if audio.info.sketchy:
logging.warning("media reported as sketchy: %s", fpath)
elif track_file.get("type", None) in FLAC_TYPES:
audio = FLAC(fpath)
else:
audio = ID3(fpath)
# print(audio.pprint())
try:
tags["media_length"] = int(audio.info.length)
except (ValueError, AttributeError):
pass
try:
bitrate = int(audio.info.bitrate)
tags["media_bitrate"] = bitrate
tags["media_kbitrate"] = int(bitrate / 1024)
except (ValueError, AttributeError):
pass
try:
tags["track"] = int(RE_NUMBERS.findall(''.join(audio['TRCK'].text))[0])
except (KeyError, IndexError):
pass
try:
tags["id3_artist"] = ''.join(audio['TPE1'].text)
except KeyError:
pass
try:
tags["id3_album"] = ''.join(audio['TALB'].text)
except KeyError:
pass
try:
tags["id3_title"] = ''.join(audio['TIT2'].text)
except KeyError:
pass
try:
tags["id3_year"] = audio['TDRC'].text[0].year
except (KeyError, IndexError):
pass
logging.info("got all media info from %s", fpath)
except ID3NoHeaderError:
pass
except MutagenError as m:
logging.error("failed to read audio information: %s", m)
continue
self.library.db.update_metadata(track_file["id"], **tags)
logging.warning("Library scan complete in {}s".format(round(time() - start, 2))) def create_or_get_album(self, cursor, pid, dirnames, artist_id):
"""
Retrieve, creating if necessary, directory information about an album. Return tuple contains the albums's ID
and the dir id associated with the album.
:param cursor: sqlite cursor to use
:param pid: root parent id we're working int
:param dirnames: list of directories from the root to the album dir
:param artist_id: id of the artist the album belongs to
:return tuple:
"""
album_dirid = self.create_or_get_dbdir_tree(cursor, pid, dirnames)
cursor.execute("SELECT * FROM albums WHERE artistid = ? AND dir = ?", (artist_id, album_dirid, ))
row = cursor.fetchone()
if row:
album_id = row['id']
else:
cursor.execute("INSERT INTO albums (artistid, dir, name, added) VALUES (?, ?, ?, ?)",
(artist_id, album_dirid, dirnames[-1], int(time())))
album_id = cursor.lastrowid
return album_id, album_dirid
def split_path(self, path):
"""
Given a path like /foo/bar, return ['foo', 'bar']
"""
parts = []
head = path
while True:
head, tail = os.path.split(head)
if tail:
parts.append(tail)
else:
break
parts.reverse()
return parts
def scan_metadata(self, pid, root, freshonly=False):
"""
Iterate through files in the library and update metadata
:param freshonly: only update metadata on files that have never been scanned before
"""
q = "SELECT * FROM songs "
if freshonly:
q += "WHERE lastscan = -1 "
q += "ORDER BY albumid"
#TODO scraping ID3 etc from the media files can be parallelized
with closing(self.library.db.db.cursor()) as reader, \
closing(self.library.db.db.cursor()) as writer:
processed = 0 # commit batching counter
for row in reader.execute(q):
# Find meta, bail if the file was unreadable
# TODO file metadata scanning could be done in parallel
meta = self.scan_file_metadata(os.path.join(root, row['file']))
if not meta:
continue
# Meta may have additional keys that arent in the songs table, omit them
song_attrs = ["title", "lastscan", "format", "length", "bitrate", "track", "year"]
song_meta = {k: v for k, v in meta.items() if k in song_attrs}
# Update the song row
q = "UPDATE songs SET "
params = []
for key, value in song_meta.items():
q += "{}=?, ".format(key)
params.append(value)
q += "lastscan=? WHERE id=?"
params += [int(time()), row["id"]]
writer.execute(q, params)
# If the metadata has an artist or album name, update the relevant items
# TODO ignore metadata if theyre blank
if "album" in meta:
writer.execute("UPDATE albums SET name=? WHERE id=?", (meta["album"], row["albumid"]))
if "artist" in meta:
album = writer.execute("SELECT artistid FROM albums WHERE id=?", (row['albumid'], )).fetchone()
if album:
writer.execute("UPDATE artists SET name=? WHERE id=?", (meta["artist"], album["artistid"]))
if "genre" in meta:
genre_name = meta["genre"].strip()
if genre_name:
genre_id = self.get_genre_id(writer, meta["genre"])
writer.execute("UPDATE songs SET genre=? WHERE id=?", (genre_id, row['id']))
# Commit every 50 items
processed += 1
if processed > 50:
writer.execute("COMMIT")
processed = 0
if processed != 0:
writer.execute("COMMIT")
def get_genre_id(self, cursor, genre_name):
genre_name = genre_name.title().strip() # normalize
for row in cursor.execute("SELECT * FROM genres WHERE name=?", (genre_name, )):
return row['id']
cursor.execute("INSERT INTO genres (name) VALUES (?)", (genre_name, ))
return cursor.lastrowid
def scan_file_metadata(self, fpath):
"""
Scan the file for metadata.
:param fpath: path to the file to scan
"""
ftype, extra = mimetypes.guess_type(fpath)
if ftype in MUSIC_TYPES:
return self.scan_mutagen_metadata(fpath, ftype)
def scan_mutagen_metadata(self, fpath, ftype):
meta = {"format": ftype}
try:
# Open file with mutagen
if ftype in MPX_TYPES:
audio = MP3(fpath)
if audio.info.sketchy:
logging.warning("media reported as sketchy: %s", fpath)
elif ftype in FLAC_TYPES:
audio = FLAC(fpath)
else:
audio = ID3(fpath)
except ID3NoHeaderError:
return
except MutagenError as m:
logging.error("failed to read audio information: %s", m)
return
try:
meta["length"] = int(audio.info.length)
except (ValueError, AttributeError):
pass
try:
bitrate = int(audio.info.bitrate)
meta["bitrate"] = bitrate
# meta["kbitrate"] = int(bitrate / 1024)
except (ValueError, AttributeError):
pass
try:
meta["track"] = int(RE_NUMBERS.findall(''.join(audio['TRCK'].text))[0])
except (KeyError, IndexError):
pass
try:
meta["artist"] = ''.join(audio['TPE1'].text)
except KeyError:
pass
try:
meta["album"] = ''.join(audio['TALB'].text)
except KeyError:
pass
try:
meta["title"] = ''.join(audio['TIT2'].text)
except KeyError:
pass
try:
meta["year"] = audio['TDRC'].text[0].year
except (KeyError, IndexError):
pass
try:
meta["genre"] = audio['TCON'].text[0]
except (KeyError, IndexError):
pass
logging.info("got all media info from %s", fpath)
return meta

View File

@ -1,7 +1,16 @@
KNOWN_MIMES = ["audio/mpeg", "audio/flac", "audio/x-wav", "image/jpeg", "image/png"] KNOWN_MIMES = ["audio/mpeg", "audio/flac", "audio/x-wav", "image/jpeg", "image/png"]
MUSIC_TYPES = ["audio/mpeg", "audio/flac", "audio/x-wav"] MUSIC_TYPES = ["audio/mpeg", "audio/flac", "audio/x-wav"]
MPX_TYPES = ["audio/mpeg"] MPX_TYPES = ["audio/mpeg"]
FLAC_TYPES = ["audio/flac"] FLAC_TYPES = ["audio/flac"]
WAV_TYPES = ["audio/x-wav"] WAV_TYPES = ["audio/x-wav"]
IMAGE_TYPES = ["image/jpeg", "image/png", "image/gif"] IMAGE_TYPES = ["image/jpeg", "image/png", "image/gif"]
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "gif"]
MUSIC_EXTENSIONS = ["mp3", "flac", "wav"]

View File

@ -1,9 +1,12 @@
beautifulsoup4==4.6.0 beautifulsoup4==4.6.0
cheroot==5.8.3 bs4==0.0.1
CherryPy==11.0.0 cheroot==6.0.0
lxml==3.8.0 CherryPy==14.0.1
mutagen==1.38 lxml==4.2.1
portend==2.1.2 more-itertools==4.1.0
pytz==2017.2 mutagen==1.40.0
six==1.10.0 portend==2.2
tempora==1.8 pysonic==0.0.1
pytz==2018.3
six==1.11.0
tempora==1.11