Compare commits

...

24 Commits
v2 ... master

Author SHA1 Message Date
dave 3342a060ee Merge pull request 'allow maxBitRate=0 as some clients set it as the default' (#2) from dpedu/maxbitrate-default into master
Gitea/pysonic/pipeline/head Something is wrong with the build of this commit Details
Reviewed-on: #2
2023-01-09 23:12:10 -08:00
dave afbf71aa08 allow maxBitRate=0 as some clients set it as the default
Gitea/pysonic/pipeline/head Something is wrong with the build of this commit Details
2023-01-09 22:49:54 -08:00
dave 092c833c4f Merge pull request 'Make sqlite open errors clearer' (#1) from dpedu/db-debug into master
Gitea/pysonic/pipeline/head This commit looks good Details
Reviewed-on: #1
2022-12-24 14:41:44 -08:00
dave e5158cfdc7 make sqlite open errors clearer
Gitea/pysonic/pipeline/head This commit looks good Details
Gitea/pysonic/pipeline/pr-master This commit looks good Details
if filesystem permissions on the directory the sqlite database database file is in are such that the app cannot list or create files, sqlite gives a vague error:

```
sqlite3.OperationalError: unable to open database file
```

whereas python's open() will give a better hint ("permission denied"). So, we try opening the database file with python first.

also, add chmods to the startup scripts to avoid this issue in the future
2022-12-24 14:33:57 -08:00
dave f0b9074391 debug print db path
Gitea/pysonic/pipeline/head This commit looks good Details
2022-12-24 13:38:33 -08:00
dave 51551f2b27 use same tracknum parser for flac as mp3
Gitea/pysonic/pipeline/head This commit looks good Details
2022-06-02 16:12:25 -07:00
dave af0cfe029d strip extra whitespace in tags
Gitea/pysonic/pipeline/head This commit looks good Details
2022-06-02 15:42:29 -07:00
dave 2de8547ab7 fix tag picking for flac as flac uses different tag names
Gitea/pysonic/pipeline/head This commit looks good Details
2022-06-02 15:17:12 -07:00
dave 6106fa9aa5 add jenkinsfile
Gitea/pysonic/pipeline/head This commit looks good Details
2022-06-01 22:28:38 -07:00
dave 5225180994 configurable transcoder timeout 2021-03-09 21:26:42 -08:00
dave 5e0e541cf9 fix suffix field 2020-10-06 16:59:23 -07:00
dave 0d2f9a9587 post-refactor fixes 2020-10-05 23:30:01 -07:00
dave da53b4e153 dir fixes 2020-10-05 23:24:11 -07:00
dave c910de0eb0 refactor out library class 2020-10-05 23:13:11 -07:00
dave bfcb528ddf readcursor -> cursor 2020-10-05 22:41:01 -07:00
dave f3d888be35 faster search 2020-10-05 22:19:48 -07:00
dave 5a7fe3a013 playcount 2020-10-05 22:12:49 -07:00
dave 0c48fc013c dockerfile touchups for kube 2020-10-05 20:12:35 -07:00
dave 64f738c5f0 fix search 2020-10-05 20:11:58 -07:00
dave 33f17887c2 fix track ordering 2020-09-23 22:57:26 -07:00
dave 8340bb3c61 Support recently/most played albums views 2018-04-07 15:36:15 -07:00
dave 8a19422e0f stats endpoint 2018-04-07 15:36:15 -07:00
Dave Pedu 41db860297 Update gitignore 2018-04-06 14:29:53 -07:00
dave bd2ba225ac v2 2018-04-05 19:02:17 -07:00
16 changed files with 1581 additions and 786 deletions

4
.dockerignore Normal file
View File

@ -0,0 +1,4 @@
Library/
.git/
testenv/
linuxenv/

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
/build
/dist
/pysonic.egg-info
/testenv
/test.db
/library
__pycache__

20
Dockerfile Normal file
View File

@ -0,0 +1,20 @@
FROM dockermirror:5000/ubuntu:focal
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install -y ffmpeg
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip libxml2-dev libxslt1-dev sudo sqlite3 && \
useradd --create-home --uid 1000 app
ADD requirements.txt /tmp/requirements.txt
RUN pip3 install -r /tmp/requirements.txt
ADD . /tmp/code
RUN cd /tmp/code && \
python3 setup.py install && \
mv start.sh / && \
chmod +x /start.sh
ENTRYPOINT ["/start.sh", "--database-path", "/db/pysonic.sqlite", "--dirs", "/library"]

68
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,68 @@
def image_name = "dpedu/pysonic"
pipeline {
agent {
kubernetes {
yaml """
apiVersion: v1
kind: Pod
spec:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution: # avoid nodes already running a jenkins job
- podAffinityTerm:
labelSelector:
matchExpressions:
- key: jenkins
operator: In
values:
- slave
topologyKey: node
containers:
- name: docker
image: docker:20-dind
args:
- "--insecure-registry"
- "dockermirror:5000"
securityContext:
privileged: true
"""
}
}
stages {
stage("Build image") {
steps {
container("docker") {
script {
try {
docker.withRegistry('http://dockermirror:5000') {
docker.image("ubuntu:focal").pull()
docker.image(image_name).pull() // Pull a recent version to share base layers with (?)
}
} catch (exc) {
echo "couldn't pull image, assuming we're building it for the first time"
}
docker.build(image_name)
}
}
}
}
stage("Push image") {
steps {
container("docker") {
script {
docker.withRegistry('http://dockermirror:5000') {
docker.image(image_name).push("latest")
}
}
}
}
}
stage("Show images") {
steps {
container("docker") {
sh 'docker images'
}
}
}
}
}

19
Makefile Normal file
View File

@ -0,0 +1,19 @@
BUILDARGS :=
IMAGE := dockermirror:5000/dpedu/pysonic
.PHONY: image
image:
docker build -t $(IMAGE) $(BUILDARGS) .
.PHONY: push
push: image
docker push $(IMAGE)
.PHONY: run-local
run-local:
pysonicd -d ./Library/ -u foo:bar -s ./db.sqlite --debug

14
README.md Normal file
View File

@ -0,0 +1,14 @@
pysonic
=======
subsonic api drop-in replacement
running docker
--------------
* `make image`
Notes:
* mount the sqlite database in /db/, it will be chowned automatically
* mount library in /library/

View File

@ -1 +1 @@
__version__ = "0.0.1"
__version__ = "0.0.2"

View File

@ -1,163 +1,36 @@
import re
import json
import os
import logging
import subprocess
from time import time
from random import shuffle
from threading import Thread
from pysonic.database import LETTER_GROUPS
from pysonic.types import MUSIC_TYPES, TYPE_TO_EXTENSION
from pysonic.apilib import formatresponse, ApiResponse
import cherrypy
from collections import defaultdict
from bs4 import BeautifulSoup
from pysonic.library import LETTER_GROUPS
from pysonic.types import MUSIC_TYPES
CALLBACK_RE = re.compile(r'^[a-zA-Z0-9_]+$')
logging = logging.getLogger("api")
response_formats = defaultdict(lambda: "render_xml")
response_formats["json"] = "render_json"
response_formats["jsonp"] = "render_jsonp"
response_headers = defaultdict(lambda: "text/xml; charset=utf-8")
response_headers["json"] = "application/json; charset=utf-8"
response_headers["jsonp"] = "text/javascript; charset=utf-8"
TRANSCODE_TIMEOUT = int(os.environ.get("PYSONIC_ENCODE_TIMEOUT", 5 * 60))
def formatresponse(func):
"""
Decorator for rendering ApiResponse responses
"""
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
response_format = kwargs.get("f", "xml")
callback = kwargs.get("callback", None)
cherrypy.response.headers['Content-Type'] = response_headers[response_format]
renderer = getattr(response, response_formats[response_format])
if response_format == "jsonp":
if callback is None:
return response.render_xml().encode('UTF-8') # copy original subsonic behavior
else:
return renderer(callback).encode('UTF-8')
return renderer().encode('UTF-8')
return wrapper
def extension(mime):
r = TYPE_TO_EXTENSION.get(mime)
return r
class ApiResponse(object):
def __init__(self, status="ok", version="1.15.0"):
"""
ApiResponses are python data structures that can be converted to other formats. The response has a status and a
version. The response data structure is stored in self.data and follows these rules:
- self.data is a dict
- the dict's values become either child nodes or attributes, named by the key
- lists become many oner one child
- dict values are not allowed
- all other types (str, int, NoneType) are attributes
:param status:
:param version:
"""
self.status = status
self.version = version
self.data = defaultdict(lambda: list())
def add_child(self, _type, _parent="", _real_parent=None, **kwargs):
parent = _real_parent if _real_parent else self.get_child(_parent)
m = defaultdict(lambda: list())
m.update(dict(kwargs))
parent[_type].append(m)
return m
def get_child(self, _path):
parent_path = _path.split(".")
parent = self.data
for item in parent_path:
if not item:
continue
parent = parent.get(item)[0]
return parent
def set_attrs(self, _path, **attrs):
parent = self.get_child(_path)
if type(parent) not in (dict, defaultdict):
raise Exception("wot")
parent.update(attrs)
def render_json(self):
def _flatten_json(item):
"""
Convert defaultdicts to dicts and remove lists where node has 1 or no child
"""
listed_attrs = ["folder"]
d = {}
for k, v in item.items():
if type(v) is list:
if len(v) > 1:
d[k] = []
for subitem in v:
d[k].append(_flatten_json(subitem))
elif len(v) == 1:
d[k] = _flatten_json(v[0])
else:
d[k] = {}
else:
d[k] = [v] if k in listed_attrs else v
return d
data = _flatten_json(self.data)
return json.dumps({"subsonic-response": dict(status=self.status, version=self.version, **data)}, indent=4)
def render_jsonp(self, callback):
assert CALLBACK_RE.match(callback), "Invalid callback"
return "{}({});".format(callback, self.render_json())
def render_xml(self):
text_attrs = ['largeImageUrl', 'musicBrainzId', 'smallImageUrl', 'mediumImageUrl', 'lastFmUrl', 'biography',
'folder']
selftext_attrs = ['value']
# These attributes will be placed in <hello>{{ value }}</hello> tags instead of hello="{{ value }}" on parent
doc = BeautifulSoup('', features='lxml-xml')
root = doc.new_tag("subsonic-response", xmlns="http://subsonic.org/restapi",
status=self.status,
version=self.version)
doc.append(root)
def _render_xml(node, parent):
"""
For every key in the node dict, the parent gets a new child tag with name == key
If the value is a dict, it becomes the new tag's attrs
If the value is a list, the parent gets many new tags with each dict as attrs
If the value is str int etc, parent gets attrs
"""
for key, value in node.items():
if type(value) in (dict, defaultdict):
tag = doc.new_tag(key)
parent.append(tag)
tag.attrs.update(value)
elif type(value) is list:
for item in value:
tag = doc.new_tag(key)
parent.append(tag)
_render_xml(item, tag)
else:
if key in text_attrs:
tag = doc.new_tag(key)
parent.append(tag)
tag.append(str(value))
elif key in selftext_attrs:
parent.append(str(value))
else:
parent.attrs[key] = value
_render_xml(self.data, root)
return doc.prettify()
class PysonicApi(object):
def __init__(self, db, library, options):
class PysonicSubsonicApi(object):
def __init__(self, db, options):
self.db = db
self.library = library
self.options = options
@cherrypy.expose
@formatresponse
def index(self):
response = ApiResponse()
response.add_child("totals", **self.db.get_stats())
return response
@cherrypy.expose
@formatresponse
def ping_view(self, **kwargs):
@ -181,7 +54,7 @@ class PysonicApi(object):
def getMusicFolders_view(self, **kwargs):
response = ApiResponse()
response.add_child("musicFolders")
for folder in self.library.get_libraries():
for folder in self.db.get_libraries():
response.add_child("musicFolder", _parent="musicFolders", id=folder["id"], name=folder["name"])
return response
@ -190,49 +63,52 @@ class PysonicApi(object):
def getIndexes_view(self, **kwargs):
# Get listing of top-level dir
response = ApiResponse()
# TODO real lastmodified date
# TODO deal with ignoredArticles
response.add_child("indexes", lastModified="1502310831000", ignoredArticles="The El La Los Las Le Les")
artists = self.db.get_artists(sortby="name", order="asc")
for letter in LETTER_GROUPS:
index = response.add_child("index", _parent="indexes", name=letter.upper())
for artist in self.library.get_artists():
for artist in artists:
if artist["name"][0].lower() in letter:
response.add_child("artist", _real_parent=index, id=artist["id"], name=artist["name"])
response.add_child("artist", _real_parent=index, id=artist["dir"], name=artist["name"])
return response
@cherrypy.expose
def savePlayQueue_view(self, id, current, position, **kwargs):
print("TODO save playlist with items {} current {} position {}".format(id, current, position))
@cherrypy.expose
@formatresponse
def getAlbumList_view(self, type, size=50, offset=0, **kwargs):
albums = self.library.get_albums()
def getAlbumList_view(self, type, size=250, offset=0, **kwargs):
qargs = {}
if type == "random":
shuffle(albums)
qargs.update(sortby="random")
elif type == "alphabeticalByName":
albums.sort(key=lambda item: item.get("id3_album", item["album"] if item["album"] else "zzzzzUnsortable"))
else:
raise NotImplemented()
albumset = albums[0 + int(offset):int(size) + int(offset)]
qargs.update(sortby="name", order="asc")
elif type == "newest":
qargs.update(sortby="added", order="desc")
elif type == "recent":
qargs.update(sortby="played", order="desc")
elif type == "frequent":
qargs.update(sortby="plays", order="desc")
qargs.update(limit=(offset, size))
albums = self.db.get_albums(**qargs)
response = ApiResponse()
response.add_child("albumList")
for album in albumset:
album_meta = album['metadata']
album_kw = dict(id=album["id"],
parent=album["parent"],
isDir="true" if album['isdir'] else "false",
title=album_meta.get("id3_title", album["name"]), #TODO these cant be blank or dsub gets mad
album=album_meta.get("id3_album", album["album"]),
artist=album_meta.get("id3_artist", album["artist"]),
# playCount="0"
# created="2016-05-08T05:31:31.000Z"/>)
for album in albums:
album_kw = dict(id=album["dir"],
parent=album["artistdir"],
isDir="true",
title=album["name"],
album=album["name"],
artist=album["artistname"],
coverArt=album["coverid"],
playCount=album["plays"],
#year=TODO
#created="2016-05-08T05:31:31.000Z"/>)
)
if 'cover' in album_meta:
album_kw["coverArt"] = album_meta["cover"]
if 'id3_year' in album_meta:
album_kw["year"] = album_meta['id3_year']
response.add_child("album", _parent="albumList", **album_kw)
return response
@ -240,87 +116,94 @@ class PysonicApi(object):
@formatresponse
def getMusicDirectory_view(self, id, **kwargs):
"""
List an artist dir
List either and artist or album dir
"""
dir_id = int(id)
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
dirtype, dirinfo, entity = self.db.get_subsonic_musicdir(dirid=dir_id)
response = ApiResponse()
response.add_child("directory")
directory = self.library.get_dir(dir_id)
dir_meta = directory["metadata"]
children = self.library.get_dir_children(dir_id)
response.set_attrs(_path="directory", name=directory['name'], id=directory['id'],
parent=directory['parent'], playCount=10)
# artists just need this
response.add_child("directory",
name=entity['name'],
id=entity['dir'])
for item in children:
if dirtype == "album":
# albums can also have
# - parent (album dir id)
# - playcount
response.set_attrs(_path="directory",
parent=dirinfo["parent"],
playCount=entity["plays"])
#TODO refactor meeeeee
for childtype, child in entity["children"]:
# omit not dirs and media in browser
if not item["isdir"] and item["type"] not in MUSIC_TYPES:
continue
item_meta = item['metadata']
response.add_child("child", _parent="directory", **self.render_node(item, item_meta, directory, dir_meta))
# if not item["isdir"] and item["type"] not in MUSIC_TYPES:
# continue
# item_meta = item['metadata']
moreargs = {}
if childtype == "album":
moreargs.update(name=child["name"],
isDir="true", # TODO song files in artist dir
parent=entity["dir"],
id=child["dir"])
if child["coverid"]:
moreargs.update(coverArt=child["coverid"])
# album=item["name"],
# title=item["name"], # TODO dupe?
# artist=artist["name"],
# coverArt=item["coverid"],
elif childtype == "song":
moreargs.update(title=child["title"],
albumId=entity["dir"],
album=entity["name"],
artistId=child["_artist"]["dir"],
artist=child["_artist"]["name"],
contentType=child["format"],
id=child["id"],
duration=child["length"],
isDir="false",
parent=entity["dir"],
track=child["track"],
playCount=child["plays"],
#TODO suffix can be null/omitted, which causes the client to cache files wrong, while
# this isn't ideal, fixing it properly would require significant changes to the scanner.
suffix=extension(child["format"]),
path=child["file"],
# bitRate
# discNumber
# created=
# year=1999
# genre="Alternative & Punk"
)
if entity["coverid"]:
moreargs.update(coverArt=entity["coverid"])
response.add_child("child", _parent="directory",
size="4096",
type="music",
**moreargs)
cherrypy.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
return response
def render_node(self, item, item_meta, directory, dir_meta):
"""
Given a node and it's parent directory, and meta, return a dict with the keys formatted how the subsonic clients
expect them to be
:param item:
:param item_meta:
:param directory:
:param dir_meta:
"""
child = dict(id=item["id"],
parent=item["id"],
isDir="true" if item['isdir'] else "false",
title=item_meta.get("id3_title", item["name"]),
album=item_meta.get("id3_album", item["album"]),
artist=item_meta.get("id3_artist", item["artist"]),
# playCount="5",
# created="2016-04-25T07:31:33.000Z"
# genre="Other",
# path="Cosmic Gate/Sign Of The Times/03 Flatline (featuring Kyler England).mp3"
type="music")
if 'kbitrate' in item_meta:
child["bitrate"] = item_meta["kbitrate"]
if item["size"] != -1:
child["size"] = item["size"]
if "media_length" in item_meta:
child["duration"] = item_meta["media_length"]
if "albumId" in directory:
child["albumId"] = directory["id"]
if "artistId" in directory:
child["artistId"] = directory["parent"]
if "." in item["name"]:
child["suffix"] = item["name"].split(".")[-1]
if item["type"]:
child["contentType"] = item["type"]
if 'cover' in item_meta:
child["coverArt"] = item_meta["cover"]
elif 'cover' in dir_meta:
child["coverArt"] = dir_meta["cover"]
if 'track' in item_meta:
child["track"] = item_meta['track']
if 'id3_year' in item_meta:
child["year"] = item_meta['id3_year']
return child
@cherrypy.expose
def stream_view(self, id, maxBitRate="256", **kwargs):
maxBitRate = int(maxBitRate)
assert maxBitRate >= 32 and maxBitRate <= 320
fpath = self.library.get_filepath(id)
meta = self.library.get_file_metadata(id)
to_bitrate = min(maxBitRate, self.options.max_bitrate, meta.get("media_kbitrate", 320))
maxBitRate = int(maxBitRate) or 256
if maxBitRate < 32 or maxBitRate > 320:
raise cherrypy.HTTPError(400, message=f"invalid maxBitRate: {maxBitRate}. Must be between 32 and 320.")
song = self.db.get_songs(id=int(id))[0]
fpath = os.path.join(song["root"], song["file"])
media_bitrate = song.get("bitrate") / 1024 if song.get("bitrate") else 320
to_bitrate = min(maxBitRate,
self.options.max_bitrate,
media_bitrate)
cherrypy.response.headers['Content-Type'] = 'audio/mpeg'
if "media_length" in meta:
cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length']))
#if "media_length" in meta:
# cherrypy.response.headers['X-Content-Duration'] = str(int(meta['media_length']))
cherrypy.response.headers['X-Content-Kbitrate'] = str(to_bitrate)
if (self.options.skip_transcode or meta.get("media_kbitrate", -1) == to_bitrate) \
and meta["type"] == "audio/mpeg":
if (self.options.skip_transcode or (song.get("bitrate") and media_bitrate == to_bitrate)) \
and song["format"] == "audio/mpeg":
def content():
with open(fpath, "rb") as f:
while True:
@ -330,10 +213,9 @@ class PysonicApi(object):
yield data
return content()
else:
transcode_meta = "transcoded_{}_size".format(to_bitrate)
if transcode_meta in meta:
cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta]))
# transcode_meta = "transcoded_{}_size".format(to_bitrate)
# if transcode_meta in meta:
# cherrypy.response.headers['Content-Length'] = str(int(meta[transcode_meta]))
transcode_args = ["ffmpeg", "-i", fpath, "-map", "0:0", "-b:a",
"{}k".format(to_bitrate),
"-v", "0", "-f", "mp3", "-"]
@ -343,13 +225,13 @@ class PysonicApi(object):
def content(proc):
length = 0
completed = False
# completed = False
start = time()
try:
while True:
data = proc.stdout.read(16 * 1024)
if not data:
completed = True
# completed = True
break
yield data
length += len(data)
@ -357,15 +239,15 @@ class PysonicApi(object):
proc.poll()
if proc.returncode is None or proc.returncode == 0:
logging.warning("transcoded {} in {}s".format(id, int(time() - start)))
if completed:
self.library.report_transcode(id, to_bitrate, length)
# if completed:
# self.db.report_transcode(id, to_bitrate, length)
else:
logging.error("transcode of {} exited with code {} after {}s".format(id, proc.returncode,
int(time() - start)))
def stopit(proc):
try:
proc.wait(timeout=90)
proc.wait(timeout=TRANSCODE_TIMEOUT)
except subprocess.TimeoutExpired:
logging.warning("killing timed-out transcoder")
proc.kill()
@ -378,7 +260,30 @@ class PysonicApi(object):
@cherrypy.expose
def getCoverArt_view(self, id, **kwargs):
fpath = self.library.get_filepath(id)
"""
id is a string and if it's a number it's the album at for a...?? could be song or album either by id or directory id lol
it could also be:
pl-1234 - playlist
for now, if the first character isn't a number, we error
"""
if id.startswith("pl-"): # get art from first track in playlist
playlist_id = int(id[len("pl-"):])
songs = self.db.get_playlist_songs(playlist_id)
for song in songs:
if song["albumcoverid"]:
id = song["albumcoverid"]
break
else:
raise cherrypy.HTTPError(404, message=f"no art for any of the {len(songs)} tracks in playlist {playlist_id}")
elif id[0] not in "0123456789":
#TODO
print("TODO support getCoverArt id format", repr(id))
raise cherrypy.HTTPError(500, message=f"coverid format {repr(id)} not supported")
else:
id = int(id)
fpath = self.db.get_cover_path(id)
type2ct = {
'jpg': 'image/jpeg',
'png': 'image/png',
@ -395,15 +300,14 @@ class PysonicApi(object):
break
total += len(data)
yield data
logging.info("\nSent {} bytes for {}".format(total, fpath))
logging.info("sent {} bytes for {}".format(total, fpath))
return content()
getCoverArt_view._cp_config = {'response.stream': True}
@cherrypy.expose
@formatresponse
def getArtistInfo_view(self, id, includeNotPresent="true", **kwargs):
info = self.library.get_artist_info(id)
info = self.db.get_artist_info(id)
response = ApiResponse()
response.add_child("artistInfo")
response.set_attrs("artistInfo", **info)
@ -412,7 +316,7 @@ class PysonicApi(object):
@cherrypy.expose
@formatresponse
def getUser_view(self, username, **kwargs):
user = {} if self.options.disable_auth else self.library.db.get_user(cherrypy.request.login)
user = {} if self.options.disable_auth else self.db.get_user(cherrypy.request.login)
response = ApiResponse()
response.add_child("user",
username=user["username"],
@ -437,19 +341,19 @@ class PysonicApi(object):
@cherrypy.expose
@formatresponse
def star_view(self, id, **kwargs):
self.library.set_starred(cherrypy.request.login, int(id), starred=True)
self.db.set_starred(cherrypy.request.login, int(id), starred=True)
return ApiResponse()
@cherrypy.expose
@formatresponse
def unstar_view(self, id, **kwargs):
self.library.set_starred(cherrypy.request.login, int(id), starred=False)
self.db.set_starred(cherrypy.request.login, int(id), starred=False)
return ApiResponse()
@cherrypy.expose
@formatresponse
def getStarred_view(self, **kwargs):
children = self.library.get_starred(cherrypy.request.login)
children = self.db.get_starred(cherrypy.request.login)
response = ApiResponse()
response.add_child("starred")
for item in children:
@ -471,15 +375,32 @@ class PysonicApi(object):
"""
response = ApiResponse()
response.add_child("randomSongs")
children = self.library.get_songs(size, shuffle=True)
for item in children:
# omit not dirs and media in browser
if not item["isdir"] and item["type"] not in MUSIC_TYPES:
continue
item_meta = item['metadata']
itemtype = "song" if item["type"] in MUSIC_TYPES else "album"
response.add_child(itemtype, _parent="randomSongs",
**self.render_node(item, item_meta, {}, self.db.getnode(item["parent"])["metadata"]))
children = self.db.get_songs(limit=size, sortby="random")
for song in children:
moreargs = {}
if song["format"]:
moreargs.update(contentType=song["format"])
if song["albumcoverid"]:
moreargs.update(coverArt=song["albumcoverid"])
if song["length"]:
moreargs.update(duration=song["length"])
if song["track"]:
moreargs.update(track=song["track"])
if song["year"]:
moreargs.update(year=song["year"])
response.add_child("song",
_parent="randomSongs",
title=song["title"],
album=song["albumname"],
artist=song["artistname"],
id=song["id"],
isDir="false",
parent=song["albumid"],
size=song["size"],
suffix=extension(song["format"]),
type="music",
**moreargs)
return response
@cherrypy.expose
@ -487,9 +408,8 @@ class PysonicApi(object):
def getGenres_view(self, **kwargs):
response = ApiResponse()
response.add_child("genres")
response.add_child("genre", _parent="genres", value="Death Metal", songCount=420, albumCount=69)
response.add_child("genre", _parent="genres", value="Metal", songCount=52, albumCount=3)
response.add_child("genre", _parent="genres", value="Punk", songCount=34, albumCount=3)
for row in self.db.get_genres():
response.add_child("genre", _parent="genres", value=row["name"], songCount=420, albumCount=69)
return response
@cherrypy.expose
@ -500,7 +420,7 @@ class PysonicApi(object):
:param submission: True if end of song reached. False on start of track.
"""
submission = True if submission == "true" else False
# TODO save played track stats
# TODO save played track stats and/or do last.fm bullshit
return ApiResponse()
@cherrypy.expose
@ -516,30 +436,60 @@ class PysonicApi(object):
query = query.replace("*", "") # TODO handle this
artists = 0
for item in self.library.get_artists():
if query in item["name"].lower():
response.add_child("artist", _parent="searchResult2", id=item["id"], name=item["name"])
artists += 1
if artists >= artistCount:
break
for item in self.db.get_artists(name_contains=query):
response.add_child("artist", _parent="searchResult2", id=item["dir"], name=item["name"])
artists += 1
if artists >= artistCount:
break
# TODO make this more efficient
albums = 0
for item in self.library.get_artists():
if query in item["name"].lower():
response.add_child("album", _parent="searchResult2", **self.render_node(item, item["metadata"], {}, {}))
albums += 1
if albums >= albumCount:
break
for album in self.db.get_albums(name_contains=query):
response.add_child("album", _parent="searchResult2",
id=album["dir"],
parent=album["artistdir"],
isDir="true",
title=album["name"],
album=album["name"],
artist=album["artistname"],
coverArt=album["coverid"],
playCount=album["plays"],
#year=TODO
#created="2016-05-08T05:31:31.000Z"/>)
)
albums += 1
if albums >= albumCount:
break
# TODO make this more efficient
songs = 0
for item in self.library.get_songs(limit=9999999, shuffle=False):
if query in item["name"].lower():
response.add_child("song", _parent="searchResult2", **self.render_node(item, item["metadata"], {}, {}))
songs += 1
if songs > songCount:
break
for song in self.db.get_songs(title_contains=query):
response.add_child("song", _parent="searchResult2",
id=song["id"],
parent=song["albumdir"],
isDir="false",
title=song["title"],
album=song["albumname"],
artist=song["artistname"],
track=song["track"],
year=song["year"],
genre=song["genrename"],
coverArt=song["albumcoverid"],
size=song["size"],
contentType=song["format"],
duration=song["length"],
bitRate=song["bitrate"],
path=song["file"],
playCount=song["plays"],
albumId=song["albumid"],
type="music",
suffix=extension(song["format"]),
# created="2012-09-17T22:35:19.000Z"
)
songs += 1
if songs > songCount:
break
return response
@ -548,3 +498,116 @@ class PysonicApi(object):
def setRating_view(self, id, rating):
# rating is 1-5
pass
@cherrypy.expose
def savePlayQueue_view(self, id, current, position, **kwargs):
print("TODO save playqueue with items {} current {} position {}".format(id, repr(current), repr(position)))
current = int(current)
song = self.db.get_songs(id=current)[0]
self.db.update_album_played(song['albumid'], time())
self.db.increment_album_plays(song['albumid'])
if int(position) == 0:
self.db.increment_track_plays(current)
# TODO save playlist with items ['378', '386', '384', '380', '383'] current 383 position 4471
# id entries are strings!
@cherrypy.expose
@formatresponse
def createPlaylist_view(self, name, songId, **kwargs):
if type(songId) != list:
songId = [songId]
user = self.db.get_user(cherrypy.request.login)
self.db.add_playlist(user["id"], name, songId)
return ApiResponse()
#TODO the response should be the new playlist, check the cap
@cherrypy.expose
@formatresponse
def getPlaylists_view(self, **kwargs):
user = self.db.get_user(cherrypy.request.login)
response = ApiResponse()
response.add_child("playlists")
for playlist in self.db.get_playlists(user["id"]):
response.add_child("playlist",
_parent="playlists",
id=playlist["id"],
name=playlist["name"],
owner=user["username"],
public=playlist["public"],
songCount=69,
duration=420,
# changed="2018-04-05T23:23:38.263Z"
# created="2018-04-05T23:23:38.252Z"
coverArt="pl-{}".format(playlist["id"])
)
return response
@cherrypy.expose
@formatresponse
def getPlaylist_view(self, id, **kwargs):
id = int(id)
user = self.db.get_user(cherrypy.request.login)
plinfo = self.db.get_playlist(id)
songs = self.db.get_playlist_songs(id)
response = ApiResponse()
response.add_child("playlist",
id=plinfo["id"],
name=plinfo["name"], # TODO this element should match getPlaylists_view
owner=user["username"], # TODO translate id to name
public=plinfo["public"],
songCount=69,
duration=420)
for song in songs:
response.add_child("entry",
_parent="playlist",
id=song["id"],
parent=song["albumid"], # albumid seems wrong? should be dir parent?
isDir="false",
title=song["title"],
album=song["albumname"],
artist=song["artistname"],
track=song["track"],
year=song["year"],
genre=song["genrename"],
coverArt=song["albumcoverid"],
size=song["size"],
contentType=song["format"],
suffix=extension(song["format"]),
duration=song["length"],
bitRate=song["bitrate"] / 1024 if song["bitrate"] else None, #TODO macro for this sort of logic
path=song["file"],
playCount=song["plays"],
# created="2015-06-09T15:26:01.000Z"
albumId=song["albumid"],
artistId=song["artistid"],
type="music")
return response
@cherrypy.expose
@formatresponse
def updatePlaylist_view(self, playlistId, songIndexToRemove=None, songIdToAdd=None, **kwargs):
playlistId = int(playlistId)
user = self.db.get_user(cherrypy.request.login)
plinfo = self.db.get_playlist(playlistId)
assert plinfo["ownerid"] == user["id"]
if songIndexToRemove:
self.db.remove_index_from_playlist(playlistId, songIndexToRemove)
elif songIdToAdd:
self.db.add_to_playlist(playlistId, songIdToAdd)
#TODO there are more modification methods
return ApiResponse()
@cherrypy.expose
@formatresponse
def deletePlaylist_view(self, id, **kwargs):
user = self.db.get_user(cherrypy.request.login)
plinfo = self.db.get_playlist(int(id))
assert plinfo["ownerid"] == user["id"]
self.db.delete_playlist(plinfo["id"])
return ApiResponse()

143
pysonic/apilib.py Normal file
View File

@ -0,0 +1,143 @@
from collections import defaultdict
from bs4 import BeautifulSoup
import re
import cherrypy
import json
CALLBACK_RE = re.compile(r'^[a-zA-Z0-9_]+$')
response_formats = defaultdict(lambda: "render_xml")
response_formats["json"] = "render_json"
response_formats["jsonp"] = "render_jsonp"
response_headers = defaultdict(lambda: "text/xml; charset=utf-8")
response_headers["json"] = "application/json; charset=utf-8"
response_headers["jsonp"] = "text/javascript; charset=utf-8"
def formatresponse(func):
"""
Decorator for rendering ApiResponse responses based on requested response type
"""
def wrapper(*args, **kwargs):
response = func(*args, **kwargs)
response_format = kwargs.get("f", "xml")
callback = kwargs.get("callback", None)
cherrypy.response.headers['Content-Type'] = response_headers[response_format]
renderer = getattr(response, response_formats[response_format])
if response_format == "jsonp":
if callback is None:
return response.render_xml().encode('UTF-8') # copy original subsonic behavior
else:
return renderer(callback).encode('UTF-8')
return renderer().encode('UTF-8')
return wrapper
class ApiResponse(object):
def __init__(self, status="ok", version="1.15.0"):
"""
ApiResponses are python data structures that can be converted to other formats. The response has a status and a
version. The response data structure is stored in self.data and follows these rules:
- self.data is a dict
- the dict's values become either child nodes or attributes, named by the key
- lists become many oner one child
- dict values are not allowed
- all other types (str, int, NoneType) are attributes
:param status:
:param version:
"""
self.status = status
self.version = version
self.data = defaultdict(lambda: list())
def add_child(self, _type, _parent="", _real_parent=None, **kwargs):
kwargs = {k: v for k, v in kwargs.items() if v or type(v) is int} # filter out empty keys (0 is ok)
parent = _real_parent if _real_parent else self.get_child(_parent)
m = defaultdict(lambda: list())
m.update(dict(kwargs))
parent[_type].append(m)
return m
def get_child(self, _path):
parent_path = _path.split(".")
parent = self.data
for item in parent_path:
if not item:
continue
parent = parent.get(item)[0]
return parent
def set_attrs(self, _path, **attrs):
parent = self.get_child(_path)
if type(parent) not in (dict, defaultdict):
raise Exception("wot")
parent.update(attrs)
def render_json(self):
def _flatten_json(item):
"""
Convert defaultdicts to dicts and remove lists where node has 1 or no child
"""
listed_attrs = ["folder"]
d = {}
for k, v in item.items():
if type(v) is list:
if len(v) > 1:
d[k] = []
for subitem in v:
d[k].append(_flatten_json(subitem))
elif len(v) == 1:
d[k] = _flatten_json(v[0])
else:
d[k] = {}
else:
d[k] = [v] if k in listed_attrs else v
return d
data = _flatten_json(self.data)
return json.dumps({"subsonic-response": dict(status=self.status, version=self.version, **data)}, indent=4)
def render_jsonp(self, callback):
assert CALLBACK_RE.match(callback), "Invalid callback"
return "{}({});".format(callback, self.render_json())
def render_xml(self):
text_attrs = ['largeImageUrl', 'musicBrainzId', 'smallImageUrl', 'mediumImageUrl', 'lastFmUrl', 'biography',
'folder']
selftext_attrs = ['value']
# These attributes will be placed in <hello>{{ value }}</hello> tags instead of hello="{{ value }}" on parent
doc = BeautifulSoup('', features='lxml-xml')
root = doc.new_tag("subsonic-response", xmlns="http://subsonic.org/restapi",
status=self.status,
version=self.version)
doc.append(root)
def _render_xml(node, parent):
"""
For every key in the node dict, the parent gets a new child tag with name == key
If the value is a dict, it becomes the new tag's attrs
If the value is a list, the parent gets many new tags with each dict as attrs
If the value is str int etc, parent gets attrs
"""
for key, value in node.items():
if type(value) in (dict, defaultdict):
tag = doc.new_tag(key)
parent.append(tag)
tag.attrs.update(value)
elif type(value) is list:
for item in value:
tag = doc.new_tag(key)
parent.append(tag)
_render_xml(item, tag)
else:
if key in text_attrs:
tag = doc.new_tag(key)
parent.append(tag)
tag.append(str(value))
elif key in selftext_attrs:
parent.append(str(value))
else:
parent.attrs[key] = value
_render_xml(self.data, root)
return doc.prettify()

View File

@ -1,10 +1,9 @@
import os
import logging
import cherrypy
from sqlite3 import IntegrityError
from pysonic.api import PysonicApi
from pysonic.library import PysonicLibrary, DuplicateRootException
from pysonic.database import PysonicDatabase
from sqlite3 import DatabaseError
from pysonic.api import PysonicSubsonicApi
from pysonic.database import PysonicDatabase, DuplicateRootException
def main():
@ -24,43 +23,47 @@ def main():
group = parser.add_argument_group("app options")
group.add_argument("--skip-transcode", action="store_true", help="instead of trancoding mp3s, send as-is")
group.add_argument("--no-rescan", action="store_true", help="don't perform simple scan on startup")
group.add_argument("--deep-rescap", action="store_true", help="perform deep scan (read id3 etc)")
group.add_argument("--enable-prune", action="store_true", help="enable removal of media not found on disk")
# group.add_argument("--deep-rescan", action="store_true", help="perform deep scan (read id3 etc)")
# group.add_argument("--enable-prune", action="store_true", help="enable removal of media not found on disk")
group.add_argument("--max-bitrate", type=int, default=320, help="maximum send bitrate")
group.add_argument("--enable-cors", action="store_true", help="add response headers to allow cors")
args = parser.parse_args()
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING)
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING,
format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s")
db = PysonicDatabase(path=args.database_path)
library = PysonicLibrary(db)
for dirname in args.dirs:
assert os.path.exists(dirname) and dirname.startswith("/"), "--dirs must be absolute paths and exist!"
dirname = os.path.abspath(dirname)
assert os.path.exists(dirname), "--dirs must be paths that exist"
try:
library.add_dir(dirname)
db.add_root(dirname)
except DuplicateRootException:
pass
library.update()
db.update()
for username, password in args.user:
try:
db.add_user(username, password)
except IntegrityError:
except DatabaseError:
db.update_user(username, password)
logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()]))
logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()]))
logging.warning("Albums: {}".format(len(library.get_albums())))
# logging.warning("Libraries: {}".format([i["name"] for i in library.get_libraries()]))
# logging.warning("Artists: {}".format([i["name"] for i in library.get_artists()]))
# logging.warning("Albums: {}".format(len(library.get_albums())))
api = PysonicApi(db, library, args)
api = PysonicSubsonicApi(db, args)
api_config = {}
if args.disable_auth:
logging.warning("starting up with auth disabled")
else:
def validate_password(realm, username, password):
return True
api_config.update({'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'pysonic',
'tools.auth_basic.checkpassword': db.validate_password})
'tools.auth_basic.checkpassword': validate_password})
if args.enable_cors:
def cors():
cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
@ -99,5 +102,6 @@ def main():
logging.info("API has shut down")
cherrypy.engine.exit()
if __name__ == '__main__':
main()

View File

@ -1,18 +1,28 @@
import os
import json
import sqlite3
import logging
from hashlib import sha512
from time import time
from contextlib import closing
from collections import Iterable
from pysonic.scanner import PysonicFilesystemScanner
logger = logging.getLogger("database")
LETTER_GROUPS = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
"u", "v", "w", "xyz", "0123456789"]
logging = logging.getLogger("database")
keys_in_table = ["title", "album", "artist", "type", "size"]
def dict_factory(cursor, row):
def dict_factory(c, row):
d = {}
for idx, col in enumerate(cursor.description):
for idx, col in enumerate(c.description):
d[col[0]] = row[idx]
return d
@ -21,227 +31,524 @@ class NotFoundError(Exception):
pass
class DuplicateRootException(Exception):
pass
def hash_password(unicode_string):
return sha512(unicode_string.encode('UTF-8')).hexdigest()
def cursor(func):
"""
Provides a cursor to the wrapped method as the first arg.
"""
def wrapped(*args, **kwargs):
self = args[0]
if len(args) >= 2 and isinstance(args[1], sqlite3.Cursor):
return func(*args, **kwargs)
else:
with closing(self.db.cursor()) as c:
return func(self, c, *args[1:], **kwargs)
return wrapped
class PysonicDatabase(object):
def __init__(self, path):
self.sqlite_opts = dict(check_same_thread=False, cached_statements=0, isolation_level=None)
self.sqlite_opts = dict(check_same_thread=False)
self.path = path
self.db = None
self.open()
self.migrate()
self.scanner = PysonicFilesystemScanner(self)
def open(self):
with open(self.path, "rb"): # sqlite doesn't give very descriptive permission errors, but this does
pass
self.db = sqlite3.connect(self.path, **self.sqlite_opts)
self.db.row_factory = dict_factory
def update(self):
"""
Start the library media scanner ands
"""
self.scanner.init_scan()
def migrate(self):
# Create db
queries = ["""CREATE TABLE 'meta' (
queries = ["""CREATE TABLE 'libraries' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'name' TEXT,
'path' TEXT UNIQUE);""",
"""CREATE TABLE 'dirs' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'parent' INTEGER,
'name' TEXT,
UNIQUE(parent, name)
)""",
"""CREATE TABLE 'genres' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'name' TEXT UNIQUE)""",
"""CREATE TABLE 'artists' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'libraryid' INTEGER,
'dir' INTEGER UNIQUE,
'name' TEXT)""",
"""CREATE TABLE 'albums' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'artistid' INTEGER,
'coverid' INTEGER,
'dir' INTEGER,
'name' TEXT,
'added' INTEGER NOT NULL DEFAULT -1,
'played' INTEGER,
'plays' INTEGER NOT NULL DEFAULT 0,
UNIQUE (artistid, dir));""",
"""CREATE TABLE 'songs' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'albumid' BOOLEAN,
'genre' INTEGER DEFAULT NULL,
'file' TEXT UNIQUE, -- path from the library root
'size' INTEGER NOT NULL DEFAULT -1,
'title' TEXT NOT NULL,
'lastscan' INTEGER NOT NULL DEFAULT -1,
'format' TEXT,
'length' INTEGER,
'bitrate' INTEGER,
'track' INTEGER,
'year' INTEGER,
'plays' INTEGER NOT NULL DEFAULT 0
)""",
"""CREATE TABLE 'covers' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT,
'library' INTEGER,
'type' TEXT,
'size' TEXT,
'path' TEXT UNIQUE);""",
"""CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)""",
"""CREATE TABLE 'stars' (
'userid' INTEGER,
'songid' INTEGER,
primary key ('userid', 'songid'))""",
"""CREATE TABLE 'playlists' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'ownerid' INTEGER,
'name' TEXT,
'public' BOOLEAN,
'created' INTEGER,
'changed' INTEGER,
'cover' INTEGER,
UNIQUE ('ownerid', 'name'))""",
"""CREATE TABLE 'playlist_entries' (
'playlistid' INTEGER,
'songid' INTEGER,
'order' FLOAT)""",
"""CREATE TABLE 'meta' (
'key' TEXT PRIMARY KEY NOT NULL,
'value' TEXT);""",
"""INSERT INTO meta VALUES ('db_version', '3');""",
"""CREATE TABLE 'nodes' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'parent' INTEGER NOT NULL,
'isdir' BOOLEAN NOT NULL,
'size' INTEGER NOT NULL DEFAULT -1,
'name' TEXT NOT NULL,
'type' TEXT,
'title' TEXT,
'album' TEXT,
'artist' TEXT,
'metadata' TEXT
)""",
"""CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)""",
"""CREATE TABLE 'stars' (
'userid' INTEGER,
'nodeid' INTEGER,
primary key ('userid', 'nodeid'))"""]
"""INSERT INTO meta VALUES ('db_version', '1');"""]
with closing(self.db.cursor()) as cursor:
cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta';")
with closing(self.db.cursor()) as c:
c.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='meta'")
# Initialize DB
if len(cursor.fetchall()) == 0:
logging.warning("Initializing database")
if len(c.fetchall()) == 0:
logger.warning("Initializing database")
for query in queries:
cursor.execute(query)
c.execute(query)
c.execute("COMMIT")
else:
# Migrate if old db exists
version = int(cursor.execute("SELECT * FROM meta WHERE key='db_version';").fetchone()['value'])
if version < 1:
logging.warning("migrating database to v1 from %s", version)
users_table = """CREATE TABLE 'users' (
'id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
'username' TEXT UNIQUE NOT NULL,
'password' TEXT NOT NULL,
'admin' BOOLEAN DEFAULT 0,
'email' TEXT)"""
cursor.execute(users_table)
version = 1
if version < 2:
logging.warning("migrating database to v2 from %s", version)
stars_table = """CREATE TABLE 'stars' (
'userid' INTEGER,
'nodeid' INTEGER,
primary key ('userid', 'nodeid'))"""
cursor.execute(stars_table)
version = 2
if version < 3:
logging.warning("migrating database to v3 from %s", version)
size_col = """ALTER TABLE nodes ADD 'size' INTEGER NOT NULL DEFAULT -1;"""
cursor.execute(size_col)
version = 3
cursor.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
logging.warning("db schema is version {}".format(version))
# Virtual file tree
def getnode(self, node_id):
return self.getnodes(node_id=node_id)[0]
def _populate_meta(self, node):
node['metadata'] = self.decode_metadata(node['metadata'])
return node
def getnodes(self, *parent_ids, node_id=None, types=None, limit=None, order=None):
"""
Find nodes that match the passed paramters.
:param parent_ids: one or more parents to find children of
:type parent_ids: int
:param node_id: single node id to return
:type node_id: int
:param types: filter by type column
:type types: list
:param limit: number of records to limit to
:param order: one of ("rand") to select ordering mode
"""
query = "SELECT * FROM nodes WHERE "
qargs = []
def add_filter(name, values):
nonlocal query
nonlocal qargs
query += "{} in (".format(name)
for value in (values if type(values) in [list, tuple] else [values]):
query += "?, "
qargs += [value]
query = query.rstrip(", ")
query += ") AND"
if node_id:
add_filter("id", node_id)
if parent_ids:
add_filter("parent", parent_ids)
if types:
add_filter("type", types)
query = query.rstrip(" AND").rstrip("WHERE ")
if order:
query += "ORDER BY "
if order == "rand":
query += "RANDOM()"
if limit: # TODO 2-item tuple limit
query += " limit {}".format(limit)
with closing(self.db.cursor()) as cursor:
return list(map(self._populate_meta, cursor.execute(query, qargs).fetchall()))
def addnode(self, parent_id, fspath, name, size=-1):
fullpath = os.path.join(fspath, name)
is_dir = os.path.isdir(fullpath)
return self._addnode(parent_id, name, is_dir, size=size)
def _addnode(self, parent_id, name, is_dir=True, size=-1):
with closing(self.db.cursor()) as cursor:
cursor.execute("INSERT INTO nodes (parent, isdir, name, size) VALUES (?, ?, ?, ?);",
(parent_id, 1 if is_dir else 0, name, size))
return self.getnode(cursor.lastrowid)
def delnode(self, node_id):
deleted = 1
for child in self.getnodes(node_id):
deleted += self.delnode(child["id"])
with closing(self.db.cursor()) as cursor:
cursor.execute("DELETE FROM nodes WHERE id=?;", (node_id, ))
return deleted
def update_metadata(self, node_id, mergedict=None, **kwargs):
mergedict = mergedict if mergedict else {}
mergedict.update(kwargs)
with closing(self.db.cursor()) as cursor:
for table_key in keys_in_table:
if table_key in mergedict:
cursor.execute("UPDATE nodes SET {}=? WHERE id=?;".format(table_key),
(mergedict[table_key], node_id))
other_meta = {k: v for k, v in mergedict.items() if k not in keys_in_table}
if other_meta:
metadata = self.get_metadata(node_id)
metadata.update(other_meta)
cursor.execute("UPDATE nodes SET metadata=? WHERE id=?;", (json.dumps(metadata), node_id, ))
def get_metadata(self, node_id):
node = self.getnode(node_id)
meta = node["metadata"]
meta.update({item: node[item] for item in keys_in_table})
return meta
def decode_metadata(self, metadata):
if metadata:
return json.loads(metadata)
return {}
def hashit(self, unicode_string):
return sha512(unicode_string.encode('UTF-8')).hexdigest()
def validate_password(self, realm, username, password):
with closing(self.db.cursor()) as cursor:
users = cursor.execute("SELECT * FROM users WHERE username=? AND password=?;",
(username, self.hashit(password))).fetchall()
return bool(users)
def add_user(self, username, password, is_admin=False):
with closing(self.db.cursor()) as cursor:
cursor.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
(username, self.hashit(password), is_admin))
def update_user(self, username, password, is_admin=False):
with closing(self.db.cursor()) as cursor:
cursor.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
(self.hashit(password), is_admin, username))
def get_user(self, user):
with closing(self.db.cursor()) as cursor:
try:
column = "id" if type(user) is int else "username"
return cursor.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
except IndexError:
raise NotFoundError("User doesn't exist")
def set_starred(self, user_id, node_id, starred=True):
with closing(self.db.cursor()) as cursor:
if starred:
query = "INSERT INTO stars (userid, nodeid) VALUES (?, ?);"
else:
query = "DELETE FROM stars WHERE userid=? and nodeid=?;"
try:
cursor.execute(query, (user_id, node_id))
except sqlite3.IntegrityError:
# c.execute("""UPDATE meta SET value=? WHERE key="db_version";""", (str(version), ))
# logger.warning("db schema is version {}".format(version))
pass
def get_starred_items(self, for_user_id=None):
with closing(self.db.cursor()) as cursor:
q = """SELECT n.* FROM nodes as n INNER JOIN stars as s ON s.nodeid = n.id"""
qargs = []
if for_user_id:
q += """ AND userid=?"""
qargs += [int(for_user_id)]
return list(map(self._populate_meta,
cursor.execute(q, qargs).fetchall()))
def get_artist_info(self, item_id):
#TODO
return {"biography": "placeholder biography",
"musicBrainzId": "playerholder",
"lastFmUrl": "https://www.last.fm/music/Placeholder",
"smallImageUrl": "",
"mediumImageUrl": "",
"largeImageUrl": "",
"similarArtists": []}
@cursor
def get_stats(self, c):
songs = c.execute("SELECT COUNT(*) as cnt FROM songs").fetchone()['cnt']
artists = c.execute("SELECT COUNT(*) as cnt FROM artists").fetchone()['cnt']
albums = c.execute("SELECT COUNT(*) as cnt FROM albums").fetchone()['cnt']
return dict(songs=songs, artists=artists, albums=albums)
# Music related
@cursor
def add_root(self, c, path, name="Library"):
"""
Add a new library root. Returns the root ID or raises on collision
:param path: normalized absolute path to add to the library
:type path: str:
:return: int
:raises: sqlite3.IntegrityError
"""
path = os.path.abspath(os.path.normpath(path))
try:
c.execute("INSERT INTO libraries ('name', 'path') VALUES (?, ?)", (name, path, ))
c.execute("COMMIT")
return c.lastrowid
except sqlite3.IntegrityError:
raise DuplicateRootException("Root '{}' already exists".format(path))
@cursor
def get_libraries(self, c, id=None):
libs = []
q = "SELECT * FROM libraries"
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if conditions:
q += " WHERE " + " AND ".join(conditions)
c.execute(q, params)
for row in c:
libs.append(row)
return libs
@cursor
def get_artists(self, c, id=None, dirid=None, sortby="name", order=None, name_contains=None):
assert order in ["asc", "desc", None]
artists = []
q = "SELECT * FROM artists"
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if dirid:
conditions.append("dir = ?")
params.append(dirid)
if name_contains:
conditions.append("name LIKE ?")
params.append("%{}%".format(name_contains))
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {} {}".format(sortby, order.upper() if order else "ASC")
c.execute(q, params)
for row in c:
artists.append(row)
return artists
@cursor
def get_albums(self, c, id=None, artist=None, sortby="name", order=None, limit=None, name_contains=None):
"""
:param limit: int or tuple of int, int. translates directly to sql logic.
"""
if order:
order = {"asc": "ASC", "desc": "DESC"}[order]
if sortby == "random":
sortby = "RANDOM()"
albums = []
q = """
SELECT
alb.*,
art.name as artistname,
dirs.parent as artistdir
FROM albums as alb
INNER JOIN artists as art
on alb.artistid = art.id
INNER JOIN dirs
on dirs.id = alb.dir
"""
params = []
conditions = []
if id:
conditions.append("id = ?")
params.append(id)
if artist:
conditions.append("artistid = ?")
params.append(artist)
if name_contains:
conditions.append("alb.name LIKE ?")
params.append("%{}%".format(name_contains))
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {}".format(sortby)
if order:
q += " {}".format(order)
if limit:
q += " LIMIT {}".format(limit) if isinstance(limit, int) \
else " LIMIT {}, {}".format(*limit)
c.execute(q, params)
for row in c:
albums.append(row)
return albums
@cursor
def get_songs(self, c, id=None, genre=None, sortby="title", order=None, limit=None, title_contains=None):
# TODO make this query massively uglier by joining albums and artists so that artistid etc can be a filter
# or maybe lookup those IDs in the library layer?
if order:
order = {"asc": "ASC", "desc": "DESC"}[order]
if sortby == "random":
sortby = "RANDOM()"
songs = []
q = """
SELECT
s.*,
lib.path as root,
alb.name as albumname,
alb.coverid as albumcoverid,
art.name as artistname,
g.name as genrename,
albdir.id as albumdir
FROM songs as s
INNER JOIN libraries as lib
on s.library == lib.id
INNER JOIN albums as alb
on s.albumid == alb.id
INNER JOIN dirs as albdir
on albdir.id = alb.dir
INNER JOIN artists as art
on alb.artistid = art.id
LEFT JOIN genres as g
on s.genre == g.id
"""
params = []
conditions = []
if id and isinstance(id, int):
conditions.append("s.id = ?")
params.append(id)
elif id and isinstance(id, Iterable):
conditions.append("s.id IN ({})".format(",".join("?" * len(id))))
params += id
if genre:
conditions.append("g.name = ?")
params.append(genre)
if title_contains:
conditions.append("s.title LIKE ?")
params.append("%{}%".format(title_contains))
if conditions:
q += " WHERE " + " AND ".join(conditions)
if sortby:
q += " ORDER BY {}".format(sortby)
if order:
q += " {}".format(order)
if limit:
q += " LIMIT {}".format(limit) # TODO support limit pagination
c.execute(q, params)
for row in c:
songs.append(row)
return songs
@cursor
def get_genres(self, c, genre_id=None):
genres = []
q = "SELECT * FROM genres"
params = []
conditions = []
if genre_id:
conditions.append("id = ?")
params.append(genre_id)
if conditions:
q += " WHERE " + " AND ".join(conditions)
c.execute(q, params)
for row in c:
genres.append(row)
return genres
@cursor
def get_cover(self, c, cover_id):
cover = None
for cover in c.execute("SELECT * FROM covers WHERE id = ?", (cover_id, )):
return cover
def get_cover_path(self, cover_id):
cover = self.get_cover(cover_id)
library = self.get_libraries(cover["library"])[0]
return os.path.join(library["path"], cover["path"])
@cursor
def get_subsonic_musicdir(self, c, dirid):
"""
The world is a harsh place.
Again, this bullshit exists only to serve subsonic clients. Given a directory ID it returns a dict containing:
- the directory itself
- its parent
- its child dirs
- its child media
that's a lie, it's a tuple and it's full of BS. read the code
"""
# find directory
dirinfo = None
for dirinfo in c.execute("SELECT * FROM dirs WHERE id = ?", (dirid, )):
pass
assert dirinfo
ret = None
# see if it matches the artists or albums table
artist = None
for artist in c.execute("SELECT * FROM artists WHERE dir = ?", (dirid, )):
pass
# if artist:
# get child albums
if artist:
ret = ("artist", dirinfo, artist)
children = []
for album in c.execute("SELECT * FROM albums WHERE artistid = ?", (artist["id"], )):
children.append(("album", album))
ret[2]['children'] = children
return ret
# else if album:
# get child tracks
album = None
for album in c.execute("SELECT * FROM albums WHERE dir = ?", (dirid, )):
pass
if album:
ret = ("album", dirinfo, album)
artist_info = c.execute("SELECT * FROM artists WHERE id = ?", (album["artistid"], )).fetchall()[0]
children = []
for song in c.execute("SELECT * FROM songs WHERE albumid = ? ORDER BY track, title ASC;", (album["id"], )):
song["_artist"] = artist_info
children.append(("song", song))
ret[2]['children'] = children
return ret
# Playlist related
@cursor
def add_playlist(self, c, ownerid, name, song_ids, public=False):
"""
Create a playlist
"""
now = time()
c.execute("INSERT INTO playlists (ownerid, name, public, created, changed) VALUES (?, ?, ?, ?, ?)",
(ownerid, name, public, now, now))
plid = c.lastrowid
for song_id in song_ids:
self.add_to_playlist(c, plid, song_id)
c.execute("COMMIT")
@cursor
def add_to_playlist(self, c, playlist_id, song_id):
# TODO deal with order column
c.execute("INSERT INTO playlist_entries (playlistid, songid) VALUES (?, ?)", (playlist_id, song_id))
@cursor
def get_playlist(self, c, playlist_id):
return c.execute("SELECT * FROM playlists WHERE id=?", (playlist_id, )).fetchone()
@cursor
def get_playlist_songs(self, c, playlist_id):
songs = []
q = """
SELECT
s.*,
alb.name as albumname,
alb.coverid as albumcoverid,
art.name as artistname,
art.name as artistid,
g.name as genrename
FROM playlist_entries as pe
INNER JOIN songs as s
on pe.songid == s.id
INNER JOIN albums as alb
on s.albumid == alb.id
INNER JOIN artists as art
on alb.artistid = art.id
LEFT JOIN genres as g
on s.genre == g.id
WHERE pe.playlistid = ?
ORDER BY pe.'order' ASC;
"""
for row in c.execute(q, (playlist_id, )):
songs.append(row)
return songs
@cursor
def get_playlists(self, c, user_id):
playlists = []
for row in c.execute("SELECT * FROM playlists WHERE ownerid=? or public=1", (user_id, )):
playlists.append(row)
return playlists
@cursor
def remove_index_from_playlist(self, c, playlist_id, index):
c.execute("DELETE FROM playlist_entries WHERE playlistid=? LIMIT ?, 1", (playlist_id, index, ))
c.execute("COMMIT")
@cursor
def empty_playlist(self, c, playlist_id):
#TODO combine with delete_playlist
c.execute("DELETE FROM playlist_entries WHERE playlistid=?", (playlist_id, ))
c.execute("COMMIT")
@cursor
def delete_playlist(self, c, playlist_id):
c.execute("DELETE FROM playlist_entries WHERE playlistid=?", (playlist_id, ))
c.execute("DELETE FROM playlists WHERE id=?", (playlist_id, ))
c.execute("COMMIT")
@cursor
def update_album_played(self, c, album_id, last_played=None):
c.execute("UPDATE albums SET played=? WHERE id=?", (last_played, album_id, ))
c.execute("COMMIT")
@cursor
def increment_album_plays(self, c, album_id):
c.execute("UPDATE albums SET plays = plays + 1 WHERE id=?", (album_id, ))
c.execute("COMMIT")
@cursor
def increment_track_plays(self, c, track_id):
c.execute("UPDATE songs SET plays = plays + 1 WHERE id=?", (track_id, ))
c.execute("COMMIT")
# User related
@cursor
def add_user(self, c, username, password, is_admin=False):
c.execute("INSERT INTO users (username, password, admin) VALUES (?, ?, ?)",
(username, hash_password(password), is_admin))
c.execute("COMMIT")
@cursor
def update_user(self, c, username, password, is_admin=False):
c.execute("UPDATE users SET password=?, admin=? WHERE username=?;",
(hash_password(password), is_admin, username))
c.execute("COMMIT")
@cursor
def get_user(self, c, user):
try:
column = "id" if type(user) is int else "username"
return c.execute("SELECT * FROM users WHERE {}=?;".format(column), (user, )).fetchall()[0]
except IndexError:
raise NotFoundError("User doesn't exist")

View File

@ -1,121 +0,0 @@
import os
import logging
from pysonic.scanner import PysonicFilesystemScanner
from pysonic.types import MUSIC_TYPES
LETTER_GROUPS = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t",
"u", "v", "w", "xyz", "0123456789"]
logging = logging.getLogger("library")
def memoize(function):
memo = {}
def wrapper(*args):
if args in memo:
return memo[args]
else:
rv = function(*args)
memo[args] = rv
return rv
return wrapper
class NoDataException(Exception):
pass
class DuplicateRootException(Exception):
pass
class PysonicLibrary(object):
def __init__(self, database):
self.db = database
self.scanner = PysonicFilesystemScanner(self)
logging.info("library ready")
def update(self):
self.scanner.init_scan()
def add_dir(self, dir_path):
dir_path = os.path.abspath(os.path.normpath(dir_path))
libraries = [i['metadata']['fspath'] for i in self.db.getnodes(-1)]
if dir_path in libraries:
raise DuplicateRootException("Dir already in library")
else:
new_root = self.db._addnode(-1, 'New Library', is_dir=True)
self.db.update_metadata(new_root['id'], fspath=dir_path)
#@memoize
def get_libraries(self):
"""
Libraries are top-level nodes
"""
return self.db.getnodes(-1)
#@memoize
def get_artists(self):
# Assume artists are second level dirs
return self.db.getnodes(*[item["id"] for item in self.get_libraries()])
def get_dir(self, dirid):
return self.db.getnode(dirid)
def get_dir_children(self, dirid):
return self.db.getnodes(dirid)
#@memoize
def get_albums(self):
return self.db.getnodes(*[item["id"] for item in self.get_artists()])
#@memoize
def get_filepath(self, nodeid):
parents = [self.db.getnode(nodeid)]
while parents[-1]['parent'] != -1:
parents.append(self.db.getnode(parents[-1]['parent']))
root = parents.pop()
parents.reverse()
return os.path.join(root['metadata']['fspath'], *[i['name'] for i in parents])
def get_file_metadata(self, nodeid):
return self.db.get_metadata(nodeid)
def get_artist_info(self, item_id):
# artist = self.db.getnode(item_id)
return {"biography": "placeholder biography",
"musicBrainzId": "playerholder",
"lastFmUrl": "https://www.last.fm/music/Placeholder",
"smallImageUrl": "",
"mediumImageUrl": "",
"largeImageUrl": "",
"similarArtists": []}
def set_starred(self, username, node_id, starred):
self.db.set_starred(self.db.get_user(username)["id"], node_id, starred)
def get_stars(self, user, user_id):
self.db.get_stars()
def get_user(self, user):
return self.db.get_user(user)
def get_starred(self, username):
return self.db.get_starred_items(self.db.get_user(username)["id"])
def get_songs(self, limit=50, shuffle=True):
return self.db.getnodes(types=MUSIC_TYPES, limit=limit, order="rand")
def get_song(self, id=None):
if id:
return self.db.getnode(id)
else:
return self.db.getnodes(types=MUSIC_TYPES, limit=1, order="rand")
def report_transcode(self, item_id, bitrate, num_bytes):
assert type(bitrate) is int and bitrate > 0 and bitrate <= 320
logging.info("Got transcode report of {} for item {} @ {}".format(num_bytes, item_id, bitrate))
self.db.update_metadata(item_id, {"transcoded_{}_size".format(bitrate):int(num_bytes)})

View File

@ -1,10 +1,12 @@
import os
import re
import logging
from contextlib import closing
import mimetypes
from time import time
from threading import Thread
from pysonic.types import KNOWN_MIMES, MUSIC_TYPES, MPX_TYPES, FLAC_TYPES, WAV_TYPES
from pysonic.types import MUSIC_TYPES, WAV_TYPES, MPX_TYPES, FLAC_TYPES, MUSIC_EXTENSIONS, IMAGE_EXTENSIONS, \
TYPE_TO_EXTENSION, UNKNOWN_MIME
from mutagen.id3 import ID3
from mutagen import MutagenError
from mutagen.id3._util import ID3NoHeaderError
@ -16,160 +18,367 @@ logging = logging.getLogger("scanner")
RE_NUMBERS = re.compile(r'^([0-9]+)')
def guess_format(fname):
ext = fname.split(".")[-1].lower()
return TYPE_TO_EXTENSION.get(ext, UNKNOWN_MIME)
class PysonicFilesystemScanner(object):
def __init__(self, library):
self.library = library
def __init__(self, db):
self.db = db
def init_scan(self):
self.scanner = Thread(target=self.rescan, daemon=True)
self.scanner.start()
def rescan(self):
# Perform directory scan
logging.warning("Beginning library rescan")
"""
Perform a full scan of the media library's files
"""
start = time()
for parent in self.library.get_libraries():
meta = parent["metadata"]
logging.info("Scanning {}".format(meta["fspath"]))
logging.warning("Beginning library rescan")
for parent in self.db.get_libraries():
logging.info("Scanning {}".format(parent["path"]))
self.scan_root(parent["id"], parent["path"])
logging.warning("Rescan complete in %ss", round(time() - start, 3))
def recurse_dir(path, parent):
logging.info("Scanning {}".format(path))
# create or update the database of nodes by comparing sets of names
fs_entries = set(os.listdir(path))
db_entires = self.library.db.getnodes(parent["id"])
db_entires_names = set([i['name'] for i in db_entires])
to_delete = db_entires_names - fs_entries
to_create = fs_entries - db_entires_names
def scan_root(self, pid, root):
"""
Scan a single root the library
:param pid: parent ID
:param root: absolute path to scan
"""
logging.warning("Beginning file scan for library %s", pid)
root_depth = len(self.split_path(root))
for path, dirs, files in os.walk(root):
child = self.split_path(path)[root_depth:]
# dirid = self.create_or_get_dbdir_tree(pid, child) # dumb table for Subsonic
self.scan_dir(pid, root, child, dirs, files)
# If any size have changed, mark the file to be rescanned
for entry in db_entires:
finfo = os.stat(os.path.join(path, entry["name"]))
if finfo.st_size != entry["size"]:
logging.info("{} has changed in size, marking for meta rescan".format(entry["id"]))
self.library.db.update_metadata(entry['id'], id3_done=False, size=finfo.st_size)
logging.warning("Beginning metadata scan for library %s", pid)
self.scan_metadata(pid, root, freshonly=True)
# Create any nodes not found in the db
for create in to_create:
new_finfo = os.stat(os.path.join(path, create))
new_node = self.library.db.addnode(parent["id"], path, create, size=new_finfo.st_size)
logging.info("Added {}".format(os.path.join(path, create)))
db_entires.append(new_node)
logging.warning("Finished scan for library %s", pid)
# Delete any db nodes not found on disk
for delete in to_delete:
logging.info("Prune ", delete, "in parent", path)
node = [i for i in db_entires if i["name"] == delete]
if node:
deleted = self.library.db.delnode(node[0]["id"])
logging.info("Pruned {}, deleting total of {}".format(node, deleted))
def create_or_get_dbdir_tree(self, cursor, pid, path):
"""
Return the ID of the directory specified by `path`. The path will be created as necessary. This bullshit exists
only to serve Subsonic, and can easily be lopped off.
:param pid: root parent the path resides in
:param path: single-file tree as a list of dir names under the root parent
:type path list
"""
assert path
# with closing(self.db.db.cursor()) as cursor:
parent_id = 0 # 0 indicates a top level item in the library
for name in path:
parent_id = self.create_or_get_dbdir(cursor, pid, parent_id, name)
return parent_id
for entry in db_entires:
if entry["name"] in to_delete:
def create_or_get_dbdir(self, cursor, pid, parent_id, name):
for row in cursor.execute("SELECT * FROM dirs WHERE library=? and parent=? and name=?",
(pid, parent_id, name, )):
return row['id']
cursor.execute("INSERT INTO dirs (library, parent, name) VALUES (?, ?, ?)", (pid, parent_id, name))
return cursor.lastrowid
def scan_dir(self, pid, root, path, dirs, files):
"""
Scan a single directory in the library. Actually, this ignores all dirs that don't contain files. Dirs are
interpreted as follows:
- The library root is ignored
- Empty dirs are ignored
- Dirs containing files are assumed to be an album
- Top level dirs in the library are assumed to be artists
- Any dirs not following the above rules are transparently ignored
- Files placed in an artist dir is an unhandled edge case TODO
- Any files with an image extension in an album dir will be assumed to be the cover regardless of naming
- TODO ignore dotfiles/dirs
TODO remove all file scanning / statting etc from paths where a db transaction is active (gather data then open)
:param pid: parent id
:param root: library root path
:param path: scan location path, as a list of subdirs within the root
:param dirs: dirs in the current path
:param files: files in the current path
"""
# If this is the library root or an empty dir just bail
if not path or not files:
return
# If it is the library root just bail
if len(path) == 0:
return
logging.info("In library %s scanning %s", pid, os.path.join(*path))
# Guess an album from the dir, if possible
album = None
if len(path) > 1:
album = path[-1]
with closing(self.db.db.cursor()) as cursor:
artist_id, artist_dirid = self.create_or_get_artist(cursor, pid, path[0])
album_id = None
album_dirid = None
if album:
album_id, album_dirid = self.create_or_get_album(cursor, pid, path, artist_id)
libpath = os.path.join(*path)
new_files = False
for fname in files:
if not any([fname.endswith(".{}".format(i)) for i in MUSIC_EXTENSIONS]):
continue
new_files = self.add_music_if_new(cursor, pid, root, album_id, libpath, fname) or new_files
# Create cover entry TODO we can probably skip this if there were no new audio files?
if album_id:
for file in files:
if not any([file.endswith(".{}".format(i)) for i in IMAGE_EXTENSIONS]):
continue
if int(entry['isdir']): # 1 means dir
recurse_dir(os.path.join(path, entry["name"]), entry)
fpath = os.path.join(libpath, file)
cursor.execute("SELECT id FROM covers WHERE path=?", (fpath, ))
if not cursor.fetchall():
# We leave most fields blank now and return later
cursor.execute("INSERT INTO covers (library, path) VALUES (?, ?);", (pid, fpath, ))
cursor.execute("UPDATE albums SET coverid=? WHERE id=?", (cursor.lastrowid, album_id))
break
# Populate all files for this top-level root
recurse_dir(meta["fspath"], parent)
#
#
#
# Add simple metadata
for artist_dir in self.library.db.getnodes(parent["id"]):
artist = artist_dir["name"]
for album_dir in self.library.db.getnodes(artist_dir["id"]):
album = album_dir["name"]
album_meta = album_dir["metadata"]
for track_file in self.library.db.getnodes(album_dir["id"]):
title = track_file["name"]
if not track_file["title"]:
self.library.db.update_metadata(track_file["id"], artist=artist, album=album, title=title)
logging.info("Adding simple metadata for {}/{}/{} #{}".format(artist, album,
title, track_file["id"]))
if not album_dir["album"]:
self.library.db.update_metadata(album_dir["id"], artist=artist, album=album)
logging.info("Adding simple metadata for {}/{} #{}".format(artist, album, album_dir["id"]))
if not artist_dir["artist"]:
self.library.db.update_metadata(artist_dir["id"], artist=artist)
logging.info("Adding simple metadata for {} #{}".format(artist, artist_dir["id"]))
if title in ["cover.jpg", "cover.png"] and 'cover' not in album_meta:
# // add cover art
self.library.db.update_metadata(album_dir["id"], cover=track_file["id"])
logging.info("added cover for {}".format(album_dir['id']))
if new_files: # Commit after each dir IF audio files were found. no audio == dump the artist
cursor.execute("COMMIT")
if track_file["type"] is None:
fpath = self.library.get_filepath(track_file['id'])
ftype, extra = mimetypes.guess_type(fpath)
def add_music_if_new(self, cursor, pid, root_dir, album_id, fdir, fname):
fpath = os.path.join(fdir, fname)
cursor.execute("SELECT id FROM songs WHERE file=?", (fpath, ))
if not cursor.fetchall():
# We leave most fields blank now and return later
# TODO probably not here but track file sizes and mark them for rescan on change
cursor.execute("INSERT INTO songs (library, albumid, file, size, title, format) "
"VALUES (?, ?, ?, ?, ?, ?)",
(pid,
album_id,
fpath,
os.stat(os.path.join(root_dir, fpath)).st_size,
fname,
guess_format(fpath)))
return True
return False
if ftype in KNOWN_MIMES:
self.library.db.update_metadata(track_file["id"], type=ftype)
logging.info("added type {} for {}".format(ftype, track_file['id']))
else:
logging.warning("Ignoring unreadable file at {}, unknown ftype ({}, {})"
.format(fpath, ftype, extra))
#
#
#
# Add advanced id3 / media info metadata
for artist_dir in self.library.db.getnodes(parent["id"]):
artist = artist_dir["name"]
for album_dir in self.library.db.getnodes(artist_dir["id"]):
album = album_dir["name"]
album_meta = album_dir["metadata"]
for track_file in self.library.db.getnodes(album_dir["id"]):
track_meta = track_file['metadata']
title = track_file["name"]
fpath = self.library.get_filepath(track_file["id"])
if track_meta.get('id3_done', False) or track_file.get("type", None) not in MUSIC_TYPES:
continue
tags = {'id3_done': True}
try:
audio = None
if track_file.get("type", None) in MPX_TYPES:
audio = MP3(fpath)
if audio.info.sketchy:
logging.warning("media reported as sketchy: %s", fpath)
elif track_file.get("type", None) in FLAC_TYPES:
audio = FLAC(fpath)
else:
audio = ID3(fpath)
# print(audio.pprint())
try:
tags["media_length"] = int(audio.info.length)
except (ValueError, AttributeError):
pass
try:
bitrate = int(audio.info.bitrate)
tags["media_bitrate"] = bitrate
tags["media_kbitrate"] = int(bitrate / 1024)
except (ValueError, AttributeError):
pass
try:
tags["track"] = int(RE_NUMBERS.findall(''.join(audio['TRCK'].text))[0])
except (KeyError, IndexError):
pass
try:
tags["id3_artist"] = ''.join(audio['TPE1'].text)
except KeyError:
pass
try:
tags["id3_album"] = ''.join(audio['TALB'].text)
except KeyError:
pass
try:
tags["id3_title"] = ''.join(audio['TIT2'].text)
except KeyError:
pass
try:
tags["id3_year"] = audio['TDRC'].text[0].year
except (KeyError, IndexError):
pass
logging.info("got all media info from %s", fpath)
except ID3NoHeaderError:
pass
except MutagenError as m:
logging.error("failed to read audio information: %s", m)
continue
self.library.db.update_metadata(track_file["id"], **tags)
def create_or_get_artist(self, cursor, pid, dirname):
"""
Retrieve, creating if necessary, directory information about an artist. Return tuple contains the artist's ID
and the dir id associated with the artist.
:param cursor: sqlite cursor to use
:param pid: root parent id we're working int
:param dirname: name of the artist dir
:return tuple:
"""
artist_dirid = self.create_or_get_dbdir_tree(cursor, pid, [dirname])
cursor.execute("SELECT * FROM artists WHERE dir = ?", (artist_dirid, ))
row = cursor.fetchone()
artist_id = None
if row:
artist_id = row['id']
else:
cursor.execute("INSERT INTO artists (libraryid, dir, name) VALUES (?, ?, ?)",
(pid, artist_dirid, dirname))
artist_id = cursor.lastrowid
return artist_id, artist_dirid
logging.warning("Library scan complete in {}s".format(round(time() - start, 2)))
def create_or_get_album(self, cursor, pid, dirnames, artist_id):
"""
Retrieve, creating if necessary, directory information about an album. Return tuple contains the albums's ID
and the dir id associated with the album.
:param cursor: sqlite cursor to use
:param pid: root parent id we're working int
:param dirnames: list of directories from the root to the album dir
:param artist_id: id of the artist the album belongs to
:return tuple:
"""
album_dirid = self.create_or_get_dbdir_tree(cursor, pid, dirnames)
cursor.execute("SELECT * FROM albums WHERE artistid = ? AND dir = ?", (artist_id, album_dirid, ))
row = cursor.fetchone()
if row:
album_id = row['id']
else:
cursor.execute("INSERT INTO albums (artistid, dir, name, added) VALUES (?, ?, ?, ?)",
(artist_id, album_dirid, dirnames[-1], int(time())))
album_id = cursor.lastrowid
return album_id, album_dirid
def split_path(self, path):
"""
Given a path like /foo/bar, return ['foo', 'bar']
"""
parts = []
head = path
while True:
head, tail = os.path.split(head)
if tail:
parts.append(tail)
else:
break
parts.reverse()
return parts
def scan_metadata(self, pid, root, freshonly=False):
"""
Iterate through files in the library and update metadata
:param freshonly: only update metadata on files that have never been scanned before
"""
q = "SELECT * FROM songs "
if freshonly:
q += "WHERE lastscan = -1 "
q += "ORDER BY albumid"
#TODO scraping ID3 etc from the media files can be parallelized
with closing(self.db.db.cursor()) as reader, \
closing(self.db.db.cursor()) as writer:
processed = 0 # commit batching counter
for row in reader.execute(q):
# Find meta, bail if the file was unreadable
# TODO file metadata scanning could be done in parallel
meta = self.scan_file_metadata(os.path.join(root, row['file']))
if not meta:
continue
# Meta may have additional keys that arent in the songs table, omit them
song_attrs = ["title", "lastscan", "format", "length", "bitrate", "track", "year"]
song_meta = {k: v for k, v in meta.items() if k in song_attrs}
# Update the song row
q = "UPDATE songs SET "
params = []
for key, value in song_meta.items():
q += "{}=?, ".format(key)
params.append(value)
q += "lastscan=? WHERE id=?"
params += [int(time()), row["id"]]
writer.execute(q, params)
# If the metadata has an artist or album name, update the relevant items
# TODO ignore metadata if theyre blank
if "album" in meta:
writer.execute("UPDATE albums SET name=? WHERE id=?", (meta["album"], row["albumid"]))
if "artist" in meta:
album = writer.execute("SELECT artistid FROM albums WHERE id=?", (row['albumid'], )).fetchone()
if album:
writer.execute("UPDATE artists SET name=? WHERE id=?", (meta["artist"], album["artistid"]))
if "genre" in meta:
genre_name = meta["genre"].strip()
if genre_name:
genre_id = self.get_genre_id(writer, meta["genre"])
writer.execute("UPDATE songs SET genre=? WHERE id=?", (genre_id, row['id']))
# Commit every 50 items
processed += 1
if processed > 50:
writer.execute("COMMIT")
processed = 0
if processed != 0:
writer.execute("COMMIT")
def get_genre_id(self, cursor, genre_name):
genre_name = genre_name.title().strip() # normalize
for row in cursor.execute("SELECT * FROM genres WHERE name=?", (genre_name, )):
return row['id']
cursor.execute("INSERT INTO genres (name) VALUES (?)", (genre_name, ))
return cursor.lastrowid
def scan_file_metadata(self, fpath):
"""
Scan the file for metadata.
:param fpath: path to the file to scan
"""
logging.info("getting metadata from %s", fpath)
ftype, extra = mimetypes.guess_type(fpath)
if ftype in MUSIC_TYPES:
return self.scan_mutagen_metadata(fpath, ftype)
def scan_mutagen_metadata(self, fpath, ftype):
meta = {"format": ftype}
try:
# Open file with mutagen
if ftype in MPX_TYPES:
audio = MP3(fpath)
if audio.info.sketchy:
logging.warning("media reported as sketchy: %s", fpath)
elif ftype in FLAC_TYPES:
audio = FLAC(fpath)
else:
audio = ID3(fpath)
except ID3NoHeaderError:
return
except MutagenError as m:
logging.error("failed to read audio information: %s", m)
return
# these fields are generic
try:
meta["length"] = int(audio.info.length)
except (ValueError, AttributeError):
pass
try:
bitrate = int(audio.info.bitrate)
meta["bitrate"] = bitrate
# meta["kbitrate"] = int(bitrate / 1024)
except (ValueError, AttributeError):
pass
# these fields are format-specific
#TODO determine if having WAV_TYPES does anything at all
if ftype in MPX_TYPES or ftype in WAV_TYPES:
try:
meta["track"] = int(RE_NUMBERS.findall(''.join(audio['TRCK'].text))[0])
except (KeyError, IndexError):
pass
try:
meta["artist"] = ''.join(audio['TPE1'].text).strip()
except KeyError:
pass
try:
meta["album"] = ''.join(audio['TALB'].text).strip()
except KeyError:
pass
try:
meta["title"] = ''.join(audio['TIT2'].text).strip()
except KeyError:
pass
try:
meta["year"] = int(audio['TDRC'].text[0].year)
except (KeyError, IndexError, ValueError):
pass
try:
meta["genre"] = audio['TCON'].text[0].strip()
except (KeyError, IndexError):
pass
elif ftype in FLAC_TYPES:
try:
meta["track"] = int(RE_NUMBERS.findall(audio["tracknumber"][0])[0])
except (KeyError, IndexError):
pass
try:
meta["artist"] = audio["artist"][0].strip()
except (KeyError, IndexError):
pass
try:
meta["album"] = audio["album"][0].strip()
except (KeyError, IndexError):
pass
try:
meta["title"] = audio["title"][0].strip()
except (KeyError, IndexError):
pass
try:
meta["year"] = int(audio["date"][0]) # TODO is this ever a full date?
except (KeyError, IndexError, ValueError):
pass
try:
meta["genre"] = audio["genre"][0].strip()
except (KeyError, IndexError):
pass
return meta

View File

@ -1,7 +1,49 @@
# known mimes
MIME_MPEG = "audio/mpeg"
KNOWN_MIMES = ["audio/mpeg", "audio/flac", "audio/x-wav", "image/jpeg", "image/png"]
MUSIC_TYPES = ["audio/mpeg", "audio/flac", "audio/x-wav"]
MPX_TYPES = ["audio/mpeg"]
FLAC_TYPES = ["audio/flac"]
WAV_TYPES = ["audio/x-wav"]
IMAGE_TYPES = ["image/jpeg", "image/png", "image/gif"]
MIME_FLAC = "audio/flac"
MIME_XFLAC = "audio/x-flac"
MIME_XWAV = "audio/x-wav"
MIME_JPEG = "image/jpeg"
MIME_PNG = "image/png"
MIME_GIF = "image/gif"
# groupings of similar files by mime
KNOWN_MIMES = [MIME_MPEG, MIME_FLAC, MIME_XFLAC, MIME_XWAV, MIME_JPEG, MIME_PNG]
MUSIC_TYPES = [MIME_MPEG, MIME_FLAC, MIME_XFLAC, MIME_XWAV]
MPX_TYPES = [MIME_MPEG]
FLAC_TYPES = [MIME_FLAC, MIME_XFLAC]
WAV_TYPES = [MIME_XWAV]
IMAGE_TYPES = [MIME_JPEG, MIME_PNG, MIME_GIF]
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "gif"]
MUSIC_EXTENSIONS = ["mp3", "flac", "wav"]
TYPE_TO_EXTENSION = {
MIME_MPEG: "mp3",
MIME_FLAC: "flac",
MIME_XFLAC: "flac",
MIME_XWAV: "wav",
MIME_JPEG: "jpg",
MIME_PNG: "png",
}
EXTENSION_TO_TYPE = {
"mp3": MIME_MPEG,
"flac": MIME_FLAC,
"wav": MIME_XWAV,
"jpg": MIME_JPEG,
"png": MIME_PNG,
}
UNKNOWN_MIME = None

View File

@ -1,9 +1,17 @@
beautifulsoup4==4.6.0
cheroot==5.8.3
CherryPy==11.0.0
lxml==3.8.0
mutagen==1.38
portend==2.1.2
pytz==2017.2
six==1.10.0
tempora==1.8
beautifulsoup4==4.11.1
cheroot==8.6.0
CherryPy==18.6.1
jaraco.classes==3.2.1
jaraco.collections==3.5.1
jaraco.context==4.1.1
jaraco.functools==3.5.0
jaraco.text==3.7.0
lxml==4.9.0
more-itertools==8.13.0
mutagen==1.40.0
portend==3.1.0
pytz==2018.3
six==1.11.0
soupsieve==2.3.2.post1
tempora==5.0.1
zc.lockfile==2.0

8
start.sh Normal file
View File

@ -0,0 +1,8 @@
#!/bin/bash
set -x
chmod 755 /db/.
chown -R app:app /db
exec sudo --preserve-env -Hu app pysonicd $@