# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-# * Neither the name of Jamaendo nor the
-# names of its contributors may be used to endorse or promote products
-# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# An improved, structured jamendo API wrapper for the N900 with cacheing
# Image / cover downloads.. and more?
-import urllib, threading, os, gzip, time, simplejson, re
+import urllib, threading, os, time, simplejson, re
+import logging, hashlib
_CACHEDIR = None
_COVERDIR = None
_OGGURL = _GET2+'stream/track/redirect/?id=%d&streamencoding=ogg2'
_TORRENTURL = _GET2+'bittorrent/file/redirect/?album_id=%d&type=archive&class=mp32'
-def set_cache_dir(cachedir):
- global _CACHEDIR
- global _COVERDIR
- _CACHEDIR = cachedir
- _COVERDIR = os.path.join(_CACHEDIR, 'covers')
-
- try:
- os.makedirs(_CACHEDIR)
- except OSError:
- pass
-
- try:
- os.makedirs(_COVERDIR)
- except OSError:
- pass
+try:
+ log = logging.getLogger(__name__)
+except:
+ class StdoutLogger(object):
+ def info(self, s, *args):
+ print s % (args)
+ def debug(self, s, *args):
+ pass#print s % (args)
+ log = StdoutLogger()
# These classes can be partially constructed,
# and if asked for a property they don't know,
_ARTIST_FIELDS = ['id', 'name', 'image']
_ALBUM_FIELDS = ['id', 'name', 'image', 'artist_name', 'artist_id', 'license_url']
-_TRACK_FIELDS = ['id', 'name', 'image', 'artist_name', 'album_name', 'album_id', 'numalbum', 'duration']
+_TRACK_FIELDS = ['id', 'name', 'image', 'artist_id', 'artist_name', 'album_name', 'album_id', 'numalbum', 'duration']
_RADIO_FIELDS = ['id', 'name', 'idstr', 'image']
class LazyQuery(object):
def __repr__(self):
try:
return u"%s(%s)"%(self.__class__.__name__,
- u", ".join(repr(v) for k,v in self.__dict__.iteritems() if not k.startswith('_')))
+ u", ".join(("%s:%s"%(k,repr(v))) for k,v in self.__dict__.iteritems() if not k.startswith('_')))
except UnicodeEncodeError:
- import traceback
- traceback.print_exc()
+ #import traceback
+ #traceback.print_exc()
return u"%s(?)"%(self.__class__.__name__)
class Artist(LazyQuery):
self.ID = int(ID)
self.name = None
self.image = None
+ self.artist_id = None
self.artist_name = None
self.album_name = None
self.album_id = None
return _OGGURL%(self.ID)
def _needs_load(self):
- return self._needs_load_impl('name', 'artist_name', 'album_name', 'album_id', 'numalbum', 'duration')
+ return self._needs_load_impl('name', 'artist_name', 'artist_id', 'album_name', 'album_id', 'numalbum', 'duration')
def _set_from(self, other):
- return self._set_from_impl(other, 'name', 'image', 'artist_name', 'album_name', 'album_id', 'numalbum', 'duration')
+ return self._set_from_impl(other, 'name', 'image', 'artist_name', 'artist_id', 'album_name', 'album_id', 'numalbum', 'duration')
class Radio(LazyQuery):
def __init__(self, ID, json=None):
_CACHED_ALBUMS = 200
_CACHED_TRACKS = 500
_CACHED_RADIOS = 10
+# cache sizes, persistant
+_CACHED_COVERS = 2048
# TODO: cache queries?
pass
def _geturl(self, url):
- print "*** %s" % (url)
+ log.info("%s", url)
Query._ratelimit()
- f = urllib.urlopen(url)
- ret = simplejson.load(f)
- f.close()
+ try:
+ f = urllib.urlopen(url)
+ ret = simplejson.load(f)
+ f.close()
+ except Exception, e:
+ return None
return ret
def __str__(self):
def execute(self):
raise NotImplemented
-import threading
-
class CoverFetcher(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.work = []
def _fetch_cover(self, albumid, size):
- coverdir = _COVERDIR if _COVERDIR else '/tmp'
- to = os.path.join(coverdir, '%d-%d.jpg'%(albumid, size))
- if not os.path.isfile(to):
- url = _GET2+'image/album/redirect/?id=%d&imagesize=%d'%(albumid, size)
- urllib.urlretrieve(url, to)
- return to
+ try:
+ coverdir = _COVERDIR if _COVERDIR else '/tmp'
+ to = os.path.join(coverdir, '%d-%d.jpg'%(albumid, size))
+ if not os.path.isfile(to):
+ url = _GET2+'image/album/redirect/?id=%d&imagesize=%d'%(albumid, size)
+ urllib.urlretrieve(url, to)
+ return to
+ except Exception, e:
+ return None
+
+ def _fetch_image(self, url):
+ try:
+ h = hashlib.md5(url).hexdigest()
+ coverdir = _COVERDIR if _COVERDIR else '/tmp'
+ to = os.path.join(coverdir, h+'.jpg')
+ if not os.path.isfile(to):
+ urllib.urlretrieve(url, to)
+ return to
+ except Exception, e:
+ return None
def request_cover(self, albumid, size, cb):
self.cond.acquire()
self.cond.notify()
self.cond.release()
+ def request_images(self, urls, cb):
+ """cb([(url, image)])"""
+ self.cond.acquire()
+ self.work.insert(0, ('images', urls, cb))
+ self.cond.notify()
+ self.cond.release()
+
def run(self):
while True:
work = []
self.cond.release()
multi = len(work) > 1
- for albumid, size, cb in work:
- cover = self._fetch_cover(albumid, size)
- cb(albumid, size, cover)
+ for job in work:
+ if job[0] == 'images':
+ self.process_images(job[1], job[2])
+ else:
+ self.process_cover(*job)
if multi:
time.sleep(1.0)
+ def process_cover(self, albumid, size, cb):
+ albumid, size, cb = job
+ cover = self._fetch_cover(albumid, size)
+ if cover:
+ cb(albumid, size, cover)
+
+ def process_images(self, urls, cb):
+ results = [(url, image) for url, image in ((url, self._fetch_image(url)) for url in urls) if image is not None]
+ if results:
+ cb(results)
+
class CoverCache(object):
"""
cache and fetch covers
"""
def __init__(self):
self._covers = {} # (albumid, size) -> file
- coverdir = _COVERDIR if _COVERDIR else '/tmp'
- if os.path.isdir(coverdir):
- covermatch = re.compile(r'(\d+)\-(\d+)\.jpg')
- for fil in os.listdir(coverdir):
- fl = os.path.join(coverdir, fil)
- m = covermatch.match(fil)
- if m and os.path.isfile(fl):
- self._covers[(int(m.group(1)), int(m.group(2)))] = fl
+ self._images = {}
self._fetcher = CoverFetcher()
self._fetcher.start()
+ if _COVERDIR and os.path.isdir(_COVERDIR):
+ self.prime_cache()
+
+ def prime_cache(self):
+ coverdir = _COVERDIR
+ covermatch = re.compile(r'(\d+)\-(\d+)\.jpg')
+
+ prev_covers = os.listdir(coverdir)
+
+ if len(prev_covers) > _CACHED_COVERS:
+ import random
+ dropn = len(prev_covers) - _CACHED_COVERS
+ todrop = random.sample(prev_covers, dropn)
+ log.warning("Deleting from cache: %s", todrop)
+ for d in todrop:
+ m = covermatch.match(d)
+ if m:
+ try:
+ os.unlink(os.path.join(coverdir, d))
+ except OSError, e:
+ log.exception('unlinking failed')
+
+ for fil in os.listdir(coverdir):
+ fl = os.path.join(coverdir, fil)
+ m = covermatch.match(fil)
+ if m and os.path.isfile(fl):
+ self._covers[(int(m.group(1)), int(m.group(2)))] = fl
def fetch_cover(self, albumid, size):
- coverdir = _COVERDIR if _COVERDIR else '/tmp'
- to = os.path.join(coverdir, '%d-%d.jpg'%(albumid, size))
- if not os.path.isfile(to):
- url = _GET2+'image/album/redirect/?id=%d&imagesize=%d'%(albumid, size)
- urllib.urlretrieve(url, to)
- self._covers[(albumid, size)] = to
- return to
+ coverdir = _COVERDIR
+ if coverdir:
+ to = os.path.join(coverdir, '%d-%d.jpg'%(albumid, size))
+ if not os.path.isfile(to):
+ url = _GET2+'image/album/redirect/?id=%d&imagesize=%d'%(albumid, size)
+ urllib.urlretrieve(url, to)
+ self._covers[(albumid, size)] = to
+ return to
+ return None
def get_cover(self, albumid, size):
cover = self._covers.get((albumid, size), None)
if cover:
cb(albumid, size, cover)
else:
- self._fetcher.request_cover(albumid, size, cb)
+ def cb2(albumid, size, cover):
+ self._covers[(albumid, size)] = cover
+ cb(albumid, size, cover)
+ self._fetcher.request_cover(albumid, size, cb2)
+
+ def get_images_async(self, url_list, cb):
+ found = []
+ lookup = []
+ for url in url_list:
+ image = self._images.get(url, None)
+ if image:
+ found.append((url, image))
+ else:
+ lookup.append(url)
+ if found:
+ cb(found)
+
+ if lookup:
+ def cb2(results):
+ for url, image in results:
+ self._images[url] = image
+ cb(results)
+ self._fetcher.request_images(lookup, cb2)
_cover_cache = CoverCache()
+def set_cache_dir(cachedir):
+ global _CACHEDIR
+ global _COVERDIR
+ _CACHEDIR = cachedir
+ _COVERDIR = os.path.join(_CACHEDIR, 'covers')
+
+ try:
+ os.makedirs(_CACHEDIR)
+ except OSError:
+ pass
+
+ try:
+ os.makedirs(_COVERDIR)
+ except OSError:
+ pass
+
+ _cover_cache.prime_cache()
+
def get_album_cover(albumid, size=100):
return _cover_cache.get_cover(albumid, size)
def get_album_cover_async(cb, albumid, size=100):
_cover_cache.get_async(albumid, size, cb)
+def get_images_async(cb, url_list):
+ _cover_cache.get_images_async(url_list, cb)
+
class CustomQuery(Query):
def __init__(self, url):
Query.__init__(self)
'params' : 'artist_id=%d',
'constructor' : Artist
},
+ 'artist_list' : {
+ 'url' : _GET2+'+'.join(_ALBUM_FIELDS)+'/artist/json/?',
+ 'params' : 'artist_id=%s',
+ 'constructor' : Album
+ },
'album' : {
'url' : _GET2+'+'.join(_ALBUM_FIELDS)+'/album/json/?',
'params' : 'album_id=%d',
'constructor' : Album
},
+ 'album_list' : {
+ 'url' : _GET2+'+'.join(_ALBUM_FIELDS)+'/album/json/?',
+ 'params' : 'album_id=%s',
+ 'constructor' : Album
+ },
'albums' : {
'url' : _GET2+'+'.join(_ALBUM_FIELDS)+'/album/json/?',
'params' : 'artist_id=%d',
'params' : 'id=%d',
'constructor' : Track
},
+ 'track_list' : {
+ 'url' : _GET2+'+'.join(_TRACK_FIELDS)+'/track/json/track_album+album_artist?',
+ 'params' : 'id=%s',
+ 'constructor' : Track
+ },
'tracks' : {
'url' : _GET2+'+'.join(_TRACK_FIELDS)+'/track/json/track_album+album_artist?',
'params' : 'order=numalbum_asc&album_id=%d',
'params' : 'user_idstr=%s',
'constructor' : [Album]
},
- #http://api.jamendo.com/get2/id+name+url+image+artist_name/album/jsonpretty/album_user_starred/?user_idstr=sylvinus&n=all
- #q = SearchQuery('album', user_idstr=user)
-
}
-#http://api.jamendo.com/get2/id+name+image+artist_name+album_name+album_id+numalbum+duration/track/json/radio_track_inradioplaylist+track_album+album_artist/?order=numradio_asc&radio_id=283
def __init__(self, what, ID):
Query.__init__(self)
old._set_from(item)
else:
cache[item.ID] = item
+ if isinstance(item, Artist) and item.albums:
+ for album in item.albums:
+ _update_cache(_albums, album)
+ elif isinstance(item, Album) and item.tracks:
+ for track in item.tracks:
+ _update_cache(_tracks, track)
+ # enforce cache limits here!
+ # also, TODO: save/load cache between sessions
+ # that will require storing a timestamp with
+ # each item, though..
+ # perhaps,
+ # artists: 1 day - changes often
+ # albums: 2-5 days - changes less often (?)
+ # tracks: 1 week - changes rarely, queried often
def get_artist(artist_id):
"""Returns: Artist"""
a = a[0]
return a
-def get_albums(artist_id):
+def get_artists(artist_ids):
+ """Returns: [Artist]"""
+ assert(isinstance(artist_ids, list))
+ found = []
+ lookup = []
+ for artist_id in artist_ids:
+ a = _artists.get(artist_id, None)
+ if not a:
+ lookup.append(artist_id)
+ else:
+ found.append(a)
+ if lookup:
+ q = GetQuery('artist_list', '+'.join(str(x) for x in lookup))
+ a = q.execute()
+ if not a:
+ raise JamendoAPIException(str(q))
+ _update_cache(_artists, a)
+ lookup = a
+ return found + lookup
+
+def get_album_list(album_ids):
"""Returns: [Album]"""
+ assert(isinstance(album_ids, list))
+ found = []
+ lookup = []
+ for album_id in album_ids:
+ a = _albums.get(album_id, None)
+ if not a:
+ lookup.append(album_id)
+ else:
+ found.append(a)
+ if lookup:
+ q = GetQuery('album_list', '+'.join(str(x) for x in lookup))
+ a = q.execute()
+ if not a:
+ raise JamendoAPIException(str(q))
+ _update_cache(_albums, a)
+ lookup = a
+ return found + lookup
+
+def get_albums(artist_id):
+ """Returns: [Album]
+ Parameter can either be an artist_id or a list of album ids.
+ """
+ if isinstance(artist_id, list):
+ return get_album_list(artist_id)
+ a = _artists.get(artist_id, None)
+ if a and a.albums:
+ return a.albums
+
q = GetQuery('albums', artist_id)
a = q.execute()
if not a:
a = a[0]
return a
-def get_tracks(album_id):
+def get_track_list(track_ids):
"""Returns: [Track]"""
+ assert(isinstance(track_ids, list))
+ found = []
+ lookup = []
+ for track_id in track_ids:
+ a = _tracks.get(track_id, None)
+ if not a:
+ lookup.append(track_id)
+ else:
+ found.append(a)
+ if lookup:
+ q = GetQuery('track_list', '+'.join(str(x) for x in lookup))
+ a = q.execute()
+ if not a:
+ raise JamendoAPIException(str(q))
+ _update_cache(_tracks, a)
+ lookup = a
+ return found + lookup
+
+def get_tracks(album_id):
+ """Returns: [Track]
+ Parameter can either be an album_id or a list of track ids.
+ """
+ if isinstance(album_id, list):
+ return get_track_list(album_id)
+ a = _albums.get(album_id, None)
+ if a and a.tracks:
+ return a.tracks
+
q = GetQuery('tracks', album_id)
a = q.execute()
if not a:
def _artist_loader(self):
if self._needs_load():
artist = get_artist(self.ID)
+ artist.albums = get_albums(self.ID)
self._set_from(artist)
Artist.load = _artist_loader