1 #!/usr/bin/env python2.5
4 # Copyright (c) 2007-2008 INdT.
5 # Copyright (c) 2011 Neal H. Walfield
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 # ============================================================================
22 # Author : Yves Marcoz
24 # Description : Simple RSS Reader
25 # ============================================================================
28 from os.path import isfile, isdir
29 from shutil import rmtree
30 from os import mkdir, remove, utime
36 from BeautifulSoup import BeautifulSoup
37 from urlparse import urljoin
38 from calendar import timegm
39 from updatedbus import get_lock, release_lock
42 from wc import wc, wc_init
44 from jobmanager import JobManager
46 from httpprogresshandler import HTTPProgressHandler
51 return md5.new(string).hexdigest()
53 def download_callback(connection):
54 if JobManager().do_quit:
55 raise KeyboardInterrupt
57 def downloader(progress_handler=None, proxy=None):
61 openers.append (progress_handler)
63 openers.append(HTTPProgressHandler(download_callback))
66 openers.append (proxy)
68 return urllib2.build_opener (*openers)
71 serial_execution_lock = threading.Lock()
76 except AttributeError:
77 db = sqlite3.connect("%s/%s.db" % (self.dir, self.key), timeout=120)
82 def __init__(self, configdir, key):
84 self.configdir = configdir
85 self.dir = "%s/%s.d" %(self.configdir, self.key)
86 self.tls = threading.local ()
88 if not isdir(self.dir):
90 if not isfile("%s/%s.db" %(self.dir, self.key)):
91 self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
92 self.db.execute("CREATE TABLE images (id text, imagePath text);")
95 def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
96 filename = configdir+key+".d/"+getId(url)
97 if not isfile(filename):
100 opener = downloader(proxy=proxy)
102 abs_url = urljoin(baseurl,url)
103 f = opener.open(abs_url)
104 outf = open(filename, "w")
108 except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
109 print ("Could not download image %s: %s"
110 % (abs_url, str (exception)))
113 exception = sys.exc_info()[0]
115 print "Downloading image: %s" % abs_url
116 traceback.print_exc()
125 #open(filename,"a").close() # "Touch" the file
126 file = open(filename,"a")
127 utime(filename, None)
131 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
134 self._updateFeed(configdir, url, etag, modified, expiryTime, proxy, imageCache, postFeedUpdateFunc, *postFeedUpdateFuncArgs)
136 JobManager().execute(doit(), self.key, priority=priority)
138 def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
140 have_serial_execution_lock = False
143 update_lock = get_lock("key")
145 # Someone else is doing an update.
148 download_start = time.time ()
150 progress_handler = HTTPProgressHandler(download_callback)
152 openers = [progress_handler]
154 openers.append (proxy)
155 kwargs = {'handlers':openers}
157 tmp=feedparser.parse(url, etag=etag, modified=modified, **kwargs)
158 download_duration = time.time () - download_start
160 opener = downloader(progress_handler, proxy)
162 if JobManager().do_quit:
163 raise KeyboardInterrupt
165 process_start = time.time()
167 # Expiry time is in hours
168 expiry = float(expiryTime) * 3600.
172 have_woodchuck = mainthread.execute (wc().available)
176 wc().stream_register (self.key, "", 6 * 60 * 60)
177 except woodchuck.ObjectExistsError:
180 wc()[self.key].updated (
181 indicator=(woodchuck.Indicator.ApplicationVisual
182 |woodchuck.Indicator.StreamWide),
183 transferred_down=progress_handler.stats['received'],
184 transferred_up=progress_handler.stats['sent'],
185 transfer_time=download_start,
186 transfer_duration=download_duration,
187 new_objects=len (tmp.entries),
188 objects_inline=len (tmp.entries))
190 print "Failed to register update with woodchuck!"
193 http_status = tmp.get ('status', 200)
195 # Check if the parse was succesful. If the http status code
196 # is 304, then the download was successful, but there is
197 # nothing new. Indeed, no content is returned. This make a
198 # 304 look like an error because there are no entries and the
199 # parse fails. But really, everything went great! Check for
201 if http_status == 304:
202 print "%s: No changes to feed." % (self.key,)
203 mainthread.execute (wc_success, async=True)
205 elif len(tmp["entries"])==0 and not tmp.version:
206 # An error occured fetching or parsing the feed. (Version
207 # will be either None if e.g. the connection timed our or
208 # '' if the data is not a proper feed)
209 print ("Error fetching %s: version is: %s: error: %s"
210 % (url, str (tmp.version),
211 str (tmp.get ('bozo_exception', 'Unknown error'))))
215 print "%s: stream update failed!" % self.key
218 # It's not easy to get the feed's title from here.
219 # At the latest, the next time the application is
220 # started, we'll fix up the human readable name.
221 wc().stream_register (self.key, "", 6 * 60 * 60)
222 except woodchuck.ObjectExistsError:
224 ec = woodchuck.TransferStatus.TransientOther
225 if 300 <= http_status and http_status < 400:
226 ec = woodchuck.TransferStatus.TransientNetwork
227 if 400 <= http_status and http_status < 500:
228 ec = woodchuck.TransferStatus.FailureGone
229 if 500 <= http_status and http_status < 600:
230 ec = woodchuck.TransferStatus.TransientNetwork
231 wc()[self.key].update_failed(ec)
232 mainthread.execute (e, async=True)
234 currentTime = time.time()
235 # The etag and modified value should only be updated if the content was not null
241 modified = tmp["modified"]
245 abs_url = urljoin(tmp["feed"]["link"],"/favicon.ico")
246 f = opener.open(abs_url)
249 outf = open(self.dir+"/favicon.ico", "w")
253 except (urllib2.HTTPError, urllib2.URLError), exception:
254 print ("Could not download favicon %s: %s"
255 % (abs_url, str (exception)))
257 self.serial_execution_lock.acquire ()
258 have_serial_execution_lock = True
260 #reversedEntries = self.getEntries()
261 #reversedEntries.reverse()
265 tmp["entries"].reverse()
266 for entry in tmp["entries"]:
267 # Yield so as to make the main thread a bit more
271 if JobManager().do_quit:
272 raise KeyboardInterrupt
274 received_base = progress_handler.stats['received']
275 sent_base = progress_handler.stats['sent']
278 date = self.extractDate(entry)
282 entry["title"] = "No Title"
290 entry["author"] = None
291 if(not(entry.has_key("id"))):
293 content = self.extractContent(entry)
294 object_size = len (content)
295 received_base -= len (content)
296 tmpEntry = {"title":entry["title"], "content":content,
297 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
298 id = self.generateUniqueId(tmpEntry)
300 #articleTime = time.mktime(self.entries[id]["dateTuple"])
301 soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
303 baseurl = tmpEntry["link"]
305 if imageCache and len(images) > 0:
306 self.serial_execution_lock.release ()
307 have_serial_execution_lock = False
309 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
311 img['src']="file://%s" %filename
312 count = self.db.execute("SELECT count(1) FROM images where id=? and imagePath=?;", (id, filename )).fetchone()[0]
314 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
318 object_size += os.path.getsize (filename)
319 except os.error, exception:
320 print ("Error getting size of %s: %s"
321 % (filename, exception))
323 self.serial_execution_lock.acquire ()
324 have_serial_execution_lock = True
326 tmpEntry["contentLink"] = configdir+self.key+".d/"+id+".html"
327 file = open(tmpEntry["contentLink"], "w")
328 file.write(soup.prettify())
331 self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
334 values = (id, tmpEntry["title"], tmpEntry["contentLink"], tmpEntry["date"], currentTime, tmpEntry["link"], 0)
335 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
339 # self.db.execute("UPDATE feed SET updated=? WHERE id=?;", (currentTime, id) )
341 # filename = configdir+self.key+".d/"+id+".html"
342 # file = open(filename,"a")
343 # utime(filename, None)
345 # images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
346 # for image in images:
347 # file = open(image[0],"a")
348 # utime(image[0], None)
353 # Register the object with Woodchuck and mark it as
358 obj = wc()[self.key].object_register(
359 object_identifier=id,
360 human_readable_name=tmpEntry["title"])
361 except woodchuck.ObjectExistsError:
362 obj = wc()[self.key][id]
364 # If the entry does not contain a publication
365 # time, the attribute won't exist.
366 pubtime = entry.get ('date_parsed', None)
368 obj.publication_time = time.mktime (pubtime)
370 received = (progress_handler.stats['received']
372 sent = progress_handler.stats['sent'] - sent_base
374 indicator=(woodchuck.Indicator.ApplicationVisual
375 |woodchuck.Indicator.StreamWide),
376 transferred_down=received,
378 object_size=object_size)
379 mainthread.execute(e, async=True)
382 print ("%s: Update successful: transferred: %d/%d; objects: %d)"
384 progress_handler.stats['sent'],
385 progress_handler.stats['received'],
387 mainthread.execute (wc_success, async=True)
390 rows = self.db.execute("SELECT id FROM feed WHERE (read=0 AND updated<?) OR (read=1 AND updated<?);", (currentTime-2*expiry, currentTime-expiry))
392 self.removeEntry(row[0])
394 from glob import glob
396 for file in glob(configdir+self.key+".d/*"):
400 # put the two dates into matching format
402 lastmodDate = stats[8]
404 expDate = time.time()-expiry*3
405 # check if image-last-modified-date is outdated
407 if expDate > lastmodDate:
411 #print 'Removing', file
413 # XXX: Tell woodchuck.
414 remove(file) # commented out for testing
416 except OSError, exception:
418 print 'Could not remove %s: %s' % (file, str (exception))
419 print ("updated %s: %fs in download, %fs in processing"
420 % (self.key, download_duration,
421 time.time () - process_start))
423 print "Updating %s: %s" % (self.key, sys.exc_info()[0])
424 traceback.print_exc()
428 if have_serial_execution_lock:
429 self.serial_execution_lock.release ()
431 if update_lock is not None:
432 release_lock (update_lock)
436 rows = self.db.execute("SELECT MAX(date) FROM feed;")
440 print "Fetching update time."
441 traceback.print_exc()
448 title = tmp.feed.title
449 except (AttributeError, UnboundLocalError), exception:
451 if postFeedUpdateFunc is not None:
452 postFeedUpdateFunc (self.key, updateTime, etag, modified,
453 title, *postFeedUpdateFuncArgs)
455 def setEntryRead(self, id):
456 self.db.execute("UPDATE feed SET read=1 WHERE id=?;", (id,) )
462 wc()[self.key][id].used()
466 def setEntryUnread(self, id):
467 self.db.execute("UPDATE feed SET read=0 WHERE id=?;", (id,) )
470 def markAllAsRead(self):
471 self.db.execute("UPDATE feed SET read=1 WHERE read=0;")
474 def isEntryRead(self, id):
475 read_status = self.db.execute("SELECT read FROM feed WHERE id=?;", (id,) ).fetchone()[0]
476 return read_status==1 # Returns True if read==1, and False if read==0
478 def getTitle(self, id):
479 return self.db.execute("SELECT title FROM feed WHERE id=?;", (id,) ).fetchone()[0]
481 def getContentLink(self, id):
482 return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
484 def getExternalLink(self, id):
485 return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
487 def getDate(self, id):
488 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
489 return time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(dateStamp))
491 def getDateTuple(self, id):
492 dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
493 return time.localtime(dateStamp)
495 def getDateStamp(self, id):
496 return self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
498 def generateUniqueId(self, entry):
499 if(entry["id"] != None):
500 return getId(str(entry["id"]))
503 return getId(str(entry["date"]) + str(entry["title"]))
505 #print entry["title"]
506 return getId(str(entry["date"]))
508 def getIds(self, onlyUnread=False):
510 rows = self.db.execute("SELECT id FROM feed where read=0 ORDER BY date DESC;").fetchall()
512 rows = self.db.execute("SELECT id FROM feed ORDER BY date DESC;").fetchall()
519 def getNextId(self, id):
521 index = ids.index(id)
522 return ids[(index+1)%len(ids)]
524 def getPreviousId(self, id):
526 index = ids.index(id)
527 return ids[(index-1)%len(ids)]
529 def getNumberOfUnreadItems(self):
530 return self.db.execute("SELECT count(*) FROM feed WHERE read=0;").fetchone()[0]
532 def getNumberOfEntries(self):
533 return self.db.execute("SELECT count(*) FROM feed;").fetchone()[0]
535 def getArticle(self, entry):
536 #self.setEntryRead(id)
537 #entry = self.entries[id]
538 title = entry['title']
539 #content = entry.get('content', entry.get('summary_detail', {}))
540 content = entry["content"]
543 author = entry['author']
544 date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(entry["date"]) )
546 #text = '''<div style="color: black; background-color: white;">'''
547 text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
548 text += "<html><head><title>" + title + "</title>"
549 text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
550 #text += '<style> body {-webkit-user-select: none;} </style>'
551 text += '</head><body bgcolor=\"#ffffff\"><div><a href=\"' + link + '\">' + title + "</a>"
553 text += "<BR /><small><i>Author: " + author + "</i></small>"
554 text += "<BR /><small><i>Date: " + date + "</i></small></div>"
555 text += "<BR /><BR />"
557 text += "</body></html>"
560 def getContent(self, id):
561 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
563 file = open(self.entries[id]["contentLink"])
564 content = file.read()
567 content = "Content unavailable"
570 def extractDate(self, entry):
571 if entry.has_key("updated_parsed"):
572 return timegm(entry["updated_parsed"])
573 elif entry.has_key("published_parsed"):
574 return timegm(entry["published_parsed"])
578 def extractContent(self, entry):
580 if entry.has_key('summary'):
581 content = entry.get('summary', '')
582 if entry.has_key('content'):
583 if len(entry.content[0].value) > len(content):
584 content = entry.content[0].value
586 content = entry.get('description', '')
589 def removeEntry(self, id):
590 contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
594 except OSError, exception:
595 print "Deleting %s: %s" % (contentLink, str (exception))
596 self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
597 self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
603 wc()[self.key][id].files_deleted (
604 woodchuck.DeletionResponse.Deleted)
605 del wc()[self.key][id]
608 mainthread.execute (e, async=True)
610 class ArchivedArticles(Feed):
611 def addArchivedArticle(self, title, link, date, configdir):
612 id = self.generateUniqueId({"date":date, "title":title})
613 values = (id, title, link, date, 0, link, 0)
614 self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
617 def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
619 rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
621 currentTime = time.time()
624 f = urllib2.urlopen(link)
625 #entry["content"] = f.read()
628 soup = BeautifulSoup(html)
632 filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
634 self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
636 contentLink = configdir+self.key+".d/"+id+".html"
637 file = open(contentLink, "w")
638 file.write(soup.prettify())
641 self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
643 return (currentTime, None, None)
645 def purgeReadArticles(self):
646 rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
649 self.removeArticle(row[0])
651 def removeArticle(self, id):
652 rows = self.db.execute("SELECT imagePath FROM images WHERE id=?;", (id,) )
655 count = self.db.execute("SELECT count(*) FROM images WHERE id!=? and imagePath=?;", (id,row[0]) ).fetchone()[0]
666 except AttributeError:
667 db = sqlite3.connect("%s/feeds.db" % self.configdir, timeout=120)
670 db = property(_getdb)
672 # Lists all the feeds in a dictionary, and expose the data
673 def __init__(self, config, configdir):
675 self.configdir = configdir
677 self.tls = threading.local ()
680 table = self.db.execute("SELECT sql FROM sqlite_master").fetchone()
682 self.db.execute("CREATE TABLE feeds(id text, url text, title text, unread int, updateTime float, rank int, etag text, modified text, widget int, category int);")
683 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
684 self.addCategory("Default Category")
685 if isfile(self.configdir+"feeds.pickle"):
686 self.importOldFormatFeeds()
688 self.addFeed("Maemo News", "http://maemo.org/news/items.xml")
690 from string import find, upper
691 if find(upper(table[0]), "WIDGET")<0:
692 self.db.execute("ALTER TABLE feeds ADD COLUMN widget int;")
693 self.db.execute("UPDATE feeds SET widget=1;")
695 if find(upper(table[0]), "CATEGORY")<0:
696 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
697 self.addCategory("Default Category")
698 self.db.execute("ALTER TABLE feeds ADD COLUMN category int;")
699 self.db.execute("UPDATE feeds SET category=1;")
704 # Check that Woodchuck's state is up to date with respect our
708 # The list of known streams.
709 streams = wc().streams_list ()
710 stream_ids = [s.identifier for s in streams]
712 # Register any unknown streams. Remove known streams from
714 for key in self.getListOfFeeds():
715 title = self.getFeedTitle(key)
716 # XXX: We should also check whether the list of
717 # articles/objects in each feed/stream is up to date.
718 if key not in stream_ids:
719 print ("Registering previously unknown channel: %s (%s)"
721 # Use a default refresh interval of 6 hours.
722 wc().stream_register (key, title, 6 * 60 * 60)
724 # Make sure the human readable name is up to date.
725 if wc()[key].human_readable_name != title:
726 wc()[key].human_readable_name = title
727 stream_ids.remove (key)
730 # Unregister any streams that are no longer subscribed to.
731 for id in stream_ids:
732 print ("Unregistering %s" % (id,))
733 w.stream_unregister (id)
735 def importOldFormatFeeds(self):
736 """This function loads feeds that are saved in an outdated format, and converts them to sqlite"""
738 listing = rss.Listing(self.configdir)
740 for id in listing.getListOfFeeds():
743 values = (id, listing.getFeedTitle(id) , listing.getFeedUrl(id), 0, time.time(), rank, None, "None", 1)
744 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?, 1);", values)
747 feed = listing.getFeed(id)
748 new_feed = self.getFeed(id)
750 items = feed.getIds()[:]
753 if feed.isEntryRead(item):
757 date = timegm(feed.getDateTuple(item))
758 title = feed.getTitle(item)
759 newId = new_feed.generateUniqueId({"date":date, "title":title})
760 values = (newId, title , feed.getContentLink(item), date, tuple(time.time()), feed.getExternalLink(item), read_status)
761 new_feed.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
764 images = feed.getImages(item)
766 new_feed.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (item, image) )
770 self.updateUnread(id)
772 traceback.print_exc()
773 remove(self.configdir+"feeds.pickle")
776 def addArchivedArticle(self, key, index):
777 feed = self.getFeed(key)
778 title = feed.getTitle(index)
779 link = feed.getExternalLink(index)
780 date = feed.getDate(index)
781 count = self.db.execute("SELECT count(*) FROM feeds where id=?;", ("ArchivedArticles",) ).fetchone()[0]
783 self.addFeed("Archived Articles", "", id="ArchivedArticles")
785 archFeed = self.getFeed("ArchivedArticles")
786 archFeed.addArchivedArticle(title, link, date, self.configdir)
787 self.updateUnread("ArchivedArticles")
789 def updateFeed(self, key, expiryTime=None, proxy=None, imageCache=None,
791 if expiryTime is None:
792 expiryTime = self.config.getExpiry()
794 # Default to 24 hours
797 (use_proxy, proxy) = self.config.getProxy()
800 if imageCache is None:
801 imageCache = self.config.getImageCache()
803 feed = self.getFeed(key)
804 (url, etag, modified) = self.db.execute("SELECT url, etag, modified FROM feeds WHERE id=?;", (key,) ).fetchone()
806 modified = time.struct_time(eval(modified))
810 self.configdir, url, etag, modified, expiryTime, proxy, imageCache,
811 priority, postFeedUpdateFunc=self._queuePostFeedUpdate)
813 def _queuePostFeedUpdate(self, *args, **kwargs):
814 mainthread.execute (self._postFeedUpdate, async=True, *args, **kwargs)
816 def _postFeedUpdate(self, key, updateTime, etag, modified, title):
820 modified=str(tuple(modified))
822 self.db.execute("UPDATE feeds SET updateTime=?, etag=?, modified=? WHERE id=?;", (updateTime, etag, modified, key) )
824 self.db.execute("UPDATE feeds SET etag=?, modified=? WHERE id=?;", (etag, modified, key) )
826 if title is not None:
827 self.db.execute("UPDATE feeds SET title=(case WHEN title=='' THEN ? ELSE title END) where id=?;",
830 self.updateUnread(key)
832 def getFeed(self, key):
833 if key == "ArchivedArticles":
834 return ArchivedArticles(self.configdir, key)
835 return Feed(self.configdir, key)
837 def editFeed(self, key, title, url, category=None):
839 self.db.execute("UPDATE feeds SET title=?, url=?, category=? WHERE id=?;", (title, url, category, key))
841 self.db.execute("UPDATE feeds SET title=?, url=? WHERE id=?;", (title, url, key))
846 wc()[key].human_readable_name = title
848 print "Feed %s (%s) unknown." % (key, title)
851 def getFeedUpdateTime(self, key):
852 return time.ctime(self.db.execute("SELECT updateTime FROM feeds WHERE id=?;", (key,)).fetchone()[0])
854 def getFeedNumberOfUnreadItems(self, key):
855 return self.db.execute("SELECT unread FROM feeds WHERE id=?;", (key,)).fetchone()[0]
857 def getFeedTitle(self, key):
858 (title, url) = self.db.execute("SELECT title, url FROM feeds WHERE id=?;", (key,)).fetchone()
863 def getFeedUrl(self, key):
864 return self.db.execute("SELECT url FROM feeds WHERE id=?;", (key,)).fetchone()[0]
866 def getFeedCategory(self, key):
867 return self.db.execute("SELECT category FROM feeds WHERE id=?;", (key,)).fetchone()[0]
869 def getListOfFeeds(self, category=None):
871 rows = self.db.execute("SELECT id FROM feeds WHERE category=? ORDER BY rank;", (category, ) )
873 rows = self.db.execute("SELECT id FROM feeds ORDER BY rank;" )
880 def getListOfCategories(self):
881 rows = self.db.execute("SELECT id FROM categories ORDER BY rank;" )
888 def getCategoryTitle(self, id):
889 row = self.db.execute("SELECT title FROM categories WHERE id=?;", (id, )).fetchone()
892 def getSortedListOfKeys(self, order, onlyUnread=False, category=1):
893 if order == "Most unread":
894 tmp = "ORDER BY unread DESC"
895 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1], reverse=True)
896 elif order == "Least unread":
897 tmp = "ORDER BY unread"
898 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1])
899 elif order == "Most recent":
900 tmp = "ORDER BY updateTime DESC"
901 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2], reverse=True)
902 elif order == "Least recent":
903 tmp = "ORDER BY updateTime"
904 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2])
905 else: # order == "Manual" or invalid value...
906 tmp = "ORDER BY rank"
907 #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][0])
909 sql = "SELECT id FROM feeds WHERE unread>0 AND category=%s " %category + tmp
911 sql = "SELECT id FROM feeds WHERE category=%s " %category + tmp
912 rows = self.db.execute(sql)
919 def getFavicon(self, key):
920 filename = "%s%s.d/favicon.ico" % (self.configdir, key)
926 def updateUnread(self, key):
927 feed = self.getFeed(key)
928 self.db.execute("UPDATE feeds SET unread=? WHERE id=?;", (feed.getNumberOfUnreadItems(), key))
931 def addFeed(self, title, url, id=None, category=1):
934 count = self.db.execute("SELECT count(*) FROM feeds WHERE id=?;", (id,) ).fetchone()[0]
936 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
939 values = (id, title, url, 0, 0, max_rank+1, None, "None", 1, category)
940 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?,?);", values)
942 # Ask for the feed object, it will create the necessary tables
946 # Register the stream with Woodchuck. Update approximately
948 wc().stream_register(stream_identifier=id,
949 human_readable_name=title,
956 def addCategory(self, title):
957 rank = self.db.execute("SELECT MAX(rank)+1 FROM categories;").fetchone()[0]
960 id = self.db.execute("SELECT MAX(id)+1 FROM categories;").fetchone()[0]
963 self.db.execute("INSERT INTO categories (id, title, unread, rank) VALUES (?, ?, 0, ?)", (id, title, rank))
966 def removeFeed(self, key):
967 if wc().available ():
971 print "Removing unregistered feed %s failed" % (key,)
973 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
974 self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
975 self.db.execute("UPDATE feeds SET rank=rank-1 WHERE rank>?;", (rank,) )
978 if isdir(self.configdir+key+".d/"):
979 rmtree(self.configdir+key+".d/")
981 def removeCategory(self, key):
982 if self.db.execute("SELECT count(*) FROM categories;").fetchone()[0] > 1:
983 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,) ).fetchone()[0]
984 self.db.execute("DELETE FROM categories WHERE id=?;", (key, ))
985 self.db.execute("UPDATE categories SET rank=rank-1 WHERE rank>?;", (rank,) )
986 self.db.execute("UPDATE feeds SET category=1 WHERE category=?;", (key,) )
989 #def saveConfig(self):
990 # self.listOfFeeds["feedingit-order"] = self.sortedKeys
991 # file = open(self.configdir+"feeds.pickle", "w")
992 # pickle.dump(self.listOfFeeds, file)
995 def moveUp(self, key):
996 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
998 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank-1) )
999 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank-1, key) )
1002 def moveCategoryUp(self, key):
1003 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1005 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank-1) )
1006 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank-1, key) )
1009 def moveDown(self, key):
1010 rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1011 max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1013 self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank+1) )
1014 self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank+1, key) )
1017 def moveCategoryDown(self, key):
1018 rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1019 max_rank = self.db.execute("SELECT MAX(rank) FROM categories;").fetchone()[0]
1021 self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank+1) )
1022 self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank+1, key) )