psa: more fixes if wc unavailable
[feedingit] / psa_harmattan / feedingit / pysrc / rss_sqlite.py
1 #!/usr/bin/env python2.5
2
3
4 # Copyright (c) 2007-2008 INdT.
5 # Copyright (c) 2011 Neal H. Walfield
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU Lesser General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 #  This program is distributed in the hope that it will be useful,
12 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
13 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 #  GNU Lesser General Public License for more details.
15 #
16 #  You should have received a copy of the GNU Lesser General Public License
17 #  along with this program.  If not, see <http://www.gnu.org/licenses/>.
18 #
19
20 # ============================================================================
21 # Name        : FeedingIt.py
22 # Author      : Yves Marcoz
23 # Version     : 0.5.4
24 # Description : Simple RSS Reader
25 # ============================================================================
26
27 from __future__ import with_statement
28
29 import sqlite3
30 from os.path import isfile, isdir
31 from shutil import rmtree
32 from os import mkdir, remove, utime
33 import os
34 import md5
35 import feedparser
36 import time
37 import urllib2
38 from BeautifulSoup import BeautifulSoup
39 from urlparse import urljoin
40 from calendar import timegm
41 import threading
42 import traceback
43 from wc import wc, wc_init, woodchuck
44 import subprocess
45 import dbus
46 from updatedbus import update_server_object
47
48 from jobmanager import JobManager
49 import mainthread
50 from httpprogresshandler import HTTPProgressHandler
51 import random
52 import sys
53 import logging
54 logger = logging.getLogger(__name__)
55
56 def getId(string):
57     if issubclass(string.__class__, unicode):
58         string = string.encode('utf8', 'replace')
59
60     return md5.new(string).hexdigest()
61
62 def download_callback(connection):
63     if JobManager().do_quit:
64         raise KeyboardInterrupt
65
66 def downloader(progress_handler=None, proxy=None):
67     openers = []
68
69     if progress_handler is not None:
70         openers.append(progress_handler)
71     else:
72         openers.append(HTTPProgressHandler(download_callback))
73
74     if proxy:
75         openers.append(proxy)
76
77     return urllib2.build_opener(*openers)
78
79 def transfer_stats(sent, received, **kwargs):
80     """
81     This function takes two arguments: sent is the number of bytes
82     sent so far, received is the number of bytes received.  The
83     function returns a continuation that you can call later.
84
85     The continuation takes the same two arguments.  It returns a tuple
86     of the number of bytes sent, the number of bytes received and the
87     time since the original function was invoked.
88     """
89     start_time = time.time()
90     start_sent = sent
91     start_received = received
92
93     def e(sent, received, **kwargs):
94         return (sent - start_sent,
95                 received - start_received,
96                 time.time() - start_time)
97
98     return e
99
100 # If not None, a subprocess.Popen object corresponding to a
101 # update_feeds.py process.
102 update_feed_process = None
103
104 update_feeds_iface = None
105
106 jobs_at_start = 0
107
108 class BaseObject(object):
109     # Columns to cache.  Classes that inherit from this and use the
110     # cache mechanism should set this to a list of tuples, each of
111     # which contains two entries: the table and the column.  Note that
112     # both are case sensitive.
113     cached_columns = ()
114
115     def cache_invalidate(self, table=None):
116         """
117         Invalidate the cache.
118
119         If table is not None, invalidate only the specified table.
120         Otherwise, drop the whole cache.
121         """
122         if not hasattr(self, 'cache'):
123             return
124
125         if table is None:
126             del self.cache
127         else:
128             if table in self.cache:
129                 del self.cache[table]
130
131     def lookup(self, table, column, id=None):
132         """
133         Look up a column or value.  Uses a cache for columns in
134         cached_columns.  Note: the column is returned unsorted.
135         """
136         if not hasattr(self, 'cache'):
137             self.cache = {}
138
139         # Cache data for at most 60 seconds.
140         now = time.time()
141         try:
142             cache = self.cache[table]
143
144             if time.time() - cache[None] > 60:
145                 # logger.debug("%s: Cache too old: clearing" % (table,))
146                 del self.cache[table]
147                 cache = None
148         except KeyError:
149             cache = None
150
151         if (cache is None
152             or (table, column) not in self.cached_columns):
153             # The cache is empty or the caller wants a column that we
154             # don't cache.
155             if (table, column) in self.cached_columns:
156                 # logger.debug("%s: Rebuilding cache" % (table,))
157
158                 do_cache = True
159
160                 self.cache[table] = cache = {}
161                 columns = []
162                 for t, c in self.cached_columns:
163                     if table == t:
164                         cache[c] = {}
165                         columns.append(c)
166
167                 columns.append('id')
168                 where = ""
169             else:
170                 do_cache = False
171
172                 columns = (colums,)
173                 if id is not None:
174                     where = "where id = '%s'" % id
175                 else:
176                     where = ""
177
178             results = self.db.execute(
179                 "SELECT %s FROM %s %s" % (','.join(columns), table, where))
180
181             if do_cache:
182                 for r in results:
183                     values = list(r)
184                     i = values.pop()
185                     for index, value in enumerate(values):
186                         cache[columns[index]][i] = value
187
188                 cache[None] = now
189             else:
190                 results = []
191                 for r in results:
192                     if id is not None:
193                         return values[0]
194
195                     results.append(values[0])
196
197                 return results
198         else:
199             cache = self.cache[table]
200
201         try:
202             if id is not None:
203                 value = cache[column][id]
204                 # logger.debug("%s.%s:%s -> %s" % (table, column, id, value))
205                 return value
206             else:
207                 return cache[column].values()
208         except KeyError:
209             # logger.debug("%s.%s:%s -> Not found" % (table, column, id))
210             return None
211
212 class Feed(BaseObject):
213     # Columns to cache.
214     cached_columns = (('feed', 'read'),
215                       ('feed', 'title'))
216
217     serial_execution_lock = threading.Lock()
218
219     def _getdb(self):
220         try:
221             db = self.tls.db
222         except AttributeError:
223             db = sqlite3.connect("%s/%s.db" % (self.dir, self.key), timeout=120)
224             self.tls.db = db
225         return db
226     db = property(_getdb)
227
228     def __init__(self, configdir, key):
229         self.key = key
230         self.configdir = configdir
231         self.dir = "%s/%s.d" %(self.configdir, self.key)
232         self.tls = threading.local()
233
234         if not isdir(self.dir):
235             mkdir(self.dir)
236         filename = "%s/%s.db" % (self.dir, self.key)
237         if not isfile(filename):
238             self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, contentHash text, date float, updated float, link text, read int);")
239             self.db.execute("CREATE TABLE images (id text, imagePath text);")
240             self.db.commit()
241         else:
242             try:
243                 self.db.execute("ALTER TABLE feed ADD COLUMN contentHash text")
244                 self.db.commit()
245             except sqlite3.OperationalError, e:
246                 if 'duplicate column name' in str(e):
247                     pass
248                 else:
249                     logger.exception("Add column contentHash to %s", filename)
250
251     def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
252         filename = configdir+key+".d/"+getId(url)
253         if not isfile(filename):
254             try:
255                 if not opener:
256                     opener = downloader(proxy=proxy)
257
258                 abs_url = urljoin(baseurl,url)
259                 f = opener.open(abs_url)
260                 try:
261                     with open(filename, "w") as outf:
262                         for data in f:
263                             outf.write(data)
264                 finally:
265                     f.close()
266             except (urllib2.HTTPError, urllib2.URLError, IOError), exception:
267                 logger.info("Could not download image %s: %s"
268                             % (abs_url, str (exception)))
269                 return None
270             except:
271                 exception = sys.exc_info()[0]
272
273                 logger.info("Downloading image %s: %s" %
274                             (abs_url, traceback.format_exc()))
275                 try:
276                     remove(filename)
277                 except OSError:
278                     pass
279
280                 return None
281         else:
282             #open(filename,"a").close()  # "Touch" the file
283             file = open(filename,"a")
284             utime(filename, None)
285             file.close()
286         return filename
287
288     def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
289         if (os.path.basename(sys.argv[0]) == 'update_feeds.py'):
290             def doit():
291                 def it():
292                     self._updateFeed(configdir, url, etag, modified, expiryTime, proxy, imageCache, postFeedUpdateFunc, *postFeedUpdateFuncArgs)
293                 return it
294             JobManager().execute(doit(), self.key, priority=priority)
295         else:
296             def send_update_request():
297                 global update_feeds_iface
298                 if update_feeds_iface is None:
299                     bus=dbus.SessionBus()
300                     remote_object = bus.get_object(
301                         "org.marcoz.feedingit", # Connection name
302                         "/org/marcoz/feedingit/update" # Object's path
303                         )
304                     update_feeds_iface = dbus.Interface(
305                         remote_object, 'org.marcoz.feedingit')
306
307                 try:
308                     update_feeds_iface.Update(self.key)
309                 except Exception, e:
310                     logger.error("Invoking org.marcoz.feedingit.Update: %s"
311                                  % str(e))
312                     update_feeds_iface = None
313                 else:
314                     return True
315
316             if send_update_request():
317                 # Success!  It seems we were able to start the update
318                 # daemon via dbus (or, it was already running).
319                 return
320
321             global update_feed_process
322             if (update_feed_process is None
323                 or update_feed_process.poll() is not None):
324                 # The update_feeds process is not running.  Start it.
325                 update_feeds = os.path.join(os.path.dirname(__file__),
326                                             'update_feeds.py')
327                 argv = ['/usr/bin/env', 'python', update_feeds, '--daemon' ]
328                 logger.debug("Starting update_feeds: running %s"
329                              % (str(argv),))
330                 update_feed_process = subprocess.Popen(argv)
331                 # Make sure the dbus calls go to the right process:
332                 # rebind.
333                 update_feeds_iface = None
334
335             for _ in xrange(5):
336                 if send_update_request():
337                     break
338                 time.sleep(1)
339
340     def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
341         logger.debug("Updating %s" % url)
342
343         success = False
344         have_serial_execution_lock = False
345         try:
346             update_start = time.time ()
347
348             progress_handler = HTTPProgressHandler(download_callback)
349
350             openers = [progress_handler]
351             if proxy:
352                 openers.append (proxy)
353             kwargs = {'handlers':openers}
354             
355             feed_transfer_stats = transfer_stats(0, 0)
356
357             tmp=feedparser.parse(url, etag=etag, modified=modified, **kwargs)
358             download_duration = time.time () - update_start
359
360             opener = downloader(progress_handler, proxy)
361
362             if JobManager().do_quit:
363                 raise KeyboardInterrupt
364
365             process_start = time.time()
366
367             # Expiry time is in hours
368             expiry = float(expiryTime) * 3600.
369     
370             currentTime = 0
371             
372             updated_objects = 0
373             new_objects = 0
374
375             def wc_success():
376                 try:
377                     if wc().available():
378                         wc().stream_register (self.key, "", 6 * 60 * 60)
379                 except woodchuck.ObjectExistsError:
380                     pass
381                 try:
382                     wc()[self.key].updated (
383                         indicator=(woodchuck.Indicator.ApplicationVisual
384                                    |woodchuck.Indicator.StreamWide),
385                         transferred_down=progress_handler.stats['received'],
386                         transferred_up=progress_handler.stats['sent'],
387                         transfer_time=update_start,
388                         transfer_duration=download_duration,
389                         new_objects=new_objects,
390                         updated_objects=updated_objects,
391                         objects_inline=new_objects + updated_objects)
392                 except KeyError:
393                     logger.warn(
394                         "Failed to register update of %s with woodchuck!"
395                         % (self.key))
396     
397             http_status = tmp.get ('status', 200)
398     
399             # Check if the parse was succesful.  If the http status code
400             # is 304, then the download was successful, but there is
401             # nothing new.  Indeed, no content is returned.  This make a
402             # 304 look like an error because there are no entries and the
403             # parse fails.  But really, everything went great!  Check for
404             # this first.
405             if http_status == 304:
406                 logger.debug("%s: No changes to feed." % (self.key,))
407                 if wc().available():
408                     mainthread.execute(wc_success, async=True)
409                 success = True
410             elif len(tmp["entries"])==0 and not tmp.get('version', None):
411                 # An error occured fetching or parsing the feed.  (Version
412                 # will be either None if e.g. the connection timed our or
413                 # '' if the data is not a proper feed)
414                 logger.error(
415                     "Error fetching %s: version is: %s: error: %s"
416                     % (url, str (tmp.get('version', 'unset')),
417                        str (tmp.get ('bozo_exception', 'Unknown error'))))
418                 logger.debug(tmp)
419                 def register_stream_update_failed(http_status):
420                     def doit():
421                         logger.debug("%s: stream update failed!" % self.key)
422     
423                         try:
424                             # It's not easy to get the feed's title from here.
425                             # At the latest, the next time the application is
426                             # started, we'll fix up the human readable name.
427                             wc().stream_register (self.key, "", 6 * 60 * 60)
428                         except woodchuck.ObjectExistsError:
429                             pass
430                         ec = woodchuck.TransferStatus.TransientOther
431                         if 300 <= http_status and http_status < 400:
432                             ec = woodchuck.TransferStatus.TransientNetwork
433                         if 400 <= http_status and http_status < 500:
434                             ec = woodchuck.TransferStatus.FailureGone
435                         if 500 <= http_status and http_status < 600:
436                             ec = woodchuck.TransferStatus.TransientNetwork
437                         wc()[self.key].update_failed(ec)
438                     return doit
439                 if wc().available():
440                     mainthread.execute(
441                         register_stream_update_failed(
442                             http_status=http_status),
443                         async=True)
444             else:
445                currentTime = time.time()
446                # The etag and modified value should only be updated if the content was not null
447                try:
448                    etag = tmp["etag"]
449                except KeyError:
450                    etag = None
451                try:
452                    modified = tmp["modified"]
453                except KeyError:
454                    modified = None
455                try:
456                    abs_url = urljoin(tmp["feed"]["link"],"/favicon.ico")
457                    f = opener.open(abs_url)
458                    data = f.read()
459                    f.close()
460                    outf = open(self.dir+"/favicon.ico", "w")
461                    outf.write(data)
462                    outf.close()
463                    del data
464                except (urllib2.HTTPError, urllib2.URLError), exception:
465                    logger.debug("Could not download favicon %s: %s"
466                                 % (abs_url, str (exception)))
467     
468                self.serial_execution_lock.acquire ()
469                have_serial_execution_lock = True
470
471                #reversedEntries = self.getEntries()
472                #reversedEntries.reverse()
473     
474                tmp["entries"].reverse()
475                for entry in tmp["entries"]:
476                    # Yield so as to make the main thread a bit more
477                    # responsive.
478                    time.sleep(0)
479     
480                    entry_transfer_stats = transfer_stats(
481                        *feed_transfer_stats(**progress_handler.stats)[0:2])
482
483                    if JobManager().do_quit:
484                        raise KeyboardInterrupt
485
486                    object_size = 0
487
488                    date = self.extractDate(entry)
489                    try:
490                        entry["title"]
491                    except KeyError:
492                        entry["title"] = "No Title"
493                    try :
494                        entry["link"]
495                    except KeyError:
496                        entry["link"] = ""
497                    try:
498                        entry["author"]
499                    except KeyError:
500                        entry["author"] = None
501                    if(not(entry.has_key("id"))):
502                        entry["id"] = None
503                    content = self.extractContent(entry)
504                    contentHash = getId(content)
505                    object_size = len (content)
506                    tmpEntry = {"title":entry["title"], "content":content,
507                                 "date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
508                    id = self.generateUniqueId(tmpEntry)
509                    
510                    current_version = self.db.execute(
511                        'select date, ROWID, contentHash from feed where id=?',
512                        (id,)).fetchone()
513                    if (current_version is not None
514                        # To detect updates, don't compare by date:
515                        # compare by content.
516                        #
517                        # - If an article update is just a date change
518                        #   and the content remains the same, we don't
519                        #   want to register an update.
520                        #
521                        # - If an article's content changes but not the
522                        #   date, we want to recognize an update.
523                        and current_version[2] == contentHash):
524                        logger.debug("ALREADY DOWNLOADED %s (%s)"
525                                     % (entry["title"], entry["link"]))
526                        ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire 
527                        self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
528                        try: 
529                            logger.debug("Updating already downloaded files for %s" %(id))
530                            filename = configdir+self.key+".d/"+id+".html"
531                            file = open(filename,"a")
532                            utime(filename, None)
533                            file.close()
534                            images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
535                            for image in images:
536                                 file = open(image[0],"a")
537                                 utime(image[0], None)
538                                 file.close()
539                        except:
540                            logger.debug("Error in refreshing images for %s" % (id))
541                        self.db.commit()
542                        continue                       
543
544                    if current_version is not None:
545                        # The version was updated.  Mark it as unread.
546                        logger.debug("UPDATED: %s (%s)"
547                                     % (entry["title"], entry["link"]))
548                        updated_objects += 1
549                    else:
550                        logger.debug("NEW: %s (%s)"
551                                     % (entry["title"], entry["link"]))
552                        new_objects += 1
553
554                    #articleTime = time.mktime(self.entries[id]["dateTuple"])
555                    soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
556                    images = soup('img')
557                    baseurl = tmpEntry["link"]
558                    if imageCache and len(images) > 0:
559                        self.serial_execution_lock.release ()
560                        have_serial_execution_lock = False
561                        for img in images:
562                            if not img.has_key('src'):
563                                continue
564
565                            filename = self.addImage(
566                                configdir, self.key, baseurl, img['src'],
567                                opener=opener)
568                            if filename:
569                                 img['src']="file://%s" %filename
570                                 count = self.db.execute("SELECT count(1) FROM images where id=? and imagePath=?;", (id, filename )).fetchone()[0]
571                                 if count == 0:
572                                     self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
573                                     self.db.commit()
574     
575                                 try:
576                                     object_size += os.path.getsize (filename)
577                                 except os.error, exception:
578                                     logger.error ("Error getting size of %s: %s"
579                                                   % (filename, exception))
580                        self.serial_execution_lock.acquire ()
581                        have_serial_execution_lock = True
582     
583                    tmpEntry["contentLink"] = configdir+self.key+".d/"+id+".html"
584                    file = open(tmpEntry["contentLink"], "w")
585                    file.write(soup.prettify())
586                    file.close()
587
588                    values = {'id': id,
589                              'title': tmpEntry["title"],
590                              'contentLink': tmpEntry["contentLink"],
591                              'contentHash': contentHash,
592                              'date': tmpEntry["date"],
593                              'updated': currentTime,
594                              'link': tmpEntry["link"],
595                              'read': 0}
596
597                    if current_version is not None:
598                        # This is an update.  Ensure that the existing
599                        # entry is replaced.
600                        values['ROWID'] = current_version[1]
601
602                    cols, values = zip(*values.items())
603                    self.db.execute(
604                        "INSERT OR REPLACE INTO feed (%s) VALUES (%s);"
605                        % (','.join(cols), ','.join(('?',) * len(values))),
606                        values)
607                    self.db.commit()
608
609                    # Register the object with Woodchuck and mark it as
610                    # downloaded.
611                    def register_object_transferred(
612                            id, title, publication_time,
613                            sent, received, object_size):
614                        def doit():
615                            logger.debug("Registering transfer of object %s"
616                                         % title)
617                            try:
618                                obj = wc()[self.key].object_register(
619                                    object_identifier=id,
620                                    human_readable_name=title)
621                            except woodchuck.ObjectExistsError:
622                                obj = wc()[self.key][id]
623                            else:
624                                obj.publication_time = publication_time
625                                obj.transferred(
626                                    indicator=(
627                                        woodchuck.Indicator.ApplicationVisual
628                                        |woodchuck.Indicator.StreamWide),
629                                    transferred_down=received,
630                                    transferred_up=sent,
631                                    object_size=object_size)
632                        return doit
633                    if wc().available():
634                        # If the entry does not contain a publication
635                        # time, the attribute won't exist.
636                        pubtime = entry.get('date_parsed', None)
637                        if pubtime:
638                            publication_time = time.mktime (pubtime)
639                        else:
640                            publication_time = None
641
642                        sent, received, _ \
643                            = entry_transfer_stats(**progress_handler.stats)
644                        # sent and received are for objects (in
645                        # particular, images) associated with this
646                        # item.  We also want to attribute the data
647                        # transferred for the item's content.  This is
648                        # a good first approximation.
649                        received += len(content)
650
651                        mainthread.execute(
652                            register_object_transferred(
653                                id=id,
654                                title=tmpEntry["title"],
655                                publication_time=publication_time,
656                                sent=sent, received=received,
657                                object_size=object_size),
658                            async=True)
659                self.db.commit()
660
661                sent, received, _ \
662                    = feed_transfer_stats(**progress_handler.stats)
663                logger.debug (
664                    "%s: Update successful: transferred: %d/%d; objects: %d)"
665                    % (url, sent, received, len (tmp.entries)))
666                mainthread.execute (wc_success, async=True)
667                success = True
668
669             rows = self.db.execute("SELECT id FROM feed WHERE (read=0 AND updated<?) OR (read=1 AND updated<?);", (currentTime-2*expiry, currentTime-expiry))
670             for row in rows:
671                self.removeEntry(row[0])
672             
673             from glob import glob
674             from os import stat
675             for file in glob(configdir+self.key+".d/*"):
676                 #
677                 stats = stat(file)
678                 #
679                 # put the two dates into matching format
680                 #
681                 lastmodDate = stats[8]
682                 #
683                 expDate = time.time()-expiry*3
684                 # check if image-last-modified-date is outdated
685                 #
686                 if expDate > lastmodDate:
687                     #
688                     try:
689                         #
690                         #print 'Removing', file
691                         #
692                         # XXX: Tell woodchuck.
693                         remove(file) # commented out for testing
694                         #
695                     except OSError, exception:
696                         #
697                         logger.error('Could not remove %s: %s'
698                                      % (file, str (exception)))
699             logger.debug("updated %s: %fs in download, %fs in processing"
700                          % (self.key, download_duration,
701                             time.time () - process_start))
702         except:
703             logger.error("Updating %s: %s" % (self.key, traceback.format_exc()))
704         finally:
705             self.db.commit ()
706
707             if have_serial_execution_lock:
708                 self.serial_execution_lock.release ()
709
710             updateTime = 0
711             try:
712                 rows = self.db.execute("SELECT MAX(date) FROM feed;")
713                 for row in rows:
714                     updateTime=row[0]
715             except Exception, e:
716                 logger.error("Fetching update time: %s: %s"
717                              % (str(e), traceback.format_exc()))
718             finally:
719                 if not success:
720                     etag = None
721                     modified = None
722                 title = None
723                 try:
724                     title = tmp.feed.title
725                 except (AttributeError, UnboundLocalError), exception:
726                     pass
727                 if postFeedUpdateFunc is not None:
728                     postFeedUpdateFunc (self.key, updateTime, etag, modified,
729                                         title, *postFeedUpdateFuncArgs)
730
731         self.cache_invalidate()
732
733     def setEntryRead(self, id):
734         self.db.execute("UPDATE feed SET read=1 WHERE id=?;", (id,) )
735         self.db.commit()
736
737         def doit():
738             try:
739                 wc()[self.key][id].used()
740             except KeyError:
741                 pass
742         if wc().available():
743             mainthread.execute(doit, async=True)
744         self.cache_invalidate('feed')
745
746     def setEntryUnread(self, id):
747         self.db.execute("UPDATE feed SET read=0 WHERE id=?;", (id,) )
748         self.db.commit()     
749         self.cache_invalidate('feed')
750         
751     def markAllAsRead(self):
752         self.db.execute("UPDATE feed SET read=1 WHERE read=0;")
753         self.db.commit()
754         self.cache_invalidate('feed')
755
756     def isEntryRead(self, id):
757         return self.lookup('feed', 'read', id) == 1
758     
759     def getTitle(self, id):
760         return self.lookup('feed', 'title', id)
761     
762     def getContentLink(self, id):
763         return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
764     
765     def getContentHash(self, id):
766         return self.db.execute("SELECT contentHash FROM feed WHERE id=?;", (id,) ).fetchone()[0]
767     
768     def getExternalLink(self, id):
769         return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
770     
771     def getDate(self, id):
772         dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
773         return time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(dateStamp))
774
775     def getDateTuple(self, id):
776         dateStamp = self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
777         return time.localtime(dateStamp)
778     
779     def getDateStamp(self, id):
780         return self.db.execute("SELECT date FROM feed WHERE id=?;", (id,) ).fetchone()[0]
781     
782     def generateUniqueId(self, entry):
783         """
784         Generate a stable identifier for the article.  For the same
785         entry, this should result in the same identifier.  If
786         possible, the identifier should remain the same even if the
787         article is updated.
788         """
789         # Prefer the entry's id, which is supposed to be globally
790         # unique.
791         key = entry.get('id', None)
792         if not key:
793             # Next, try the link to the content.
794             key = entry.get('link', None)
795         if not key:
796             # Ok, the title and the date concatenated are likely to be
797             # relatively stable.
798             key = entry.get('title', None) + entry.get('date', None)
799         if not key:
800             # Hmm, the article's content will at least guarantee no
801             # false negatives (i.e., missing articles)
802             key = entry.get('content', None)
803         if not key:
804             # If all else fails, just use a random number.
805             key = str (random.random ())
806         return getId (key)
807     
808     def getIds(self, onlyUnread=False):
809         if onlyUnread:
810             rows = self.db.execute("SELECT id FROM feed where read=0 ORDER BY date DESC;").fetchall()
811         else:
812             rows = self.db.execute("SELECT id FROM feed ORDER BY date DESC;").fetchall()
813         ids = []
814         for row in rows:
815             ids.append(row[0])
816         #ids.reverse()
817         return ids
818     
819     def getNextId(self, id, forward=True, onlyUnread=False):
820         if forward:
821             delta = 1
822         else:
823             delta = -1
824         ids = self.getIds(onlyUnread=onlyUnread)
825         index = ids.index(id)
826         return ids[(index + delta) % len(ids)]
827         
828     def getPreviousId(self, id, onlyUnread=False):
829         return self.getNextId(id, forward=False, onlyUnread=onlyUnread)
830     
831     def getNumberOfUnreadItems(self):
832         return self.db.execute("SELECT count(*) FROM feed WHERE read=0;").fetchone()[0]
833     
834     def getNumberOfEntries(self):
835         return self.db.execute("SELECT count(*) FROM feed;").fetchone()[0]
836
837     def getArticle(self, entry):
838         #self.setEntryRead(id)
839         #entry = self.entries[id]
840         title = entry['title']
841         #content = entry.get('content', entry.get('summary_detail', {}))
842         content = entry["content"]
843
844         link = entry['link']
845         author = entry['author']
846         date = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(entry["date"]) )
847
848         #text = '''<div style="color: black; background-color: white;">'''
849         text = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
850         text += "<html><head><title>" + title + "</title>"
851         text += '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>\n'
852         #text += '<style> body {-webkit-user-select: none;} </style>'
853         text += '</head><body bgcolor=\"#ffffff\"><div><a href=\"' + link + '\">' + title + "</a>"
854         if author != None:
855             text += "<BR /><small><i>Author: " + author + "</i></small>"
856         text += "<BR /><small><i>Date: " + date + "</i></small></div>"
857         text += "<BR /><BR />"
858         text += content
859         text += "</body></html>"
860         return text
861    
862     def getContent(self, id):
863         """
864         Return the content of the article with the specified ID.  If
865         the content is not available, returns None.
866         """
867         contentLink = self.getContentLink(id)
868         try:
869             with open(contentLink, 'r') as file:
870                 content = file.read()
871         except Exception:
872             logger.exception("Failed get content for %s: reading %s failed",
873                              id, contentLink)
874             content = None
875         return content
876     
877     def extractDate(self, entry):
878         if entry.has_key("updated_parsed"):
879             return timegm(entry["updated_parsed"])
880         elif entry.has_key("published_parsed"):
881             return timegm(entry["published_parsed"])
882         else:
883             return time.time()
884         
885     def extractContent(self, entry):
886         content = ""
887         if entry.has_key('summary'):
888             content = entry.get('summary', '')
889         if entry.has_key('content'):
890             if len(entry.content[0].value) > len(content):
891                 content = entry.content[0].value
892         if content == "":
893             content = entry.get('description', '')
894         return content
895     
896     def removeEntry(self, id):
897         contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
898         if contentLink:
899             try:
900                 remove(contentLink)
901             except OSError, exception:
902                 logger.error("Deleting %s: %s" % (contentLink, str (exception)))
903         self.db.execute("DELETE FROM feed WHERE id=?;", (id,) )
904         self.db.execute("DELETE FROM images WHERE id=?;", (id,) )
905         self.db.commit()
906
907         def doit():
908             try:
909                 wc()[self.key][id].files_deleted (
910                     woodchuck.DeletionResponse.Deleted)
911                 del wc()[self.key][id]
912             except KeyError:
913                 pass
914         if wc().available():
915             mainthread.execute (doit, async=True)
916  
917 class ArchivedArticles(Feed):    
918     def addArchivedArticle(self, title, link, date, configdir):
919         id = self.generateUniqueId({"date":date, "title":title})
920         values = (id, title, link, date, 0, link, 0)
921         self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
922         self.db.commit()
923
924     # Feed.UpdateFeed calls this function.
925     def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
926         currentTime = 0
927         rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
928         for row in rows:
929             try:
930                 currentTime = time.time()
931                 id = row[0]
932                 link = row[1]
933                 f = urllib2.urlopen(link)
934                 #entry["content"] = f.read()
935                 html = f.read()
936                 f.close()
937                 soup = BeautifulSoup(html)
938                 images = soup('img')
939                 baseurl = link
940                 for img in images:
941                     filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
942                     img['src']=filename
943                     self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
944                     self.db.commit()
945                 contentLink = configdir+self.key+".d/"+id+".html"
946                 file = open(contentLink, "w")
947                 file.write(soup.prettify())
948                 file.close()
949                 
950                 self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
951                 self.db.commit()
952             except:
953                 logger.error("Error updating Archived Article: %s %s"
954                              % (link,traceback.format_exc(),))
955
956         if postFeedUpdateFunc is not None:
957             postFeedUpdateFunc (self.key, currentTime, None, None, None,
958                                 *postFeedUpdateFuncArgs)
959     
960     def purgeReadArticles(self):
961         rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
962         #ids = self.getIds()
963         for row in rows:
964             self.removeArticle(row[0])
965
966     def removeArticle(self, id):
967         rows = self.db.execute("SELECT imagePath FROM images WHERE id=?;", (id,) )
968         for row in rows:
969             try:
970                 count = self.db.execute("SELECT count(*) FROM images WHERE id!=? and imagePath=?;", (id,row[0]) ).fetchone()[0]
971                 if count == 0:
972                     os.remove(row[0])
973             except:
974                 pass
975         self.removeEntry(id)
976
977 class Listing(BaseObject):
978     # Columns to cache.
979     cached_columns = (('feeds', 'updateTime'),
980                       ('feeds', 'unread'),
981                       ('feeds', 'title'),
982                       ('categories', 'title'))
983
984     def _getdb(self):
985         try:
986             db = self.tls.db
987         except AttributeError:
988             db = sqlite3.connect("%s/feeds.db" % self.configdir, timeout=120)
989             self.tls.db = db
990         return db
991     db = property(_getdb)
992
993     # Lists all the feeds in a dictionary, and expose the data
994     def __init__(self, config, configdir):
995         self.config = config
996         self.configdir = configdir
997
998         self.tls = threading.local ()
999         
1000         try:
1001             table = self.db.execute("SELECT sql FROM sqlite_master").fetchone()
1002             if table == None:
1003                 self.db.execute("CREATE TABLE feeds(id text, url text, title text, unread int, updateTime float, rank int, etag text, modified text, widget int, category int);")
1004                 self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
1005                 self.addCategory("Default Category")
1006                 if isfile(self.configdir+"feeds.pickle"):
1007                     self.importOldFormatFeeds()
1008                 else:
1009                     self.addFeed("Maemo News", "http://maemo.org/news/items.xml")    
1010             else:
1011                 from string import find, upper
1012                 if find(upper(table[0]), "WIDGET")<0:
1013                     self.db.execute("ALTER TABLE feeds ADD COLUMN widget int;")
1014                     self.db.execute("UPDATE feeds SET widget=1;")
1015                     self.db.commit()
1016                 if find(upper(table[0]), "CATEGORY")<0:
1017                     self.db.execute("CREATE TABLE categories(id text, title text, unread int, rank int);")
1018                     self.addCategory("Default Category")
1019                     self.db.execute("ALTER TABLE feeds ADD COLUMN category int;")
1020                     self.db.execute("UPDATE feeds SET category=1;")
1021             self.db.commit()
1022         except:
1023             pass
1024
1025         # Check that Woodchuck's state is up to date with respect our
1026         # state.
1027         try:
1028             updater = os.path.basename(sys.argv[0]) == 'update_feeds.py'
1029             wc_init(config, self, True if updater else False)
1030             if wc().available() and updater:
1031                 # The list of known streams.
1032                 streams = wc().streams_list ()
1033                 stream_ids = [s.identifier for s in streams]
1034     
1035                 # Register any unknown streams.  Remove known streams from
1036                 # STREAMS_IDS.
1037                 for key in self.getListOfFeeds():
1038                     title = self.getFeedTitle(key)
1039                     # XXX: We should also check whether the list of
1040                     # articles/objects in each feed/stream is up to date.
1041                     if key not in stream_ids:
1042                         logger.debug(
1043                             "Registering previously unknown channel: %s (%s)"
1044                             % (key, title,))
1045                         wc().stream_register(
1046                             key, title,
1047                             self.config.getUpdateInterval() * 60 * 60)
1048                     else:
1049                         # Make sure the human readable name is up to date.
1050                         if wc()[key].human_readable_name != title:
1051                             wc()[key].human_readable_name = title
1052                         stream_ids.remove (key)
1053                         wc()[key].freshness \
1054                             = self.config.getUpdateInterval() * 60 * 60
1055                         
1056     
1057                 # Unregister any streams that are no longer subscribed to.
1058                 for id in stream_ids:
1059                     logger.debug("Unregistering %s" % (id,))
1060                     wc().stream_unregister (id)
1061         except Exception:
1062             logger.exception("Registering streams with Woodchuck")
1063
1064     def importOldFormatFeeds(self):
1065         """This function loads feeds that are saved in an outdated format, and converts them to sqlite"""
1066         import rss
1067         listing = rss.Listing(self.configdir)
1068         rank = 0
1069         for id in listing.getListOfFeeds():
1070             try:
1071                 rank += 1
1072                 values = (id, listing.getFeedTitle(id) , listing.getFeedUrl(id), 0, time.time(), rank, None, "None", 1)
1073                 self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?, 1);", values)
1074                 self.db.commit()
1075                 
1076                 feed = listing.getFeed(id)
1077                 new_feed = self.getFeed(id)
1078                 
1079                 items = feed.getIds()[:]
1080                 items.reverse()
1081                 for item in items:
1082                         if feed.isEntryRead(item):
1083                             read_status = 1
1084                         else:
1085                             read_status = 0 
1086                         date = timegm(feed.getDateTuple(item))
1087                         title = feed.getTitle(item)
1088                         newId = new_feed.generateUniqueId({"date":date, "title":title})
1089                         values = (newId, title , feed.getContentLink(item), date, tuple(time.time()), feed.getExternalLink(item), read_status)
1090                         new_feed.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
1091                         new_feed.db.commit()
1092                         try:
1093                             images = feed.getImages(item)
1094                             for image in images:
1095                                 new_feed.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (item, image) )
1096                                 new_feed.db.commit()
1097                         except:
1098                             pass
1099                 self.updateUnread(id)
1100             except:
1101                 logger.error("importOldFormatFeeds: %s"
1102                              % (traceback.format_exc(),))
1103         remove(self.configdir+"feeds.pickle")
1104                 
1105         
1106     def addArchivedArticle(self, key, index):
1107         feed = self.getFeed(key)
1108         title = feed.getTitle(index)
1109         link = feed.getExternalLink(index)
1110         date = feed.getDate(index)
1111         count = self.db.execute("SELECT count(*) FROM feeds where id=?;", ("ArchivedArticles",) ).fetchone()[0]
1112         if count == 0:
1113             self.addFeed("Archived Articles", "", id="ArchivedArticles")
1114
1115         archFeed = self.getFeed("ArchivedArticles")
1116         archFeed.addArchivedArticle(title, link, date, self.configdir)
1117         self.updateUnread("ArchivedArticles")
1118         
1119     def updateFeed(self, key, expiryTime=None, proxy=None, imageCache=None,
1120                    priority=0):
1121         if expiryTime is None:
1122             expiryTime = self.config.getExpiry()
1123         if not expiryTime:
1124             # Default to 24 hours
1125             expriyTime = 24
1126         if proxy is None:
1127             (use_proxy, proxy) = self.config.getProxy()
1128             if not use_proxy:
1129                 proxy = None
1130         if imageCache is None:
1131             imageCache = self.config.getImageCache()
1132
1133         feed = self.getFeed(key)
1134         (url, etag, modified) = self.db.execute("SELECT url, etag, modified FROM feeds WHERE id=?;", (key,) ).fetchone()
1135         try:
1136             modified = time.struct_time(eval(modified))
1137         except:
1138             modified = None
1139         feed.updateFeed(
1140             self.configdir, url, etag, modified, expiryTime, proxy, imageCache,
1141             priority, postFeedUpdateFunc=self._queuePostFeedUpdate)
1142
1143     def _queuePostFeedUpdate(self, *args, **kwargs):
1144         mainthread.execute (self._postFeedUpdate, async=True, *args, **kwargs)
1145
1146     def _postFeedUpdate(self, key, updateTime, etag, modified, title):
1147         if modified==None:
1148             modified="None"
1149         else:
1150             modified=str(tuple(modified))
1151         if updateTime > 0:
1152             self.db.execute("UPDATE feeds SET updateTime=?, etag=?, modified=? WHERE id=?;", (updateTime, etag, modified, key) )
1153         else:
1154             self.db.execute("UPDATE feeds SET etag=?, modified=? WHERE id=?;", (etag, modified, key) )
1155
1156         if title is not None:
1157             self.db.execute("UPDATE feeds SET title=(case WHEN title=='' THEN ? ELSE title END) where id=?;",
1158                             (title, key))
1159         self.db.commit()
1160         self.cache_invalidate('feeds')
1161         self.updateUnread(key)
1162
1163         update_server_object().ArticleCountUpdated()
1164
1165         stats = JobManager().stats()
1166         global jobs_at_start
1167         completed = stats['jobs-completed'] - jobs_at_start
1168         in_progress = stats['jobs-in-progress']
1169         queued = stats['jobs-queued']
1170
1171         try:
1172             percent = (100 * ((completed + in_progress / 2.))
1173                        / (completed + in_progress + queued))
1174         except ZeroDivisionError:
1175             percent = 100
1176
1177         update_server_object().UpdateProgress(
1178             percent, completed, in_progress, queued, 0, 0, 0, key)
1179
1180         if in_progress == 0 and queued == 0:
1181             jobs_at_start = stats['jobs-completed']
1182         
1183     def getFeed(self, key):
1184         if key == "ArchivedArticles":
1185             return ArchivedArticles(self.configdir, key)
1186         return Feed(self.configdir, key)
1187         
1188     def editFeed(self, key, title, url, category=None):
1189         if category:
1190             self.db.execute("UPDATE feeds SET title=?, url=?, category=? WHERE id=?;", (title, url, category, key))
1191         else:
1192             self.db.execute("UPDATE feeds SET title=?, url=? WHERE id=?;", (title, url, key))
1193         self.db.commit()
1194         self.cache_invalidate('feeds')
1195
1196         if wc().available():
1197             try:
1198                 wc()[key].human_readable_name = title
1199             except KeyError:
1200                 logger.debug("Feed %s (%s) unknown." % (key, title))
1201         
1202     def getFeedUpdateTime(self, key):
1203         update_time = self.lookup('feeds', 'updateTime', key)
1204
1205         if not update_time:
1206             return "Never"
1207
1208         delta = time.time() - update_time
1209
1210         delta_hours = delta / (60. * 60.)
1211         if delta_hours < .1:
1212             return "A few minutes ago"
1213         if delta_hours < .75:
1214             return "Less than an hour ago"
1215         if delta_hours < 1.5:
1216             return "About an hour ago"
1217         if delta_hours < 18:
1218             return "About %d hours ago" % (int(delta_hours + 0.5),)
1219
1220         delta_days = delta_hours / 24.
1221         if delta_days < 1.5:
1222             return "About a day ago"
1223         if delta_days < 18:
1224             return "%d days ago" % (int(delta_days + 0.5),)
1225
1226         delta_weeks = delta_days / 7.
1227         if delta_weeks <= 8:
1228             return "%d weeks ago" % int(delta_weeks + 0.5)
1229
1230         delta_months = delta_days / 30.
1231         if delta_months <= 30:
1232             return "%d months ago" % int(delta_months + 0.5)
1233
1234         return time.strftime("%x", time.gmtime(update_time))
1235         
1236     def getFeedNumberOfUnreadItems(self, key):
1237         return self.lookup('feeds', 'unread', key)
1238         
1239     def getFeedTitle(self, key):
1240         title = self.lookup('feeds', 'title', key)
1241         if title:
1242             return title
1243
1244         return self.getFeedUrl(key)
1245         
1246     def getFeedUrl(self, key):
1247         return self.db.execute("SELECT url FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1248     
1249     def getFeedCategory(self, key):
1250         return self.db.execute("SELECT category FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1251         
1252     def getListOfFeeds(self, category=None):
1253         if category:
1254             rows = self.db.execute("SELECT id FROM feeds WHERE category=? ORDER BY rank;", (category, ) )
1255         else:
1256             rows = self.db.execute("SELECT id FROM feeds ORDER BY rank;" )
1257         keys = []
1258         for row in rows:
1259             if row[0]:
1260                 keys.append(row[0])
1261         return keys
1262     
1263     def getListOfCategories(self):
1264         return list(row[0] for row in self.db.execute(
1265                 "SELECT id FROM categories ORDER BY rank;"))
1266     
1267     def getCategoryTitle(self, id):
1268         return self.lookup('categories', 'title', id)
1269     
1270     def getCategoryUnread(self, id):
1271         count = 0
1272         for key in self.getListOfFeeds(category=id):
1273             try: 
1274                 count = count + self.getFeedNumberOfUnreadItems(key)
1275             except:
1276                 pass
1277         return count
1278     
1279     def getSortedListOfKeys(self, order, onlyUnread=False, category=1):
1280         if   order == "Most unread":
1281             tmp = "ORDER BY unread DESC"
1282             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1], reverse=True)
1283         elif order == "Least unread":
1284             tmp = "ORDER BY unread"
1285             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][1])
1286         elif order == "Most recent":
1287             tmp = "ORDER BY updateTime DESC"
1288             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2], reverse=True)
1289         elif order == "Least recent":
1290             tmp = "ORDER BY updateTime"
1291             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][2])
1292         else: # order == "Manual" or invalid value...
1293             tmp = "ORDER BY rank"
1294             #keyorder = sorted(feedInfo, key = lambda k: feedInfo[k][0])
1295         if onlyUnread:
1296             sql = "SELECT id FROM feeds WHERE unread>0 AND category=%s " %category + tmp 
1297         else:
1298             sql = "SELECT id FROM feeds WHERE category=%s " %category + tmp
1299         rows = self.db.execute(sql)
1300         keys = []
1301         for row in rows:
1302             if row[0]:
1303                 keys.append(row[0])
1304         return keys
1305     
1306     def getFavicon(self, key):
1307         filename = "%s%s.d/favicon.ico" % (self.configdir, key)
1308         if isfile(filename):
1309             return filename
1310         else:
1311             return False
1312         
1313     def updateUnread(self, key):
1314         feed = self.getFeed(key)
1315         self.db.execute("UPDATE feeds SET unread=? WHERE id=?;", (feed.getNumberOfUnreadItems(), key))
1316         self.db.commit()
1317         self.cache_invalidate('feeds')
1318
1319     def addFeed(self, title, url, id=None, category=1):
1320         if not id:
1321             id = getId(url)
1322         count = self.db.execute("SELECT count(*) FROM feeds WHERE id=?;", (id,) ).fetchone()[0]
1323         if count == 0:
1324             max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1325             if max_rank == None:
1326                 max_rank = 0
1327             values = (id, title, url, 0, 0, max_rank+1, None, "None", 1, category)
1328             self.db.execute("INSERT INTO feeds (id, title, url, unread, updateTime, rank, etag, modified, widget, category) VALUES (?, ?, ? ,? ,? ,?, ?, ?, ?,?);", values)
1329             self.db.commit()
1330             # Ask for the feed object, it will create the necessary tables
1331             self.getFeed(id)
1332
1333             if wc().available():
1334                 # Register the stream with Woodchuck.  Update approximately
1335                 # every 6 hours.
1336                 wc().stream_register(stream_identifier=id,
1337                                      human_readable_name=title,
1338                                      freshness=6*60*60)
1339
1340             self.cache_invalidate('feeds')
1341             return True
1342         else:
1343             return False
1344         
1345     def addCategory(self, title):
1346         rank = self.db.execute("SELECT MAX(rank)+1 FROM categories;").fetchone()[0]
1347         if rank==None:
1348             rank=1
1349         id = self.db.execute("SELECT MAX(id)+1 FROM categories;").fetchone()[0]
1350         if id==None:
1351             id=1
1352         self.db.execute("INSERT INTO categories (id, title, unread, rank) VALUES (?, ?, 0, ?)", (id, title, rank))
1353         self.db.commit()
1354         self.cache_invalidate('categories')
1355     
1356     def removeFeed(self, key):
1357         if wc().available():
1358             try:
1359                 del wc()[key]
1360             except KeyError, woodchuck.Error:
1361                 logger.debug("Removing unregistered feed %s failed" % (key,))
1362
1363         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
1364         self.db.execute("DELETE FROM feeds WHERE id=?;", (key, ))
1365         self.db.execute("UPDATE feeds SET rank=rank-1 WHERE rank>?;", (rank,) )
1366         self.db.commit()
1367
1368         if isdir(self.configdir+key+".d/"):
1369            rmtree(self.configdir+key+".d/")
1370         self.cache_invalidate('feeds')
1371            
1372     def removeCategory(self, key):
1373         if self.db.execute("SELECT count(*) FROM categories;").fetchone()[0] > 1:
1374             rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,) ).fetchone()[0]
1375             self.db.execute("DELETE FROM categories WHERE id=?;", (key, ))
1376             self.db.execute("UPDATE categories SET rank=rank-1 WHERE rank>?;", (rank,) )
1377             self.db.execute("UPDATE feeds SET category=1 WHERE category=?;", (key,) )
1378             self.db.commit()
1379             self.cache_invalidate('categories')
1380         
1381     #def saveConfig(self):
1382     #    self.listOfFeeds["feedingit-order"] = self.sortedKeys
1383     #    file = open(self.configdir+"feeds.pickle", "w")
1384     #    pickle.dump(self.listOfFeeds, file)
1385     #    file.close()
1386         
1387     def moveUp(self, key):
1388         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1389         if rank>0:
1390             self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank-1) )
1391             self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank-1, key) )
1392             self.db.commit()
1393             
1394     def moveCategoryUp(self, key):
1395         rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1396         if rank>0:
1397             self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank-1) )
1398             self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank-1, key) )
1399             self.db.commit()
1400         
1401     def moveDown(self, key):
1402         rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,)).fetchone()[0]
1403         max_rank = self.db.execute("SELECT MAX(rank) FROM feeds;").fetchone()[0]
1404         if rank<max_rank:
1405             self.db.execute("UPDATE feeds SET rank=? WHERE rank=?;", (rank, rank+1) )
1406             self.db.execute("UPDATE feeds SET rank=? WHERE id=?;", (rank+1, key) )
1407             self.db.commit()
1408             
1409     def moveCategoryDown(self, key):
1410         rank = self.db.execute("SELECT rank FROM categories WHERE id=?;", (key,)).fetchone()[0]
1411         max_rank = self.db.execute("SELECT MAX(rank) FROM categories;").fetchone()[0]
1412         if rank<max_rank:
1413             self.db.execute("UPDATE categories SET rank=? WHERE rank=?;", (rank, rank+1) )
1414             self.db.execute("UPDATE categories SET rank=? WHERE id=?;", (rank+1, key) )
1415             self.db.commit()
1416             
1417