0.9.1-3 - Fixed Archived Articles update
authorYves Marcoz <yves@marcoz.org>
Thu, 17 Nov 2011 06:36:28 +0000 (22:36 -0800)
committerYves Marcoz <yves@marcoz.org>
Thu, 17 Nov 2011 06:36:28 +0000 (22:36 -0800)
debian/changelog
src/rss_sqlite.py

index 2102387..37b1b49 100644 (file)
@@ -1,3 +1,9 @@
+feedingit (0.9.1-3) unstable; urgency=low
+
+  * Fixed Archived Articles updates
+
+ -- Yves <yves@marcoz.org>  Wed, 16 Nov 2011 21:51:19 -0800
+
 feedingit (0.9.1-2) unstable; urgency=low
 
   * Fixed update issue with feeds having old articles
index fe9c404..9bc7a8a 100644 (file)
@@ -887,32 +887,36 @@ class ArchivedArticles(Feed):
         self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
         self.db.commit()
 
-    def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
+    def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
         currentTime = 0
         rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
         for row in rows:
-            currentTime = time.time()
-            id = row[0]
-            link = row[1]
-            f = urllib2.urlopen(link)
-            #entry["content"] = f.read()
-            html = f.read()
-            f.close()
-            soup = BeautifulSoup(html)
-            images = soup('img')
-            baseurl = link
-            for img in images:
-                filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
-                img['src']=filename
-                self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+            try:
+                currentTime = time.time()
+                id = row[0]
+                link = row[1]
+                f = urllib2.urlopen(link)
+                #entry["content"] = f.read()
+                html = f.read()
+                f.close()
+                soup = BeautifulSoup(html)
+                images = soup('img')
+                baseurl = link
+                for img in images:
+                    filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
+                    img['src']=filename
+                    self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+                    self.db.commit()
+                contentLink = configdir+self.key+".d/"+id+".html"
+                file = open(contentLink, "w")
+                file.write(soup.prettify())
+                file.close()
+                
+                self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
                 self.db.commit()
-            contentLink = configdir+self.key+".d/"+id+".html"
-            file = open(contentLink, "w")
-            file.write(soup.prettify())
-            file.close()
-            
-            self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
-            self.db.commit()
+            except:
+                logger.error("Error updating Archived Article: %s %s"
+                             % (link,traceback.format_exc(),))
         return (currentTime, None, None)
     
     def purgeReadArticles(self):