#!/bin/sh
case "$1" in
+update)
+ dbus-send --print-reply --dest='org.marcoz.feedingit' --session /org/marcoz/feedingit/update org.marcoz.feedingit.UpdateAll
+ ;;
dbus)
- nice python /usr/share/feedingit/update_feeds.py
+ nice python /usr/share/feedingit/update_feeds.py --daemon
;;
*)
cd /usr/share/feedingit
* source package automatically created by stdeb 0.6.0+git
- -- Slocan <slocan@marcoz.org> Tue, 29 Nov 2011 21:32:12 -0800
+ -- Slocan <slocan@marcoz.org> Thu, 05 Jan 2012 20:58:44 -0800
Version: 0.1.0-1
Architecture: all
Maintainer: Slocan <slocan@marcoz.org>
-Installed-Size: 792
+Installed-Size: 816
Depends: python-pyside.qtgui, python-pyside.qtopengl, python-pyside.qtdeclarative, python-dbus, python-gconf
Breaks: python (<< 2.6)
Section: user/network
-4420c31f88de68fe6e1b7637abb06196 usr/bin/feedingit
-7fe75a25d71f563a6391a9dbebd259f8 usr/share/applications/feedingit.desktop
+ab6be1fce6e02f7f10857cd79755bf18 usr/bin/feedingit
+1af83cdf3d338e6460a92c00537c375b usr/share/applications/feedingit.desktop
eda8cc6ffe8d842d6dfe0244b01b3042 usr/share/dbus-1/services/feedingit_status.service
-bc91edfc49d3edeec67619bea0bfba5d usr/share/doc/feedingit/changelog.Debian.gz
+3f2da2596c2f5788fee074a589c9bbd0 usr/share/doc/feedingit/changelog.Debian.gz
bac2be6ae9673ee5096e20e8b714c9cd usr/share/feedingit/BeautifulSoup.py
8f6e980f2e1154103a90763be6c00dc4 usr/share/feedingit/XmlHandler.py
d41d8cd98f00b204e9800998ecf8427e usr/share/feedingit/__init__.py
-f35343aa0ea95526b6af4ed8193526c2 usr/share/feedingit/config.py
+3485a6a12a47b67c26e4134c558cc3c9 usr/share/feedingit/config.py
b4b00de5dccaf56d81a9dab1eeac63e1 usr/share/feedingit/debugging.py
fae02e730b76761d43a626fe19828d5e usr/share/feedingit/download.py
4db69341f53742ba38afb8173ae9ef32 usr/share/feedingit/feedingit.py
d9c0665dfdd5cf19f1529ce88af95134 usr/share/feedingit/opml.py
7c3358bb2e3cba866248e75556a159d3 usr/share/feedingit/opml_lib.py
4ef0e77dd5685db3a51306fd7ccf585f usr/share/feedingit/qml/AddFeed.qml
-af27062fdba0bc7a3df92116e8340d19 usr/share/feedingit/qml/ArticleDisplay.qml
-0931ce1681f6691f2e229c6a5bdcb993 usr/share/feedingit/qml/ArticleViewer.qml
+538dac5d47b6d46b428d71b8c579e419 usr/share/feedingit/qml/ArticleDelegate.qml
+6a0c407a7931a4b362bd07921f70c009 usr/share/feedingit/qml/ArticleDisplay.qml
+d6b1c6bb6e2d09cf95c1db69227ccdd2 usr/share/feedingit/qml/ArticleViewer.qml
15083e9a1fac05c8efaaa085dfabcbcb usr/share/feedingit/qml/Articles.qml
77bf6a1d2d0f265ee16c492886f96ede usr/share/feedingit/qml/AutomaticUpdate.qml
bd7579a3d822222caca98684212c4f42 usr/share/feedingit/qml/Categories.qml
7790a99425dd7c1046e6ae3b1ee72a03 usr/share/feedingit/qml/i18n/qml_en.qm
1674fcce45bcf3319e61d19a9adf4fdd usr/share/feedingit/qml/i18n/qml_en.ts
5e9106fb8689ca72675d28123b36c2a5 usr/share/feedingit/qml/main.qml
-e3b4184531f29bbf86d812f9b8adacc2 usr/share/feedingit/rss_sqlite.py
+d6a20bd58cfc71b071bc2de09bf1518f usr/share/feedingit/rss_sqlite.py
+6e88950ff9c416c362c6ca17cf4317f7 usr/share/feedingit/splash.jpg
721777a26cd2a5b8466ce2aa2b99fad7 usr/share/feedingit/update_feeds.py
6ccf12dc4379e91800ae8505b2e86082 usr/share/feedingit/updatedbus.py
-833ff79caab7c1fa89d6ff4a2f3bb3fd usr/share/feedingit/wc.py
+686f9bcf1180690c08f64b1ef6a10d97 usr/share/feedingit/wc.py
a30b3cb2decc0a3de4cafc18ca739d7a usr/share/icons/hicolor/80x80/apps/feedingit.png
3b48b22b6732560282d323eb8a638967 usr/share/pyshared/feedingit-0.1.0.egg-info
1ce7b7194658769bb4173134a725d1ce usr/share/python/runtime.d/feedingit.rtupdate
#!/bin/sh
case "$1" in
+update)
+ dbus-send --print-reply --dest='org.marcoz.feedingit' --session /org/marcoz/feedingit/update org.marcoz.feedingit.UpdateAll
+ ;;
dbus)
- nice python /usr/share/feedingit/update_feeds.py
+ nice python /usr/share/feedingit/update_feeds.py --daemon
;;
*)
cd /usr/share/feedingit
Version=1.0
Type=Application
Name=FeedingIt RSS Reader
-Exec=invoker --single-instance --type=e /usr/bin/feedingit
+Exec=invoker --single-instance --splash=/usr/share/feedingit/splash.jpg --type=e /usr/bin/python /usr/share/feedingit/feedingit.py
Icon=/usr/share/icons/hicolor/80x80/apps/feedingit.png
Categories=Network;News;
#
# Copyright (c) 2007-2008 INdT.
+# Copyright (c) 2011 Neal H. Walfield.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
import logging
logger = logging.getLogger(__name__)
-VERSION = "52"
+VERSION = "120"
section = "FeedingIt"
-ranges = { "updateInterval":[0.5, 1, 2, 4, 12, 24], "expiry":[24, 48, 72, 144, 288], "fontSize":range(12,24), "orientation":["Automatic", "Landscape", "Portrait"], "artFontSize":[10, 12, 14, 16, 18, 20], "feedsort":["Manual", "Most unread", "Least unread", "Most recent", "Least recent"] }
+ranges = { "updateInterval":[0.5, 1, 2, 4, 8, 12, 24], "expiry":[24, 48, 72, 144, 288], "fontSize":range(12,24), "orientation":["Automatic", "Landscape", "Portrait"], "artFontSize":[10, 12, 14, 16, 18, 20], "feedsort":["Manual", "Most unread", "Least unread", "Most recent", "Least recent"] }
titles = {"updateInterval":"Auto-update interval", "expiry":"Delete articles", "fontSize":"List font size", "orientation":"Display orientation", "artFontSize":"Article font size","feedsort":"Feed sort order"}
subtitles = {"updateInterval":"Every %s hours", "expiry":"After %s hours", "fontSize":"%s pixels", "orientation":"%s", "artFontSize":"%s pixels", "feedsort":"%s"}
heading('Updating')
- button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
- button.set_label("Automatically update feeds")
- button.set_active(self.config["autoupdate"])
- button.connect("toggled", self.button_toggled, "autoupdate")
- vbox.pack_start(button, expand=False)
- add_setting('updateInterval')
- add_setting('expiry')
+ label = gtk.Label(gtk.HILDON_SIZE_FINGER_HEIGHT)
+ label.set_label("Use Woodchuck network daemon, or the home-screen widget for automatic updates.")
+ label.set_line_wrap(True)
+ vbox.pack_start(label, expand=False)
+
+ try:
+ import woodchuck
+ woodchuck_installed = True
+ except ImportError:
+ woodchuck_installed = False
+
+ if not woodchuck_installed:
+ def install_woodchuck_clicked(button):
+ from FeedingIt import open_in_browser
+ open_in_browser("http://maemo.org/downloads/product/raw/Maemo5/murmeltier?get_installfile")
+
+ button = hildon.Button(gtk.HILDON_SIZE_FINGER_HEIGHT, hildon.BUTTON_ARRANGEMENT_VERTICAL)
+ button.set_label("Install Woodchuck")
+ button.connect("clicked", install_woodchuck_clicked)
+ button.set_alignment(0,0,1,1)
+ vbox.pack_start(button, expand=False)
+ else:
+ button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
+ button.set_label("Woodchuck-Based Automatic Update")
+ button.set_active(self.config["woodchuck"])
+ button.connect("toggled", self.button_toggled, "woodchuck")
+ vbox.pack_start(button, expand=False)
+ add_setting('updateInterval')
+ add_setting('expiry')
heading('Network')
button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
else:
self.config[configName] = False
#print "autoup", self.autoupdate
+
+ if configName == 'woodchuck':
+ try:
+ from wc import wc_disable_set
+ wc_disable_set(not self.config['woodchuck'])
+ except Exception:
+ logger.exception("Disabling Woodchuck")
+
self.saveConfig()
def selection_changed(self, selector, button, setting):
def loadConfig(self):
self.config = {}
+
+ configParser = RawConfigParser()
try:
- configParser = RawConfigParser()
configParser.read(self.configFilename)
- self.config["fontSize"] = configParser.getint(section, "fontSize")
- self.config["artFontSize"] = configParser.getint(section, "artFontSize")
- self.config["expiry"] = configParser.getint(section, "expiry")
- self.config["autoupdate"] = configParser.getboolean(section, "autoupdate")
- self.config["updateInterval"] = configParser.getfloat(section, "updateInterval")
- self.config["orientation"] = configParser.get(section, "orientation")
- self.config["imageCache"] = configParser.getboolean(section, "imageCache")
- except:
- self.config["fontSize"] = 17
- self.config["artFontSize"] = 14
- self.config["expiry"] = 24
- self.config["autoupdate"] = False
- self.config["updateInterval"] = 4
- self.config["orientation"] = "Automatic"
- self.config["imageCache"] = False
- try:
- self.config["proxy"] = configParser.getboolean(section, "proxy")
- except:
- self.config["proxy"] = True
- try:
- self.config["hidereadfeeds"] = configParser.getboolean(section, "hidereadfeeds")
- self.config["hidereadarticles"] = configParser.getboolean(section, "hidereadarticles")
- except:
- self.config["hidereadfeeds"] = False
- self.config["hidereadarticles"] = False
- try:
- self.config["extBrowser"] = configParser.getboolean(section, "extBrowser")
- except:
- self.config["extBrowser"] = False
- try:
- self.config["feedsort"] = configParser.get(section, "feedsort")
- except:
- self.config["feedsort"] = "Manual"
- try:
- self.config["theme"] = configParser.get(section, "theme")
- except:
- self.config["theme"] = True
-
+ except Exception:
+ logger.exception("Reading %s", self.configFilename)
+
+ # The function to use to fetch the parameter, the parameter's
+ # name and the default value.
+ values = ((configParser.getint, "fontSize", 17),
+ (configParser.getint, "artFontSize", 14),
+ (configParser.getint, "expiry", 24),
+ (configParser.getboolean, "autoupdate", False),
+ (configParser.getboolean, "woodchuck", True),
+ (configParser.getboolean, "askedAboutWoodchuck", False),
+ (configParser.getfloat, "updateInterval", 4),
+ (configParser.get, "orientation", "Automatic"),
+ (configParser.getboolean, "imageCache", False),
+ (configParser.getboolean, "proxy", True),
+ (configParser.getboolean, "hidereadfeeds", False),
+ (configParser.getboolean, "hidereadarticles", False),
+ (configParser.getboolean, "extBrowser", False),
+ (configParser.getboolean, "theme", True),
+ (configParser.get, "feedsort", "Manual"))
+
+ for fetcher, name, default in values:
+ try:
+ v = fetcher(section, name)
+ except Exception:
+ logger.exception("Reading config variable %s", name)
+ v = default
+ self.config[name] = v
+
def saveConfig(self):
configParser = RawConfigParser()
configParser.add_section(section)
configParser.set(section, 'expiry', str(self.config["expiry"]))
configParser.set(section, 'autoupdate', str(self.config["autoupdate"]))
configParser.set(section, 'updateInterval', str(self.config["updateInterval"]))
+ configParser.set(section, 'woodchuck', str(self.config["woodchuck"]))
+ configParser.set(section, 'askedAboutWoodchuck', str(self.config["askedAboutWoodchuck"]))
configParser.set(section, 'orientation', str(self.config["orientation"]))
configParser.set(section, 'imageCache', str(self.config["imageCache"]))
configParser.set(section, 'proxy', str(self.config["proxy"]))
return self.config["autoupdate"]
def setAutoUpdateEnabled(self, value):
self.config["autoupdate"] = value
+ def getWoodchuckEnabled(self):
+ return self.config["woodchuck"]
+ def getAskedAboutWoodchuck(self):
+ return self.config["askedAboutWoodchuck"]
+ def setAskedAboutWoodchuck(self, value):
+ self.config["askedAboutWoodchuck"] = value
+ self.saveConfig()
def getUpdateInterval(self):
return float(self.config["updateInterval"])
def getReadFont(self):
return "sans italic %s" % self.config["fontSize"]
def getUnreadFont(self):
return "sans %s" % self.config["fontSize"]
- def getOrientation(self, index):
+ def getOrientation(self):
return ranges["orientation"].index(self.config["orientation"])
def getOrientationChoices(self):
return ranges["orientation"]
Rectangle {
/*x: parent.width; height: parent.height;*/
- width: parent.width;
+ width: flickable.width;
height: parent.height
property alias zoomEnabled: slider.visible;
property alias value: slider.value;
id: flickable
//anchors.fill: screen;
height: parent.height;
- width: parent.width;
+ width: webView.width;
contentWidth: webView.width*webView.scale; //Math.max(screen.width,webView.width*webView.scale)
contentHeight: Math.max(articleViewer.height,webView.height*webView.scale)
//contentWidth: childrenRect.width; contentHeight: childrenRect.height
id: webView
//url: flipItem.url;
html: flipItem.html;
- preferredWidth: flickable.width
- preferredHeight: flickable.height
+ preferredWidth: articleView.width
+ //preferredHeight: articleView.height
//scale: 1.25;
transformOrigin: Item.TopLeft
scale: slider.value;
- settings.defaultFontSize: 24
+ //settings.defaultFontSize: 24
}
// onFlickStarted: {
width: parent.width;
height: updateBarArticles.visible? parent.height-updateBarArticles.height : parent.height;
visible: false; z:8
- //onCurrentIndexChanged: photosGridView.positionViewAtIndex(currentIndex, GridView.Contain)
+ onCurrentIndexChanged: articlesView.positionViewAtIndex(currentIndex, ListView.Contain)
highlightRangeMode: ListView.StrictlyEnforceRange; snapMode: ListView.SnapOneItem
//cacheBuffer: 5;
onMovementStarted: articleViewer.vertPanningEnabled=false;
font.pointSize: settings.articleListingTextSize
}
MouseArea { anchors.fill: listItem;
- onClicked: { articleView.positionViewAtIndex(index, ListView.Contain); articleView.visible = true; }
+ onClicked: { articleView.currentIndex = index; articleView.visible = true; }
}
}
id: viewer
Item {
id: flipItem;
- width: articleViewer.width; height: articleViewer.height;
+ width: articleDisplay.width;
+ height: articleView.height;
//property string url: (articleView.visible && Math.abs(articleView.currentIndex-index)<2) ? path: "";
property string html: controller.getArticle(articleViewer.feedid, articleid)
ArticleDisplay {
+ id: articleDisplay
zoomEnabled: articleViewer.zoomEnabled;
property bool vertPanningEnabled: articleViewer.vertPanningEnabled;
logger = logging.getLogger(__name__)
def getId(string):
+ if issubclass(string.__class__, unicode):
+ string = string.encode('utf8', 'replace')
+
return md5.new(string).hexdigest()
def download_callback(connection):
self.key = key
self.configdir = configdir
self.dir = "%s/%s.d" %(self.configdir, self.key)
- self.tls = threading.local ()
+ self.tls = threading.local()
if not isdir(self.dir):
mkdir(self.dir)
- if not isfile("%s/%s.db" %(self.dir, self.key)):
- self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
+ filename = "%s/%s.db" % (self.dir, self.key)
+ if not isfile(filename):
+ self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, contentHash text, date float, updated float, link text, read int);")
self.db.execute("CREATE TABLE images (id text, imagePath text);")
self.db.commit()
+ else:
+ try:
+ self.db.execute("ALTER TABLE feed ADD COLUMN contentHash text")
+ self.db.commit()
+ except sqlite3.OperationalError, e:
+ if 'duplicate column name' in str(e):
+ pass
+ else:
+ logger.exception("Add column contentHash to %s", filename)
def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
filename = configdir+key+".d/"+getId(url)
if(not(entry.has_key("id"))):
entry["id"] = None
content = self.extractContent(entry)
+ contentHash = getId(content)
object_size = len (content)
tmpEntry = {"title":entry["title"], "content":content,
"date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
id = self.generateUniqueId(tmpEntry)
current_version = self.db.execute(
- 'select date, ROWID from feed where id=?',
+ 'select date, ROWID, contentHash from feed where id=?',
(id,)).fetchone()
if (current_version is not None
- and current_version[0] == date):
+ # To detect updates, don't compare by date:
+ # compare by content.
+ #
+ # - If an article update is just a date change
+ # and the content remains the same, we don't
+ # want to register an update.
+ #
+ # - If an article's content changes but not the
+ # date, we want to recognize an update.
+ and current_version[2] == contentHash):
logger.debug("ALREADY DOWNLOADED %s (%s)"
% (entry["title"], entry["link"]))
- ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire
- self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
- try:
- logger.debug("Updating already downloaded files for %s" %(id))
- filename = configdir+self.key+".d/"+id+".html"
- file = open(filename,"a")
- utime(filename, None)
- file.close()
- images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
- for image in images:
- file = open(image[0],"a")
- utime(image[0], None)
- file.close()
- except:
- logger.debug("Error in refreshing images for %s" % (id))
+ ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire
+ self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
+ try:
+ logger.debug("Updating already downloaded files for %s" %(id))
+ filename = configdir+self.key+".d/"+id+".html"
+ file = open(filename,"a")
+ utime(filename, None)
+ file.close()
+ images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
+ for image in images:
+ file = open(image[0],"a")
+ utime(image[0], None)
+ file.close()
+ except:
+ logger.debug("Error in refreshing images for %s" % (id))
self.db.commit()
continue
# The version was updated. Mark it as unread.
logger.debug("UPDATED: %s (%s)"
% (entry["title"], entry["link"]))
- self.setEntryUnread(id)
updated_objects += 1
else:
logger.debug("NEW: %s (%s)"
soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
images = soup('img')
baseurl = tmpEntry["link"]
- #if not id in ids:
if imageCache and len(images) > 0:
self.serial_execution_lock.release ()
have_serial_execution_lock = False
values = {'id': id,
'title': tmpEntry["title"],
'contentLink': tmpEntry["contentLink"],
+ 'contentHash': contentHash,
'date': tmpEntry["date"],
'updated': currentTime,
'link': tmpEntry["link"],
def getContentLink(self, id):
return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
+ def getContentHash(self, id):
+ return self.db.execute("SELECT contentHash FROM feed WHERE id=?;", (id,) ).fetchone()[0]
+
def getExternalLink(self, id):
return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
return text
def getContent(self, id):
- contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
+ """
+ Return the content of the article with the specified ID. If
+ the content is not available, returns None.
+ """
+ contentLink = self.getContentLink(id)
try:
- file = open(self.entries[id]["contentLink"])
- content = file.read()
- file.close()
- except:
- content = "Content unavailable"
+ with open(contentLink, 'r') as file:
+ content = file.read()
+ except Exception:
+ logger.exception("Failed get content for %s: reading %s failed",
+ id, contentLink)
+ content = None
return content
def extractDate(self, entry):
self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
self.db.commit()
- def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
+ # Feed.UpdateFeed calls this function.
+ def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
currentTime = 0
rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
for row in rows:
- currentTime = time.time()
- id = row[0]
- link = row[1]
- f = urllib2.urlopen(link)
- #entry["content"] = f.read()
- html = f.read()
- f.close()
- soup = BeautifulSoup(html)
- images = soup('img')
- baseurl = link
- for img in images:
- filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
- img['src']=filename
- self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+ try:
+ currentTime = time.time()
+ id = row[0]
+ link = row[1]
+ f = urllib2.urlopen(link)
+ #entry["content"] = f.read()
+ html = f.read()
+ f.close()
+ soup = BeautifulSoup(html)
+ images = soup('img')
+ baseurl = link
+ for img in images:
+ filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
+ img['src']=filename
+ self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+ self.db.commit()
+ contentLink = configdir+self.key+".d/"+id+".html"
+ file = open(contentLink, "w")
+ file.write(soup.prettify())
+ file.close()
+
+ self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
self.db.commit()
- contentLink = configdir+self.key+".d/"+id+".html"
- file = open(contentLink, "w")
- file.write(soup.prettify())
- file.close()
-
- self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
- self.db.commit()
- return (currentTime, None, None)
+ except:
+ logger.error("Error updating Archived Article: %s %s"
+ % (link,traceback.format_exc(),))
+
+ if postFeedUpdateFunc is not None:
+ postFeedUpdateFunc (self.key, currentTime, None, None, None,
+ *postFeedUpdateFuncArgs)
def purgeReadArticles(self):
rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
# state.
try:
updater = os.path.basename(sys.argv[0]) == 'update_feeds.py'
- wc_init (self, True if updater else False)
+ wc_init(config, self, True if updater else False)
if wc().available() and updater:
# The list of known streams.
streams = wc().streams_list ()
logger.debug(
"Registering previously unknown channel: %s (%s)"
% (key, title,))
- # Use a default refresh interval of 6 hours.
- wc().stream_register (key, title, 6 * 60 * 60)
+ wc().stream_register(
+ key, title,
+ self.config.getUpdateInterval() * 60 * 60)
else:
# Make sure the human readable name is up to date.
if wc()[key].human_readable_name != title:
wc()[key].human_readable_name = title
stream_ids.remove (key)
+ wc()[key].freshness \
+ = self.config.getUpdateInterval() * 60 * 60
# Unregister any streams that are no longer subscribed to.
for id in stream_ids:
logger.debug("Unregistering %s" % (id,))
- w.stream_unregister (id)
+ wc().stream_unregister (id)
except Exception:
logger.exception("Registering streams with Woodchuck")
def getCategoryTitle(self, id):
return self.lookup('categories', 'title', id)
-
+
def getCategoryUnread(self, id):
count = 0
for key in self.getListOfFeeds(category=id):
human_readable_name=title,
freshness=6*60*60)
+ self.cache_invalidate('feeds')
return True
else:
return False
if wc().available ():
try:
del wc()[key]
- except KeyError:
+ except KeyError, woodchuck.Error:
logger.debug("Removing unregistered feed %s failed" % (key,))
rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
% traceback.format_exc ())
woodchuck_imported = False
class PyWoodchuck (object):
- def available(self):
+ def available(self, *args, **kwargs):
return False
woodchuck = None
refresh_interval = 6 * 60 * 60
class mywoodchuck (PyWoodchuck):
- def __init__(self, listing, human_readable_name, identifier,
+ def __init__(self, config, listing, human_readable_name, identifier,
request_feedback):
try:
PyWoodchuck.__init__ (self, human_readable_name, identifier,
self.available = self.not_available
return
+ self.config = config
self.listing = listing
- def not_available(self):
+ try:
+ self.enabled = config.getWoodchuckEnabled()
+ except Exception:
+ logging.exception("Setting enabled")
+
+ def available(self, check_config=True):
+ if not PyWoodchuck.available(self):
+ return False
+ if check_config:
+ return self.config.getWoodchuckEnabled()
+ return True
+
+ def not_available(self, *args, **kwargs):
return False
# Woodchuck upcalls.
str(e)))
_w = None
-def wc_init(listing, request_feedback=False):
+def wc_init(config, listing, request_feedback=False):
"""Connect to the woodchuck server and initialize any state."""
global _w
assert _w is None
- _w = mywoodchuck (listing, "FeedingIt", "org.marcoz.feedingit",
+ _w = mywoodchuck (config, listing, "FeedingIt", "org.marcoz.feedingit",
request_feedback)
if not woodchuck_imported or not _w.available ():
else:
logger.debug("Woodchuck appears to be available.")
+def wc_disable_set(disable=True):
+ """Disable Woodchuck."""
+ if disable:
+ logger.info("Disabling Woodchuck")
+ else:
+ logger.info("Enabling Woodchuck")
+
+ global _w
+ if _w is None:
+ logging.info("Woodchuck not loaded. Not doing anything.")
+ return
+
+ if not _w.available(check_config=False):
+ logging.info("Woodchuck not available. Not doing anything.")
+ return
+
+ try:
+ _w.enabled = not disable
+ except Exception:
+ logger.exception("Disabling Woodchuck")
+
def wc():
"""Return the Woodchuck singleton."""
global _w
#!/usr/bin/make -f
# This file was automatically generated by stdeb 0.6.0+git at
-# Tue, 29 Nov 2011 21:32:12 -0800
+# Thu, 05 Jan 2012 20:58:44 -0800
%:
dh $@ --with python2 --buildsystem=python_distutils
#!/bin/sh
case "$1" in
+update)
+ dbus-send --print-reply --dest='org.marcoz.feedingit' --session /org/marcoz/feedingit/update org.marcoz.feedingit.UpdateAll
+ ;;
dbus)
- nice python /usr/share/feedingit/update_feeds.py
+ nice python /usr/share/feedingit/update_feeds.py --daemon
;;
*)
cd /usr/share/feedingit
Version=1.0
Type=Application
Name=FeedingIt RSS Reader
-Exec=invoker --single-instance --type=e /usr/bin/feedingit
+Exec=invoker --single-instance --splash=/usr/share/feedingit/splash.jpg --type=e /usr/bin/python /usr/share/feedingit/feedingit.py
Icon=/usr/share/icons/hicolor/80x80/apps/feedingit.png
Categories=Network;News;
#
# Copyright (c) 2007-2008 INdT.
+# Copyright (c) 2011 Neal H. Walfield.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
import logging
logger = logging.getLogger(__name__)
-VERSION = "52"
+VERSION = "120"
section = "FeedingIt"
-ranges = { "updateInterval":[0.5, 1, 2, 4, 12, 24], "expiry":[24, 48, 72, 144, 288], "fontSize":range(12,24), "orientation":["Automatic", "Landscape", "Portrait"], "artFontSize":[10, 12, 14, 16, 18, 20], "feedsort":["Manual", "Most unread", "Least unread", "Most recent", "Least recent"] }
+ranges = { "updateInterval":[0.5, 1, 2, 4, 8, 12, 24], "expiry":[24, 48, 72, 144, 288], "fontSize":range(12,24), "orientation":["Automatic", "Landscape", "Portrait"], "artFontSize":[10, 12, 14, 16, 18, 20], "feedsort":["Manual", "Most unread", "Least unread", "Most recent", "Least recent"] }
titles = {"updateInterval":"Auto-update interval", "expiry":"Delete articles", "fontSize":"List font size", "orientation":"Display orientation", "artFontSize":"Article font size","feedsort":"Feed sort order"}
subtitles = {"updateInterval":"Every %s hours", "expiry":"After %s hours", "fontSize":"%s pixels", "orientation":"%s", "artFontSize":"%s pixels", "feedsort":"%s"}
heading('Updating')
- button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
- button.set_label("Automatically update feeds")
- button.set_active(self.config["autoupdate"])
- button.connect("toggled", self.button_toggled, "autoupdate")
- vbox.pack_start(button, expand=False)
- add_setting('updateInterval')
- add_setting('expiry')
+ label = gtk.Label(gtk.HILDON_SIZE_FINGER_HEIGHT)
+ label.set_label("Use Woodchuck network daemon, or the home-screen widget for automatic updates.")
+ label.set_line_wrap(True)
+ vbox.pack_start(label, expand=False)
+
+ try:
+ import woodchuck
+ woodchuck_installed = True
+ except ImportError:
+ woodchuck_installed = False
+
+ if not woodchuck_installed:
+ def install_woodchuck_clicked(button):
+ from FeedingIt import open_in_browser
+ open_in_browser("http://maemo.org/downloads/product/raw/Maemo5/murmeltier?get_installfile")
+
+ button = hildon.Button(gtk.HILDON_SIZE_FINGER_HEIGHT, hildon.BUTTON_ARRANGEMENT_VERTICAL)
+ button.set_label("Install Woodchuck")
+ button.connect("clicked", install_woodchuck_clicked)
+ button.set_alignment(0,0,1,1)
+ vbox.pack_start(button, expand=False)
+ else:
+ button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
+ button.set_label("Woodchuck-Based Automatic Update")
+ button.set_active(self.config["woodchuck"])
+ button.connect("toggled", self.button_toggled, "woodchuck")
+ vbox.pack_start(button, expand=False)
+ add_setting('updateInterval')
+ add_setting('expiry')
heading('Network')
button = hildon.CheckButton(gtk.HILDON_SIZE_FINGER_HEIGHT)
else:
self.config[configName] = False
#print "autoup", self.autoupdate
+
+ if configName == 'woodchuck':
+ try:
+ from wc import wc_disable_set
+ wc_disable_set(not self.config['woodchuck'])
+ except Exception:
+ logger.exception("Disabling Woodchuck")
+
self.saveConfig()
def selection_changed(self, selector, button, setting):
def loadConfig(self):
self.config = {}
+
+ configParser = RawConfigParser()
try:
- configParser = RawConfigParser()
configParser.read(self.configFilename)
- self.config["fontSize"] = configParser.getint(section, "fontSize")
- self.config["artFontSize"] = configParser.getint(section, "artFontSize")
- self.config["expiry"] = configParser.getint(section, "expiry")
- self.config["autoupdate"] = configParser.getboolean(section, "autoupdate")
- self.config["updateInterval"] = configParser.getfloat(section, "updateInterval")
- self.config["orientation"] = configParser.get(section, "orientation")
- self.config["imageCache"] = configParser.getboolean(section, "imageCache")
- except:
- self.config["fontSize"] = 17
- self.config["artFontSize"] = 14
- self.config["expiry"] = 24
- self.config["autoupdate"] = False
- self.config["updateInterval"] = 4
- self.config["orientation"] = "Automatic"
- self.config["imageCache"] = False
- try:
- self.config["proxy"] = configParser.getboolean(section, "proxy")
- except:
- self.config["proxy"] = True
- try:
- self.config["hidereadfeeds"] = configParser.getboolean(section, "hidereadfeeds")
- self.config["hidereadarticles"] = configParser.getboolean(section, "hidereadarticles")
- except:
- self.config["hidereadfeeds"] = False
- self.config["hidereadarticles"] = False
- try:
- self.config["extBrowser"] = configParser.getboolean(section, "extBrowser")
- except:
- self.config["extBrowser"] = False
- try:
- self.config["feedsort"] = configParser.get(section, "feedsort")
- except:
- self.config["feedsort"] = "Manual"
- try:
- self.config["theme"] = configParser.get(section, "theme")
- except:
- self.config["theme"] = True
-
+ except Exception:
+ logger.exception("Reading %s", self.configFilename)
+
+ # The function to use to fetch the parameter, the parameter's
+ # name and the default value.
+ values = ((configParser.getint, "fontSize", 17),
+ (configParser.getint, "artFontSize", 14),
+ (configParser.getint, "expiry", 24),
+ (configParser.getboolean, "autoupdate", False),
+ (configParser.getboolean, "woodchuck", True),
+ (configParser.getboolean, "askedAboutWoodchuck", False),
+ (configParser.getfloat, "updateInterval", 4),
+ (configParser.get, "orientation", "Automatic"),
+ (configParser.getboolean, "imageCache", False),
+ (configParser.getboolean, "proxy", True),
+ (configParser.getboolean, "hidereadfeeds", False),
+ (configParser.getboolean, "hidereadarticles", False),
+ (configParser.getboolean, "extBrowser", False),
+ (configParser.getboolean, "theme", True),
+ (configParser.get, "feedsort", "Manual"))
+
+ for fetcher, name, default in values:
+ try:
+ v = fetcher(section, name)
+ except Exception:
+ logger.exception("Reading config variable %s", name)
+ v = default
+ self.config[name] = v
+
def saveConfig(self):
configParser = RawConfigParser()
configParser.add_section(section)
configParser.set(section, 'expiry', str(self.config["expiry"]))
configParser.set(section, 'autoupdate', str(self.config["autoupdate"]))
configParser.set(section, 'updateInterval', str(self.config["updateInterval"]))
+ configParser.set(section, 'woodchuck', str(self.config["woodchuck"]))
+ configParser.set(section, 'askedAboutWoodchuck', str(self.config["askedAboutWoodchuck"]))
configParser.set(section, 'orientation', str(self.config["orientation"]))
configParser.set(section, 'imageCache', str(self.config["imageCache"]))
configParser.set(section, 'proxy', str(self.config["proxy"]))
return self.config["autoupdate"]
def setAutoUpdateEnabled(self, value):
self.config["autoupdate"] = value
+ def getWoodchuckEnabled(self):
+ return self.config["woodchuck"]
+ def getAskedAboutWoodchuck(self):
+ return self.config["askedAboutWoodchuck"]
+ def setAskedAboutWoodchuck(self, value):
+ self.config["askedAboutWoodchuck"] = value
+ self.saveConfig()
def getUpdateInterval(self):
return float(self.config["updateInterval"])
def getReadFont(self):
return "sans italic %s" % self.config["fontSize"]
def getUnreadFont(self):
return "sans %s" % self.config["fontSize"]
- def getOrientation(self, index):
+ def getOrientation(self):
return ranges["orientation"].index(self.config["orientation"])
def getOrientationChoices(self):
return ranges["orientation"]
logger = logging.getLogger(__name__)
def getId(string):
+ if issubclass(string.__class__, unicode):
+ string = string.encode('utf8', 'replace')
+
return md5.new(string).hexdigest()
def download_callback(connection):
self.key = key
self.configdir = configdir
self.dir = "%s/%s.d" %(self.configdir, self.key)
- self.tls = threading.local ()
+ self.tls = threading.local()
if not isdir(self.dir):
mkdir(self.dir)
- if not isfile("%s/%s.db" %(self.dir, self.key)):
- self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, date float, updated float, link text, read int);")
+ filename = "%s/%s.db" % (self.dir, self.key)
+ if not isfile(filename):
+ self.db.execute("CREATE TABLE feed (id text, title text, contentLink text, contentHash text, date float, updated float, link text, read int);")
self.db.execute("CREATE TABLE images (id text, imagePath text);")
self.db.commit()
+ else:
+ try:
+ self.db.execute("ALTER TABLE feed ADD COLUMN contentHash text")
+ self.db.commit()
+ except sqlite3.OperationalError, e:
+ if 'duplicate column name' in str(e):
+ pass
+ else:
+ logger.exception("Add column contentHash to %s", filename)
def addImage(self, configdir, key, baseurl, url, proxy=None, opener=None):
filename = configdir+key+".d/"+getId(url)
if(not(entry.has_key("id"))):
entry["id"] = None
content = self.extractContent(entry)
+ contentHash = getId(content)
object_size = len (content)
tmpEntry = {"title":entry["title"], "content":content,
"date":date, "link":entry["link"], "author":entry["author"], "id":entry["id"]}
id = self.generateUniqueId(tmpEntry)
current_version = self.db.execute(
- 'select date, ROWID from feed where id=?',
+ 'select date, ROWID, contentHash from feed where id=?',
(id,)).fetchone()
if (current_version is not None
- and current_version[0] == date):
+ # To detect updates, don't compare by date:
+ # compare by content.
+ #
+ # - If an article update is just a date change
+ # and the content remains the same, we don't
+ # want to register an update.
+ #
+ # - If an article's content changes but not the
+ # date, we want to recognize an update.
+ and current_version[2] == contentHash):
logger.debug("ALREADY DOWNLOADED %s (%s)"
% (entry["title"], entry["link"]))
- ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire
- self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
- try:
- logger.debug("Updating already downloaded files for %s" %(id))
- filename = configdir+self.key+".d/"+id+".html"
- file = open(filename,"a")
- utime(filename, None)
- file.close()
- images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
- for image in images:
- file = open(image[0],"a")
- utime(image[0], None)
- file.close()
- except:
- logger.debug("Error in refreshing images for %s" % (id))
+ ## This article is already present in the feed listing. Update the "updated" time, so it doesn't expire
+ self.db.execute("UPDATE feed SET updated=? WHERE id=?;",(currentTime,id))
+ try:
+ logger.debug("Updating already downloaded files for %s" %(id))
+ filename = configdir+self.key+".d/"+id+".html"
+ file = open(filename,"a")
+ utime(filename, None)
+ file.close()
+ images = self.db.execute("SELECT imagePath FROM images where id=?;", (id, )).fetchall()
+ for image in images:
+ file = open(image[0],"a")
+ utime(image[0], None)
+ file.close()
+ except:
+ logger.debug("Error in refreshing images for %s" % (id))
self.db.commit()
continue
# The version was updated. Mark it as unread.
logger.debug("UPDATED: %s (%s)"
% (entry["title"], entry["link"]))
- self.setEntryUnread(id)
updated_objects += 1
else:
logger.debug("NEW: %s (%s)"
soup = BeautifulSoup(self.getArticle(tmpEntry)) #tmpEntry["content"])
images = soup('img')
baseurl = tmpEntry["link"]
- #if not id in ids:
if imageCache and len(images) > 0:
self.serial_execution_lock.release ()
have_serial_execution_lock = False
values = {'id': id,
'title': tmpEntry["title"],
'contentLink': tmpEntry["contentLink"],
+ 'contentHash': contentHash,
'date': tmpEntry["date"],
'updated': currentTime,
'link': tmpEntry["link"],
def getContentLink(self, id):
return self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,) ).fetchone()[0]
+ def getContentHash(self, id):
+ return self.db.execute("SELECT contentHash FROM feed WHERE id=?;", (id,) ).fetchone()[0]
+
def getExternalLink(self, id):
return self.db.execute("SELECT link FROM feed WHERE id=?;", (id,) ).fetchone()[0]
return text
def getContent(self, id):
- contentLink = self.db.execute("SELECT contentLink FROM feed WHERE id=?;", (id,)).fetchone()[0]
+ """
+ Return the content of the article with the specified ID. If
+ the content is not available, returns None.
+ """
+ contentLink = self.getContentLink(id)
try:
- file = open(self.entries[id]["contentLink"])
- content = file.read()
- file.close()
- except:
- content = "Content unavailable"
+ with open(contentLink, 'r') as file:
+ content = file.read()
+ except Exception:
+ logger.exception("Failed get content for %s: reading %s failed",
+ id, contentLink)
+ content = None
return content
def extractDate(self, entry):
self.db.execute("INSERT INTO feed (id, title, contentLink, date, updated, link, read) VALUES (?, ?, ?, ?, ?, ?, ?);", values)
self.db.commit()
- def updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False):
+ # Feed.UpdateFeed calls this function.
+ def _updateFeed(self, configdir, url, etag, modified, expiryTime=24, proxy=None, imageCache=False, priority=0, postFeedUpdateFunc=None, *postFeedUpdateFuncArgs):
currentTime = 0
rows = self.db.execute("SELECT id, link FROM feed WHERE updated=0;")
for row in rows:
- currentTime = time.time()
- id = row[0]
- link = row[1]
- f = urllib2.urlopen(link)
- #entry["content"] = f.read()
- html = f.read()
- f.close()
- soup = BeautifulSoup(html)
- images = soup('img')
- baseurl = link
- for img in images:
- filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
- img['src']=filename
- self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+ try:
+ currentTime = time.time()
+ id = row[0]
+ link = row[1]
+ f = urllib2.urlopen(link)
+ #entry["content"] = f.read()
+ html = f.read()
+ f.close()
+ soup = BeautifulSoup(html)
+ images = soup('img')
+ baseurl = link
+ for img in images:
+ filename = self.addImage(configdir, self.key, baseurl, img['src'], proxy=proxy)
+ img['src']=filename
+ self.db.execute("INSERT INTO images (id, imagePath) VALUES (?, ?);", (id, filename) )
+ self.db.commit()
+ contentLink = configdir+self.key+".d/"+id+".html"
+ file = open(contentLink, "w")
+ file.write(soup.prettify())
+ file.close()
+
+ self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
self.db.commit()
- contentLink = configdir+self.key+".d/"+id+".html"
- file = open(contentLink, "w")
- file.write(soup.prettify())
- file.close()
-
- self.db.execute("UPDATE feed SET read=0, contentLink=?, updated=? WHERE id=?;", (contentLink, time.time(), id) )
- self.db.commit()
- return (currentTime, None, None)
+ except:
+ logger.error("Error updating Archived Article: %s %s"
+ % (link,traceback.format_exc(),))
+
+ if postFeedUpdateFunc is not None:
+ postFeedUpdateFunc (self.key, currentTime, None, None, None,
+ *postFeedUpdateFuncArgs)
def purgeReadArticles(self):
rows = self.db.execute("SELECT id FROM feed WHERE read=1;")
# state.
try:
updater = os.path.basename(sys.argv[0]) == 'update_feeds.py'
- wc_init (self, True if updater else False)
+ wc_init(config, self, True if updater else False)
if wc().available() and updater:
# The list of known streams.
streams = wc().streams_list ()
logger.debug(
"Registering previously unknown channel: %s (%s)"
% (key, title,))
- # Use a default refresh interval of 6 hours.
- wc().stream_register (key, title, 6 * 60 * 60)
+ wc().stream_register(
+ key, title,
+ self.config.getUpdateInterval() * 60 * 60)
else:
# Make sure the human readable name is up to date.
if wc()[key].human_readable_name != title:
wc()[key].human_readable_name = title
stream_ids.remove (key)
+ wc()[key].freshness \
+ = self.config.getUpdateInterval() * 60 * 60
# Unregister any streams that are no longer subscribed to.
for id in stream_ids:
logger.debug("Unregistering %s" % (id,))
- w.stream_unregister (id)
+ wc().stream_unregister (id)
except Exception:
logger.exception("Registering streams with Woodchuck")
def getCategoryTitle(self, id):
return self.lookup('categories', 'title', id)
-
+
def getCategoryUnread(self, id):
count = 0
for key in self.getListOfFeeds(category=id):
human_readable_name=title,
freshness=6*60*60)
+ self.cache_invalidate('feeds')
return True
else:
return False
if wc().available ():
try:
del wc()[key]
- except KeyError:
+ except KeyError, woodchuck.Error:
logger.debug("Removing unregistered feed %s failed" % (key,))
rank = self.db.execute("SELECT rank FROM feeds WHERE id=?;", (key,) ).fetchone()[0]
% traceback.format_exc ())
woodchuck_imported = False
class PyWoodchuck (object):
- def available(self):
+ def available(self, *args, **kwargs):
return False
woodchuck = None
refresh_interval = 6 * 60 * 60
class mywoodchuck (PyWoodchuck):
- def __init__(self, listing, human_readable_name, identifier,
+ def __init__(self, config, listing, human_readable_name, identifier,
request_feedback):
try:
PyWoodchuck.__init__ (self, human_readable_name, identifier,
self.available = self.not_available
return
+ self.config = config
self.listing = listing
- def not_available(self):
+ try:
+ self.enabled = config.getWoodchuckEnabled()
+ except Exception:
+ logging.exception("Setting enabled")
+
+ def available(self, check_config=True):
+ if not PyWoodchuck.available(self):
+ return False
+ if check_config:
+ return self.config.getWoodchuckEnabled()
+ return True
+
+ def not_available(self, *args, **kwargs):
return False
# Woodchuck upcalls.
str(e)))
_w = None
-def wc_init(listing, request_feedback=False):
+def wc_init(config, listing, request_feedback=False):
"""Connect to the woodchuck server and initialize any state."""
global _w
assert _w is None
- _w = mywoodchuck (listing, "FeedingIt", "org.marcoz.feedingit",
+ _w = mywoodchuck (config, listing, "FeedingIt", "org.marcoz.feedingit",
request_feedback)
if not woodchuck_imported or not _w.available ():
else:
logger.debug("Woodchuck appears to be available.")
+def wc_disable_set(disable=True):
+ """Disable Woodchuck."""
+ if disable:
+ logger.info("Disabling Woodchuck")
+ else:
+ logger.info("Enabling Woodchuck")
+
+ global _w
+ if _w is None:
+ logging.info("Woodchuck not loaded. Not doing anything.")
+ return
+
+ if not _w.available(check_config=False):
+ logging.info("Woodchuck not available. Not doing anything.")
+ return
+
+ try:
+ _w.enabled = not disable
+ except Exception:
+ logger.exception("Disabling Woodchuck")
+
def wc():
"""Return the Woodchuck singleton."""
global _w
Rectangle {
/*x: parent.width; height: parent.height;*/
- width: parent.width;
+ width: flickable.width;
height: parent.height
property alias zoomEnabled: slider.visible;
property alias value: slider.value;
id: flickable
//anchors.fill: screen;
height: parent.height;
- width: parent.width;
+ width: webView.width;
contentWidth: webView.width*webView.scale; //Math.max(screen.width,webView.width*webView.scale)
contentHeight: Math.max(articleViewer.height,webView.height*webView.scale)
//contentWidth: childrenRect.width; contentHeight: childrenRect.height
id: webView
//url: flipItem.url;
html: flipItem.html;
- preferredWidth: flickable.width
- preferredHeight: flickable.height
+ preferredWidth: articleView.width
+ //preferredHeight: articleView.height
//scale: 1.25;
transformOrigin: Item.TopLeft
scale: slider.value;
- settings.defaultFontSize: 24
+ //settings.defaultFontSize: 24
}
// onFlickStarted: {
width: parent.width;
height: updateBarArticles.visible? parent.height-updateBarArticles.height : parent.height;
visible: false; z:8
- //onCurrentIndexChanged: photosGridView.positionViewAtIndex(currentIndex, GridView.Contain)
+ onCurrentIndexChanged: articlesView.positionViewAtIndex(currentIndex, ListView.Contain)
highlightRangeMode: ListView.StrictlyEnforceRange; snapMode: ListView.SnapOneItem
//cacheBuffer: 5;
onMovementStarted: articleViewer.vertPanningEnabled=false;
font.pointSize: settings.articleListingTextSize
}
MouseArea { anchors.fill: listItem;
- onClicked: { articleView.positionViewAtIndex(index, ListView.Contain); articleView.visible = true; }
+ onClicked: { articleView.currentIndex = index; articleView.visible = true; }
}
}
id: viewer
Item {
id: flipItem;
- width: articleViewer.width; height: articleViewer.height;
+ width: articleDisplay.width;
+ height: articleView.height;
//property string url: (articleView.visible && Math.abs(articleView.currentIndex-index)<2) ? path: "";
property string html: controller.getArticle(articleViewer.feedid, articleid)
ArticleDisplay {
+ id: articleDisplay
zoomEnabled: articleViewer.zoomEnabled;
property bool vertPanningEnabled: articleViewer.vertPanningEnabled;
('share/feedingit/qml/common/images', glob.glob('qml/common/images/*')),
('share/feedingit/qml/i18n', glob.glob('qml/i18n/*')),
('share/dbus-1/services', ['feedingit_status.service']),
+ ('share/feedingit', ['splash.jpg']),
('share/feedingit', glob.glob('pysrc/*.py')) ],)
Standards-Version: 3.9.1
Build-Depends: python-all (>= 2.6.6-3), debhelper (>= 7.4.3)
Checksums-Sha1:
- f72424b24806c14a04a45be31d817a954c29b61c 185518 feedingit_0.1.0.orig.tar.gz
- b51d6818e8859ac6d5bcf5bca329dd4f776597cd 888 feedingit_0.1.0-1.debian.tar.gz
+ e22e0955ddccc6762aa03b7d1f5baebacebb0ebb 199505 feedingit_0.1.0.orig.tar.gz
+ 883b59ceea4aa57b5c83cbe38b679feeb87a2c79 886 feedingit_0.1.0-1.debian.tar.gz
Checksums-Sha256:
- 70d0ed38abbf308935ec2f9dafc403ba3186bd9205aba0214c41a4c93bb4cead 185518 feedingit_0.1.0.orig.tar.gz
- 95f8ac99f327887646ab60d5a9ff0b2331450b7041795db56a8a87563f5ad1e6 888 feedingit_0.1.0-1.debian.tar.gz
+ edc0b7c4b001d19e3ecf9615642bffdddcdcabe375d6cabdeb4dfbe395303980 199505 feedingit_0.1.0.orig.tar.gz
+ 27904d190182f50bca80bcdbd18773325ab7b79cccede4659f3ec042e3582753 886 feedingit_0.1.0-1.debian.tar.gz
Files:
- 5c60aa33afabe2253b807b8990358c05 185518 feedingit_0.1.0.orig.tar.gz
- d79f2e7d466d8781614e7c1e468f1c57 888 feedingit_0.1.0-1.debian.tar.gz
+ 25b45241279c8c856489cd0d6a13ffab 199505 feedingit_0.1.0.orig.tar.gz
+ f93ba4e3ec741aabbd3a0c320f9862fa 886 feedingit_0.1.0-1.debian.tar.gz
Format: 1.8
-Date: Tue, 29 Nov 2011 21:32:12 -0800
+Date: Thu, 05 Jan 2012 20:58:44 -0800
Source: feedingit
Binary: feedingit
Architecture: all
.
* source package automatically created by stdeb 0.6.0+git
Checksums-Sha1:
- f3e7a0dd9894fb979b10aed034057eba7f56e4f0 172448 feedingit_0.1.0-1_all.deb
+ 147b11cafcf8b0c652325c30dcb84540795f9e59 187912 feedingit_0.1.0-1_all.deb
Checksums-Sha256:
- 582d42f71bb73dd510bfa63f925a080387ffa6eeb0e6f15c9c7ce90b87e63f5b 172448 feedingit_0.1.0-1_all.deb
+ fe4e11203b4e3cc25ec905172cc0c5acd92a236b2492a547ff088bbece0f243a 187912 feedingit_0.1.0-1_all.deb
Files:
- c9d015476c9e657e3aa457b1f5b0de3b 172448 user/network optional feedingit_0.1.0-1_all.deb
+ a91028946c0bc0b43bb2645a9a32f972 187912 user/network optional feedingit_0.1.0-1_all.deb
Rectangle {
/*x: parent.width; height: parent.height;*/
- width: parent.width;
+ width: flickable.width;
height: parent.height
property alias zoomEnabled: slider.visible;
property alias value: slider.value;
id: flickable
//anchors.fill: screen;
height: parent.height;
- width: parent.width;
+ width: webView.width;
contentWidth: webView.width*webView.scale; //Math.max(screen.width,webView.width*webView.scale)
contentHeight: Math.max(articleViewer.height,webView.height*webView.scale)
//contentWidth: childrenRect.width; contentHeight: childrenRect.height
id: webView
//url: flipItem.url;
html: flipItem.html;
- preferredWidth: flickable.width
- preferredHeight: flickable.height
+ preferredWidth: articleView.width
+ //preferredHeight: articleView.height
//scale: 1.25;
transformOrigin: Item.TopLeft
scale: slider.value;
- settings.defaultFontSize: 24
+ //settings.defaultFontSize: 24
}
// onFlickStarted: {
id: viewer
Item {
id: flipItem;
- width: articleViewer.width; height: articleViewer.height;
+ width: articleDisplay.width;
+ height: articleView.height;
//property string url: (articleView.visible && Math.abs(articleView.currentIndex-index)<2) ? path: "";
property string html: controller.getArticle(articleViewer.feedid, articleid)
ArticleDisplay {
+ id: articleDisplay
zoomEnabled: articleViewer.zoomEnabled;
property bool vertPanningEnabled: articleViewer.vertPanningEnabled;