making pickled caches block no execution
[theonering] / src / gvoice / conversations.py
index 86697ea..64dafb8 100644 (file)
@@ -14,15 +14,20 @@ except ImportError:
 import constants
 import util.coroutines as coroutines
 import util.misc as misc_utils
+import util.go_utils as gobject_utils
 
 
 _moduleLogger = logging.getLogger(__name__)
 
 
+class ConversationError(RuntimeError):
+
+       pass
+
+
 class Conversations(object):
 
        OLDEST_COMPATIBLE_FORMAT_VERSION = misc_utils.parse_version("0.8.0")
-       OLDEST_MESSAGE_WINDOW = datetime.timedelta(days=60)
 
        def __init__(self, getter, asyncPool):
                self._get_raw_conversations = getter
@@ -38,15 +43,16 @@ class Conversations(object):
                return repr(self._get_raw_conversations.__name__)
 
        def load(self, path):
+               _moduleLogger.debug("%s Loading cache" % (self._name, ))
                assert not self._conversations
                try:
                        with open(path, "rb") as f:
                                fileVersion, fileBuild, convs = pickle.load(f)
-               except (pickle.PickleError, IOError, EOFError, ValueError):
+               except (pickle.PickleError, IOError, EOFError, ValueError, Exception):
                        _moduleLogger.exception("While loading for %s" % self._name)
                        return
 
-               if misc_utils.compare_versions(
+               if convs and misc_utils.compare_versions(
                        self.OLDEST_COMPATIBLE_FORMAT_VERSION,
                        misc_utils.parse_version(fileVersion),
                ) <= 0:
@@ -61,29 +67,38 @@ class Conversations(object):
                        )
 
        def save(self, path):
+               _moduleLogger.info("%s Saving cache" % (self._name, ))
+               if not self._conversations:
+                       _moduleLogger.info("%s Odd, no conversations to cache.  Did we never load the cache?" % (self._name, ))
+                       return
+
                try:
-                       _moduleLogger.info("%s Saving cache" % (self._name, ))
-                       for conv in self._conversations.itervalues():
-                               conv.compress(self.OLDEST_MESSAGE_WINDOW)
                        dataToDump = (constants.__version__, constants.__build__, self._conversations)
                        with open(path, "wb") as f:
                                pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL)
                except (pickle.PickleError, IOError):
                        _moduleLogger.exception("While saving for %s" % self._name)
+               _moduleLogger.info("%s Cache saved" % (self._name, ))
 
        def update(self, force=False):
                if not force and self._conversations:
                        return
-               self._asyncPool.add_task(
-                       self._get_raw_conversations,
-                       (),
-                       {},
-                       self._on_get_conversations,
-                       self._on_get_conversations_failed,
-               )
+
+               le = gobject_utils.AsyncLinearExecution(self._asyncPool, self._update)
+               le.start()
 
        @misc_utils.log_exception(_moduleLogger)
-       def _on_get_conversations(self, conversationResult):
+       def _update(self):
+               try:
+                       conversationResult = yield (
+                               self._get_raw_conversations,
+                               (),
+                               {},
+                       )
+               except Exception:
+                       _moduleLogger.exception("%s While updating conversations" % (self._name, ))
+                       return
+
                oldConversationIds = set(self._conversations.iterkeys())
 
                updateConversationIds = set()
@@ -101,26 +116,36 @@ class Conversations(object):
                                markAllAsRead = False
                        else:
                                markAllAsRead = True
+
                        try:
                                mergedConversations.append_conversation(conversation, markAllAsRead)
                                isConversationUpdated = True
+                       except ConversationError, e:
+                               isConversationUpdated = False
+                       except AssertionError, e:
+                               _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
+                               isConversationUpdated = False
                        except RuntimeError, e:
-                               if False:
-                                       _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
+                               _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
                                isConversationUpdated = False
 
                        if isConversationUpdated:
                                updateConversationIds.add(key)
 
+               for key in updateConversationIds:
+                       mergedConv = self._conversations[key]
+                       _moduleLogger.debug("%s \tUpdated %s" % (self._name, key))
+                       for conv in mergedConv.conversations:
+                               message = "%s \t\tUpdated %s (%r) %r %r %r" % (
+                                       self._name, conv.id, conv.time, conv.isRead, conv.isArchived, len(conv.messages)
+                               )
+                               _moduleLogger.debug(message)
+
                if updateConversationIds:
                        message = (self, updateConversationIds, )
                        self.updateSignalHandler.stage.send(message)
                self._hasDoneUpdate = True
 
-       @misc_utils.log_exception(_moduleLogger)
-       def _on_get_conversations_failed(self, error):
-               _moduleLogger.error(error)
-
        def get_conversations(self):
                return self._conversations.iterkeys()
 
@@ -159,10 +184,11 @@ class MergedConversations(object):
                # * We cache to disk the history of messages sent/received
                # * On first run we mark all server messages as read due to no cache
                # * If not first load or from cache (disk or in-memory) then it must be unread
-               if markAllAsRead:
-                       newConversation.isRead = True
-               else:
-                       newConversation.isRead = False
+               if newConversation.type != newConversation.TYPE_VOICEMAIL:
+                       if markAllAsRead:
+                               newConversation.isRead = True
+                       else:
+                               newConversation.isRead = False
 
                if newConversation.messages:
                        # must not have had all items removed due to duplicates
@@ -177,21 +203,6 @@ class MergedConversations(object):
        def conversations(self):
                return self._conversations
 
-       def compress(self, timedelta):
-               now = datetime.datetime.now()
-               oldNumConvs = len(self._conversations)
-               oldConvs = self._conversations
-               self._conversations = [
-                       conv
-                       for conv in self._conversations
-                       if (now - conv.time) < timedelta
-               ]
-               newNumConvs = len(self._conversations)
-               if oldNumConvs != newNumConvs:
-                       _moduleLogger.debug("Compressed conversations from %s to %s" % (oldNumConvs, newNumConvs))
-               else:
-                       _moduleLogger.debug("Did not compress, %s" % (newNumConvs))
-
        def _validate(self, newConversation):
                if not self._conversations:
                        return
@@ -203,7 +214,7 @@ class MergedConversations(object):
                        )
 
                if newConversation.time <= self._conversations[-1].time:
-                       raise RuntimeError("Conversations got out of order")
+                       raise ConversationError("Conversations got out of order")
 
        def _find_related_conversation(self, convId):
                similarConversations = (
@@ -276,3 +287,18 @@ class FilterOutReported(object):
                if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time:
                        self._lastMessageTimestamp = filteredConversations[0].time
                return filteredConversations
+
+
+def print_conversations(path):
+       import pprint
+
+       try:
+               with open(path, "rb") as f:
+                       fileVersion, fileBuild, convs = pickle.load(f)
+       except (pickle.PickleError, IOError, EOFError, ValueError):
+               _moduleLogger.exception("")
+       else:
+               for key, value in convs.iteritems():
+                       convs[key] = value.to_dict()
+               pprint.pprint((fileVersion, fileBuild))
+               pprint.pprint(convs)