Adding fancier version checks to allow to the cache to be preserved longer
[theonering] / src / gvoice / conversations.py
index 809d0fc..fed382b 100644 (file)
@@ -2,6 +2,7 @@
 
 from __future__ import with_statement
 
+import datetime
 import logging
 
 try:
@@ -10,11 +11,12 @@ try:
 except ImportError:
        import pickle
 
+import constants
 import util.coroutines as coroutines
-import util.misc as util_misc
+import util.misc as misc_utils
 
 
-_moduleLogger = logging.getLogger("gvoice.conversations")
+_moduleLogger = logging.getLogger(__name__)
 
 
 class Conversations(object):
@@ -33,14 +35,28 @@ class Conversations(object):
                assert not self._conversations
                try:
                        with open(path, "rb") as f:
-                               self._conversations = pickle.load(f)
-               except (pickle.PickleError, IOError):
+                               fileVersion, fileBuild, convs = pickle.load(f)
+               except (pickle.PickleError, IOError, EOFError, ValueError):
                        _moduleLogger.exception("While loading for %s" % self._name)
+                       return
+
+               if misc_utils.compare_versions(
+                       misc_utils.parse_version("0.8.0"),
+                       misc_utils.parse_version(fileVersion),
+               ) <= 0:
+                       self._conversations = convs
+               else:
+                       _moduleLogger.debug(
+                               "%s Skipping cache due to version mismatch (%s-%s)" % (
+                                       self._name, fileVersion, fileBuild
+                               )
+                       )
 
        def save(self, path):
                try:
+                       dataToDump = (constants.__version__, constants.__build__, self._conversations)
                        with open(path, "wb") as f:
-                               pickle.dump(self._conversations, f, pickle.HIGHEST_PROTOCOL)
+                               pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL)
                except (pickle.PickleError, IOError):
                        _moduleLogger.exception("While saving for %s" % self._name)
 
@@ -54,7 +70,7 @@ class Conversations(object):
                conversations = list(self._get_raw_conversations())
                conversations.sort()
                for conversation in conversations:
-                       key = util_misc.normalize_number(conversation.number)
+                       key = misc_utils.normalize_number(conversation.number)
                        try:
                                mergedConversations = self._conversations[key]
                        except KeyError:
@@ -99,9 +115,21 @@ class MergedConversations(object):
 
        def append_conversation(self, newConversation):
                self._validate(newConversation)
+               similarExist = False
                for similarConversation in self._find_related_conversation(newConversation.id):
                        self._update_previous_related_conversation(similarConversation, newConversation)
                        self._remove_repeats(similarConversation, newConversation)
+                       similarExist = True
+               if similarExist:
+                       # Hack to reduce a race window with GV marking messages as read
+                       # because it thinks we replied when really we replied to the
+                       # previous message.  Clients of this code are expected to handle
+                       # this gracefully.  Other race conditions may exist but clients are
+                       # responsible for them
+                       if newConversation.messages:
+                               newConversation.isRead = False
+                       else:
+                               newConversation.isRead = True
                self._conversations.append(newConversation)
 
        def to_dict(self):
@@ -135,7 +163,7 @@ class MergedConversations(object):
                return similarConversations
 
        def _update_previous_related_conversation(self, relatedConversation, newConversation):
-               for commonField in ("isRead", "isSpam", "isTrash", "isArchived"):
+               for commonField in ("isSpam", "isTrash", "isArchived"):
                        newValue = getattr(newConversation, commonField)
                        setattr(relatedConversation, commonField, newValue)
 
@@ -153,3 +181,47 @@ class MergedConversations(object):
                        len(newConversationMessages),
                ))
                assert 0 < len(newConversation.messages), "Everything shouldn't have been removed"
+
+
+def filter_out_read(conversations):
+       return (
+               conversation
+               for conversation in conversations
+               if not conversation.isRead and not conversation.isArchived
+       )
+
+
+def is_message_from_self(message):
+       return message.whoFrom == "Me:"
+
+
+def filter_out_self(conversations):
+       return (
+               newConversation
+               for newConversation in conversations
+               if len(newConversation.messages) and any(
+                       not is_message_from_self(message)
+                       for message in newConversation.messages
+               )
+       )
+
+
+class FilterOutReported(object):
+
+       NULL_TIMESTAMP = datetime.datetime(1, 1, 1)
+
+       def __init__(self):
+               self._lastMessageTimestamp = self.NULL_TIMESTAMP
+
+       def get_last_timestamp(self):
+               return self._lastMessageTimestamp
+
+       def __call__(self, conversations):
+               filteredConversations = [
+                       conversation
+                       for conversation in conversations
+                       if self._lastMessageTimestamp < conversation.time
+               ]
+               if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time:
+                       self._lastMessageTimestamp = filteredConversations[0].time
+               return filteredConversations