X-Git-Url: http://git.maemo.org/git/?a=blobdiff_plain;f=src%2Fgvoice%2Fconversations.py;h=8d61a585f0942519c3b46daabc9fd274f18dfefd;hb=a3d4b5a03a1c10621db4b193d50e1c64410de0d7;hp=357a8445521acdc644f20d987f4840d3ebfec1b3;hpb=89e33abd2a4f6962aa7d6ac721a9a1bb87611c11;p=theonering diff --git a/src/gvoice/conversations.py b/src/gvoice/conversations.py index 357a844..8d61a58 100644 --- a/src/gvoice/conversations.py +++ b/src/gvoice/conversations.py @@ -1,24 +1,71 @@ #!/usr/bin/python +from __future__ import with_statement +import datetime import logging -import util.coroutines as coroutines +try: + import cPickle + pickle = cPickle +except ImportError: + import pickle -import backend +import constants +import util.coroutines as coroutines +import util.misc as misc_utils -_moduleLogger = logging.getLogger("gvoice.conversations") +_moduleLogger = logging.getLogger(__name__) class Conversations(object): - def __init__(self, backend): - self._backend = backend + OLDEST_COMPATIBLE_FORMAT_VERSION = misc_utils.parse_version("0.8.0") + OLDEST_MESSAGE_WINDOW = datetime.timedelta(days=60) + + def __init__(self, getter): + self._get_raw_conversations = getter self._conversations = {} self.updateSignalHandler = coroutines.CoTee() - self.update() + + @property + def _name(self): + return repr(self._get_raw_conversations.__name__) + + def load(self, path): + assert not self._conversations + try: + with open(path, "rb") as f: + fileVersion, fileBuild, convs = pickle.load(f) + except (pickle.PickleError, IOError, EOFError, ValueError): + _moduleLogger.exception("While loading for %s" % self._name) + return + + if misc_utils.compare_versions( + self.OLDEST_COMPATIBLE_FORMAT_VERSION, + misc_utils.parse_version(fileVersion), + ) <= 0: + _moduleLogger.info("%s Loaded cache" % (self._name, )) + self._conversations = convs + else: + _moduleLogger.debug( + "%s Skipping cache due to version mismatch (%s-%s)" % ( + self._name, fileVersion, fileBuild + ) + ) + + def save(self, path): + try: + _moduleLogger.info("%s Saving cache" % (self._name, )) + for conv in self._conversations.itervalues(): + conv.compress(self.OLDEST_MESSAGE_WINDOW) + dataToDump = (constants.__version__, constants.__build__, self._conversations) + with open(path, "wb") as f: + pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL) + except (pickle.PickleError, IOError): + _moduleLogger.exception("While saving for %s" % self._name) def update(self, force=False): if not force and self._conversations: @@ -27,23 +74,23 @@ class Conversations(object): oldConversationIds = set(self._conversations.iterkeys()) updateConversationIds = set() - messages = self._backend.get_messages() - sortedMessages = backend.sort_messages(messages) - for messageData in sortedMessages: - key = messageData["contactId"], messageData["number"] + conversations = list(self._get_raw_conversations()) + conversations.sort() + for conversation in conversations: + key = misc_utils.normalize_number(conversation.number) try: - conversation = self._conversations[key] - isNewConversation = False + mergedConversations = self._conversations[key] except KeyError: - conversation = Conversation(self._backend, messageData) - self._conversations[key] = conversation - isNewConversation = True + mergedConversations = MergedConversations() + self._conversations[key] = mergedConversations - if isNewConversation: - # @todo see if this has issues with a user marking a item as unread/unarchive? + try: + mergedConversations.append_conversation(conversation) isConversationUpdated = True - else: - isConversationUpdated = conversation.merge_conversation(messageData) + except RuntimeError, e: + if False: + _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e)) + isConversationUpdated = False if isConversationUpdated: updateConversationIds.add(key) @@ -62,60 +109,141 @@ class Conversations(object): try: del self._conversations[key] except KeyError: - _moduleLogger.info("Conversation never existed for %r" % (key,)) + _moduleLogger.info("%s Conversation never existed for %r" % (self._name, key, )) def clear_all(self): self._conversations.clear() -class Conversation(object): +class MergedConversations(object): + + def __init__(self): + self._conversations = [] + + def append_conversation(self, newConversation): + self._validate(newConversation) + similarExist = False + for similarConversation in self._find_related_conversation(newConversation.id): + self._update_previous_related_conversation(similarConversation, newConversation) + self._remove_repeats(similarConversation, newConversation) + similarExist = True + if similarExist: + # Hack to reduce a race window with GV marking messages as read + # because it thinks we replied when really we replied to the + # previous message. Clients of this code are expected to handle + # this gracefully. Other race conditions may exist but clients are + # responsible for them + if newConversation.messages: + newConversation.isRead = False + else: + newConversation.isRead = True + self._conversations.append(newConversation) + + def to_dict(self): + selfDict = {} + selfDict["conversations"] = [conv.to_dict() for conv in self._conversations] + return selfDict + + @property + def conversations(self): + return self._conversations + + def compress(self, timedelta): + now = datetime.datetime.now() + oldNumConvs = len(self._conversations) + oldConvs = self._conversations + self._conversations = [ + conv + for conv in self._conversations + if (now - conv.time) < timedelta + ] + newNumConvs = len(self._conversations) + if oldNumConvs != newNumConvs: + _moduleLogger.debug("Compressed conversations from %s to %s" % (oldNumConvs, newNumConvs)) + else: + _moduleLogger.debug("Did not compress, %s" % (newNumConvs)) + + def _validate(self, newConversation): + if not self._conversations: + return - def __init__(self, backend, data): - self._backend = backend - self._data = dict((key, value) for (key, value) in data.iteritems()) + for constantField in ("number", ): + assert getattr(self._conversations[0], constantField) == getattr(newConversation, constantField), "Constant field changed, soemthing is seriously messed up: %r v %r" % ( + getattr(self._conversations[0], constantField), + getattr(newConversation, constantField), + ) - # confirm we have a list - self._data["messageParts"] = list( - self._append_time(message, self._data["time"]) - for message in self._data["messageParts"] + if newConversation.time <= self._conversations[-1].time: + raise RuntimeError("Conversations got out of order") + + def _find_related_conversation(self, convId): + similarConversations = ( + conversation + for conversation in self._conversations + if conversation.id == convId + ) + return similarConversations + + def _update_previous_related_conversation(self, relatedConversation, newConversation): + for commonField in ("isSpam", "isTrash", "isArchived"): + newValue = getattr(newConversation, commonField) + setattr(relatedConversation, commonField, newValue) + + def _remove_repeats(self, relatedConversation, newConversation): + newConversationMessages = newConversation.messages + newConversation.messages = [ + newMessage + for newMessage in newConversationMessages + if newMessage not in relatedConversation.messages + ] + _moduleLogger.debug("Found %d new messages in conversation %s (%d/%d)" % ( + len(newConversationMessages) - len(newConversation.messages), + newConversation.id, + len(newConversation.messages), + len(newConversationMessages), + )) + assert 0 < len(newConversation.messages), "Everything shouldn't have been removed" + + +def filter_out_read(conversations): + return ( + conversation + for conversation in conversations + if not conversation.isRead and not conversation.isArchived + ) + + +def is_message_from_self(message): + return message.whoFrom == "Me:" + + +def filter_out_self(conversations): + return ( + newConversation + for newConversation in conversations + if len(newConversation.messages) and any( + not is_message_from_self(message) + for message in newConversation.messages ) + ) + + +class FilterOutReported(object): + + NULL_TIMESTAMP = datetime.datetime(1, 1, 1) + + def __init__(self): + self._lastMessageTimestamp = self.NULL_TIMESTAMP + + def get_last_timestamp(self): + return self._lastMessageTimestamp - def __getitem__(self, key): - return self._data[key] - - def merge_conversation(self, moreData): - """ - @returns True if there was content to merge (new messages arrived - rather than being a duplicate) - - @warning This assumes merges are done in chronological order - """ - for constantField in ("contactId", "number"): - assert self._data[constantField] == moreData[constantField], "Constant field changed, soemthing is seriously messed up: %r v %r" % (self._data, moreData) - - if moreData["time"] < self._data["time"]: - # If its older, assuming it has nothing new to report - return False - - for preferredMoreField in ("id", "name", "time", "relTime", "prettyNumber", "location"): - preferredFieldValue = moreData[preferredMoreField] - if preferredFieldValue: - self._data[preferredMoreField] = preferredFieldValue - - messageAppended = False - - # @todo Handle No Transcription voicemails - messageParts = self._data["messageParts"] - for message in moreData["messageParts"]: - messageWithTimestamp = self._append_time(message, moreData["time"]) - if messageWithTimestamp not in messageParts: - messageParts.append(messageWithTimestamp) - messageAppended = True - messageParts.sort() - - return messageAppended - - @staticmethod - def _append_time(message, exactWhen): - whoFrom, message, when = message - return exactWhen, whoFrom, message, when + def __call__(self, conversations): + filteredConversations = [ + conversation + for conversation in conversations + if self._lastMessageTimestamp < conversation.time + ] + if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time: + self._lastMessageTimestamp = filteredConversations[0].time + return filteredConversations