_moduleLogger = logging.getLogger(__name__)
+class ConversationError(RuntimeError):
+
+ pass
+
+
class Conversations(object):
OLDEST_COMPATIBLE_FORMAT_VERSION = misc_utils.parse_version("0.8.0")
- OLDEST_MESSAGE_WINDOW = datetime.timedelta(days=60)
def __init__(self, getter, asyncPool):
self._get_raw_conversations = getter
return repr(self._get_raw_conversations.__name__)
def load(self, path):
+ _moduleLogger.debug("%s Loading cache" % (self._name, ))
assert not self._conversations
try:
with open(path, "rb") as f:
fileVersion, fileBuild, convs = pickle.load(f)
- except (pickle.PickleError, IOError, EOFError, ValueError):
+ except (pickle.PickleError, IOError, EOFError, ValueError, Exception):
_moduleLogger.exception("While loading for %s" % self._name)
return
- if misc_utils.compare_versions(
+ if convs and misc_utils.compare_versions(
self.OLDEST_COMPATIBLE_FORMAT_VERSION,
misc_utils.parse_version(fileVersion),
) <= 0:
)
def save(self, path):
+ _moduleLogger.info("%s Saving cache" % (self._name, ))
+ if not self._conversations:
+ _moduleLogger.info("%s Odd, no conversations to cache. Did we never load the cache?" % (self._name, ))
+ return
+
try:
- _moduleLogger.info("%s Saving cache" % (self._name, ))
- for conv in self._conversations.itervalues():
- conv.compress(self.OLDEST_MESSAGE_WINDOW)
dataToDump = (constants.__version__, constants.__build__, self._conversations)
with open(path, "wb") as f:
pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL)
except (pickle.PickleError, IOError):
_moduleLogger.exception("While saving for %s" % self._name)
+ _moduleLogger.info("%s Cache saved" % (self._name, ))
def update(self, force=False):
if not force and self._conversations:
@misc_utils.log_exception(_moduleLogger)
def _update(self):
- conversationResult = yield (
- self._get_raw_conversations,
- (),
- {},
- )
+ try:
+ conversationResult = yield (
+ self._get_raw_conversations,
+ (),
+ {},
+ )
+ except Exception:
+ _moduleLogger.exception("%s While updating conversations" % (self._name, ))
+ return
oldConversationIds = set(self._conversations.iterkeys())
markAllAsRead = False
else:
markAllAsRead = True
+
try:
mergedConversations.append_conversation(conversation, markAllAsRead)
isConversationUpdated = True
+ except ConversationError, e:
+ isConversationUpdated = False
+ except AssertionError, e:
+ _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
+ isConversationUpdated = False
except RuntimeError, e:
- if False:
- _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
+ _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
isConversationUpdated = False
if isConversationUpdated:
updateConversationIds.add(key)
+ for key in updateConversationIds:
+ mergedConv = self._conversations[key]
+ _moduleLogger.debug("%s \tUpdated %s" % (self._name, key))
+ for conv in mergedConv.conversations:
+ message = "%s \t\tUpdated %s (%r) %r %r %r" % (
+ self._name, conv.id, conv.time, conv.isRead, conv.isArchived, len(conv.messages)
+ )
+ _moduleLogger.debug(message)
+
if updateConversationIds:
message = (self, updateConversationIds, )
self.updateSignalHandler.stage.send(message)
# * We cache to disk the history of messages sent/received
# * On first run we mark all server messages as read due to no cache
# * If not first load or from cache (disk or in-memory) then it must be unread
- if markAllAsRead:
- newConversation.isRead = True
- else:
- newConversation.isRead = False
+ if newConversation.type != newConversation.TYPE_VOICEMAIL:
+ if markAllAsRead:
+ newConversation.isRead = True
+ else:
+ newConversation.isRead = False
if newConversation.messages:
# must not have had all items removed due to duplicates
def conversations(self):
return self._conversations
- def compress(self, timedelta):
- now = datetime.datetime.now()
- oldNumConvs = len(self._conversations)
- oldConvs = self._conversations
- self._conversations = [
- conv
- for conv in self._conversations
- if (now - conv.time) < timedelta
- ]
- newNumConvs = len(self._conversations)
- if oldNumConvs != newNumConvs:
- _moduleLogger.debug("Compressed conversations from %s to %s" % (oldNumConvs, newNumConvs))
- else:
- _moduleLogger.debug("Did not compress, %s" % (newNumConvs))
-
def _validate(self, newConversation):
if not self._conversations:
return
)
if newConversation.time <= self._conversations[-1].time:
- raise RuntimeError("Conversations got out of order")
+ raise ConversationError("Conversations got out of order")
def _find_related_conversation(self, convId):
similarConversations = (
if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time:
self._lastMessageTimestamp = filteredConversations[0].time
return filteredConversations
+
+
+def print_conversations(path):
+ import pprint
+
+ try:
+ with open(path, "rb") as f:
+ fileVersion, fileBuild, convs = pickle.load(f)
+ except (pickle.PickleError, IOError, EOFError, ValueError):
+ _moduleLogger.exception("")
+ else:
+ for key, value in convs.iteritems():
+ convs[key] = value.to_dict()
+ pprint.pprint((fileVersion, fileBuild))
+ pprint.pprint(convs)