3 from __future__ import with_statement
15 import util.coroutines as coroutines
16 import util.misc as misc_utils
17 import util.go_utils as gobject_utils
20 _moduleLogger = logging.getLogger(__name__)
23 class Conversations(object):
25 OLDEST_COMPATIBLE_FORMAT_VERSION = misc_utils.parse_version("0.8.0")
27 def __init__(self, getter, asyncPool):
28 self._get_raw_conversations = getter
29 self._asyncPool = asyncPool
30 self._conversations = {}
31 self._loadedFromCache = False
32 self._hasDoneUpdate = False
34 self.updateSignalHandler = coroutines.CoTee()
38 return repr(self._get_raw_conversations.__name__)
41 _moduleLogger.debug("%s Loading cache" % (self._name, ))
42 assert not self._conversations
44 with open(path, "rb") as f:
45 fileVersion, fileBuild, convs = pickle.load(f)
46 except (pickle.PickleError, IOError, EOFError, ValueError):
47 _moduleLogger.exception("While loading for %s" % self._name)
50 if misc_utils.compare_versions(
51 self.OLDEST_COMPATIBLE_FORMAT_VERSION,
52 misc_utils.parse_version(fileVersion),
54 _moduleLogger.info("%s Loaded cache" % (self._name, ))
55 self._conversations = convs
56 self._loadedFromCache = True
57 for key, mergedConv in self._conversations.iteritems():
58 _moduleLogger.debug("%s \tLoaded %s" % (self._name, key))
59 for conv in mergedConv.conversations:
60 message = "%s \t\tLoaded %s (%r) %r %r %r" % (
61 self._name, conv.id, conv.time, conv.isRead, conv.isArchived, len(conv.messages)
63 _moduleLogger.debug(message)
66 "%s Skipping cache due to version mismatch (%s-%s)" % (
67 self._name, fileVersion, fileBuild
72 _moduleLogger.info("%s Saving cache" % (self._name, ))
74 dataToDump = (constants.__version__, constants.__build__, self._conversations)
75 with open(path, "wb") as f:
76 pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL)
77 except (pickle.PickleError, IOError):
78 _moduleLogger.exception("While saving for %s" % self._name)
80 for key, mergedConv in self._conversations.iteritems():
81 _moduleLogger.debug("%s \tSaving %s" % (self._name, key))
82 for conv in mergedConv.conversations:
83 message = "%s \t\tSaving %s (%r) %r %r %r" % (
84 self._name, conv.id, conv.time, conv.isRead, conv.isArchived, len(conv.messages)
86 _moduleLogger.debug(message)
88 _moduleLogger.info("%s Cache saved" % (self._name, ))
90 def update(self, force=False):
91 if not force and self._conversations:
94 le = gobject_utils.AsyncLinearExecution(self._asyncPool, self._update)
97 @misc_utils.log_exception(_moduleLogger)
99 conversationResult = yield (
100 self._get_raw_conversations,
105 oldConversationIds = set(self._conversations.iterkeys())
107 updateConversationIds = set()
108 conversations = list(conversationResult)
110 for conversation in conversations:
111 key = misc_utils.normalize_number(conversation.number)
113 mergedConversations = self._conversations[key]
115 mergedConversations = MergedConversations()
116 self._conversations[key] = mergedConversations
118 if self._loadedFromCache or self._hasDoneUpdate:
119 markAllAsRead = False
123 mergedConversations.append_conversation(conversation, markAllAsRead)
124 isConversationUpdated = True
125 except RuntimeError, e:
127 _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
128 isConversationUpdated = False
130 if isConversationUpdated:
131 updateConversationIds.add(key)
133 for key in updateConversationIds:
134 mergedConv = self._conversations[key]
135 _moduleLogger.debug("%s \tUpdated %s" % (self._name, key))
136 for conv in mergedConv.conversations:
137 message = "%s \t\tUpdated %s (%r) %r %r %r" % (
138 self._name, conv.id, conv.time, conv.isRead, conv.isArchived, len(conv.messages)
140 _moduleLogger.debug(message)
142 if updateConversationIds:
143 message = (self, updateConversationIds, )
144 self.updateSignalHandler.stage.send(message)
145 self._hasDoneUpdate = True
147 def get_conversations(self):
148 return self._conversations.iterkeys()
150 def get_conversation(self, key):
151 return self._conversations[key]
153 def clear_conversation(self, key):
155 del self._conversations[key]
157 _moduleLogger.info("%s Conversation never existed for %r" % (self._name, key, ))
160 self._conversations.clear()
163 class MergedConversations(object):
166 self._conversations = []
168 def append_conversation(self, newConversation, markAllAsRead):
169 self._validate(newConversation)
170 for similarConversation in self._find_related_conversation(newConversation.id):
171 self._update_previous_related_conversation(similarConversation, newConversation)
172 self._remove_repeats(similarConversation, newConversation)
174 # HACK: Because GV marks all messages as read when you reply it has
175 # the following race:
176 # 1. Get all messages
177 # 2. Contact sends a text
178 # 3. User sends a text marking contacts text as read
179 # 4. Get all messages not returning text from step 2
180 # This isn't a problem for voicemails but we don't know(?( enough.
181 # So we hack around this by:
182 # * We cache to disk the history of messages sent/received
183 # * On first run we mark all server messages as read due to no cache
184 # * If not first load or from cache (disk or in-memory) then it must be unread
186 newConversation.isRead = True
188 newConversation.isRead = False
190 if newConversation.messages:
191 # must not have had all items removed due to duplicates
192 self._conversations.append(newConversation)
196 selfDict["conversations"] = [conv.to_dict() for conv in self._conversations]
200 def conversations(self):
201 return self._conversations
203 def _validate(self, newConversation):
204 if not self._conversations:
207 for constantField in ("number", ):
208 assert getattr(self._conversations[0], constantField) == getattr(newConversation, constantField), "Constant field changed, soemthing is seriously messed up: %r v %r" % (
209 getattr(self._conversations[0], constantField),
210 getattr(newConversation, constantField),
213 if newConversation.time <= self._conversations[-1].time:
214 raise RuntimeError("Conversations got out of order")
216 def _find_related_conversation(self, convId):
217 similarConversations = (
219 for conversation in self._conversations
220 if conversation.id == convId
222 return similarConversations
224 def _update_previous_related_conversation(self, relatedConversation, newConversation):
225 for commonField in ("isSpam", "isTrash", "isArchived"):
226 newValue = getattr(newConversation, commonField)
227 setattr(relatedConversation, commonField, newValue)
229 def _remove_repeats(self, relatedConversation, newConversation):
230 newConversationMessages = newConversation.messages
231 newConversation.messages = [
233 for newMessage in newConversationMessages
234 if newMessage not in relatedConversation.messages
236 _moduleLogger.debug("Found %d new messages in conversation %s (%d/%d)" % (
237 len(newConversationMessages) - len(newConversation.messages),
239 len(newConversation.messages),
240 len(newConversationMessages),
242 assert 0 < len(newConversation.messages), "Everything shouldn't have been removed"
245 def filter_out_read(conversations):
248 for conversation in conversations
249 if not conversation.isRead and not conversation.isArchived
253 def is_message_from_self(message):
254 return message.whoFrom == "Me:"
257 def filter_out_self(conversations):
260 for newConversation in conversations
261 if len(newConversation.messages) and any(
262 not is_message_from_self(message)
263 for message in newConversation.messages
268 class FilterOutReported(object):
270 NULL_TIMESTAMP = datetime.datetime(1, 1, 1)
273 self._lastMessageTimestamp = self.NULL_TIMESTAMP
275 def get_last_timestamp(self):
276 return self._lastMessageTimestamp
278 def __call__(self, conversations):
279 filteredConversations = [
281 for conversation in conversations
282 if self._lastMessageTimestamp < conversation.time
284 if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time:
285 self._lastMessageTimestamp = filteredConversations[0].time
286 return filteredConversations