3 from __future__ import with_statement
15 import util.coroutines as coroutines
16 import util.misc as misc_utils
19 _moduleLogger = logging.getLogger(__name__)
22 class Conversations(object):
24 OLDEST_COMPATIBLE_FORMAT_VERSION = misc_utils.parse_version("0.8.0")
25 OLDEST_MESSAGE_WINDOW = datetime.timedelta(days=60)
27 def __init__(self, getter, asyncPool):
28 self._get_raw_conversations = getter
29 self._asyncPool = asyncPool
30 self._conversations = {}
31 self._loadedFromCache = False
32 self._hasDoneUpdate = False
34 self.updateSignalHandler = coroutines.CoTee()
38 return repr(self._get_raw_conversations.__name__)
41 assert not self._conversations
43 with open(path, "rb") as f:
44 fileVersion, fileBuild, convs = pickle.load(f)
45 except (pickle.PickleError, IOError, EOFError, ValueError):
46 _moduleLogger.exception("While loading for %s" % self._name)
49 if misc_utils.compare_versions(
50 self.OLDEST_COMPATIBLE_FORMAT_VERSION,
51 misc_utils.parse_version(fileVersion),
53 _moduleLogger.info("%s Loaded cache" % (self._name, ))
54 self._conversations = convs
55 self._loadedFromCache = True
58 "%s Skipping cache due to version mismatch (%s-%s)" % (
59 self._name, fileVersion, fileBuild
65 _moduleLogger.info("%s Saving cache" % (self._name, ))
66 for conv in self._conversations.itervalues():
67 conv.compress(self.OLDEST_MESSAGE_WINDOW)
68 dataToDump = (constants.__version__, constants.__build__, self._conversations)
69 with open(path, "wb") as f:
70 pickle.dump(dataToDump, f, pickle.HIGHEST_PROTOCOL)
71 except (pickle.PickleError, IOError):
72 _moduleLogger.exception("While saving for %s" % self._name)
74 def update(self, force=False):
75 if not force and self._conversations:
77 self._asyncPool.add_task(
78 self._get_raw_conversations,
81 self._on_get_conversations,
82 self._on_get_conversations_failed,
85 @misc_utils.log_exception(_moduleLogger)
86 def _on_get_conversations(self, conversationResult):
87 oldConversationIds = set(self._conversations.iterkeys())
89 updateConversationIds = set()
90 conversations = list(conversationResult)
92 for conversation in conversations:
93 key = misc_utils.normalize_number(conversation.number)
95 mergedConversations = self._conversations[key]
97 mergedConversations = MergedConversations()
98 self._conversations[key] = mergedConversations
100 if self._loadedFromCache or self._hasDoneUpdate:
101 markAllAsRead = False
105 mergedConversations.append_conversation(conversation, markAllAsRead)
106 isConversationUpdated = True
107 except RuntimeError, e:
109 _moduleLogger.debug("%s Skipping conversation for %r because '%s'" % (self._name, key, e))
110 isConversationUpdated = False
112 if isConversationUpdated:
113 updateConversationIds.add(key)
115 if updateConversationIds:
116 message = (self, updateConversationIds, )
117 self.updateSignalHandler.stage.send(message)
118 self._hasDoneUpdate = True
120 @misc_utils.log_exception(_moduleLogger)
121 def _on_get_conversations_failed(self, error):
122 _moduleLogger.error(error)
124 def get_conversations(self):
125 return self._conversations.iterkeys()
127 def get_conversation(self, key):
128 return self._conversations[key]
130 def clear_conversation(self, key):
132 del self._conversations[key]
134 _moduleLogger.info("%s Conversation never existed for %r" % (self._name, key, ))
137 self._conversations.clear()
140 class MergedConversations(object):
143 self._conversations = []
145 def append_conversation(self, newConversation, markAllAsRead):
146 self._validate(newConversation)
147 for similarConversation in self._find_related_conversation(newConversation.id):
148 self._update_previous_related_conversation(similarConversation, newConversation)
149 self._remove_repeats(similarConversation, newConversation)
151 # HACK: Because GV marks all messages as read when you reply it has
152 # the following race:
153 # 1. Get all messages
154 # 2. Contact sends a text
155 # 3. User sends a text marking contacts text as read
156 # 4. Get all messages not returning text from step 2
157 # This isn't a problem for voicemails but we don't know(?( enough.
158 # So we hack around this by:
159 # * We cache to disk the history of messages sent/received
160 # * On first run we mark all server messages as read due to no cache
161 # * If not first load or from cache (disk or in-memory) then it must be unread
163 newConversation.isRead = True
165 newConversation.isRead = False
167 if newConversation.messages:
168 # must not have had all items removed due to duplicates
169 self._conversations.append(newConversation)
173 selfDict["conversations"] = [conv.to_dict() for conv in self._conversations]
177 def conversations(self):
178 return self._conversations
180 def compress(self, timedelta):
181 now = datetime.datetime.now()
182 oldNumConvs = len(self._conversations)
183 oldConvs = self._conversations
184 self._conversations = [
186 for conv in self._conversations
187 if (now - conv.time) < timedelta
189 newNumConvs = len(self._conversations)
190 if oldNumConvs != newNumConvs:
191 _moduleLogger.debug("Compressed conversations from %s to %s" % (oldNumConvs, newNumConvs))
193 _moduleLogger.debug("Did not compress, %s" % (newNumConvs))
195 def _validate(self, newConversation):
196 if not self._conversations:
199 for constantField in ("number", ):
200 assert getattr(self._conversations[0], constantField) == getattr(newConversation, constantField), "Constant field changed, soemthing is seriously messed up: %r v %r" % (
201 getattr(self._conversations[0], constantField),
202 getattr(newConversation, constantField),
205 if newConversation.time <= self._conversations[-1].time:
206 raise RuntimeError("Conversations got out of order")
208 def _find_related_conversation(self, convId):
209 similarConversations = (
211 for conversation in self._conversations
212 if conversation.id == convId
214 return similarConversations
216 def _update_previous_related_conversation(self, relatedConversation, newConversation):
217 for commonField in ("isSpam", "isTrash", "isArchived"):
218 newValue = getattr(newConversation, commonField)
219 setattr(relatedConversation, commonField, newValue)
221 def _remove_repeats(self, relatedConversation, newConversation):
222 newConversationMessages = newConversation.messages
223 newConversation.messages = [
225 for newMessage in newConversationMessages
226 if newMessage not in relatedConversation.messages
228 _moduleLogger.debug("Found %d new messages in conversation %s (%d/%d)" % (
229 len(newConversationMessages) - len(newConversation.messages),
231 len(newConversation.messages),
232 len(newConversationMessages),
234 assert 0 < len(newConversation.messages), "Everything shouldn't have been removed"
237 def filter_out_read(conversations):
240 for conversation in conversations
241 if not conversation.isRead and not conversation.isArchived
245 def is_message_from_self(message):
246 return message.whoFrom == "Me:"
249 def filter_out_self(conversations):
252 for newConversation in conversations
253 if len(newConversation.messages) and any(
254 not is_message_from_self(message)
255 for message in newConversation.messages
260 class FilterOutReported(object):
262 NULL_TIMESTAMP = datetime.datetime(1, 1, 1)
265 self._lastMessageTimestamp = self.NULL_TIMESTAMP
267 def get_last_timestamp(self):
268 return self._lastMessageTimestamp
270 def __call__(self, conversations):
271 filteredConversations = [
273 for conversation in conversations
274 if self._lastMessageTimestamp < conversation.time
276 if filteredConversations and self._lastMessageTimestamp < filteredConversations[0].time:
277 self._lastMessageTimestamp = filteredConversations[0].time
278 return filteredConversations