Merging in files from skeleton
authorEd Page <eopage@byu.net>
Fri, 16 Jul 2010 01:47:06 +0000 (20:47 -0500)
committerEd Page <eopage@byu.net>
Fri, 16 Jul 2010 01:47:27 +0000 (20:47 -0500)
18 files changed:
.gitignore
Makefile
src/constants.py
src/dc_glade.py
src/gtk_toolbox.py
src/hildonize.py
src/util/__init__.py [new file with mode: 0644]
src/util/algorithms.py [new file with mode: 0644]
src/util/concurrent.py [new file with mode: 0644]
src/util/coroutines.py [new file with mode: 0755]
src/util/go_utils.py [new file with mode: 0644]
src/util/io.py [new file with mode: 0644]
src/util/linux.py [new file with mode: 0644]
src/util/misc.py [new file with mode: 0644]
src/util/overloading.py [new file with mode: 0644]
src/util/tp_utils.py [new file with mode: 0644]
support/builddeb.py
support/dialcentral.desktop

index 0d20b64..1948f58 100644 (file)
@@ -1 +1,2 @@
 *.pyc
+.profile
index 6c59a71..15eff8c 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -45,7 +45,6 @@ package: $(OBJ)
        cp $(SOURCE_PATH)/$(PROJECT_NAME).py  $(BUILD_PATH)/generic
        $(foreach file, $(DATA), cp $(file) $(BUILD_PATH)/generic/$(subst /,-,$(file)) ; )
        $(foreach file, $(SOURCE), cp $(file) $(BUILD_PATH)/generic/$(subst /,-,$(file)) ; )
-       #$(foreach file, $(OBJ), cp $(file) $(BUILD_PATH)/generic/$(subst /,-,$(file)) ; )
        cp support/$(PROJECT_NAME).desktop $(BUILD_PATH)/generic
        cp support/icons/hicolor/26x26/hildon/$(PROJECT_NAME).png $(BUILD_PATH)/generic/26x26-$(PROJECT_NAME).png
        cp support/icons/hicolor/64x64/hildon/$(PROJECT_NAME).png $(BUILD_PATH)/generic/64x64-$(PROJECT_NAME).png
index b2b9662..d41e7cd 100644 (file)
@@ -5,8 +5,8 @@ __app_name__ = "dialcentral"
 __version__ = "1.1.8"
 __build__ = 0
 __app_magic__ = 0xdeadbeef
-_data_path_ = os.path.join(os.path.expanduser("~"), ".dialcentral")
+_data_path_ = os.path.join(os.path.expanduser("~"), ".%s" % __app_name__)
 _user_settings_ = "%s/settings.ini" % _data_path_
 _custom_notifier_settings_ = "%s/notifier.ini" % _data_path_
-_user_logpath_ = "%s/dialcentral.log" % _data_path_
+_user_logpath_ = "%s/%s.log" % (_data_path_, __app_name__)
 _notifier_logpath_ = "%s/notifier.log" % _data_path_
index aa32707..14ffb02 100755 (executable)
@@ -175,7 +175,7 @@ class Dialcentral(object):
                if not hildonize.IS_HILDON_SUPPORTED:
                        _moduleLogger.warning("No hildonization support")
 
-               hildonize.set_application_title(self._window, "%s" % constants.__pretty_app_name__)
+               hildonize.set_application_name("%s" % constants.__pretty_app_name__)
 
                self._window.connect("destroy", self._on_close)
                self._window.set_default_size(800, 300)
@@ -970,8 +970,6 @@ def run_doctest():
 def run_dialpad():
        gtk.gdk.threads_init()
 
-       if hildonize.IS_HILDON_SUPPORTED:
-               gtk.set_application_name(constants.__pretty_app_name__)
        handle = Dialcentral()
        if not PROFILE_STARTUP:
                gtk.main()
index 17dc166..769e19b 100644 (file)
@@ -17,7 +17,7 @@ import gobject
 import gtk
 
 
-_moduleLogger = logging.getLogger("gtk_toolbox")
+_moduleLogger = logging.getLogger(__name__)
 
 
 def get_screen_orientation():
index 77d585a..339eb2a 100644 (file)
@@ -47,24 +47,26 @@ except AttributeError:
        get_app_class = _null_get_app_class
 
 
-def _hildon_set_application_title(window, title):
-       pass
+def _hildon_set_application_name(name):
+       gtk.set_application_name(name)
 
 
-def _null_set_application_title(window, title):
-       window.set_title(title)
+def _null_set_application_name(name):
+       pass
 
 
-if IS_HILDON_SUPPORTED:
-       set_application_title = _hildon_set_application_title
-else:
-       set_application_title = _null_set_application_title
+try:
+       gtk.set_application_name
+       set_application_name = _hildon_set_application_name
+except AttributeError:
+       set_application_name = _null_set_application_name
 
 
 def _fremantle_hildonize_window(app, window):
        oldWindow = window
        newWindow = hildon.StackableWindow()
-       oldWindow.get_child().reparent(newWindow)
+       if oldWindow.get_child() is not None:
+               oldWindow.get_child().reparent(newWindow)
        app.add_window(newWindow)
        return newWindow
 
@@ -72,7 +74,8 @@ def _fremantle_hildonize_window(app, window):
 def _hildon_hildonize_window(app, window):
        oldWindow = window
        newWindow = hildon.Window()
-       oldWindow.get_child().reparent(newWindow)
+       if oldWindow.get_child() is not None:
+               oldWindow.get_child().reparent(newWindow)
        app.add_window(newWindow)
        return newWindow
 
@@ -356,6 +359,28 @@ else:
        hildonize_combo_entry = _null_hildonize_combo_entry
 
 
+def _null_create_seekbar():
+       adjustment = gtk.Adjustment(0, 0, 101, 1, 5, 1)
+       seek = gtk.HScale(adjustment)
+       seek.set_draw_value(False)
+       return seek
+
+
+def _fremantle_create_seekbar():
+       seek = hildon.Seekbar()
+       seek.set_range(0.0, 100)
+       seek.set_draw_value(False)
+       seek.set_update_policy(gtk.UPDATE_DISCONTINUOUS)
+       return seek
+
+
+try:
+       hildon.Seekbar
+       create_seekbar = _fremantle_create_seekbar
+except AttributeError:
+       create_seekbar = _null_create_seekbar
+
+
 def _fremantle_hildonize_scrollwindow(scrolledWindow):
        pannableWindow = hildon.PannableArea()
 
diff --git a/src/util/__init__.py b/src/util/__init__.py
new file mode 100644 (file)
index 0000000..4265cc3
--- /dev/null
@@ -0,0 +1 @@
+#!/usr/bin/env python
diff --git a/src/util/algorithms.py b/src/util/algorithms.py
new file mode 100644 (file)
index 0000000..7052b98
--- /dev/null
@@ -0,0 +1,584 @@
+#!/usr/bin/env python
+
+"""
+@note Source http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66448
+"""
+
+import itertools
+import functools
+import datetime
+import types
+
+
+def ordered_itr(collection):
+       """
+       >>> [v for v in ordered_itr({"a": 1, "b": 2})]
+       [('a', 1), ('b', 2)]
+       >>> [v for v in ordered_itr([3, 1, 10, -20])]
+       [-20, 1, 3, 10]
+       """
+       if isinstance(collection, types.DictType):
+               keys = list(collection.iterkeys())
+               keys.sort()
+               for key in keys:
+                       yield key, collection[key]
+       else:
+               values = list(collection)
+               values.sort()
+               for value in values:
+                       yield value
+
+
+def itercat(*iterators):
+       """
+       Concatenate several iterators into one.
+
+       >>> [v for v in itercat([1, 2, 3], [4, 1, 3])]
+       [1, 2, 3, 4, 1, 3]
+       """
+       for i in iterators:
+               for x in i:
+                       yield x
+
+
+def iterwhile(func, iterator):
+       """
+       Iterate for as long as func(value) returns true.
+       >>> through = lambda b: b
+       >>> [v for v in iterwhile(through, [True, True, False])]
+       [True, True]
+       """
+       iterator = iter(iterator)
+       while 1:
+               next = iterator.next()
+               if not func(next):
+                       raise StopIteration
+               yield next
+
+
+def iterfirst(iterator, count=1):
+       """
+       Iterate through 'count' first values.
+
+       >>> [v for v in iterfirst([1, 2, 3, 4, 5], 3)]
+       [1, 2, 3]
+       """
+       iterator = iter(iterator)
+       for i in xrange(count):
+               yield iterator.next()
+
+
+def iterstep(iterator, n):
+       """
+       Iterate every nth value.
+
+       >>> [v for v in iterstep([1, 2, 3, 4, 5], 1)]
+       [1, 2, 3, 4, 5]
+       >>> [v for v in iterstep([1, 2, 3, 4, 5], 2)]
+       [1, 3, 5]
+       >>> [v for v in iterstep([1, 2, 3, 4, 5], 3)]
+       [1, 4]
+       """
+       iterator = iter(iterator)
+       while True:
+               yield iterator.next()
+               # skip n-1 values
+               for dummy in xrange(n-1):
+                       iterator.next()
+
+
+def itergroup(iterator, count, padValue = None):
+       """
+       Iterate in groups of 'count' values. If there
+       aren't enough values, the last result is padded with
+       None.
+
+       >>> for val in itergroup([1, 2, 3, 4, 5, 6], 3):
+       ...     print tuple(val)
+       (1, 2, 3)
+       (4, 5, 6)
+       >>> for val in itergroup([1, 2, 3, 4, 5, 6], 3):
+       ...     print list(val)
+       [1, 2, 3]
+       [4, 5, 6]
+       >>> for val in itergroup([1, 2, 3, 4, 5, 6, 7], 3):
+       ...     print tuple(val)
+       (1, 2, 3)
+       (4, 5, 6)
+       (7, None, None)
+       >>> for val in itergroup("123456", 3):
+       ...     print tuple(val)
+       ('1', '2', '3')
+       ('4', '5', '6')
+       >>> for val in itergroup("123456", 3):
+       ...     print repr("".join(val))
+       '123'
+       '456'
+       """
+       paddedIterator = itertools.chain(iterator, itertools.repeat(padValue, count-1))
+       nIterators = (paddedIterator, ) * count
+       return itertools.izip(*nIterators)
+
+
+def xzip(*iterators):
+       """Iterative version of builtin 'zip'."""
+       iterators = itertools.imap(iter, iterators)
+       while 1:
+               yield tuple([x.next() for x in iterators])
+
+
+def xmap(func, *iterators):
+       """Iterative version of builtin 'map'."""
+       iterators = itertools.imap(iter, iterators)
+       values_left = [1]
+
+       def values():
+               # Emulate map behaviour, i.e. shorter
+               # sequences are padded with None when
+               # they run out of values.
+               values_left[0] = 0
+               for i in range(len(iterators)):
+                       iterator = iterators[i]
+                       if iterator is None:
+                               yield None
+                       else:
+                               try:
+                                       yield iterator.next()
+                                       values_left[0] = 1
+                               except StopIteration:
+                                       iterators[i] = None
+                                       yield None
+       while 1:
+               args = tuple(values())
+               if not values_left[0]:
+                       raise StopIteration
+               yield func(*args)
+
+
+def xfilter(func, iterator):
+       """Iterative version of builtin 'filter'."""
+       iterator = iter(iterator)
+       while 1:
+               next = iterator.next()
+               if func(next):
+                       yield next
+
+
+def xreduce(func, iterator, default=None):
+       """Iterative version of builtin 'reduce'."""
+       iterator = iter(iterator)
+       try:
+               prev = iterator.next()
+       except StopIteration:
+               return default
+       single = 1
+       for next in iterator:
+               single = 0
+               prev = func(prev, next)
+       if single:
+               return func(prev, default)
+       return prev
+
+
+def daterange(begin, end, delta = datetime.timedelta(1)):
+       """
+       Form a range of dates and iterate over them.
+
+       Arguments:
+       begin -- a date (or datetime) object; the beginning of the range.
+       end   -- a date (or datetime) object; the end of the range.
+       delta -- (optional) a datetime.timedelta object; how much to step each iteration.
+                       Default step is 1 day.
+
+       Usage:
+       """
+       if not isinstance(delta, datetime.timedelta):
+               delta = datetime.timedelta(delta)
+
+       ZERO = datetime.timedelta(0)
+
+       if begin < end:
+               if delta <= ZERO:
+                       raise StopIteration
+               test = end.__gt__
+       else:
+               if delta >= ZERO:
+                       raise StopIteration
+               test = end.__lt__
+
+       while test(begin):
+               yield begin
+               begin += delta
+
+
+class LazyList(object):
+       """
+       A Sequence whose values are computed lazily by an iterator.
+
+       Module for the creation and use of iterator-based lazy lists.
+       this module defines a class LazyList which can be used to represent sequences
+       of values generated lazily. One can also create recursively defined lazy lists
+       that generate their values based on ones previously generated.
+
+       Backport to python 2.5 by Michael Pust
+       """
+
+       __author__ = 'Dan Spitz'
+
+       def __init__(self, iterable):
+               self._exhausted = False
+               self._iterator = iter(iterable)
+               self._data = []
+
+       def __len__(self):
+               """Get the length of a LazyList's computed data."""
+               return len(self._data)
+
+       def __getitem__(self, i):
+               """Get an item from a LazyList.
+               i should be a positive integer or a slice object."""
+               if isinstance(i, int):
+                       #index has not yet been yielded by iterator (or iterator exhausted
+                       #before reaching that index)
+                       if i >= len(self):
+                               self.exhaust(i)
+                       elif i < 0:
+                               raise ValueError('cannot index LazyList with negative number')
+                       return self._data[i]
+
+               #LazyList slices are iterators over a portion of the list.
+               elif isinstance(i, slice):
+                       start, stop, step = i.start, i.stop, i.step
+                       if any(x is not None and x < 0 for x in (start, stop, step)):
+                               raise ValueError('cannot index or step through a LazyList with'
+                                                               'a negative number')
+                       #set start and step to their integer defaults if they are None.
+                       if start is None:
+                               start = 0
+                       if step is None:
+                               step = 1
+
+                       def LazyListIterator():
+                               count = start
+                               predicate = (
+                                       (lambda: True)
+                                       if stop is None
+                                       else (lambda: count < stop)
+                               )
+                               while predicate():
+                                       try:
+                                               yield self[count]
+                                       #slices can go out of actual index range without raising an
+                                       #error
+                                       except IndexError:
+                                               break
+                                       count += step
+                       return LazyListIterator()
+
+               raise TypeError('i must be an integer or slice')
+
+       def __iter__(self):
+               """return an iterator over each value in the sequence,
+               whether it has been computed yet or not."""
+               return self[:]
+
+       def computed(self):
+               """Return an iterator over the values in a LazyList that have
+               already been computed."""
+               return self[:len(self)]
+
+       def exhaust(self, index = None):
+               """Exhaust the iterator generating this LazyList's values.
+               if index is None, this will exhaust the iterator completely.
+               Otherwise, it will iterate over the iterator until either the list
+               has a value for index or the iterator is exhausted.
+               """
+               if self._exhausted:
+                       return
+               if index is None:
+                       ind_range = itertools.count(len(self))
+               else:
+                       ind_range = range(len(self), index + 1)
+
+               for ind in ind_range:
+                       try:
+                               self._data.append(self._iterator.next())
+                       except StopIteration: #iterator is fully exhausted
+                               self._exhausted = True
+                               break
+
+
+class RecursiveLazyList(LazyList):
+
+       def __init__(self, prod, *args, **kwds):
+               super(RecursiveLazyList, self).__init__(prod(self, *args, **kwds))
+
+
+class RecursiveLazyListFactory:
+
+       def __init__(self, producer):
+               self._gen = producer
+
+       def __call__(self, *a, **kw):
+               return RecursiveLazyList(self._gen, *a, **kw)
+
+
+def lazylist(gen):
+       """
+       Decorator for creating a RecursiveLazyList subclass.
+       This should decorate a generator function taking the LazyList object as its
+       first argument which yields the contents of the list in order.
+
+       >>> #fibonnacci sequence in a lazy list.
+       >>> @lazylist
+       ... def fibgen(lst):
+       ...     yield 0
+       ...     yield 1
+       ...     for a, b in itertools.izip(lst, lst[1:]):
+       ...             yield a + b
+       ...
+       >>> #now fibs can be indexed or iterated over as if it were an infinitely long list containing the fibonnaci sequence
+       >>> fibs = fibgen()
+       >>>
+       >>> #prime numbers in a lazy list.
+       >>> @lazylist
+       ... def primegen(lst):
+       ...     yield 2
+       ...     for candidate in itertools.count(3): #start at next number after 2
+       ...             #if candidate is not divisible by any smaller prime numbers,
+       ...             #it is a prime.
+       ...             if all(candidate % p for p in lst.computed()):
+       ...                     yield candidate
+       ...
+       >>> #same for primes- treat it like an infinitely long list containing all prime numbers.
+       >>> primes = primegen()
+       >>> print fibs[0], fibs[1], fibs[2], primes[0], primes[1], primes[2]
+       0 1 1 2 3 5
+       >>> print list(fibs[:10]), list(primes[:10])
+       [0, 1, 1, 2, 3, 5, 8, 13, 21, 34] [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
+       """
+       return RecursiveLazyListFactory(gen)
+
+
+def map_func(f):
+       """
+       >>> import misc
+       >>> misc.validate_decorator(map_func)
+       """
+
+       @functools.wraps(f)
+       def wrapper(*args):
+               result = itertools.imap(f, args)
+               return result
+       return wrapper
+
+
+def reduce_func(function):
+       """
+       >>> import misc
+       >>> misc.validate_decorator(reduce_func(lambda x: x))
+       """
+
+       def decorator(f):
+
+               @functools.wraps(f)
+               def wrapper(*args):
+                       result = reduce(function, f(args))
+                       return result
+               return wrapper
+       return decorator
+
+
+def any_(iterable):
+       """
+       @note Python Version <2.5
+
+       >>> any_([True, True])
+       True
+       >>> any_([True, False])
+       True
+       >>> any_([False, False])
+       False
+       """
+
+       for element in iterable:
+               if element:
+                       return True
+       return False
+
+
+def all_(iterable):
+       """
+       @note Python Version <2.5
+
+       >>> all_([True, True])
+       True
+       >>> all_([True, False])
+       False
+       >>> all_([False, False])
+       False
+       """
+
+       for element in iterable:
+               if not element:
+                       return False
+       return True
+
+
+def for_every(pred, seq):
+       """
+       for_every takes a one argument predicate function and a sequence.
+       @param pred The predicate function should return true or false.
+       @returns true if every element in seq returns true for predicate, else returns false.
+
+       >>> for_every (lambda c: c > 5,(6,7,8,9))
+       True
+
+       @author Source:http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52907
+       """
+
+       for i in seq:
+               if not pred(i):
+                       return False
+       return True
+
+
+def there_exists(pred, seq):
+       """
+       there_exists takes a one argument predicate     function and a sequence.
+       @param pred The predicate function should return true or false.
+       @returns true if any element in seq returns true for predicate, else returns false.
+
+       >>> there_exists (lambda c: c > 5,(6,7,8,9))
+       True
+
+       @author Source:http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52907
+       """
+
+       for i in seq:
+               if pred(i):
+                       return True
+       return False
+
+
+def func_repeat(quantity, func, *args, **kwd):
+       """
+       Meant to be in connection with "reduce"
+       """
+       for i in xrange(quantity):
+               yield func(*args, **kwd)
+
+
+def function_map(preds, item):
+       """
+       Meant to be in connection with "reduce"
+       """
+       results = (pred(item) for pred in preds)
+
+       return results
+
+
+def functional_if(combiner, preds, item):
+       """
+       Combines the result of a list of predicates applied to item according to combiner
+
+       @see any, every for example combiners
+       """
+       pass_bool = lambda b: b
+
+       bool_results = function_map(preds, item)
+       return combiner(pass_bool, bool_results)
+
+
+def pushback_itr(itr):
+       """
+       >>> list(pushback_itr(xrange(5)))
+       [0, 1, 2, 3, 4]
+       >>>
+       >>> first = True
+       >>> itr = pushback_itr(xrange(5))
+       >>> for i in itr:
+       ...     print i
+       ...     if first and i == 2:
+       ...             first = False
+       ...             print itr.send(i)
+       0
+       1
+       2
+       None
+       2
+       3
+       4
+       >>>
+       >>> first = True
+       >>> itr = pushback_itr(xrange(5))
+       >>> for i in itr:
+       ...     print i
+       ...     if first and i == 2:
+       ...             first = False
+       ...             print itr.send(i)
+       ...             print itr.send(i)
+       0
+       1
+       2
+       None
+       None
+       2
+       2
+       3
+       4
+       >>>
+       >>> itr = pushback_itr(xrange(5))
+       >>> print itr.next()
+       0
+       >>> print itr.next()
+       1
+       >>> print itr.send(10)
+       None
+       >>> print itr.next()
+       10
+       >>> print itr.next()
+       2
+       >>> print itr.send(20)
+       None
+       >>> print itr.send(30)
+       None
+       >>> print itr.send(40)
+       None
+       >>> print itr.next()
+       40
+       >>> print itr.next()
+       30
+       >>> print itr.send(50)
+       None
+       >>> print itr.next()
+       50
+       >>> print itr.next()
+       20
+       >>> print itr.next()
+       3
+       >>> print itr.next()
+       4
+       """
+       for item in itr:
+               maybePushedBack = yield item
+               queue = []
+               while queue or maybePushedBack is not None:
+                       if maybePushedBack is not None:
+                               queue.append(maybePushedBack)
+                               maybePushedBack = yield None
+                       else:
+                               item = queue.pop()
+                               maybePushedBack = yield item
+
+
+def itr_available(queue, initiallyBlock = False):
+       if initiallyBlock:
+               yield queue.get()
+       while not queue.empty():
+               yield queue.get_nowait()
+
+
+if __name__ == "__main__":
+       import doctest
+       print doctest.testmod()
diff --git a/src/util/concurrent.py b/src/util/concurrent.py
new file mode 100644 (file)
index 0000000..503a1b4
--- /dev/null
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+from __future__ import with_statement
+
+import os
+import errno
+import time
+import functools
+import contextlib
+
+
+def synchronized(lock):
+       """
+       Synchronization decorator.
+
+       >>> import misc
+       >>> misc.validate_decorator(synchronized(object()))
+       """
+
+       def wrap(f):
+
+               @functools.wraps(f)
+               def newFunction(*args, **kw):
+                       lock.acquire()
+                       try:
+                               return f(*args, **kw)
+                       finally:
+                               lock.release()
+               return newFunction
+       return wrap
+
+
+@contextlib.contextmanager
+def qlock(queue, gblock = True, gtimeout = None, pblock = True, ptimeout = None):
+       """
+       Locking with a queue, good for when you want to lock an item passed around
+
+       >>> import Queue
+       >>> item = 5
+       >>> lock = Queue.Queue()
+       >>> lock.put(item)
+       >>> with qlock(lock) as i:
+       ...     print i
+       5
+       """
+       item = queue.get(gblock, gtimeout)
+       try:
+               yield item
+       finally:
+               queue.put(item, pblock, ptimeout)
+
+
+@contextlib.contextmanager
+def flock(path, timeout=-1):
+       WAIT_FOREVER = -1
+       DELAY = 0.1
+       timeSpent = 0
+
+       acquired = False
+
+       while timeSpent <= timeout or timeout == WAIT_FOREVER:
+               try:
+                       fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDWR)
+                       acquired = True
+                       break
+               except OSError, e:
+                       if e.errno != errno.EEXIST:
+                               raise
+               time.sleep(DELAY)
+               timeSpent += DELAY
+
+       assert acquired, "Failed to grab file-lock %s within timeout %d" % (path, timeout)
+
+       try:
+               yield fd
+       finally:
+               os.unlink(path)
diff --git a/src/util/coroutines.py b/src/util/coroutines.py
new file mode 100755 (executable)
index 0000000..b1e539e
--- /dev/null
@@ -0,0 +1,623 @@
+#!/usr/bin/env python\r
+\r
+"""\r
+Uses for generators\r
+* Pull pipelining (iterators)\r
+* Push pipelining (coroutines)\r
+* State machines (coroutines)\r
+* "Cooperative multitasking" (coroutines)\r
+* Algorithm -> Object transform for cohesiveness (for example context managers) (coroutines)\r
+\r
+Design considerations\r
+* When should a stage pass on exceptions or have it thrown within it?\r
+* When should a stage pass on GeneratorExits?\r
+* Is there a way to either turn a push generator into a iterator or to use\r
+       comprehensions syntax for push generators (I doubt it)\r
+* When should the stage try and send data in both directions\r
+* Since pull generators (generators), push generators (coroutines), subroutines, and coroutines are all coroutines, maybe we should rename the push generators to not confuse them, like signals/slots? and then refer to two-way generators as coroutines\r
+** If so, make s* and co* implementation of functions\r
+"""\r
+\r
+import threading\r
+import Queue\r
+import pickle\r
+import functools\r
+import itertools\r
+import xml.sax\r
+import xml.parsers.expat\r
+\r
+\r
+def autostart(func):\r
+       """\r
+       >>> @autostart\r
+       ... def grep_sink(pattern):\r
+       ...     print "Looking for %s" % pattern\r
+       ...     while True:\r
+       ...             line = yield\r
+       ...             if pattern in line:\r
+       ...                     print line,\r
+       >>> g = grep_sink("python")\r
+       Looking for python\r
+       >>> g.send("Yeah but no but yeah but no")\r
+       >>> g.send("A series of tubes")\r
+       >>> g.send("python generators rock!")\r
+       python generators rock!\r
+       >>> g.close()\r
+       """\r
+\r
+       @functools.wraps(func)\r
+       def start(*args, **kwargs):\r
+               cr = func(*args, **kwargs)\r
+               cr.next()\r
+               return cr\r
+\r
+       return start\r
+\r
+\r
+@autostart\r
+def printer_sink(format = "%s"):\r
+       """\r
+       >>> pr = printer_sink("%r")\r
+       >>> pr.send("Hello")\r
+       'Hello'\r
+       >>> pr.send("5")\r
+       '5'\r
+       >>> pr.send(5)\r
+       5\r
+       >>> p = printer_sink()\r
+       >>> p.send("Hello")\r
+       Hello\r
+       >>> p.send("World")\r
+       World\r
+       >>> # p.throw(RuntimeError, "Goodbye")\r
+       >>> # p.send("Meh")\r
+       >>> # p.close()\r
+       """\r
+       while True:\r
+               item = yield\r
+               print format % (item, )\r
+\r
+\r
+@autostart\r
+def null_sink():\r
+       """\r
+       Good for uses like with cochain to pick up any slack\r
+       """\r
+       while True:\r
+               item = yield\r
+\r
+\r
+def itr_source(itr, target):\r
+       """\r
+       >>> itr_source(xrange(2), printer_sink())\r
+       0\r
+       1\r
+       """\r
+       for item in itr:\r
+               target.send(item)\r
+\r
+\r
+@autostart\r
+def cofilter(predicate, target):\r
+       """\r
+       >>> p = printer_sink()\r
+       >>> cf = cofilter(None, p)\r
+       >>> cf.send("")\r
+       >>> cf.send("Hello")\r
+       Hello\r
+       >>> cf.send([])\r
+       >>> cf.send([1, 2])\r
+       [1, 2]\r
+       >>> cf.send(False)\r
+       >>> cf.send(True)\r
+       True\r
+       >>> cf.send(0)\r
+       >>> cf.send(1)\r
+       1\r
+       >>> # cf.throw(RuntimeError, "Goodbye")\r
+       >>> # cf.send(False)\r
+       >>> # cf.send(True)\r
+       >>> # cf.close()\r
+       """\r
+       if predicate is None:\r
+               predicate = bool\r
+\r
+       while True:\r
+               try:\r
+                       item = yield\r
+                       if predicate(item):\r
+                               target.send(item)\r
+               except StandardError, e:\r
+                       target.throw(e.__class__, e.message)\r
+\r
+\r
+@autostart\r
+def comap(function, target):\r
+       """\r
+       >>> p = printer_sink()\r
+       >>> cm = comap(lambda x: x+1, p)\r
+       >>> cm.send(0)\r
+       1\r
+       >>> cm.send(1.0)\r
+       2.0\r
+       >>> cm.send(-2)\r
+       -1\r
+       >>> # cm.throw(RuntimeError, "Goodbye")\r
+       >>> # cm.send(0)\r
+       >>> # cm.send(1.0)\r
+       >>> # cm.close()\r
+       """\r
+       while True:\r
+               try:\r
+                       item = yield\r
+                       mappedItem = function(item)\r
+                       target.send(mappedItem)\r
+               except StandardError, e:\r
+                       target.throw(e.__class__, e.message)\r
+\r
+\r
+def func_sink(function):\r
+       return comap(function, null_sink())\r
+\r
+\r
+def expand_positional(function):\r
+\r
+       @functools.wraps(function)\r
+       def expander(item):\r
+               return function(*item)\r
+\r
+       return expander\r
+\r
+\r
+@autostart\r
+def append_sink(l):\r
+       """\r
+       >>> l = []\r
+       >>> apps = append_sink(l)\r
+       >>> apps.send(1)\r
+       >>> apps.send(2)\r
+       >>> apps.send(3)\r
+       >>> print l\r
+       [1, 2, 3]\r
+       """\r
+       while True:\r
+               item = yield\r
+               l.append(item)\r
+\r
+\r
+@autostart\r
+def last_n_sink(l, n = 1):\r
+       """\r
+       >>> l = []\r
+       >>> lns = last_n_sink(l)\r
+       >>> lns.send(1)\r
+       >>> lns.send(2)\r
+       >>> lns.send(3)\r
+       >>> print l\r
+       [3]\r
+       """\r
+       del l[:]\r
+       while True:\r
+               item = yield\r
+               extraCount = len(l) - n + 1\r
+               if 0 < extraCount:\r
+                       del l[0:extraCount]\r
+               l.append(item)\r
+\r
+\r
+@autostart\r
+def coreduce(target, function, initializer = None):\r
+       """\r
+       >>> reduceResult = []\r
+       >>> lns = last_n_sink(reduceResult)\r
+       >>> cr = coreduce(lns, lambda x, y: x + y, 0)\r
+       >>> cr.send(1)\r
+       >>> cr.send(2)\r
+       >>> cr.send(3)\r
+       >>> print reduceResult\r
+       [6]\r
+       >>> cr = coreduce(lns, lambda x, y: x + y)\r
+       >>> cr.send(1)\r
+       >>> cr.send(2)\r
+       >>> cr.send(3)\r
+       >>> print reduceResult\r
+       [6]\r
+       """\r
+       isFirst = True\r
+       cumulativeRef = initializer\r
+       while True:\r
+               item = yield\r
+               if isFirst and initializer is None:\r
+                       cumulativeRef = item\r
+               else:\r
+                       cumulativeRef = function(cumulativeRef, item)\r
+               target.send(cumulativeRef)\r
+               isFirst = False\r
+\r
+\r
+@autostart\r
+def cotee(targets):\r
+       """\r
+       Takes a sequence of coroutines and sends the received items to all of them\r
+\r
+       >>> ct = cotee((printer_sink("1 %s"), printer_sink("2 %s")))\r
+       >>> ct.send("Hello")\r
+       1 Hello\r
+       2 Hello\r
+       >>> ct.send("World")\r
+       1 World\r
+       2 World\r
+       >>> # ct.throw(RuntimeError, "Goodbye")\r
+       >>> # ct.send("Meh")\r
+       >>> # ct.close()\r
+       """\r
+       while True:\r
+               try:\r
+                       item = yield\r
+                       for target in targets:\r
+                               target.send(item)\r
+               except StandardError, e:\r
+                       for target in targets:\r
+                               target.throw(e.__class__, e.message)\r
+\r
+\r
+class CoTee(object):\r
+       """\r
+       >>> ct = CoTee()\r
+       >>> ct.register_sink(printer_sink("1 %s"))\r
+       >>> ct.register_sink(printer_sink("2 %s"))\r
+       >>> ct.stage.send("Hello")\r
+       1 Hello\r
+       2 Hello\r
+       >>> ct.stage.send("World")\r
+       1 World\r
+       2 World\r
+       >>> ct.register_sink(printer_sink("3 %s"))\r
+       >>> ct.stage.send("Foo")\r
+       1 Foo\r
+       2 Foo\r
+       3 Foo\r
+       >>> # ct.stage.throw(RuntimeError, "Goodbye")\r
+       >>> # ct.stage.send("Meh")\r
+       >>> # ct.stage.close()\r
+       """\r
+\r
+       def __init__(self):\r
+               self.stage = self._stage()\r
+               self._targets = []\r
+\r
+       def register_sink(self, sink):\r
+               self._targets.append(sink)\r
+\r
+       def unregister_sink(self, sink):\r
+               self._targets.remove(sink)\r
+\r
+       def restart(self):\r
+               self.stage = self._stage()\r
+\r
+       @autostart\r
+       def _stage(self):\r
+               while True:\r
+                       try:\r
+                               item = yield\r
+                               for target in self._targets:\r
+                                       target.send(item)\r
+                       except StandardError, e:\r
+                               for target in self._targets:\r
+                                       target.throw(e.__class__, e.message)\r
+\r
+\r
+def _flush_queue(queue):\r
+       while not queue.empty():\r
+               yield queue.get()\r
+\r
+\r
+@autostart\r
+def cocount(target, start = 0):\r
+       """\r
+       >>> cc = cocount(printer_sink("%s"))\r
+       >>> cc.send("a")\r
+       0\r
+       >>> cc.send(None)\r
+       1\r
+       >>> cc.send([])\r
+       2\r
+       >>> cc.send(0)\r
+       3\r
+       """\r
+       for i in itertools.count(start):\r
+               item = yield\r
+               target.send(i)\r
+\r
+\r
+@autostart\r
+def coenumerate(target, start = 0):\r
+       """\r
+       >>> ce = coenumerate(printer_sink("%r"))\r
+       >>> ce.send("a")\r
+       (0, 'a')\r
+       >>> ce.send(None)\r
+       (1, None)\r
+       >>> ce.send([])\r
+       (2, [])\r
+       >>> ce.send(0)\r
+       (3, 0)\r
+       """\r
+       for i in itertools.count(start):\r
+               item = yield\r
+               decoratedItem = i, item\r
+               target.send(decoratedItem)\r
+\r
+\r
+@autostart\r
+def corepeat(target, elem):\r
+       """\r
+       >>> cr = corepeat(printer_sink("%s"), "Hello World")\r
+       >>> cr.send("a")\r
+       Hello World\r
+       >>> cr.send(None)\r
+       Hello World\r
+       >>> cr.send([])\r
+       Hello World\r
+       >>> cr.send(0)\r
+       Hello World\r
+       """\r
+       while True:\r
+               item = yield\r
+               target.send(elem)\r
+\r
+\r
+@autostart\r
+def cointercept(target, elems):\r
+       """\r
+       >>> cr = cointercept(printer_sink("%s"), [1, 2, 3, 4])\r
+       >>> cr.send("a")\r
+       1\r
+       >>> cr.send(None)\r
+       2\r
+       >>> cr.send([])\r
+       3\r
+       >>> cr.send(0)\r
+       4\r
+       >>> cr.send("Bye")\r
+       Traceback (most recent call last):\r
+         File "/usr/lib/python2.5/doctest.py", line 1228, in __run\r
+           compileflags, 1) in test.globs\r
+         File "<doctest __main__.cointercept[5]>", line 1, in <module>\r
+           cr.send("Bye")\r
+       StopIteration\r
+       """\r
+       item = yield\r
+       for elem in elems:\r
+               target.send(elem)\r
+               item = yield\r
+\r
+\r
+@autostart\r
+def codropwhile(target, pred):\r
+       """\r
+       >>> cdw = codropwhile(printer_sink("%s"), lambda x: x)\r
+       >>> cdw.send([0, 1, 2])\r
+       >>> cdw.send(1)\r
+       >>> cdw.send(True)\r
+       >>> cdw.send(False)\r
+       >>> cdw.send([0, 1, 2])\r
+       [0, 1, 2]\r
+       >>> cdw.send(1)\r
+       1\r
+       >>> cdw.send(True)\r
+       True\r
+       """\r
+       while True:\r
+               item = yield\r
+               if not pred(item):\r
+                       break\r
+\r
+       while True:\r
+               item = yield\r
+               target.send(item)\r
+\r
+\r
+@autostart\r
+def cotakewhile(target, pred):\r
+       """\r
+       >>> ctw = cotakewhile(printer_sink("%s"), lambda x: x)\r
+       >>> ctw.send([0, 1, 2])\r
+       [0, 1, 2]\r
+       >>> ctw.send(1)\r
+       1\r
+       >>> ctw.send(True)\r
+       True\r
+       >>> ctw.send(False)\r
+       >>> ctw.send([0, 1, 2])\r
+       >>> ctw.send(1)\r
+       >>> ctw.send(True)\r
+       """\r
+       while True:\r
+               item = yield\r
+               if not pred(item):\r
+                       break\r
+               target.send(item)\r
+\r
+       while True:\r
+               item = yield\r
+\r
+\r
+@autostart\r
+def coslice(target, lower, upper):\r
+       """\r
+       >>> cs = coslice(printer_sink("%r"), 3, 5)\r
+       >>> cs.send("0")\r
+       >>> cs.send("1")\r
+       >>> cs.send("2")\r
+       >>> cs.send("3")\r
+       '3'\r
+       >>> cs.send("4")\r
+       '4'\r
+       >>> cs.send("5")\r
+       >>> cs.send("6")\r
+       """\r
+       for i in xrange(lower):\r
+               item = yield\r
+       for i in xrange(upper - lower):\r
+               item = yield\r
+               target.send(item)\r
+       while True:\r
+               item = yield\r
+\r
+\r
+@autostart\r
+def cochain(targets):\r
+       """\r
+       >>> cr = cointercept(printer_sink("good %s"), [1, 2, 3, 4])\r
+       >>> cc = cochain([cr, printer_sink("end %s")])\r
+       >>> cc.send("a")\r
+       good 1\r
+       >>> cc.send(None)\r
+       good 2\r
+       >>> cc.send([])\r
+       good 3\r
+       >>> cc.send(0)\r
+       good 4\r
+       >>> cc.send("Bye")\r
+       end Bye\r
+       """\r
+       behind = []\r
+       for target in targets:\r
+               try:\r
+                       while behind:\r
+                               item = behind.pop()\r
+                               target.send(item)\r
+                       while True:\r
+                               item = yield\r
+                               target.send(item)\r
+               except StopIteration:\r
+                       behind.append(item)\r
+\r
+\r
+@autostart\r
+def queue_sink(queue):\r
+       """\r
+       >>> q = Queue.Queue()\r
+       >>> qs = queue_sink(q)\r
+       >>> qs.send("Hello")\r
+       >>> qs.send("World")\r
+       >>> qs.throw(RuntimeError, "Goodbye")\r
+       >>> qs.send("Meh")\r
+       >>> qs.close()\r
+       >>> print [i for i in _flush_queue(q)]\r
+       [(None, 'Hello'), (None, 'World'), (<type 'exceptions.RuntimeError'>, 'Goodbye'), (None, 'Meh'), (<type 'exceptions.GeneratorExit'>, None)]\r
+       """\r
+       while True:\r
+               try:\r
+                       item = yield\r
+                       queue.put((None, item))\r
+               except StandardError, e:\r
+                       queue.put((e.__class__, e.message))\r
+               except GeneratorExit:\r
+                       queue.put((GeneratorExit, None))\r
+                       raise\r
+\r
+\r
+def decode_item(item, target):\r
+       if item[0] is None:\r
+               target.send(item[1])\r
+               return False\r
+       elif item[0] is GeneratorExit:\r
+               target.close()\r
+               return True\r
+       else:\r
+               target.throw(item[0], item[1])\r
+               return False\r
+\r
+\r
+def queue_source(queue, target):\r
+       """\r
+       >>> q = Queue.Queue()\r
+       >>> for i in [\r
+       ...     (None, 'Hello'),\r
+       ...     (None, 'World'),\r
+       ...     (GeneratorExit, None),\r
+       ...     ]:\r
+       ...     q.put(i)\r
+       >>> qs = queue_source(q, printer_sink())\r
+       Hello\r
+       World\r
+       """\r
+       isDone = False\r
+       while not isDone:\r
+               item = queue.get()\r
+               isDone = decode_item(item, target)\r
+\r
+\r
+def threaded_stage(target, thread_factory = threading.Thread):\r
+       messages = Queue.Queue()\r
+\r
+       run_source = functools.partial(queue_source, messages, target)\r
+       thread_factory(target=run_source).start()\r
+\r
+       # Sink running in current thread\r
+       return functools.partial(queue_sink, messages)\r
+\r
+\r
+@autostart\r
+def pickle_sink(f):\r
+       while True:\r
+               try:\r
+                       item = yield\r
+                       pickle.dump((None, item), f)\r
+               except StandardError, e:\r
+                       pickle.dump((e.__class__, e.message), f)\r
+               except GeneratorExit:\r
+                       pickle.dump((GeneratorExit, ), f)\r
+                       raise\r
+               except StopIteration:\r
+                       f.close()\r
+                       return\r
+\r
+\r
+def pickle_source(f, target):\r
+       try:\r
+               isDone = False\r
+               while not isDone:\r
+                       item = pickle.load(f)\r
+                       isDone = decode_item(item, target)\r
+       except EOFError:\r
+               target.close()\r
+\r
+\r
+class EventHandler(object, xml.sax.ContentHandler):\r
+\r
+       START = "start"\r
+       TEXT = "text"\r
+       END = "end"\r
+\r
+       def __init__(self, target):\r
+               object.__init__(self)\r
+               xml.sax.ContentHandler.__init__(self)\r
+               self._target = target\r
+\r
+       def startElement(self, name, attrs):\r
+               self._target.send((self.START, (name, attrs._attrs)))\r
+\r
+       def characters(self, text):\r
+               self._target.send((self.TEXT, text))\r
+\r
+       def endElement(self, name):\r
+               self._target.send((self.END, name))\r
+\r
+\r
+def expat_parse(f, target):\r
+       parser = xml.parsers.expat.ParserCreate()\r
+       parser.buffer_size = 65536\r
+       parser.buffer_text = True\r
+       parser.returns_unicode = False\r
+       parser.StartElementHandler = lambda name, attrs: target.send(('start', (name, attrs)))\r
+       parser.EndElementHandler = lambda name: target.send(('end', name))\r
+       parser.CharacterDataHandler = lambda data: target.send(('text', data))\r
+       parser.ParseFile(f)\r
+\r
+\r
+if __name__ == "__main__":\r
+       import doctest\r
+       doctest.testmod()\r
diff --git a/src/util/go_utils.py b/src/util/go_utils.py
new file mode 100644 (file)
index 0000000..d066542
--- /dev/null
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+
+from __future__ import with_statement
+
+import time
+import functools
+import threading
+import Queue
+import logging
+
+import gobject
+
+import algorithms
+import misc
+
+
+_moduleLogger = logging.getLogger(__name__)
+
+
+def make_idler(func):
+       """
+       Decorator that makes a generator-function into a function that will continue execution on next call
+       """
+       a = []
+
+       @functools.wraps(func)
+       def decorated_func(*args, **kwds):
+               if not a:
+                       a.append(func(*args, **kwds))
+               try:
+                       a[0].next()
+                       return True
+               except StopIteration:
+                       del a[:]
+                       return False
+
+       return decorated_func
+
+
+def async(func):
+       """
+       Make a function mainloop friendly. the function will be called at the
+       next mainloop idle state.
+
+       >>> import misc
+       >>> misc.validate_decorator(async)
+       """
+
+       @functools.wraps(func)
+       def new_function(*args, **kwargs):
+
+               def async_function():
+                       func(*args, **kwargs)
+                       return False
+
+               gobject.idle_add(async_function)
+
+       return new_function
+
+
+class Async(object):
+
+       def __init__(self, func, once = True):
+               self.__func = func
+               self.__idleId = None
+               self.__once = once
+
+       def start(self):
+               assert self.__idleId is None
+               if self.__once:
+                       self.__idleId = gobject.idle_add(self._on_once)
+               else:
+                       self.__idleId = gobject.idle_add(self.__func)
+
+       def is_running(self):
+               return self.__idleId is not None
+
+       def cancel(self):
+               if self.__idleId is not None:
+                       gobject.source_remove(self.__idleId)
+                       self.__idleId = None
+
+       def __call__(self):
+               return self.start()
+
+       @misc.log_exception(_moduleLogger)
+       def _on_once(self):
+               self.cancel()
+               try:
+                       self.__func()
+               except Exception:
+                       pass
+               return False
+
+
+class Timeout(object):
+
+       def __init__(self, func, once = True):
+               self.__func = func
+               self.__timeoutId = None
+               self.__once = once
+
+       def start(self, **kwds):
+               assert self.__timeoutId is None
+
+               callback = self._on_once if self.__once else self.__func
+
+               assert len(kwds) == 1
+               timeoutInSeconds = kwds["seconds"]
+               assert 0 <= timeoutInSeconds
+
+               if timeoutInSeconds == 0:
+                       self.__timeoutId = gobject.idle_add(callback)
+               else:
+                       self.__timeoutId = timeout_add_seconds(timeoutInSeconds, callback)
+
+       def is_running(self):
+               return self.__timeoutId is not None
+
+       def cancel(self):
+               if self.__timeoutId is not None:
+                       gobject.source_remove(self.__timeoutId)
+                       self.__timeoutId = None
+
+       def __call__(self, **kwds):
+               return self.start(**kwds)
+
+       @misc.log_exception(_moduleLogger)
+       def _on_once(self):
+               self.cancel()
+               try:
+                       self.__func()
+               except Exception:
+                       pass
+               return False
+
+
+_QUEUE_EMPTY = object()
+
+
+class AsyncPool(object):
+
+       def __init__(self):
+               self.__workQueue = Queue.Queue()
+               self.__thread = threading.Thread(
+                       name = type(self).__name__,
+                       target = self.__consume_queue,
+               )
+               self.__isRunning = True
+
+       def start(self):
+               self.__thread.start()
+
+       def stop(self):
+               self.__isRunning = False
+               for _ in algorithms.itr_available(self.__workQueue):
+                       pass # eat up queue to cut down dumb work
+               self.__workQueue.put(_QUEUE_EMPTY)
+
+       def clear_tasks(self):
+               for _ in algorithms.itr_available(self.__workQueue):
+                       pass # eat up queue to cut down dumb work
+
+       def add_task(self, func, args, kwds, on_success, on_error):
+               task = func, args, kwds, on_success, on_error
+               self.__workQueue.put(task)
+
+       @misc.log_exception(_moduleLogger)
+       def __trampoline_callback(self, on_success, on_error, isError, result):
+               if not self.__isRunning:
+                       if isError:
+                               _moduleLogger.error("Masking: %s" % (result, ))
+                       isError = True
+                       result = StopIteration("Cancelling all callbacks")
+               callback = on_success if not isError else on_error
+               try:
+                       callback(result)
+               except Exception:
+                       _moduleLogger.exception("Callback errored")
+               return False
+
+       @misc.log_exception(_moduleLogger)
+       def __consume_queue(self):
+               while True:
+                       task = self.__workQueue.get()
+                       if task is _QUEUE_EMPTY:
+                               break
+                       func, args, kwds, on_success, on_error = task
+
+                       try:
+                               result = func(*args, **kwds)
+                               isError = False
+                       except Exception, e:
+                               _moduleLogger.error("Error, passing it back to the main thread")
+                               result = e
+                               isError = True
+                       self.__workQueue.task_done()
+
+                       gobject.idle_add(self.__trampoline_callback, on_success, on_error, isError, result)
+               _moduleLogger.debug("Shutting down worker thread")
+
+
+class AsyncLinearExecution(object):
+
+       def __init__(self, pool, func):
+               self._pool = pool
+               self._func = func
+               self._run = None
+
+       def start(self, *args, **kwds):
+               assert self._run is None
+               self._run = self._func(*args, **kwds)
+               trampoline, args, kwds = self._run.send(None) # priming the function
+               self._pool.add_task(
+                       trampoline,
+                       args,
+                       kwds,
+                       self.on_success,
+                       self.on_error,
+               )
+
+       @misc.log_exception(_moduleLogger)
+       def on_success(self, result):
+               _moduleLogger.debug("Processing success for: %r", self._func)
+               try:
+                       trampoline, args, kwds = self._run.send(result)
+               except StopIteration, e:
+                       pass
+               else:
+                       self._pool.add_task(
+                               trampoline,
+                               args,
+                               kwds,
+                               self.on_success,
+                               self.on_error,
+                       )
+
+       @misc.log_exception(_moduleLogger)
+       def on_error(self, error):
+               _moduleLogger.debug("Processing error for: %r", self._func)
+               try:
+                       trampoline, args, kwds = self._run.throw(error)
+               except StopIteration, e:
+                       pass
+               else:
+                       self._pool.add_task(
+                               trampoline,
+                               args,
+                               kwds,
+                               self.on_success,
+                               self.on_error,
+                       )
+
+
+class AutoSignal(object):
+
+       def __init__(self, toplevel):
+               self.__disconnectPool = []
+               toplevel.connect("destroy", self.__on_destroy)
+
+       def connect_auto(self, widget, *args):
+               id = widget.connect(*args)
+               self.__disconnectPool.append((widget, id))
+
+       @misc.log_exception(_moduleLogger)
+       def __on_destroy(self, widget):
+               _moduleLogger.info("Destroy: %r (%s to clean up)" % (self, len(self.__disconnectPool)))
+               for widget, id in self.__disconnectPool:
+                       widget.disconnect(id)
+               del self.__disconnectPool[:]
+
+
+def throttled(minDelay, queue):
+       """
+       Throttle the calls to a function by queueing all the calls that happen
+       before the minimum delay
+
+       >>> import misc
+       >>> import Queue
+       >>> misc.validate_decorator(throttled(0, Queue.Queue()))
+       """
+
+       def actual_decorator(func):
+
+               lastCallTime = [None]
+
+               def process_queue():
+                       if 0 < len(queue):
+                               func, args, kwargs = queue.pop(0)
+                               lastCallTime[0] = time.time() * 1000
+                               func(*args, **kwargs)
+                       return False
+
+               @functools.wraps(func)
+               def new_function(*args, **kwargs):
+                       now = time.time() * 1000
+                       if (
+                               lastCallTime[0] is None or
+                               (now - lastCallTime >= minDelay)
+                       ):
+                               lastCallTime[0] = now
+                               func(*args, **kwargs)
+                       else:
+                               queue.append((func, args, kwargs))
+                               lastCallDelta = now - lastCallTime[0]
+                               processQueueTimeout = int(minDelay * len(queue) - lastCallDelta)
+                               gobject.timeout_add(processQueueTimeout, process_queue)
+
+               return new_function
+
+       return actual_decorator
+
+
+def _old_timeout_add_seconds(timeout, callback):
+       return gobject.timeout_add(timeout * 1000, callback)
+
+
+def _timeout_add_seconds(timeout, callback):
+       return gobject.timeout_add_seconds(timeout, callback)
+
+
+try:
+       gobject.timeout_add_seconds
+       timeout_add_seconds = _timeout_add_seconds
+except AttributeError:
+       timeout_add_seconds = _old_timeout_add_seconds
diff --git a/src/util/io.py b/src/util/io.py
new file mode 100644 (file)
index 0000000..aece2dd
--- /dev/null
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+
+
+from __future__ import with_statement
+
+import os
+import pickle
+import contextlib
+import itertools
+import functools
+
+
+@contextlib.contextmanager
+def change_directory(directory):
+       previousDirectory = os.getcwd()
+       os.chdir(directory)
+       currentDirectory = os.getcwd()
+
+       try:
+               yield previousDirectory, currentDirectory
+       finally:
+               os.chdir(previousDirectory)
+
+
+@contextlib.contextmanager
+def pickled(filename):
+       """
+       Here is an example usage:
+       with pickled("foo.db") as p:
+               p("users", list).append(["srid", "passwd", 23])
+       """
+
+       if os.path.isfile(filename):
+               data = pickle.load(open(filename))
+       else:
+               data = {}
+
+       def getter(item, factory):
+               if item in data:
+                       return data[item]
+               else:
+                       data[item] = factory()
+                       return data[item]
+
+       yield getter
+
+       pickle.dump(data, open(filename, "w"))
+
+
+@contextlib.contextmanager
+def redirect(object_, attr, value):
+       """
+       >>> import sys
+       ... with redirect(sys, 'stdout', open('stdout', 'w')):
+       ...     print "hello"
+       ...
+       >>> print "we're back"
+       we're back
+       """
+       orig = getattr(object_, attr)
+       setattr(object_, attr, value)
+       try:
+               yield
+       finally:
+               setattr(object_, attr, orig)
+
+
+def pathsplit(path):
+       """
+       >>> pathsplit("/a/b/c")
+       ['', 'a', 'b', 'c']
+       >>> pathsplit("./plugins/builtins.ini")
+       ['.', 'plugins', 'builtins.ini']
+       """
+       pathParts = path.split(os.path.sep)
+       return pathParts
+
+
+def commonpath(l1, l2, common=None):
+       """
+       >>> commonpath(pathsplit('/a/b/c/d'), pathsplit('/a/b/c1/d1'))
+       (['', 'a', 'b'], ['c', 'd'], ['c1', 'd1'])
+       >>> commonpath(pathsplit("./plugins/"), pathsplit("./plugins/builtins.ini"))
+       (['.', 'plugins'], [''], ['builtins.ini'])
+       >>> commonpath(pathsplit("./plugins/builtins"), pathsplit("./plugins"))
+       (['.', 'plugins'], ['builtins'], [])
+       """
+       if common is None:
+               common = []
+
+       if l1 == l2:
+               return l1, [], []
+
+       for i, (leftDir, rightDir) in enumerate(zip(l1, l2)):
+               if leftDir != rightDir:
+                       return l1[0:i], l1[i:], l2[i:]
+       else:
+               if leftDir == rightDir:
+                       i += 1
+               return l1[0:i], l1[i:], l2[i:]
+
+
+def relpath(p1, p2):
+       """
+       >>> relpath('/', '/')
+       './'
+       >>> relpath('/a/b/c/d', '/')
+       '../../../../'
+       >>> relpath('/a/b/c/d', '/a/b/c1/d1')
+       '../../c1/d1'
+       >>> relpath('/a/b/c/d', '/a/b/c1/d1/')
+       '../../c1/d1'
+       >>> relpath("./plugins/builtins", "./plugins")
+       '../'
+       >>> relpath("./plugins/", "./plugins/builtins.ini")
+       'builtins.ini'
+       """
+       sourcePath = os.path.normpath(p1)
+       destPath = os.path.normpath(p2)
+
+       (common, sourceOnly, destOnly) = commonpath(pathsplit(sourcePath), pathsplit(destPath))
+       if len(sourceOnly) or len(destOnly):
+               relParts = itertools.chain(
+                       (('..' + os.sep) * len(sourceOnly), ),
+                       destOnly,
+               )
+               return os.path.join(*relParts)
+       else:
+               return "."+os.sep
diff --git a/src/util/linux.py b/src/util/linux.py
new file mode 100644 (file)
index 0000000..4837f2a
--- /dev/null
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+
+
+import logging
+
+
+def set_process_name(name):
+       try: # change process name for killall
+               import ctypes
+               libc = ctypes.CDLL('libc.so.6')
+               libc.prctl(15, name, 0, 0, 0)
+       except Exception, e:
+               logging.warning('Unable to set processName: %s" % e')
diff --git a/src/util/misc.py b/src/util/misc.py
new file mode 100644 (file)
index 0000000..cf5c22a
--- /dev/null
@@ -0,0 +1,757 @@
+#!/usr/bin/env python
+
+from __future__ import with_statement
+
+import sys
+import re
+import cPickle
+
+import functools
+import contextlib
+import inspect
+
+import optparse
+import traceback
+import warnings
+import string
+
+
+_indentationLevel = [0]
+
+
+def log_call(logger):
+
+       def log_call_decorator(func):
+
+               @functools.wraps(func)
+               def wrapper(*args, **kwds):
+                       logger.debug("%s> %s" % (" " * _indentationLevel[0], func.__name__, ))
+                       _indentationLevel[0] += 1
+                       try:
+                               return func(*args, **kwds)
+                       finally:
+                               _indentationLevel[0] -= 1
+                               logger.debug("%s< %s" % (" " * _indentationLevel[0], func.__name__, ))
+
+               return wrapper
+
+       return log_call_decorator
+
+
+def log_exception(logger):
+
+       def log_exception_decorator(func):
+
+               @functools.wraps(func)
+               def wrapper(*args, **kwds):
+                       try:
+                               return func(*args, **kwds)
+                       except Exception:
+                               logger.exception(func.__name__)
+                               raise
+
+               return wrapper
+
+       return log_exception_decorator
+
+
+def printfmt(template):
+       """
+       This hides having to create the Template object and call substitute/safe_substitute on it. For example:
+
+       >>> num = 10
+       >>> word = "spam"
+       >>> printfmt("I would like to order $num units of $word, please") #doctest: +SKIP
+       I would like to order 10 units of spam, please
+       """
+       frame = inspect.stack()[-1][0]
+       try:
+               print string.Template(template).safe_substitute(frame.f_locals)
+       finally:
+               del frame
+
+
+def is_special(name):
+       return name.startswith("__") and name.endswith("__")
+
+
+def is_private(name):
+       return name.startswith("_") and not is_special(name)
+
+
+def privatize(clsName, attributeName):
+       """
+       At runtime, make an attributeName private
+
+       Example:
+       >>> class Test(object):
+       ...     pass
+       ...
+       >>> try:
+       ...     dir(Test).index("_Test__me")
+       ...     print dir(Test)
+       ... except:
+       ...     print "Not Found"
+       Not Found
+       >>> setattr(Test, privatize(Test.__name__, "me"), "Hello World")
+       >>> try:
+       ...     dir(Test).index("_Test__me")
+       ...     print "Found"
+       ... except:
+       ...     print dir(Test)
+       0
+       Found
+       >>> print getattr(Test, obfuscate(Test.__name__, "__me"))
+       Hello World
+       >>>
+       >>> is_private(privatize(Test.__name__, "me"))
+       True
+       >>> is_special(privatize(Test.__name__, "me"))
+       False
+       """
+       return "".join(["_", clsName, "__", attributeName])
+
+
+def obfuscate(clsName, attributeName):
+       """
+       At runtime, turn a private name into the obfuscated form
+
+       Example:
+       >>> class Test(object):
+       ...     __me = "Hello World"
+       ...
+       >>> try:
+       ...     dir(Test).index("_Test__me")
+       ...     print "Found"
+       ... except:
+       ...     print dir(Test)
+       0
+       Found
+       >>> print getattr(Test, obfuscate(Test.__name__, "__me"))
+       Hello World
+       >>> is_private(obfuscate(Test.__name__, "__me"))
+       True
+       >>> is_special(obfuscate(Test.__name__, "__me"))
+       False
+       """
+       return "".join(["_", clsName, attributeName])
+
+
+class PAOptionParser(optparse.OptionParser, object):
+       """
+       >>> if __name__ == '__main__':
+       ...     #parser = PAOptionParser("My usage str")
+       ...     parser = PAOptionParser()
+       ...     parser.add_posarg("Foo", help="Foo usage")
+       ...     parser.add_posarg("Bar", dest="bar_dest")
+       ...     parser.add_posarg("Language", dest='tr_type', type="choice", choices=("Python", "Other"))
+       ...     parser.add_option('--stocksym', dest='symbol')
+       ...     values, args = parser.parse_args()
+       ...     print values, args
+       ...
+
+       python mycp.py  -h
+       python mycp.py
+       python mycp.py  foo
+       python mycp.py  foo bar
+
+       python mycp.py foo bar lava
+       Usage: pa.py <Foo> <Bar> <Language> [options]
+
+       Positional Arguments:
+       Foo: Foo usage
+       Bar:
+       Language:
+
+       pa.py: error: option --Language: invalid choice: 'lava' (choose from 'Python', 'Other'
+       """
+
+       def __init__(self, *args, **kw):
+               self.posargs = []
+               super(PAOptionParser, self).__init__(*args, **kw)
+
+       def add_posarg(self, *args, **kw):
+               pa_help = kw.get("help", "")
+               kw["help"] = optparse.SUPPRESS_HELP
+               o = self.add_option("--%s" % args[0], *args[1:], **kw)
+               self.posargs.append((args[0], pa_help))
+
+       def get_usage(self, *args, **kwargs):
+               params = (' '.join(["<%s>" % arg[0] for arg in self.posargs]), '\n '.join(["%s: %s" % (arg) for arg in self.posargs]))
+               self.usage = "%%prog %s [options]\n\nPositional Arguments:\n %s" % params
+               return super(PAOptionParser, self).get_usage(*args, **kwargs)
+
+       def parse_args(self, *args, **kwargs):
+               args = sys.argv[1:]
+               args0 = []
+               for p, v in zip(self.posargs, args):
+                       args0.append("--%s" % p[0])
+                       args0.append(v)
+               args = args0 + args
+               options, args = super(PAOptionParser, self).parse_args(args, **kwargs)
+               if len(args) < len(self.posargs):
+                       msg = 'Missing value(s) for "%s"\n' % ", ".join([arg[0] for arg in self.posargs][len(args):])
+                       self.error(msg)
+               return options, args
+
+
+def explicitly(name, stackadd=0):
+       """
+       This is an alias for adding to '__all__'.  Less error-prone than using
+       __all__ itself, since setting __all__ directly is prone to stomping on
+       things implicitly exported via L{alias}.
+
+       @note Taken from PyExport (which could turn out pretty cool):
+       @li @a http://codebrowse.launchpad.net/~glyph/
+       @li @a http://glyf.livejournal.com/74356.html
+       """
+       packageVars = sys._getframe(1+stackadd).f_locals
+       globalAll = packageVars.setdefault('__all__', [])
+       globalAll.append(name)
+
+
+def public(thunk):
+       """
+       This is a decorator, for convenience.  Rather than typing the name of your
+       function twice, you can decorate a function with this.
+
+       To be real, @public would need to work on methods as well, which gets into
+       supporting types...
+
+       @note Taken from PyExport (which could turn out pretty cool):
+       @li @a http://codebrowse.launchpad.net/~glyph/
+       @li @a http://glyf.livejournal.com/74356.html
+       """
+       explicitly(thunk.__name__, 1)
+       return thunk
+
+
+def _append_docstring(obj, message):
+       if obj.__doc__ is None:
+               obj.__doc__ = message
+       else:
+               obj.__doc__ += message
+
+
+def validate_decorator(decorator):
+
+       def simple(x):
+               return x
+
+       f = simple
+       f.__name__ = "name"
+       f.__doc__ = "doc"
+       f.__dict__["member"] = True
+
+       g = decorator(f)
+
+       if f.__name__ != g.__name__:
+               print f.__name__, "!=", g.__name__
+
+       if g.__doc__ is None:
+               print decorator.__name__, "has no doc string"
+       elif not g.__doc__.startswith(f.__doc__):
+               print g.__doc__, "didn't start with", f.__doc__
+
+       if not ("member" in g.__dict__ and g.__dict__["member"]):
+               print "'member' not in ", g.__dict__
+
+
+def deprecated_api(func):
+       """
+       This is a decorator which can be used to mark functions
+       as deprecated. It will result in a warning being emitted
+       when the function is used.
+
+       >>> validate_decorator(deprecated_api)
+       """
+
+       @functools.wraps(func)
+       def newFunc(*args, **kwargs):
+               warnings.warn("Call to deprecated function %s." % func.__name__, category=DeprecationWarning)
+               return func(*args, **kwargs)
+
+       _append_docstring(newFunc, "\n@deprecated")
+       return newFunc
+
+
+def unstable_api(func):
+       """
+       This is a decorator which can be used to mark functions
+       as deprecated. It will result in a warning being emitted
+       when the function is used.
+
+       >>> validate_decorator(unstable_api)
+       """
+
+       @functools.wraps(func)
+       def newFunc(*args, **kwargs):
+               warnings.warn("Call to unstable API function %s." % func.__name__, category=FutureWarning)
+               return func(*args, **kwargs)
+       _append_docstring(newFunc, "\n@unstable")
+       return newFunc
+
+
+def enabled(func):
+       """
+       This decorator doesn't add any behavior
+
+       >>> validate_decorator(enabled)
+       """
+       return func
+
+
+def disabled(func):
+       """
+       This decorator disables the provided function, and does nothing
+
+       >>> validate_decorator(disabled)
+       """
+
+       @functools.wraps(func)
+       def emptyFunc(*args, **kargs):
+               pass
+       _append_docstring(emptyFunc, "\n@note Temporarily Disabled")
+       return emptyFunc
+
+
+def metadata(document=True, **kwds):
+       """
+       >>> validate_decorator(metadata(author="Ed"))
+       """
+
+       def decorate(func):
+               for k, v in kwds.iteritems():
+                       setattr(func, k, v)
+                       if document:
+                               _append_docstring(func, "\n@"+k+" "+v)
+               return func
+       return decorate
+
+
+def prop(func):
+       """Function decorator for defining property attributes
+
+       The decorated function is expected to return a dictionary
+       containing one or more of the following pairs:
+               fget - function for getting attribute value
+               fset - function for setting attribute value
+               fdel - function for deleting attribute
+       This can be conveniently constructed by the locals() builtin
+       function; see:
+       http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183
+       @author http://kbyanc.blogspot.com/2007/06/python-property-attribute-tricks.html
+
+       Example:
+       >>> #Due to transformation from function to property, does not need to be validated
+       >>> #validate_decorator(prop)
+       >>> class MyExampleClass(object):
+       ...     @prop
+       ...     def foo():
+       ...             "The foo property attribute's doc-string"
+       ...             def fget(self):
+       ...                     print "GET"
+       ...                     return self._foo
+       ...             def fset(self, value):
+       ...                     print "SET"
+       ...                     self._foo = value
+       ...             return locals()
+       ...
+       >>> me = MyExampleClass()
+       >>> me.foo = 10
+       SET
+       >>> print me.foo
+       GET
+       10
+       """
+       return property(doc=func.__doc__, **func())
+
+
+def print_handler(e):
+       """
+       @see ExpHandler
+       """
+       print "%s: %s" % (type(e).__name__, e)
+
+
+def print_ignore(e):
+       """
+       @see ExpHandler
+       """
+       print 'Ignoring %s exception: %s' % (type(e).__name__, e)
+
+
+def print_traceback(e):
+       """
+       @see ExpHandler
+       """
+       #print sys.exc_info()
+       traceback.print_exc(file=sys.stdout)
+
+
+def ExpHandler(handler = print_handler, *exceptions):
+       """
+       An exception handling idiom using decorators
+       Examples
+       Specify exceptions in order, first one is handled first
+       last one last.
+
+       >>> validate_decorator(ExpHandler())
+       >>> @ExpHandler(print_ignore, ZeroDivisionError)
+       ... @ExpHandler(None, AttributeError, ValueError)
+       ... def f1():
+       ...     1/0
+       >>> @ExpHandler(print_traceback, ZeroDivisionError)
+       ... def f2():
+       ...     1/0
+       >>> @ExpHandler()
+       ... def f3(*pargs):
+       ...     l = pargs
+       ...     return l[10]
+       >>> @ExpHandler(print_traceback, ZeroDivisionError)
+       ... def f4():
+       ...     return 1
+       >>>
+       >>>
+       >>> f1()
+       Ignoring ZeroDivisionError exception: integer division or modulo by zero
+       >>> f2() # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
+       Traceback (most recent call last):
+       ...
+       ZeroDivisionError: integer division or modulo by zero
+       >>> f3()
+       IndexError: tuple index out of range
+       >>> f4()
+       1
+       """
+
+       def wrapper(f):
+               localExceptions = exceptions
+               if not localExceptions:
+                       localExceptions = [Exception]
+               t = [(ex, handler) for ex in localExceptions]
+               t.reverse()
+
+               def newfunc(t, *args, **kwargs):
+                       ex, handler = t[0]
+                       try:
+                               if len(t) == 1:
+                                       return f(*args, **kwargs)
+                               else:
+                                       #Recurse for embedded try/excepts
+                                       dec_func = functools.partial(newfunc, t[1:])
+                                       dec_func = functools.update_wrapper(dec_func, f)
+                                       return dec_func(*args, **kwargs)
+                       except ex, e:
+                               return handler(e)
+
+               dec_func = functools.partial(newfunc, t)
+               dec_func = functools.update_wrapper(dec_func, f)
+               return dec_func
+       return wrapper
+
+
+def into_debugger(func):
+       """
+       >>> validate_decorator(into_debugger)
+       """
+
+       @functools.wraps(func)
+       def newFunc(*args, **kwargs):
+               try:
+                       return func(*args, **kwargs)
+               except:
+                       import pdb
+                       pdb.post_mortem()
+
+       return newFunc
+
+
+class bindclass(object):
+       """
+       >>> validate_decorator(bindclass)
+       >>> class Foo(BoundObject):
+       ...      @bindclass
+       ...      def foo(this_class, self):
+       ...              return this_class, self
+       ...
+       >>> class Bar(Foo):
+       ...      @bindclass
+       ...      def bar(this_class, self):
+       ...              return this_class, self
+       ...
+       >>> f = Foo()
+       >>> b = Bar()
+       >>>
+       >>> f.foo() # doctest: +ELLIPSIS
+       (<class '...Foo'>, <...Foo object at ...>)
+       >>> b.foo() # doctest: +ELLIPSIS
+       (<class '...Foo'>, <...Bar object at ...>)
+       >>> b.bar() # doctest: +ELLIPSIS
+       (<class '...Bar'>, <...Bar object at ...>)
+       """
+
+       def __init__(self, f):
+               self.f = f
+               self.__name__ = f.__name__
+               self.__doc__ = f.__doc__
+               self.__dict__.update(f.__dict__)
+               self.m = None
+
+       def bind(self, cls, attr):
+
+               def bound_m(*args, **kwargs):
+                       return self.f(cls, *args, **kwargs)
+               bound_m.__name__ = attr
+               self.m = bound_m
+
+       def __get__(self, obj, objtype=None):
+               return self.m.__get__(obj, objtype)
+
+
+class ClassBindingSupport(type):
+       "@see bindclass"
+
+       def __init__(mcs, name, bases, attrs):
+               type.__init__(mcs, name, bases, attrs)
+               for attr, val in attrs.iteritems():
+                       if isinstance(val, bindclass):
+                               val.bind(mcs, attr)
+
+
+class BoundObject(object):
+       "@see bindclass"
+       __metaclass__ = ClassBindingSupport
+
+
+def bindfunction(f):
+       """
+       >>> validate_decorator(bindfunction)
+       >>> @bindfunction
+       ... def factorial(thisfunction, n):
+       ...      # Within this function the name 'thisfunction' refers to the factorial
+       ...      # function(with only one argument), even after 'factorial' is bound
+       ...      # to another object
+       ...      if n > 0:
+       ...              return n * thisfunction(n - 1)
+       ...      else:
+       ...              return 1
+       ...
+       >>> factorial(3)
+       6
+       """
+
+       @functools.wraps(f)
+       def bound_f(*args, **kwargs):
+               return f(bound_f, *args, **kwargs)
+       return bound_f
+
+
+class Memoize(object):
+       """
+       Memoize(fn) - an instance which acts like fn but memoizes its arguments
+       Will only work on functions with non-mutable arguments
+       @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52201
+
+       >>> validate_decorator(Memoize)
+       """
+
+       def __init__(self, fn):
+               self.fn = fn
+               self.__name__ = fn.__name__
+               self.__doc__ = fn.__doc__
+               self.__dict__.update(fn.__dict__)
+               self.memo = {}
+
+       def __call__(self, *args):
+               if args not in self.memo:
+                       self.memo[args] = self.fn(*args)
+               return self.memo[args]
+
+
+class MemoizeMutable(object):
+       """Memoize(fn) - an instance which acts like fn but memoizes its arguments
+       Will work on functions with mutable arguments(slower than Memoize)
+       @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52201
+
+       >>> validate_decorator(MemoizeMutable)
+       """
+
+       def __init__(self, fn):
+               self.fn = fn
+               self.__name__ = fn.__name__
+               self.__doc__ = fn.__doc__
+               self.__dict__.update(fn.__dict__)
+               self.memo = {}
+
+       def __call__(self, *args, **kw):
+               text = cPickle.dumps((args, kw))
+               if text not in self.memo:
+                       self.memo[text] = self.fn(*args, **kw)
+               return self.memo[text]
+
+
+callTraceIndentationLevel = 0
+
+
+def call_trace(f):
+       """
+       Synchronization decorator.
+
+       >>> validate_decorator(call_trace)
+       >>> @call_trace
+       ... def a(a, b, c):
+       ...     pass
+       >>> a(1, 2, c=3)
+       Entering a((1, 2), {'c': 3})
+       Exiting a((1, 2), {'c': 3})
+       """
+
+       @functools.wraps(f)
+       def verboseTrace(*args, **kw):
+               global callTraceIndentationLevel
+
+               print "%sEntering %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw)
+               callTraceIndentationLevel += 1
+               try:
+                       result = f(*args, **kw)
+               except:
+                       callTraceIndentationLevel -= 1
+                       print "%sException %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw)
+                       raise
+               callTraceIndentationLevel -= 1
+               print "%sExiting %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw)
+               return result
+
+       @functools.wraps(f)
+       def smallTrace(*args, **kw):
+               global callTraceIndentationLevel
+
+               print "%sEntering %s" % ("\t"*callTraceIndentationLevel, f.__name__)
+               callTraceIndentationLevel += 1
+               try:
+                       result = f(*args, **kw)
+               except:
+                       callTraceIndentationLevel -= 1
+                       print "%sException %s" % ("\t"*callTraceIndentationLevel, f.__name__)
+                       raise
+               callTraceIndentationLevel -= 1
+               print "%sExiting %s" % ("\t"*callTraceIndentationLevel, f.__name__)
+               return result
+
+       #return smallTrace
+       return verboseTrace
+
+
+@contextlib.contextmanager
+def lexical_scope(*args):
+       """
+       @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/520586
+       Example:
+       >>> b = 0
+       >>> with lexical_scope(1) as (a):
+       ...     print a
+       ...
+       1
+       >>> with lexical_scope(1,2,3) as (a,b,c):
+       ...     print a,b,c
+       ...
+       1 2 3
+       >>> with lexical_scope():
+       ...     d = 10
+       ...     def foo():
+       ...             pass
+       ...
+       >>> print b
+       2
+       """
+
+       frame = inspect.currentframe().f_back.f_back
+       saved = frame.f_locals.keys()
+       try:
+               if not args:
+                       yield
+               elif len(args) == 1:
+                       yield args[0]
+               else:
+                       yield args
+       finally:
+               f_locals = frame.f_locals
+               for key in (x for x in f_locals.keys() if x not in saved):
+                       del f_locals[key]
+               del frame
+
+
+def normalize_number(prettynumber):
+       """
+       function to take a phone number and strip out all non-numeric
+       characters
+
+       >>> normalize_number("+012-(345)-678-90")
+       '+01234567890'
+       >>> normalize_number("1-(345)-678-9000")
+       '+13456789000'
+       >>> normalize_number("+1-(345)-678-9000")
+       '+13456789000'
+       """
+       uglynumber = re.sub('[^0-9+]', '', prettynumber)
+       if uglynumber.startswith("+"):
+               pass
+       elif uglynumber.startswith("1") and len(uglynumber) == 11:
+               uglynumber = "+"+uglynumber
+       elif len(uglynumber) == 10:
+               uglynumber = "+1"+uglynumber
+       else:
+               pass
+
+       #validateRe = re.compile("^\+?[0-9]{10,}$")
+       #assert validateRe.match(uglynumber) is not None
+
+       return uglynumber
+
+
+_VALIDATE_RE = re.compile("^\+?[0-9]{10,}$")
+
+
+def is_valid_number(number):
+       """
+       @returns If This number be called ( syntax validation only )
+       """
+       return _VALIDATE_RE.match(number) is not None
+
+
+def parse_version(versionText):
+       """
+       >>> parse_version("0.5.2")
+       [0, 5, 2]
+       """
+       return [
+               int(number)
+               for number in versionText.split(".")
+       ]
+
+
+def compare_versions(leftParsedVersion, rightParsedVersion):
+       """
+       >>> compare_versions([0, 1, 2], [0, 1, 2])
+       0
+       >>> compare_versions([0, 1, 2], [0, 1, 3])
+       -1
+       >>> compare_versions([0, 1, 2], [0, 2, 2])
+       -1
+       >>> compare_versions([0, 1, 2], [1, 1, 2])
+       -1
+       >>> compare_versions([0, 1, 3], [0, 1, 2])
+       1
+       >>> compare_versions([0, 2, 2], [0, 1, 2])
+       1
+       >>> compare_versions([1, 1, 2], [0, 1, 2])
+       1
+       """
+       for left, right in zip(leftParsedVersion, rightParsedVersion):
+               if left < right:
+                       return -1
+               elif right < left:
+                       return 1
+       else:
+               return 0
diff --git a/src/util/overloading.py b/src/util/overloading.py
new file mode 100644 (file)
index 0000000..89cb738
--- /dev/null
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+import new
+
+# Make the environment more like Python 3.0
+__metaclass__ = type
+from itertools import izip as zip
+import textwrap
+import inspect
+
+
+__all__ = [
+       "AnyType",
+       "overloaded"
+]
+
+
+AnyType = object
+
+
+class overloaded:
+       """
+       Dynamically overloaded functions.
+
+       This is an implementation of (dynamically, or run-time) overloaded
+       functions; also known as generic functions or multi-methods.
+
+       The dispatch algorithm uses the types of all argument for dispatch,
+       similar to (compile-time) overloaded functions or methods in C++ and
+       Java.
+
+       Most of the complexity in the algorithm comes from the need to support
+       subclasses in call signatures.  For example, if an function is
+       registered for a signature (T1, T2), then a call with a signature (S1,
+       S2) is acceptable, assuming that S1 is a subclass of T1, S2 a subclass
+       of T2, and there are no other more specific matches (see below).
+
+       If there are multiple matches and one of those doesn't *dominate* all
+       others, the match is deemed ambiguous and an exception is raised.  A
+       subtlety here: if, after removing the dominated matches, there are
+       still multiple matches left, but they all map to the same function,
+       then the match is not deemed ambiguous and that function is used.
+       Read the method find_func() below for details.
+
+       @note Python 2.5 is required due to the use of predicates any() and all().
+       @note only supports positional arguments
+
+       @author http://www.artima.com/weblogs/viewpost.jsp?thread=155514
+
+       >>> import misc
+       >>> misc.validate_decorator (overloaded)
+       >>>
+       >>>
+       >>>
+       >>>
+       >>> #################
+       >>> #Basics, with reusing names and without
+       >>> @overloaded
+       ... def foo(x):
+       ...     "prints x"
+       ...     print x
+       ...
+       >>> @foo.register(int)
+       ... def foo(x):
+       ...     "prints the hex representation of x"
+       ...     print hex(x)
+       ...
+       >>> from types import DictType
+       >>> @foo.register(DictType)
+       ... def foo_dict(x):
+       ...     "prints the keys of x"
+       ...     print [k for k in x.iterkeys()]
+       ...
+       >>> #combines all of the doc strings to help keep track of the specializations
+       >>> foo.__doc__  # doctest: +ELLIPSIS
+       "prints x\\n\\n...overloading.foo (<type 'int'>):\\n\\tprints the hex representation of x\\n\\n...overloading.foo_dict (<type 'dict'>):\\n\\tprints the keys of x"
+       >>> foo ("text")
+       text
+       >>> foo (10) #calling the specialized foo
+       0xa
+       >>> foo ({3:5, 6:7}) #calling the specialization foo_dict
+       [3, 6]
+       >>> foo_dict ({3:5, 6:7}) #with using a unique name, you still have the option of calling the function directly
+       [3, 6]
+       >>>
+       >>>
+       >>>
+       >>>
+       >>> #################
+       >>> #Multiple arguments, accessing the default, and function finding
+       >>> @overloaded
+       ... def two_arg (x, y):
+       ...     print x,y
+       ...
+       >>> @two_arg.register(int, int)
+       ... def two_arg_int_int (x, y):
+       ...     print hex(x), hex(y)
+       ...
+       >>> @two_arg.register(float, int)
+       ... def two_arg_float_int (x, y):
+       ...     print x, hex(y)
+       ...
+       >>> @two_arg.register(int, float)
+       ... def two_arg_int_float (x, y):
+       ...     print hex(x), y
+       ...
+       >>> two_arg.__doc__ # doctest: +ELLIPSIS
+       "...overloading.two_arg_int_int (<type 'int'>, <type 'int'>):\\n\\n...overloading.two_arg_float_int (<type 'float'>, <type 'int'>):\\n\\n...overloading.two_arg_int_float (<type 'int'>, <type 'float'>):"
+       >>> two_arg(9, 10)
+       0x9 0xa
+       >>> two_arg(9.0, 10)
+       9.0 0xa
+       >>> two_arg(15, 16.0)
+       0xf 16.0
+       >>> two_arg.default_func(9, 10)
+       9 10
+       >>> two_arg.find_func ((int, float)) == two_arg_int_float
+       True
+       >>> (int, float) in two_arg
+       True
+       >>> (str, int) in two_arg
+       False
+       >>>
+       >>>
+       >>>
+       >>> #################
+       >>> #wildcard
+       >>> @two_arg.register(AnyType, str)
+       ... def two_arg_any_str (x, y):
+       ...     print x, y.lower()
+       ...
+       >>> two_arg("Hello", "World")
+       Hello world
+       >>> two_arg(500, "World")
+       500 world
+       """
+
+       def __init__(self, default_func):
+               # Decorator to declare new overloaded function.
+               self.registry = {}
+               self.cache = {}
+               self.default_func = default_func
+               self.__name__ = self.default_func.__name__
+               self.__doc__ = self.default_func.__doc__
+               self.__dict__.update (self.default_func.__dict__)
+
+       def __get__(self, obj, type=None):
+               if obj is None:
+                       return self
+               return new.instancemethod(self, obj)
+
+       def register(self, *types):
+               """
+               Decorator to register an implementation for a specific set of types.
+
+               .register(t1, t2)(f) is equivalent to .register_func((t1, t2), f).
+               """
+
+               def helper(func):
+                       self.register_func(types, func)
+
+                       originalDoc = self.__doc__ if self.__doc__ is not None else ""
+                       typeNames = ", ".join ([str(type) for type in types])
+                       typeNames = "".join ([func.__module__+".", func.__name__, " (", typeNames, "):"])
+                       overloadedDoc = ""
+                       if func.__doc__ is not None:
+                               overloadedDoc = textwrap.fill (func.__doc__, width=60, initial_indent="\t", subsequent_indent="\t")
+                       self.__doc__ = "\n".join ([originalDoc, "", typeNames, overloadedDoc]).strip()
+
+                       new_func = func
+
+                       #Masking the function, so we want to take on its traits
+                       if func.__name__ == self.__name__:
+                               self.__dict__.update (func.__dict__)
+                               new_func = self
+                       return new_func
+
+               return helper
+
+       def register_func(self, types, func):
+               """Helper to register an implementation."""
+               self.registry[tuple(types)] = func
+               self.cache = {} # Clear the cache (later we can optimize this).
+
+       def __call__(self, *args):
+               """Call the overloaded function."""
+               types = tuple(map(type, args))
+               func = self.cache.get(types)
+               if func is None:
+                       self.cache[types] = func = self.find_func(types)
+               return func(*args)
+
+       def __contains__ (self, types):
+               return self.find_func(types) is not self.default_func
+
+       def find_func(self, types):
+               """Find the appropriate overloaded function; don't call it.
+
+               @note This won't work for old-style classes or classes without __mro__
+               """
+               func = self.registry.get(types)
+               if func is not None:
+                       # Easy case -- direct hit in registry.
+                       return func
+
+               # Phillip Eby suggests to use issubclass() instead of __mro__.
+               # There are advantages and disadvantages.
+
+               # I can't help myself -- this is going to be intense functional code.
+               # Find all possible candidate signatures.
+               mros = tuple(inspect.getmro(t) for t in types)
+               n = len(mros)
+               candidates = [sig for sig in self.registry
+                               if len(sig) == n and
+                                       all(t in mro for t, mro in zip(sig, mros))]
+
+               if not candidates:
+                       # No match at all -- use the default function.
+                       return self.default_func
+               elif len(candidates) == 1:
+                       # Unique match -- that's an easy case.
+                       return self.registry[candidates[0]]
+
+               # More than one match -- weed out the subordinate ones.
+
+               def dominates(dom, sub,
+                               orders=tuple(dict((t, i) for i, t in enumerate(mro))
+                                                       for mro in mros)):
+                       # Predicate to decide whether dom strictly dominates sub.
+                       # Strict domination is defined as domination without equality.
+                       # The arguments dom and sub are type tuples of equal length.
+                       # The orders argument is a precomputed auxiliary data structure
+                       # giving dicts of ordering information corresponding to the
+                       # positions in the type tuples.
+                       # A type d dominates a type s iff order[d] <= order[s].
+                       # A type tuple (d1, d2, ...) dominates a type tuple of equal length
+                       # (s1, s2, ...) iff d1 dominates s1, d2 dominates s2, etc.
+                       if dom is sub:
+                               return False
+                       return all(order[d] <= order[s] for d, s, order in zip(dom, sub, orders))
+
+               # I suppose I could inline dominates() but it wouldn't get any clearer.
+               candidates = [cand
+                               for cand in candidates
+                                       if not any(dominates(dom, cand) for dom in candidates)]
+               if len(candidates) == 1:
+                       # There's exactly one candidate left.
+                       return self.registry[candidates[0]]
+
+               # Perhaps these multiple candidates all have the same implementation?
+               funcs = set(self.registry[cand] for cand in candidates)
+               if len(funcs) == 1:
+                       return funcs.pop()
+
+               # No, the situation is irreducibly ambiguous.
+               raise TypeError("ambigous call; types=%r; candidates=%r" %
+                                               (types, candidates))
diff --git a/src/util/tp_utils.py b/src/util/tp_utils.py
new file mode 100644 (file)
index 0000000..1c6cbc8
--- /dev/null
@@ -0,0 +1,220 @@
+#!/usr/bin/env python
+
+import logging
+
+import dbus
+import telepathy
+
+import util.go_utils as gobject_utils
+import misc
+
+
+_moduleLogger = logging.getLogger(__name__)
+DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties'
+
+
+class WasMissedCall(object):
+
+       def __init__(self, bus, conn, chan, on_success, on_error):
+               self.__on_success = on_success
+               self.__on_error = on_error
+
+               self._requested = None
+               self._didMembersChange = False
+               self._didClose = False
+               self._didReport = False
+
+               self._onTimeout = gobject_utils.Timeout(self._on_timeout)
+               self._onTimeout.start(seconds=10)
+
+               chan[telepathy.interfaces.CHANNEL_INTERFACE_GROUP].connect_to_signal(
+                       "MembersChanged",
+                       self._on_members_changed,
+               )
+
+               chan[telepathy.interfaces.CHANNEL].connect_to_signal(
+                       "Closed",
+                       self._on_closed,
+               )
+
+               chan[DBUS_PROPERTIES].GetAll(
+                       telepathy.interfaces.CHANNEL_INTERFACE,
+                       reply_handler = self._on_got_all,
+                       error_handler = self._on_error,
+               )
+
+       def cancel(self):
+               self._report_error("by request")
+
+       def _report_missed_if_ready(self):
+               if self._didReport:
+                       pass
+               elif self._requested is not None and (self._didMembersChange or self._didClose):
+                       if self._requested:
+                               self._report_error("wrong direction")
+                       elif self._didClose:
+                               self._report_success()
+                       else:
+                               self._report_error("members added")
+               else:
+                       if self._didClose:
+                               self._report_error("closed too early")
+
+       def _report_success(self):
+               assert not self._didReport
+               self._didReport = True
+               self._onTimeout.cancel()
+               self.__on_success(self)
+
+       def _report_error(self, reason):
+               assert not self._didReport
+               self._didReport = True
+               self._onTimeout.cancel()
+               self.__on_error(self, reason)
+
+       @misc.log_exception(_moduleLogger)
+       def _on_got_all(self, properties):
+               self._requested = properties["Requested"]
+               self._report_missed_if_ready()
+
+       @misc.log_exception(_moduleLogger)
+       def _on_members_changed(self, message, added, removed, lp, rp, actor, reason):
+               if added:
+                       self._didMembersChange = True
+                       self._report_missed_if_ready()
+
+       @misc.log_exception(_moduleLogger)
+       def _on_closed(self):
+               self._didClose = True
+               self._report_missed_if_ready()
+
+       @misc.log_exception(_moduleLogger)
+       def _on_error(self, *args):
+               self._report_error(args)
+
+       @misc.log_exception(_moduleLogger)
+       def _on_timeout(self):
+               self._report_error("timeout")
+               return False
+
+
+class NewChannelSignaller(object):
+
+       def __init__(self, on_new_channel):
+               self._sessionBus = dbus.SessionBus()
+               self._on_user_new_channel = on_new_channel
+
+       def start(self):
+               self._sessionBus.add_signal_receiver(
+                       self._on_new_channel,
+                       "NewChannel",
+                       "org.freedesktop.Telepathy.Connection",
+                       None,
+                       None
+               )
+
+       def stop(self):
+               self._sessionBus.remove_signal_receiver(
+                       self._on_new_channel,
+                       "NewChannel",
+                       "org.freedesktop.Telepathy.Connection",
+                       None,
+                       None
+               )
+
+       @misc.log_exception(_moduleLogger)
+       def _on_new_channel(
+               self, channelObjectPath, channelType, handleType, handle, supressHandler
+       ):
+               connObjectPath = channel_path_to_conn_path(channelObjectPath)
+               serviceName = path_to_service_name(channelObjectPath)
+               try:
+                       self._on_user_new_channel(
+                               self._sessionBus, serviceName, connObjectPath, channelObjectPath, channelType
+                       )
+               except Exception:
+                       _moduleLogger.exception("Blocking exception from being passed up")
+
+
+class EnableSystemContactIntegration(object):
+
+       ACCOUNT_MGR_NAME = "org.freedesktop.Telepathy.AccountManager"
+       ACCOUNT_MGR_PATH = "/org/freedesktop/Telepathy/AccountManager"
+       ACCOUNT_MGR_IFACE_QUERY = "com.nokia.AccountManager.Interface.Query"
+       ACCOUNT_IFACE_COMPAT = "com.nokia.Account.Interface.Compat"
+       ACCOUNT_IFACE_COMPAT_PROFILE = "com.nokia.Account.Interface.Compat.Profile"
+       DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties'
+
+       def __init__(self, profileName):
+               self._bus = dbus.SessionBus()
+               self._profileName = profileName
+
+       def start(self):
+               self._accountManager = self._bus.get_object(
+                       self.ACCOUNT_MGR_NAME,
+                       self.ACCOUNT_MGR_PATH,
+               )
+               self._accountManagerQuery = dbus.Interface(
+                       self._accountManager,
+                       dbus_interface=self.ACCOUNT_MGR_IFACE_QUERY,
+               )
+
+               self._accountManagerQuery.FindAccounts(
+                       {
+                               self.ACCOUNT_IFACE_COMPAT_PROFILE: self._profileName,
+                       },
+                       reply_handler = self._on_found_accounts_reply,
+                       error_handler = self._on_error,
+               )
+
+       @misc.log_exception(_moduleLogger)
+       def _on_found_accounts_reply(self, accountObjectPaths):
+               for accountObjectPath in accountObjectPaths:
+                       print accountObjectPath
+                       account = self._bus.get_object(
+                               self.ACCOUNT_MGR_NAME,
+                               accountObjectPath,
+                       )
+                       accountProperties = dbus.Interface(
+                               account,
+                               self.DBUS_PROPERTIES,
+                       )
+                       accountProperties.Set(
+                               self.ACCOUNT_IFACE_COMPAT,
+                               "SecondaryVCardFields",
+                               ["TEL"],
+                               reply_handler = self._on_field_set,
+                               error_handler = self._on_error,
+                       )
+
+       @misc.log_exception(_moduleLogger)
+       def _on_field_set(self):
+               _moduleLogger.info("SecondaryVCardFields Set")
+
+       @misc.log_exception(_moduleLogger)
+       def _on_error(self, error):
+               _moduleLogger.error("%r" % (error, ))
+
+
+def channel_path_to_conn_path(channelObjectPath):
+       """
+       >>> channel_path_to_conn_path("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1")
+       '/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME'
+       """
+       return channelObjectPath.rsplit("/", 1)[0]
+
+
+def path_to_service_name(path):
+       """
+       >>> path_to_service_name("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1")
+       'org.freedesktop.Telepathy.ConnectionManager.theonering.gv.USERNAME'
+       """
+       return ".".join(path[1:].split("/")[0:7])
+
+
+def cm_from_path(path):
+       """
+       >>> cm_from_path("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1")
+       'theonering'
+       """
+       return path[1:].split("/")[4]
index 6805808..eb99ebc 100755 (executable)
@@ -37,9 +37,9 @@ __changelog__ = """
 __postinstall__ = """#!/bin/sh -e
 
 gtk-update-icon-cache -f /usr/share/icons/hicolor
-rm -f ~/.dialcentral/notifier.log
-rm -f ~/.dialcentral/dialcentral.log
-"""
+rm -f ~/.%(name)s/%(name)s.log
+rm -f ~/.%(name)s/notifier.log
+""" % {"name": constants.__app_name__}
 
 __preremove__ = """#!/bin/sh -e
 
@@ -47,13 +47,13 @@ python /usr/lib/dialcentral/alarm_handler.py -d || true
 """
 
 
-def find_files(path):
+def find_files(prefix, path):
        for root, dirs, files in os.walk(path):
                for file in files:
-                       if file.startswith("src-"):
+                       if file.startswith(prefix+"-"):
                                fileParts = file.split("-")
                                unused, relPathParts, newName = fileParts[0], fileParts[1:-1], fileParts[-1]
-                               assert unused == "src"
+                               assert unused == prefix
                                relPath = os.sep.join(relPathParts)
                                yield relPath, file, newName
 
@@ -78,7 +78,6 @@ def build_package(distribution):
        p.prettyName = constants.__pretty_app_name__
        p.description = __description__
        p.bugTracker = "https://bugs.maemo.org/enter_bug.cgi?product=Dialcentral"
-       p.upgradeDescription = __changelog__.split("\n\n", 1)[0]
        p.author = __author__
        p.mail = __email__
        p.license = "lgpl"
@@ -113,9 +112,9 @@ def build_package(distribution):
                "diablo": "26x26-dialcentral.png",
                "fremantle": "64x64-dialcentral.png", # Fremantle natively uses 48x48
        }[distribution]
-       p["/usr/bin"] = [ "dialcentral.py" ]
-       for relPath, files in unflatten_files(find_files(".")).iteritems():
-               fullPath = "/usr/lib/dialcentral"
+       p["/opt/%s/bin" % __appname__] = [ "%s.py" % __appname__ ]
+       for relPath, files in unflatten_files(find_files("src", ".")).iteritems():
+               fullPath = "/opt/%s/lib" % __appname__
                if relPath:
                        fullPath += os.sep+relPath
                p[fullPath] = list(
index ce28c88..c5ec2f3 100644 (file)
@@ -3,5 +3,5 @@ Encoding=UTF-8
 Version=1.0
 Type=Application
 Name=DialCentral
-Exec=/usr/bin/run-standalone.sh /usr/bin/dialcentral.py
+Exec=/usr/bin/run-standalone.sh /opt/dialcentral/bin/dialcentral.py
 Icon=dialcentral