From 57b08f900cc042ef06b5f7592a157249c1845f17 Mon Sep 17 00:00:00 2001 From: Ed Page Date: Fri, 30 Apr 2010 19:54:13 -0500 Subject: [PATCH] Adding utils --- src/util/__init__.py | 1 + src/util/algorithms.py | 584 ++++++++++++++++++++++++++++++++++++ src/util/concurrent.py | 77 +++++ src/util/coroutines.py | 623 ++++++++++++++++++++++++++++++++++++++ src/util/go_utils.py | 300 +++++++++++++++++++ src/util/io.py | 129 ++++++++ src/util/linux.py | 13 + src/util/misc.py | 757 +++++++++++++++++++++++++++++++++++++++++++++++ src/util/overloading.py | 256 ++++++++++++++++ src/util/tp_utils.py | 220 ++++++++++++++ 10 files changed, 2960 insertions(+) create mode 100644 src/util/__init__.py create mode 100644 src/util/algorithms.py create mode 100644 src/util/concurrent.py create mode 100755 src/util/coroutines.py create mode 100644 src/util/go_utils.py create mode 100644 src/util/io.py create mode 100644 src/util/linux.py create mode 100644 src/util/misc.py create mode 100644 src/util/overloading.py create mode 100644 src/util/tp_utils.py diff --git a/src/util/__init__.py b/src/util/__init__.py new file mode 100644 index 0000000..4265cc3 --- /dev/null +++ b/src/util/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python diff --git a/src/util/algorithms.py b/src/util/algorithms.py new file mode 100644 index 0000000..7052b98 --- /dev/null +++ b/src/util/algorithms.py @@ -0,0 +1,584 @@ +#!/usr/bin/env python + +""" +@note Source http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66448 +""" + +import itertools +import functools +import datetime +import types + + +def ordered_itr(collection): + """ + >>> [v for v in ordered_itr({"a": 1, "b": 2})] + [('a', 1), ('b', 2)] + >>> [v for v in ordered_itr([3, 1, 10, -20])] + [-20, 1, 3, 10] + """ + if isinstance(collection, types.DictType): + keys = list(collection.iterkeys()) + keys.sort() + for key in keys: + yield key, collection[key] + else: + values = list(collection) + values.sort() + for value in values: + yield value + + +def itercat(*iterators): + """ + Concatenate several iterators into one. + + >>> [v for v in itercat([1, 2, 3], [4, 1, 3])] + [1, 2, 3, 4, 1, 3] + """ + for i in iterators: + for x in i: + yield x + + +def iterwhile(func, iterator): + """ + Iterate for as long as func(value) returns true. + >>> through = lambda b: b + >>> [v for v in iterwhile(through, [True, True, False])] + [True, True] + """ + iterator = iter(iterator) + while 1: + next = iterator.next() + if not func(next): + raise StopIteration + yield next + + +def iterfirst(iterator, count=1): + """ + Iterate through 'count' first values. + + >>> [v for v in iterfirst([1, 2, 3, 4, 5], 3)] + [1, 2, 3] + """ + iterator = iter(iterator) + for i in xrange(count): + yield iterator.next() + + +def iterstep(iterator, n): + """ + Iterate every nth value. + + >>> [v for v in iterstep([1, 2, 3, 4, 5], 1)] + [1, 2, 3, 4, 5] + >>> [v for v in iterstep([1, 2, 3, 4, 5], 2)] + [1, 3, 5] + >>> [v for v in iterstep([1, 2, 3, 4, 5], 3)] + [1, 4] + """ + iterator = iter(iterator) + while True: + yield iterator.next() + # skip n-1 values + for dummy in xrange(n-1): + iterator.next() + + +def itergroup(iterator, count, padValue = None): + """ + Iterate in groups of 'count' values. If there + aren't enough values, the last result is padded with + None. + + >>> for val in itergroup([1, 2, 3, 4, 5, 6], 3): + ... print tuple(val) + (1, 2, 3) + (4, 5, 6) + >>> for val in itergroup([1, 2, 3, 4, 5, 6], 3): + ... print list(val) + [1, 2, 3] + [4, 5, 6] + >>> for val in itergroup([1, 2, 3, 4, 5, 6, 7], 3): + ... print tuple(val) + (1, 2, 3) + (4, 5, 6) + (7, None, None) + >>> for val in itergroup("123456", 3): + ... print tuple(val) + ('1', '2', '3') + ('4', '5', '6') + >>> for val in itergroup("123456", 3): + ... print repr("".join(val)) + '123' + '456' + """ + paddedIterator = itertools.chain(iterator, itertools.repeat(padValue, count-1)) + nIterators = (paddedIterator, ) * count + return itertools.izip(*nIterators) + + +def xzip(*iterators): + """Iterative version of builtin 'zip'.""" + iterators = itertools.imap(iter, iterators) + while 1: + yield tuple([x.next() for x in iterators]) + + +def xmap(func, *iterators): + """Iterative version of builtin 'map'.""" + iterators = itertools.imap(iter, iterators) + values_left = [1] + + def values(): + # Emulate map behaviour, i.e. shorter + # sequences are padded with None when + # they run out of values. + values_left[0] = 0 + for i in range(len(iterators)): + iterator = iterators[i] + if iterator is None: + yield None + else: + try: + yield iterator.next() + values_left[0] = 1 + except StopIteration: + iterators[i] = None + yield None + while 1: + args = tuple(values()) + if not values_left[0]: + raise StopIteration + yield func(*args) + + +def xfilter(func, iterator): + """Iterative version of builtin 'filter'.""" + iterator = iter(iterator) + while 1: + next = iterator.next() + if func(next): + yield next + + +def xreduce(func, iterator, default=None): + """Iterative version of builtin 'reduce'.""" + iterator = iter(iterator) + try: + prev = iterator.next() + except StopIteration: + return default + single = 1 + for next in iterator: + single = 0 + prev = func(prev, next) + if single: + return func(prev, default) + return prev + + +def daterange(begin, end, delta = datetime.timedelta(1)): + """ + Form a range of dates and iterate over them. + + Arguments: + begin -- a date (or datetime) object; the beginning of the range. + end -- a date (or datetime) object; the end of the range. + delta -- (optional) a datetime.timedelta object; how much to step each iteration. + Default step is 1 day. + + Usage: + """ + if not isinstance(delta, datetime.timedelta): + delta = datetime.timedelta(delta) + + ZERO = datetime.timedelta(0) + + if begin < end: + if delta <= ZERO: + raise StopIteration + test = end.__gt__ + else: + if delta >= ZERO: + raise StopIteration + test = end.__lt__ + + while test(begin): + yield begin + begin += delta + + +class LazyList(object): + """ + A Sequence whose values are computed lazily by an iterator. + + Module for the creation and use of iterator-based lazy lists. + this module defines a class LazyList which can be used to represent sequences + of values generated lazily. One can also create recursively defined lazy lists + that generate their values based on ones previously generated. + + Backport to python 2.5 by Michael Pust + """ + + __author__ = 'Dan Spitz' + + def __init__(self, iterable): + self._exhausted = False + self._iterator = iter(iterable) + self._data = [] + + def __len__(self): + """Get the length of a LazyList's computed data.""" + return len(self._data) + + def __getitem__(self, i): + """Get an item from a LazyList. + i should be a positive integer or a slice object.""" + if isinstance(i, int): + #index has not yet been yielded by iterator (or iterator exhausted + #before reaching that index) + if i >= len(self): + self.exhaust(i) + elif i < 0: + raise ValueError('cannot index LazyList with negative number') + return self._data[i] + + #LazyList slices are iterators over a portion of the list. + elif isinstance(i, slice): + start, stop, step = i.start, i.stop, i.step + if any(x is not None and x < 0 for x in (start, stop, step)): + raise ValueError('cannot index or step through a LazyList with' + 'a negative number') + #set start and step to their integer defaults if they are None. + if start is None: + start = 0 + if step is None: + step = 1 + + def LazyListIterator(): + count = start + predicate = ( + (lambda: True) + if stop is None + else (lambda: count < stop) + ) + while predicate(): + try: + yield self[count] + #slices can go out of actual index range without raising an + #error + except IndexError: + break + count += step + return LazyListIterator() + + raise TypeError('i must be an integer or slice') + + def __iter__(self): + """return an iterator over each value in the sequence, + whether it has been computed yet or not.""" + return self[:] + + def computed(self): + """Return an iterator over the values in a LazyList that have + already been computed.""" + return self[:len(self)] + + def exhaust(self, index = None): + """Exhaust the iterator generating this LazyList's values. + if index is None, this will exhaust the iterator completely. + Otherwise, it will iterate over the iterator until either the list + has a value for index or the iterator is exhausted. + """ + if self._exhausted: + return + if index is None: + ind_range = itertools.count(len(self)) + else: + ind_range = range(len(self), index + 1) + + for ind in ind_range: + try: + self._data.append(self._iterator.next()) + except StopIteration: #iterator is fully exhausted + self._exhausted = True + break + + +class RecursiveLazyList(LazyList): + + def __init__(self, prod, *args, **kwds): + super(RecursiveLazyList, self).__init__(prod(self, *args, **kwds)) + + +class RecursiveLazyListFactory: + + def __init__(self, producer): + self._gen = producer + + def __call__(self, *a, **kw): + return RecursiveLazyList(self._gen, *a, **kw) + + +def lazylist(gen): + """ + Decorator for creating a RecursiveLazyList subclass. + This should decorate a generator function taking the LazyList object as its + first argument which yields the contents of the list in order. + + >>> #fibonnacci sequence in a lazy list. + >>> @lazylist + ... def fibgen(lst): + ... yield 0 + ... yield 1 + ... for a, b in itertools.izip(lst, lst[1:]): + ... yield a + b + ... + >>> #now fibs can be indexed or iterated over as if it were an infinitely long list containing the fibonnaci sequence + >>> fibs = fibgen() + >>> + >>> #prime numbers in a lazy list. + >>> @lazylist + ... def primegen(lst): + ... yield 2 + ... for candidate in itertools.count(3): #start at next number after 2 + ... #if candidate is not divisible by any smaller prime numbers, + ... #it is a prime. + ... if all(candidate % p for p in lst.computed()): + ... yield candidate + ... + >>> #same for primes- treat it like an infinitely long list containing all prime numbers. + >>> primes = primegen() + >>> print fibs[0], fibs[1], fibs[2], primes[0], primes[1], primes[2] + 0 1 1 2 3 5 + >>> print list(fibs[:10]), list(primes[:10]) + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34] [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] + """ + return RecursiveLazyListFactory(gen) + + +def map_func(f): + """ + >>> import misc + >>> misc.validate_decorator(map_func) + """ + + @functools.wraps(f) + def wrapper(*args): + result = itertools.imap(f, args) + return result + return wrapper + + +def reduce_func(function): + """ + >>> import misc + >>> misc.validate_decorator(reduce_func(lambda x: x)) + """ + + def decorator(f): + + @functools.wraps(f) + def wrapper(*args): + result = reduce(function, f(args)) + return result + return wrapper + return decorator + + +def any_(iterable): + """ + @note Python Version <2.5 + + >>> any_([True, True]) + True + >>> any_([True, False]) + True + >>> any_([False, False]) + False + """ + + for element in iterable: + if element: + return True + return False + + +def all_(iterable): + """ + @note Python Version <2.5 + + >>> all_([True, True]) + True + >>> all_([True, False]) + False + >>> all_([False, False]) + False + """ + + for element in iterable: + if not element: + return False + return True + + +def for_every(pred, seq): + """ + for_every takes a one argument predicate function and a sequence. + @param pred The predicate function should return true or false. + @returns true if every element in seq returns true for predicate, else returns false. + + >>> for_every (lambda c: c > 5,(6,7,8,9)) + True + + @author Source:http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52907 + """ + + for i in seq: + if not pred(i): + return False + return True + + +def there_exists(pred, seq): + """ + there_exists takes a one argument predicate function and a sequence. + @param pred The predicate function should return true or false. + @returns true if any element in seq returns true for predicate, else returns false. + + >>> there_exists (lambda c: c > 5,(6,7,8,9)) + True + + @author Source:http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52907 + """ + + for i in seq: + if pred(i): + return True + return False + + +def func_repeat(quantity, func, *args, **kwd): + """ + Meant to be in connection with "reduce" + """ + for i in xrange(quantity): + yield func(*args, **kwd) + + +def function_map(preds, item): + """ + Meant to be in connection with "reduce" + """ + results = (pred(item) for pred in preds) + + return results + + +def functional_if(combiner, preds, item): + """ + Combines the result of a list of predicates applied to item according to combiner + + @see any, every for example combiners + """ + pass_bool = lambda b: b + + bool_results = function_map(preds, item) + return combiner(pass_bool, bool_results) + + +def pushback_itr(itr): + """ + >>> list(pushback_itr(xrange(5))) + [0, 1, 2, 3, 4] + >>> + >>> first = True + >>> itr = pushback_itr(xrange(5)) + >>> for i in itr: + ... print i + ... if first and i == 2: + ... first = False + ... print itr.send(i) + 0 + 1 + 2 + None + 2 + 3 + 4 + >>> + >>> first = True + >>> itr = pushback_itr(xrange(5)) + >>> for i in itr: + ... print i + ... if first and i == 2: + ... first = False + ... print itr.send(i) + ... print itr.send(i) + 0 + 1 + 2 + None + None + 2 + 2 + 3 + 4 + >>> + >>> itr = pushback_itr(xrange(5)) + >>> print itr.next() + 0 + >>> print itr.next() + 1 + >>> print itr.send(10) + None + >>> print itr.next() + 10 + >>> print itr.next() + 2 + >>> print itr.send(20) + None + >>> print itr.send(30) + None + >>> print itr.send(40) + None + >>> print itr.next() + 40 + >>> print itr.next() + 30 + >>> print itr.send(50) + None + >>> print itr.next() + 50 + >>> print itr.next() + 20 + >>> print itr.next() + 3 + >>> print itr.next() + 4 + """ + for item in itr: + maybePushedBack = yield item + queue = [] + while queue or maybePushedBack is not None: + if maybePushedBack is not None: + queue.append(maybePushedBack) + maybePushedBack = yield None + else: + item = queue.pop() + maybePushedBack = yield item + + +def itr_available(queue, initiallyBlock = False): + if initiallyBlock: + yield queue.get() + while not queue.empty(): + yield queue.get_nowait() + + +if __name__ == "__main__": + import doctest + print doctest.testmod() diff --git a/src/util/concurrent.py b/src/util/concurrent.py new file mode 100644 index 0000000..503a1b4 --- /dev/null +++ b/src/util/concurrent.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python + +from __future__ import with_statement + +import os +import errno +import time +import functools +import contextlib + + +def synchronized(lock): + """ + Synchronization decorator. + + >>> import misc + >>> misc.validate_decorator(synchronized(object())) + """ + + def wrap(f): + + @functools.wraps(f) + def newFunction(*args, **kw): + lock.acquire() + try: + return f(*args, **kw) + finally: + lock.release() + return newFunction + return wrap + + +@contextlib.contextmanager +def qlock(queue, gblock = True, gtimeout = None, pblock = True, ptimeout = None): + """ + Locking with a queue, good for when you want to lock an item passed around + + >>> import Queue + >>> item = 5 + >>> lock = Queue.Queue() + >>> lock.put(item) + >>> with qlock(lock) as i: + ... print i + 5 + """ + item = queue.get(gblock, gtimeout) + try: + yield item + finally: + queue.put(item, pblock, ptimeout) + + +@contextlib.contextmanager +def flock(path, timeout=-1): + WAIT_FOREVER = -1 + DELAY = 0.1 + timeSpent = 0 + + acquired = False + + while timeSpent <= timeout or timeout == WAIT_FOREVER: + try: + fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDWR) + acquired = True + break + except OSError, e: + if e.errno != errno.EEXIST: + raise + time.sleep(DELAY) + timeSpent += DELAY + + assert acquired, "Failed to grab file-lock %s within timeout %d" % (path, timeout) + + try: + yield fd + finally: + os.unlink(path) diff --git a/src/util/coroutines.py b/src/util/coroutines.py new file mode 100755 index 0000000..b1e539e --- /dev/null +++ b/src/util/coroutines.py @@ -0,0 +1,623 @@ +#!/usr/bin/env python + +""" +Uses for generators +* Pull pipelining (iterators) +* Push pipelining (coroutines) +* State machines (coroutines) +* "Cooperative multitasking" (coroutines) +* Algorithm -> Object transform for cohesiveness (for example context managers) (coroutines) + +Design considerations +* When should a stage pass on exceptions or have it thrown within it? +* When should a stage pass on GeneratorExits? +* Is there a way to either turn a push generator into a iterator or to use + comprehensions syntax for push generators (I doubt it) +* When should the stage try and send data in both directions +* Since pull generators (generators), push generators (coroutines), subroutines, and coroutines are all coroutines, maybe we should rename the push generators to not confuse them, like signals/slots? and then refer to two-way generators as coroutines +** If so, make s* and co* implementation of functions +""" + +import threading +import Queue +import pickle +import functools +import itertools +import xml.sax +import xml.parsers.expat + + +def autostart(func): + """ + >>> @autostart + ... def grep_sink(pattern): + ... print "Looking for %s" % pattern + ... while True: + ... line = yield + ... if pattern in line: + ... print line, + >>> g = grep_sink("python") + Looking for python + >>> g.send("Yeah but no but yeah but no") + >>> g.send("A series of tubes") + >>> g.send("python generators rock!") + python generators rock! + >>> g.close() + """ + + @functools.wraps(func) + def start(*args, **kwargs): + cr = func(*args, **kwargs) + cr.next() + return cr + + return start + + +@autostart +def printer_sink(format = "%s"): + """ + >>> pr = printer_sink("%r") + >>> pr.send("Hello") + 'Hello' + >>> pr.send("5") + '5' + >>> pr.send(5) + 5 + >>> p = printer_sink() + >>> p.send("Hello") + Hello + >>> p.send("World") + World + >>> # p.throw(RuntimeError, "Goodbye") + >>> # p.send("Meh") + >>> # p.close() + """ + while True: + item = yield + print format % (item, ) + + +@autostart +def null_sink(): + """ + Good for uses like with cochain to pick up any slack + """ + while True: + item = yield + + +def itr_source(itr, target): + """ + >>> itr_source(xrange(2), printer_sink()) + 0 + 1 + """ + for item in itr: + target.send(item) + + +@autostart +def cofilter(predicate, target): + """ + >>> p = printer_sink() + >>> cf = cofilter(None, p) + >>> cf.send("") + >>> cf.send("Hello") + Hello + >>> cf.send([]) + >>> cf.send([1, 2]) + [1, 2] + >>> cf.send(False) + >>> cf.send(True) + True + >>> cf.send(0) + >>> cf.send(1) + 1 + >>> # cf.throw(RuntimeError, "Goodbye") + >>> # cf.send(False) + >>> # cf.send(True) + >>> # cf.close() + """ + if predicate is None: + predicate = bool + + while True: + try: + item = yield + if predicate(item): + target.send(item) + except StandardError, e: + target.throw(e.__class__, e.message) + + +@autostart +def comap(function, target): + """ + >>> p = printer_sink() + >>> cm = comap(lambda x: x+1, p) + >>> cm.send(0) + 1 + >>> cm.send(1.0) + 2.0 + >>> cm.send(-2) + -1 + >>> # cm.throw(RuntimeError, "Goodbye") + >>> # cm.send(0) + >>> # cm.send(1.0) + >>> # cm.close() + """ + while True: + try: + item = yield + mappedItem = function(item) + target.send(mappedItem) + except StandardError, e: + target.throw(e.__class__, e.message) + + +def func_sink(function): + return comap(function, null_sink()) + + +def expand_positional(function): + + @functools.wraps(function) + def expander(item): + return function(*item) + + return expander + + +@autostart +def append_sink(l): + """ + >>> l = [] + >>> apps = append_sink(l) + >>> apps.send(1) + >>> apps.send(2) + >>> apps.send(3) + >>> print l + [1, 2, 3] + """ + while True: + item = yield + l.append(item) + + +@autostart +def last_n_sink(l, n = 1): + """ + >>> l = [] + >>> lns = last_n_sink(l) + >>> lns.send(1) + >>> lns.send(2) + >>> lns.send(3) + >>> print l + [3] + """ + del l[:] + while True: + item = yield + extraCount = len(l) - n + 1 + if 0 < extraCount: + del l[0:extraCount] + l.append(item) + + +@autostart +def coreduce(target, function, initializer = None): + """ + >>> reduceResult = [] + >>> lns = last_n_sink(reduceResult) + >>> cr = coreduce(lns, lambda x, y: x + y, 0) + >>> cr.send(1) + >>> cr.send(2) + >>> cr.send(3) + >>> print reduceResult + [6] + >>> cr = coreduce(lns, lambda x, y: x + y) + >>> cr.send(1) + >>> cr.send(2) + >>> cr.send(3) + >>> print reduceResult + [6] + """ + isFirst = True + cumulativeRef = initializer + while True: + item = yield + if isFirst and initializer is None: + cumulativeRef = item + else: + cumulativeRef = function(cumulativeRef, item) + target.send(cumulativeRef) + isFirst = False + + +@autostart +def cotee(targets): + """ + Takes a sequence of coroutines and sends the received items to all of them + + >>> ct = cotee((printer_sink("1 %s"), printer_sink("2 %s"))) + >>> ct.send("Hello") + 1 Hello + 2 Hello + >>> ct.send("World") + 1 World + 2 World + >>> # ct.throw(RuntimeError, "Goodbye") + >>> # ct.send("Meh") + >>> # ct.close() + """ + while True: + try: + item = yield + for target in targets: + target.send(item) + except StandardError, e: + for target in targets: + target.throw(e.__class__, e.message) + + +class CoTee(object): + """ + >>> ct = CoTee() + >>> ct.register_sink(printer_sink("1 %s")) + >>> ct.register_sink(printer_sink("2 %s")) + >>> ct.stage.send("Hello") + 1 Hello + 2 Hello + >>> ct.stage.send("World") + 1 World + 2 World + >>> ct.register_sink(printer_sink("3 %s")) + >>> ct.stage.send("Foo") + 1 Foo + 2 Foo + 3 Foo + >>> # ct.stage.throw(RuntimeError, "Goodbye") + >>> # ct.stage.send("Meh") + >>> # ct.stage.close() + """ + + def __init__(self): + self.stage = self._stage() + self._targets = [] + + def register_sink(self, sink): + self._targets.append(sink) + + def unregister_sink(self, sink): + self._targets.remove(sink) + + def restart(self): + self.stage = self._stage() + + @autostart + def _stage(self): + while True: + try: + item = yield + for target in self._targets: + target.send(item) + except StandardError, e: + for target in self._targets: + target.throw(e.__class__, e.message) + + +def _flush_queue(queue): + while not queue.empty(): + yield queue.get() + + +@autostart +def cocount(target, start = 0): + """ + >>> cc = cocount(printer_sink("%s")) + >>> cc.send("a") + 0 + >>> cc.send(None) + 1 + >>> cc.send([]) + 2 + >>> cc.send(0) + 3 + """ + for i in itertools.count(start): + item = yield + target.send(i) + + +@autostart +def coenumerate(target, start = 0): + """ + >>> ce = coenumerate(printer_sink("%r")) + >>> ce.send("a") + (0, 'a') + >>> ce.send(None) + (1, None) + >>> ce.send([]) + (2, []) + >>> ce.send(0) + (3, 0) + """ + for i in itertools.count(start): + item = yield + decoratedItem = i, item + target.send(decoratedItem) + + +@autostart +def corepeat(target, elem): + """ + >>> cr = corepeat(printer_sink("%s"), "Hello World") + >>> cr.send("a") + Hello World + >>> cr.send(None) + Hello World + >>> cr.send([]) + Hello World + >>> cr.send(0) + Hello World + """ + while True: + item = yield + target.send(elem) + + +@autostart +def cointercept(target, elems): + """ + >>> cr = cointercept(printer_sink("%s"), [1, 2, 3, 4]) + >>> cr.send("a") + 1 + >>> cr.send(None) + 2 + >>> cr.send([]) + 3 + >>> cr.send(0) + 4 + >>> cr.send("Bye") + Traceback (most recent call last): + File "/usr/lib/python2.5/doctest.py", line 1228, in __run + compileflags, 1) in test.globs + File "", line 1, in + cr.send("Bye") + StopIteration + """ + item = yield + for elem in elems: + target.send(elem) + item = yield + + +@autostart +def codropwhile(target, pred): + """ + >>> cdw = codropwhile(printer_sink("%s"), lambda x: x) + >>> cdw.send([0, 1, 2]) + >>> cdw.send(1) + >>> cdw.send(True) + >>> cdw.send(False) + >>> cdw.send([0, 1, 2]) + [0, 1, 2] + >>> cdw.send(1) + 1 + >>> cdw.send(True) + True + """ + while True: + item = yield + if not pred(item): + break + + while True: + item = yield + target.send(item) + + +@autostart +def cotakewhile(target, pred): + """ + >>> ctw = cotakewhile(printer_sink("%s"), lambda x: x) + >>> ctw.send([0, 1, 2]) + [0, 1, 2] + >>> ctw.send(1) + 1 + >>> ctw.send(True) + True + >>> ctw.send(False) + >>> ctw.send([0, 1, 2]) + >>> ctw.send(1) + >>> ctw.send(True) + """ + while True: + item = yield + if not pred(item): + break + target.send(item) + + while True: + item = yield + + +@autostart +def coslice(target, lower, upper): + """ + >>> cs = coslice(printer_sink("%r"), 3, 5) + >>> cs.send("0") + >>> cs.send("1") + >>> cs.send("2") + >>> cs.send("3") + '3' + >>> cs.send("4") + '4' + >>> cs.send("5") + >>> cs.send("6") + """ + for i in xrange(lower): + item = yield + for i in xrange(upper - lower): + item = yield + target.send(item) + while True: + item = yield + + +@autostart +def cochain(targets): + """ + >>> cr = cointercept(printer_sink("good %s"), [1, 2, 3, 4]) + >>> cc = cochain([cr, printer_sink("end %s")]) + >>> cc.send("a") + good 1 + >>> cc.send(None) + good 2 + >>> cc.send([]) + good 3 + >>> cc.send(0) + good 4 + >>> cc.send("Bye") + end Bye + """ + behind = [] + for target in targets: + try: + while behind: + item = behind.pop() + target.send(item) + while True: + item = yield + target.send(item) + except StopIteration: + behind.append(item) + + +@autostart +def queue_sink(queue): + """ + >>> q = Queue.Queue() + >>> qs = queue_sink(q) + >>> qs.send("Hello") + >>> qs.send("World") + >>> qs.throw(RuntimeError, "Goodbye") + >>> qs.send("Meh") + >>> qs.close() + >>> print [i for i in _flush_queue(q)] + [(None, 'Hello'), (None, 'World'), (, 'Goodbye'), (None, 'Meh'), (, None)] + """ + while True: + try: + item = yield + queue.put((None, item)) + except StandardError, e: + queue.put((e.__class__, e.message)) + except GeneratorExit: + queue.put((GeneratorExit, None)) + raise + + +def decode_item(item, target): + if item[0] is None: + target.send(item[1]) + return False + elif item[0] is GeneratorExit: + target.close() + return True + else: + target.throw(item[0], item[1]) + return False + + +def queue_source(queue, target): + """ + >>> q = Queue.Queue() + >>> for i in [ + ... (None, 'Hello'), + ... (None, 'World'), + ... (GeneratorExit, None), + ... ]: + ... q.put(i) + >>> qs = queue_source(q, printer_sink()) + Hello + World + """ + isDone = False + while not isDone: + item = queue.get() + isDone = decode_item(item, target) + + +def threaded_stage(target, thread_factory = threading.Thread): + messages = Queue.Queue() + + run_source = functools.partial(queue_source, messages, target) + thread_factory(target=run_source).start() + + # Sink running in current thread + return functools.partial(queue_sink, messages) + + +@autostart +def pickle_sink(f): + while True: + try: + item = yield + pickle.dump((None, item), f) + except StandardError, e: + pickle.dump((e.__class__, e.message), f) + except GeneratorExit: + pickle.dump((GeneratorExit, ), f) + raise + except StopIteration: + f.close() + return + + +def pickle_source(f, target): + try: + isDone = False + while not isDone: + item = pickle.load(f) + isDone = decode_item(item, target) + except EOFError: + target.close() + + +class EventHandler(object, xml.sax.ContentHandler): + + START = "start" + TEXT = "text" + END = "end" + + def __init__(self, target): + object.__init__(self) + xml.sax.ContentHandler.__init__(self) + self._target = target + + def startElement(self, name, attrs): + self._target.send((self.START, (name, attrs._attrs))) + + def characters(self, text): + self._target.send((self.TEXT, text)) + + def endElement(self, name): + self._target.send((self.END, name)) + + +def expat_parse(f, target): + parser = xml.parsers.expat.ParserCreate() + parser.buffer_size = 65536 + parser.buffer_text = True + parser.returns_unicode = False + parser.StartElementHandler = lambda name, attrs: target.send(('start', (name, attrs))) + parser.EndElementHandler = lambda name: target.send(('end', name)) + parser.CharacterDataHandler = lambda data: target.send(('text', data)) + parser.ParseFile(f) + + +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/src/util/go_utils.py b/src/util/go_utils.py new file mode 100644 index 0000000..20ccac1 --- /dev/null +++ b/src/util/go_utils.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python + +from __future__ import with_statement + +import time +import functools +import threading +import Queue +import logging + +import gobject + +import algorithms +import misc + + +_moduleLogger = logging.getLogger(__name__) + + +def make_idler(func): + """ + Decorator that makes a generator-function into a function that will continue execution on next call + """ + a = [] + + @functools.wraps(func) + def decorated_func(*args, **kwds): + if not a: + a.append(func(*args, **kwds)) + try: + a[0].next() + return True + except StopIteration: + del a[:] + return False + + return decorated_func + + +def async(func): + """ + Make a function mainloop friendly. the function will be called at the + next mainloop idle state. + + >>> import misc + >>> misc.validate_decorator(async) + """ + + @functools.wraps(func) + def new_function(*args, **kwargs): + + def async_function(): + func(*args, **kwargs) + return False + + gobject.idle_add(async_function) + + return new_function + + +class Async(object): + + def __init__(self, func, once = True): + self.__func = func + self.__idleId = None + self.__once = once + + def start(self): + assert self.__idleId is None + if self.__once: + self.__idleId = gobject.idle_add(self._on_once) + else: + self.__idleId = gobject.idle_add(self.__func) + + def is_running(self): + return self.__idleId is not None + + def cancel(self): + if self.__idleId is not None: + gobject.source_remove(self.__idleId) + self.__idleId = None + + def __call__(self): + return self.start() + + @misc.log_exception(_moduleLogger) + def _on_once(self): + self.cancel() + try: + self.__func() + except Exception: + pass + return False + + +class Timeout(object): + + def __init__(self, func): + self.__func = func + self.__timeoutId = None + + def start(self, **kwds): + assert self.__timeoutId is None + + assert len(kwds) == 1 + timeoutInSeconds = kwds["seconds"] + assert 0 <= timeoutInSeconds + if timeoutInSeconds == 0: + self.__timeoutId = gobject.idle_add(self._on_once) + else: + self.__timeoutId = timeout_add_seconds(timeoutInSeconds, self._on_once) + + def is_running(self): + return self.__timeoutId is not None + + def cancel(self): + if self.__timeoutId is not None: + gobject.source_remove(self.__timeoutId) + self.__timeoutId = None + + def __call__(self, **kwds): + return self.start(**kwds) + + @misc.log_exception(_moduleLogger) + def _on_once(self): + self.cancel() + try: + self.__func() + except Exception: + pass + return False + + +_QUEUE_EMPTY = object() + + +class AsyncPool(object): + + def __init__(self): + self.__workQueue = Queue.Queue() + self.__thread = threading.Thread( + name = type(self).__name__, + target = self.__consume_queue, + ) + self.__isRunning = True + + def start(self): + self.__thread.start() + + def stop(self): + self.__isRunning = False + for _ in algorithms.itr_available(self.__workQueue): + pass # eat up queue to cut down dumb work + self.__workQueue.put(_QUEUE_EMPTY) + + def add_task(self, func, args, kwds, on_success, on_error): + task = func, args, kwds, on_success, on_error + self.__workQueue.put(task) + + @misc.log_exception(_moduleLogger) + def __trampoline_callback(self, on_success, on_error, isError, result): + if not self.__isRunning: + if isError: + _moduleLogger.error("Masking: %s" % (result, )) + isError = True + result = StopIteration("Cancelling all callbacks") + callback = on_success if not isError else on_error + try: + callback(result) + except Exception: + _moduleLogger.exception("Callback errored") + return False + + @misc.log_exception(_moduleLogger) + def __consume_queue(self): + while True: + task = self.__workQueue.get() + if task is _QUEUE_EMPTY: + break + func, args, kwds, on_success, on_error = task + + try: + result = func(*args, **kwds) + isError = False + except Exception, e: + _moduleLogger.error("Error, passing it back to the main thread") + result = e + isError = True + self.__workQueue.task_done() + + gobject.idle_add(self.__trampoline_callback, on_success, on_error, isError, result) + _moduleLogger.debug("Shutting down worker thread") + + +class AsyncLinearExecution(object): + + def __init__(self, pool, func): + self._pool = pool + self._func = func + self._run = None + + def start(self, *args, **kwds): + assert self._run is None + self._run = self._func(*args, **kwds) + trampoline, args, kwds = self._run.send(None) # priming the function + self._pool.add_task( + trampoline, + args, + kwds, + self.on_success, + self.on_error, + ) + + @misc.log_exception(_moduleLogger) + def on_success(self, result): + _moduleLogger.debug("Processing success for: %r", self._func) + try: + trampoline, args, kwds = self._run.send(result) + except StopIteration, e: + pass + else: + self._pool.add_task( + trampoline, + args, + kwds, + self.on_success, + self.on_error, + ) + + @misc.log_exception(_moduleLogger) + def on_error(self, error): + _moduleLogger.debug("Processing error for: %r", self._func) + try: + trampoline, args, kwds = self._run.throw(error) + except StopIteration, e: + pass + else: + self._pool.add_task( + trampoline, + args, + kwds, + self.on_success, + self.on_error, + ) + + +def throttled(minDelay, queue): + """ + Throttle the calls to a function by queueing all the calls that happen + before the minimum delay + + >>> import misc + >>> import Queue + >>> misc.validate_decorator(throttled(0, Queue.Queue())) + """ + + def actual_decorator(func): + + lastCallTime = [None] + + def process_queue(): + if 0 < len(queue): + func, args, kwargs = queue.pop(0) + lastCallTime[0] = time.time() * 1000 + func(*args, **kwargs) + return False + + @functools.wraps(func) + def new_function(*args, **kwargs): + now = time.time() * 1000 + if ( + lastCallTime[0] is None or + (now - lastCallTime >= minDelay) + ): + lastCallTime[0] = now + func(*args, **kwargs) + else: + queue.append((func, args, kwargs)) + lastCallDelta = now - lastCallTime[0] + processQueueTimeout = int(minDelay * len(queue) - lastCallDelta) + gobject.timeout_add(processQueueTimeout, process_queue) + + return new_function + + return actual_decorator + + +def _old_timeout_add_seconds(timeout, callback): + return gobject.timeout_add(timeout * 1000, callback) + + +def _timeout_add_seconds(timeout, callback): + return gobject.timeout_add_seconds(timeout, callback) + + +try: + gobject.timeout_add_seconds + timeout_add_seconds = _timeout_add_seconds +except AttributeError: + timeout_add_seconds = _old_timeout_add_seconds diff --git a/src/util/io.py b/src/util/io.py new file mode 100644 index 0000000..aece2dd --- /dev/null +++ b/src/util/io.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python + + +from __future__ import with_statement + +import os +import pickle +import contextlib +import itertools +import functools + + +@contextlib.contextmanager +def change_directory(directory): + previousDirectory = os.getcwd() + os.chdir(directory) + currentDirectory = os.getcwd() + + try: + yield previousDirectory, currentDirectory + finally: + os.chdir(previousDirectory) + + +@contextlib.contextmanager +def pickled(filename): + """ + Here is an example usage: + with pickled("foo.db") as p: + p("users", list).append(["srid", "passwd", 23]) + """ + + if os.path.isfile(filename): + data = pickle.load(open(filename)) + else: + data = {} + + def getter(item, factory): + if item in data: + return data[item] + else: + data[item] = factory() + return data[item] + + yield getter + + pickle.dump(data, open(filename, "w")) + + +@contextlib.contextmanager +def redirect(object_, attr, value): + """ + >>> import sys + ... with redirect(sys, 'stdout', open('stdout', 'w')): + ... print "hello" + ... + >>> print "we're back" + we're back + """ + orig = getattr(object_, attr) + setattr(object_, attr, value) + try: + yield + finally: + setattr(object_, attr, orig) + + +def pathsplit(path): + """ + >>> pathsplit("/a/b/c") + ['', 'a', 'b', 'c'] + >>> pathsplit("./plugins/builtins.ini") + ['.', 'plugins', 'builtins.ini'] + """ + pathParts = path.split(os.path.sep) + return pathParts + + +def commonpath(l1, l2, common=None): + """ + >>> commonpath(pathsplit('/a/b/c/d'), pathsplit('/a/b/c1/d1')) + (['', 'a', 'b'], ['c', 'd'], ['c1', 'd1']) + >>> commonpath(pathsplit("./plugins/"), pathsplit("./plugins/builtins.ini")) + (['.', 'plugins'], [''], ['builtins.ini']) + >>> commonpath(pathsplit("./plugins/builtins"), pathsplit("./plugins")) + (['.', 'plugins'], ['builtins'], []) + """ + if common is None: + common = [] + + if l1 == l2: + return l1, [], [] + + for i, (leftDir, rightDir) in enumerate(zip(l1, l2)): + if leftDir != rightDir: + return l1[0:i], l1[i:], l2[i:] + else: + if leftDir == rightDir: + i += 1 + return l1[0:i], l1[i:], l2[i:] + + +def relpath(p1, p2): + """ + >>> relpath('/', '/') + './' + >>> relpath('/a/b/c/d', '/') + '../../../../' + >>> relpath('/a/b/c/d', '/a/b/c1/d1') + '../../c1/d1' + >>> relpath('/a/b/c/d', '/a/b/c1/d1/') + '../../c1/d1' + >>> relpath("./plugins/builtins", "./plugins") + '../' + >>> relpath("./plugins/", "./plugins/builtins.ini") + 'builtins.ini' + """ + sourcePath = os.path.normpath(p1) + destPath = os.path.normpath(p2) + + (common, sourceOnly, destOnly) = commonpath(pathsplit(sourcePath), pathsplit(destPath)) + if len(sourceOnly) or len(destOnly): + relParts = itertools.chain( + (('..' + os.sep) * len(sourceOnly), ), + destOnly, + ) + return os.path.join(*relParts) + else: + return "."+os.sep diff --git a/src/util/linux.py b/src/util/linux.py new file mode 100644 index 0000000..4837f2a --- /dev/null +++ b/src/util/linux.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python + + +import logging + + +def set_process_name(name): + try: # change process name for killall + import ctypes + libc = ctypes.CDLL('libc.so.6') + libc.prctl(15, name, 0, 0, 0) + except Exception, e: + logging.warning('Unable to set processName: %s" % e') diff --git a/src/util/misc.py b/src/util/misc.py new file mode 100644 index 0000000..cf5c22a --- /dev/null +++ b/src/util/misc.py @@ -0,0 +1,757 @@ +#!/usr/bin/env python + +from __future__ import with_statement + +import sys +import re +import cPickle + +import functools +import contextlib +import inspect + +import optparse +import traceback +import warnings +import string + + +_indentationLevel = [0] + + +def log_call(logger): + + def log_call_decorator(func): + + @functools.wraps(func) + def wrapper(*args, **kwds): + logger.debug("%s> %s" % (" " * _indentationLevel[0], func.__name__, )) + _indentationLevel[0] += 1 + try: + return func(*args, **kwds) + finally: + _indentationLevel[0] -= 1 + logger.debug("%s< %s" % (" " * _indentationLevel[0], func.__name__, )) + + return wrapper + + return log_call_decorator + + +def log_exception(logger): + + def log_exception_decorator(func): + + @functools.wraps(func) + def wrapper(*args, **kwds): + try: + return func(*args, **kwds) + except Exception: + logger.exception(func.__name__) + raise + + return wrapper + + return log_exception_decorator + + +def printfmt(template): + """ + This hides having to create the Template object and call substitute/safe_substitute on it. For example: + + >>> num = 10 + >>> word = "spam" + >>> printfmt("I would like to order $num units of $word, please") #doctest: +SKIP + I would like to order 10 units of spam, please + """ + frame = inspect.stack()[-1][0] + try: + print string.Template(template).safe_substitute(frame.f_locals) + finally: + del frame + + +def is_special(name): + return name.startswith("__") and name.endswith("__") + + +def is_private(name): + return name.startswith("_") and not is_special(name) + + +def privatize(clsName, attributeName): + """ + At runtime, make an attributeName private + + Example: + >>> class Test(object): + ... pass + ... + >>> try: + ... dir(Test).index("_Test__me") + ... print dir(Test) + ... except: + ... print "Not Found" + Not Found + >>> setattr(Test, privatize(Test.__name__, "me"), "Hello World") + >>> try: + ... dir(Test).index("_Test__me") + ... print "Found" + ... except: + ... print dir(Test) + 0 + Found + >>> print getattr(Test, obfuscate(Test.__name__, "__me")) + Hello World + >>> + >>> is_private(privatize(Test.__name__, "me")) + True + >>> is_special(privatize(Test.__name__, "me")) + False + """ + return "".join(["_", clsName, "__", attributeName]) + + +def obfuscate(clsName, attributeName): + """ + At runtime, turn a private name into the obfuscated form + + Example: + >>> class Test(object): + ... __me = "Hello World" + ... + >>> try: + ... dir(Test).index("_Test__me") + ... print "Found" + ... except: + ... print dir(Test) + 0 + Found + >>> print getattr(Test, obfuscate(Test.__name__, "__me")) + Hello World + >>> is_private(obfuscate(Test.__name__, "__me")) + True + >>> is_special(obfuscate(Test.__name__, "__me")) + False + """ + return "".join(["_", clsName, attributeName]) + + +class PAOptionParser(optparse.OptionParser, object): + """ + >>> if __name__ == '__main__': + ... #parser = PAOptionParser("My usage str") + ... parser = PAOptionParser() + ... parser.add_posarg("Foo", help="Foo usage") + ... parser.add_posarg("Bar", dest="bar_dest") + ... parser.add_posarg("Language", dest='tr_type', type="choice", choices=("Python", "Other")) + ... parser.add_option('--stocksym', dest='symbol') + ... values, args = parser.parse_args() + ... print values, args + ... + + python mycp.py -h + python mycp.py + python mycp.py foo + python mycp.py foo bar + + python mycp.py foo bar lava + Usage: pa.py [options] + + Positional Arguments: + Foo: Foo usage + Bar: + Language: + + pa.py: error: option --Language: invalid choice: 'lava' (choose from 'Python', 'Other' + """ + + def __init__(self, *args, **kw): + self.posargs = [] + super(PAOptionParser, self).__init__(*args, **kw) + + def add_posarg(self, *args, **kw): + pa_help = kw.get("help", "") + kw["help"] = optparse.SUPPRESS_HELP + o = self.add_option("--%s" % args[0], *args[1:], **kw) + self.posargs.append((args[0], pa_help)) + + def get_usage(self, *args, **kwargs): + params = (' '.join(["<%s>" % arg[0] for arg in self.posargs]), '\n '.join(["%s: %s" % (arg) for arg in self.posargs])) + self.usage = "%%prog %s [options]\n\nPositional Arguments:\n %s" % params + return super(PAOptionParser, self).get_usage(*args, **kwargs) + + def parse_args(self, *args, **kwargs): + args = sys.argv[1:] + args0 = [] + for p, v in zip(self.posargs, args): + args0.append("--%s" % p[0]) + args0.append(v) + args = args0 + args + options, args = super(PAOptionParser, self).parse_args(args, **kwargs) + if len(args) < len(self.posargs): + msg = 'Missing value(s) for "%s"\n' % ", ".join([arg[0] for arg in self.posargs][len(args):]) + self.error(msg) + return options, args + + +def explicitly(name, stackadd=0): + """ + This is an alias for adding to '__all__'. Less error-prone than using + __all__ itself, since setting __all__ directly is prone to stomping on + things implicitly exported via L{alias}. + + @note Taken from PyExport (which could turn out pretty cool): + @li @a http://codebrowse.launchpad.net/~glyph/ + @li @a http://glyf.livejournal.com/74356.html + """ + packageVars = sys._getframe(1+stackadd).f_locals + globalAll = packageVars.setdefault('__all__', []) + globalAll.append(name) + + +def public(thunk): + """ + This is a decorator, for convenience. Rather than typing the name of your + function twice, you can decorate a function with this. + + To be real, @public would need to work on methods as well, which gets into + supporting types... + + @note Taken from PyExport (which could turn out pretty cool): + @li @a http://codebrowse.launchpad.net/~glyph/ + @li @a http://glyf.livejournal.com/74356.html + """ + explicitly(thunk.__name__, 1) + return thunk + + +def _append_docstring(obj, message): + if obj.__doc__ is None: + obj.__doc__ = message + else: + obj.__doc__ += message + + +def validate_decorator(decorator): + + def simple(x): + return x + + f = simple + f.__name__ = "name" + f.__doc__ = "doc" + f.__dict__["member"] = True + + g = decorator(f) + + if f.__name__ != g.__name__: + print f.__name__, "!=", g.__name__ + + if g.__doc__ is None: + print decorator.__name__, "has no doc string" + elif not g.__doc__.startswith(f.__doc__): + print g.__doc__, "didn't start with", f.__doc__ + + if not ("member" in g.__dict__ and g.__dict__["member"]): + print "'member' not in ", g.__dict__ + + +def deprecated_api(func): + """ + This is a decorator which can be used to mark functions + as deprecated. It will result in a warning being emitted + when the function is used. + + >>> validate_decorator(deprecated_api) + """ + + @functools.wraps(func) + def newFunc(*args, **kwargs): + warnings.warn("Call to deprecated function %s." % func.__name__, category=DeprecationWarning) + return func(*args, **kwargs) + + _append_docstring(newFunc, "\n@deprecated") + return newFunc + + +def unstable_api(func): + """ + This is a decorator which can be used to mark functions + as deprecated. It will result in a warning being emitted + when the function is used. + + >>> validate_decorator(unstable_api) + """ + + @functools.wraps(func) + def newFunc(*args, **kwargs): + warnings.warn("Call to unstable API function %s." % func.__name__, category=FutureWarning) + return func(*args, **kwargs) + _append_docstring(newFunc, "\n@unstable") + return newFunc + + +def enabled(func): + """ + This decorator doesn't add any behavior + + >>> validate_decorator(enabled) + """ + return func + + +def disabled(func): + """ + This decorator disables the provided function, and does nothing + + >>> validate_decorator(disabled) + """ + + @functools.wraps(func) + def emptyFunc(*args, **kargs): + pass + _append_docstring(emptyFunc, "\n@note Temporarily Disabled") + return emptyFunc + + +def metadata(document=True, **kwds): + """ + >>> validate_decorator(metadata(author="Ed")) + """ + + def decorate(func): + for k, v in kwds.iteritems(): + setattr(func, k, v) + if document: + _append_docstring(func, "\n@"+k+" "+v) + return func + return decorate + + +def prop(func): + """Function decorator for defining property attributes + + The decorated function is expected to return a dictionary + containing one or more of the following pairs: + fget - function for getting attribute value + fset - function for setting attribute value + fdel - function for deleting attribute + This can be conveniently constructed by the locals() builtin + function; see: + http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183 + @author http://kbyanc.blogspot.com/2007/06/python-property-attribute-tricks.html + + Example: + >>> #Due to transformation from function to property, does not need to be validated + >>> #validate_decorator(prop) + >>> class MyExampleClass(object): + ... @prop + ... def foo(): + ... "The foo property attribute's doc-string" + ... def fget(self): + ... print "GET" + ... return self._foo + ... def fset(self, value): + ... print "SET" + ... self._foo = value + ... return locals() + ... + >>> me = MyExampleClass() + >>> me.foo = 10 + SET + >>> print me.foo + GET + 10 + """ + return property(doc=func.__doc__, **func()) + + +def print_handler(e): + """ + @see ExpHandler + """ + print "%s: %s" % (type(e).__name__, e) + + +def print_ignore(e): + """ + @see ExpHandler + """ + print 'Ignoring %s exception: %s' % (type(e).__name__, e) + + +def print_traceback(e): + """ + @see ExpHandler + """ + #print sys.exc_info() + traceback.print_exc(file=sys.stdout) + + +def ExpHandler(handler = print_handler, *exceptions): + """ + An exception handling idiom using decorators + Examples + Specify exceptions in order, first one is handled first + last one last. + + >>> validate_decorator(ExpHandler()) + >>> @ExpHandler(print_ignore, ZeroDivisionError) + ... @ExpHandler(None, AttributeError, ValueError) + ... def f1(): + ... 1/0 + >>> @ExpHandler(print_traceback, ZeroDivisionError) + ... def f2(): + ... 1/0 + >>> @ExpHandler() + ... def f3(*pargs): + ... l = pargs + ... return l[10] + >>> @ExpHandler(print_traceback, ZeroDivisionError) + ... def f4(): + ... return 1 + >>> + >>> + >>> f1() + Ignoring ZeroDivisionError exception: integer division or modulo by zero + >>> f2() # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + ZeroDivisionError: integer division or modulo by zero + >>> f3() + IndexError: tuple index out of range + >>> f4() + 1 + """ + + def wrapper(f): + localExceptions = exceptions + if not localExceptions: + localExceptions = [Exception] + t = [(ex, handler) for ex in localExceptions] + t.reverse() + + def newfunc(t, *args, **kwargs): + ex, handler = t[0] + try: + if len(t) == 1: + return f(*args, **kwargs) + else: + #Recurse for embedded try/excepts + dec_func = functools.partial(newfunc, t[1:]) + dec_func = functools.update_wrapper(dec_func, f) + return dec_func(*args, **kwargs) + except ex, e: + return handler(e) + + dec_func = functools.partial(newfunc, t) + dec_func = functools.update_wrapper(dec_func, f) + return dec_func + return wrapper + + +def into_debugger(func): + """ + >>> validate_decorator(into_debugger) + """ + + @functools.wraps(func) + def newFunc(*args, **kwargs): + try: + return func(*args, **kwargs) + except: + import pdb + pdb.post_mortem() + + return newFunc + + +class bindclass(object): + """ + >>> validate_decorator(bindclass) + >>> class Foo(BoundObject): + ... @bindclass + ... def foo(this_class, self): + ... return this_class, self + ... + >>> class Bar(Foo): + ... @bindclass + ... def bar(this_class, self): + ... return this_class, self + ... + >>> f = Foo() + >>> b = Bar() + >>> + >>> f.foo() # doctest: +ELLIPSIS + (, <...Foo object at ...>) + >>> b.foo() # doctest: +ELLIPSIS + (, <...Bar object at ...>) + >>> b.bar() # doctest: +ELLIPSIS + (, <...Bar object at ...>) + """ + + def __init__(self, f): + self.f = f + self.__name__ = f.__name__ + self.__doc__ = f.__doc__ + self.__dict__.update(f.__dict__) + self.m = None + + def bind(self, cls, attr): + + def bound_m(*args, **kwargs): + return self.f(cls, *args, **kwargs) + bound_m.__name__ = attr + self.m = bound_m + + def __get__(self, obj, objtype=None): + return self.m.__get__(obj, objtype) + + +class ClassBindingSupport(type): + "@see bindclass" + + def __init__(mcs, name, bases, attrs): + type.__init__(mcs, name, bases, attrs) + for attr, val in attrs.iteritems(): + if isinstance(val, bindclass): + val.bind(mcs, attr) + + +class BoundObject(object): + "@see bindclass" + __metaclass__ = ClassBindingSupport + + +def bindfunction(f): + """ + >>> validate_decorator(bindfunction) + >>> @bindfunction + ... def factorial(thisfunction, n): + ... # Within this function the name 'thisfunction' refers to the factorial + ... # function(with only one argument), even after 'factorial' is bound + ... # to another object + ... if n > 0: + ... return n * thisfunction(n - 1) + ... else: + ... return 1 + ... + >>> factorial(3) + 6 + """ + + @functools.wraps(f) + def bound_f(*args, **kwargs): + return f(bound_f, *args, **kwargs) + return bound_f + + +class Memoize(object): + """ + Memoize(fn) - an instance which acts like fn but memoizes its arguments + Will only work on functions with non-mutable arguments + @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52201 + + >>> validate_decorator(Memoize) + """ + + def __init__(self, fn): + self.fn = fn + self.__name__ = fn.__name__ + self.__doc__ = fn.__doc__ + self.__dict__.update(fn.__dict__) + self.memo = {} + + def __call__(self, *args): + if args not in self.memo: + self.memo[args] = self.fn(*args) + return self.memo[args] + + +class MemoizeMutable(object): + """Memoize(fn) - an instance which acts like fn but memoizes its arguments + Will work on functions with mutable arguments(slower than Memoize) + @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52201 + + >>> validate_decorator(MemoizeMutable) + """ + + def __init__(self, fn): + self.fn = fn + self.__name__ = fn.__name__ + self.__doc__ = fn.__doc__ + self.__dict__.update(fn.__dict__) + self.memo = {} + + def __call__(self, *args, **kw): + text = cPickle.dumps((args, kw)) + if text not in self.memo: + self.memo[text] = self.fn(*args, **kw) + return self.memo[text] + + +callTraceIndentationLevel = 0 + + +def call_trace(f): + """ + Synchronization decorator. + + >>> validate_decorator(call_trace) + >>> @call_trace + ... def a(a, b, c): + ... pass + >>> a(1, 2, c=3) + Entering a((1, 2), {'c': 3}) + Exiting a((1, 2), {'c': 3}) + """ + + @functools.wraps(f) + def verboseTrace(*args, **kw): + global callTraceIndentationLevel + + print "%sEntering %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw) + callTraceIndentationLevel += 1 + try: + result = f(*args, **kw) + except: + callTraceIndentationLevel -= 1 + print "%sException %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw) + raise + callTraceIndentationLevel -= 1 + print "%sExiting %s(%s, %s)" % ("\t"*callTraceIndentationLevel, f.__name__, args, kw) + return result + + @functools.wraps(f) + def smallTrace(*args, **kw): + global callTraceIndentationLevel + + print "%sEntering %s" % ("\t"*callTraceIndentationLevel, f.__name__) + callTraceIndentationLevel += 1 + try: + result = f(*args, **kw) + except: + callTraceIndentationLevel -= 1 + print "%sException %s" % ("\t"*callTraceIndentationLevel, f.__name__) + raise + callTraceIndentationLevel -= 1 + print "%sExiting %s" % ("\t"*callTraceIndentationLevel, f.__name__) + return result + + #return smallTrace + return verboseTrace + + +@contextlib.contextmanager +def lexical_scope(*args): + """ + @note Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/520586 + Example: + >>> b = 0 + >>> with lexical_scope(1) as (a): + ... print a + ... + 1 + >>> with lexical_scope(1,2,3) as (a,b,c): + ... print a,b,c + ... + 1 2 3 + >>> with lexical_scope(): + ... d = 10 + ... def foo(): + ... pass + ... + >>> print b + 2 + """ + + frame = inspect.currentframe().f_back.f_back + saved = frame.f_locals.keys() + try: + if not args: + yield + elif len(args) == 1: + yield args[0] + else: + yield args + finally: + f_locals = frame.f_locals + for key in (x for x in f_locals.keys() if x not in saved): + del f_locals[key] + del frame + + +def normalize_number(prettynumber): + """ + function to take a phone number and strip out all non-numeric + characters + + >>> normalize_number("+012-(345)-678-90") + '+01234567890' + >>> normalize_number("1-(345)-678-9000") + '+13456789000' + >>> normalize_number("+1-(345)-678-9000") + '+13456789000' + """ + uglynumber = re.sub('[^0-9+]', '', prettynumber) + if uglynumber.startswith("+"): + pass + elif uglynumber.startswith("1") and len(uglynumber) == 11: + uglynumber = "+"+uglynumber + elif len(uglynumber) == 10: + uglynumber = "+1"+uglynumber + else: + pass + + #validateRe = re.compile("^\+?[0-9]{10,}$") + #assert validateRe.match(uglynumber) is not None + + return uglynumber + + +_VALIDATE_RE = re.compile("^\+?[0-9]{10,}$") + + +def is_valid_number(number): + """ + @returns If This number be called ( syntax validation only ) + """ + return _VALIDATE_RE.match(number) is not None + + +def parse_version(versionText): + """ + >>> parse_version("0.5.2") + [0, 5, 2] + """ + return [ + int(number) + for number in versionText.split(".") + ] + + +def compare_versions(leftParsedVersion, rightParsedVersion): + """ + >>> compare_versions([0, 1, 2], [0, 1, 2]) + 0 + >>> compare_versions([0, 1, 2], [0, 1, 3]) + -1 + >>> compare_versions([0, 1, 2], [0, 2, 2]) + -1 + >>> compare_versions([0, 1, 2], [1, 1, 2]) + -1 + >>> compare_versions([0, 1, 3], [0, 1, 2]) + 1 + >>> compare_versions([0, 2, 2], [0, 1, 2]) + 1 + >>> compare_versions([1, 1, 2], [0, 1, 2]) + 1 + """ + for left, right in zip(leftParsedVersion, rightParsedVersion): + if left < right: + return -1 + elif right < left: + return 1 + else: + return 0 diff --git a/src/util/overloading.py b/src/util/overloading.py new file mode 100644 index 0000000..89cb738 --- /dev/null +++ b/src/util/overloading.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python +import new + +# Make the environment more like Python 3.0 +__metaclass__ = type +from itertools import izip as zip +import textwrap +import inspect + + +__all__ = [ + "AnyType", + "overloaded" +] + + +AnyType = object + + +class overloaded: + """ + Dynamically overloaded functions. + + This is an implementation of (dynamically, or run-time) overloaded + functions; also known as generic functions or multi-methods. + + The dispatch algorithm uses the types of all argument for dispatch, + similar to (compile-time) overloaded functions or methods in C++ and + Java. + + Most of the complexity in the algorithm comes from the need to support + subclasses in call signatures. For example, if an function is + registered for a signature (T1, T2), then a call with a signature (S1, + S2) is acceptable, assuming that S1 is a subclass of T1, S2 a subclass + of T2, and there are no other more specific matches (see below). + + If there are multiple matches and one of those doesn't *dominate* all + others, the match is deemed ambiguous and an exception is raised. A + subtlety here: if, after removing the dominated matches, there are + still multiple matches left, but they all map to the same function, + then the match is not deemed ambiguous and that function is used. + Read the method find_func() below for details. + + @note Python 2.5 is required due to the use of predicates any() and all(). + @note only supports positional arguments + + @author http://www.artima.com/weblogs/viewpost.jsp?thread=155514 + + >>> import misc + >>> misc.validate_decorator (overloaded) + >>> + >>> + >>> + >>> + >>> ################# + >>> #Basics, with reusing names and without + >>> @overloaded + ... def foo(x): + ... "prints x" + ... print x + ... + >>> @foo.register(int) + ... def foo(x): + ... "prints the hex representation of x" + ... print hex(x) + ... + >>> from types import DictType + >>> @foo.register(DictType) + ... def foo_dict(x): + ... "prints the keys of x" + ... print [k for k in x.iterkeys()] + ... + >>> #combines all of the doc strings to help keep track of the specializations + >>> foo.__doc__ # doctest: +ELLIPSIS + "prints x\\n\\n...overloading.foo ():\\n\\tprints the hex representation of x\\n\\n...overloading.foo_dict ():\\n\\tprints the keys of x" + >>> foo ("text") + text + >>> foo (10) #calling the specialized foo + 0xa + >>> foo ({3:5, 6:7}) #calling the specialization foo_dict + [3, 6] + >>> foo_dict ({3:5, 6:7}) #with using a unique name, you still have the option of calling the function directly + [3, 6] + >>> + >>> + >>> + >>> + >>> ################# + >>> #Multiple arguments, accessing the default, and function finding + >>> @overloaded + ... def two_arg (x, y): + ... print x,y + ... + >>> @two_arg.register(int, int) + ... def two_arg_int_int (x, y): + ... print hex(x), hex(y) + ... + >>> @two_arg.register(float, int) + ... def two_arg_float_int (x, y): + ... print x, hex(y) + ... + >>> @two_arg.register(int, float) + ... def two_arg_int_float (x, y): + ... print hex(x), y + ... + >>> two_arg.__doc__ # doctest: +ELLIPSIS + "...overloading.two_arg_int_int (, ):\\n\\n...overloading.two_arg_float_int (, ):\\n\\n...overloading.two_arg_int_float (, ):" + >>> two_arg(9, 10) + 0x9 0xa + >>> two_arg(9.0, 10) + 9.0 0xa + >>> two_arg(15, 16.0) + 0xf 16.0 + >>> two_arg.default_func(9, 10) + 9 10 + >>> two_arg.find_func ((int, float)) == two_arg_int_float + True + >>> (int, float) in two_arg + True + >>> (str, int) in two_arg + False + >>> + >>> + >>> + >>> ################# + >>> #wildcard + >>> @two_arg.register(AnyType, str) + ... def two_arg_any_str (x, y): + ... print x, y.lower() + ... + >>> two_arg("Hello", "World") + Hello world + >>> two_arg(500, "World") + 500 world + """ + + def __init__(self, default_func): + # Decorator to declare new overloaded function. + self.registry = {} + self.cache = {} + self.default_func = default_func + self.__name__ = self.default_func.__name__ + self.__doc__ = self.default_func.__doc__ + self.__dict__.update (self.default_func.__dict__) + + def __get__(self, obj, type=None): + if obj is None: + return self + return new.instancemethod(self, obj) + + def register(self, *types): + """ + Decorator to register an implementation for a specific set of types. + + .register(t1, t2)(f) is equivalent to .register_func((t1, t2), f). + """ + + def helper(func): + self.register_func(types, func) + + originalDoc = self.__doc__ if self.__doc__ is not None else "" + typeNames = ", ".join ([str(type) for type in types]) + typeNames = "".join ([func.__module__+".", func.__name__, " (", typeNames, "):"]) + overloadedDoc = "" + if func.__doc__ is not None: + overloadedDoc = textwrap.fill (func.__doc__, width=60, initial_indent="\t", subsequent_indent="\t") + self.__doc__ = "\n".join ([originalDoc, "", typeNames, overloadedDoc]).strip() + + new_func = func + + #Masking the function, so we want to take on its traits + if func.__name__ == self.__name__: + self.__dict__.update (func.__dict__) + new_func = self + return new_func + + return helper + + def register_func(self, types, func): + """Helper to register an implementation.""" + self.registry[tuple(types)] = func + self.cache = {} # Clear the cache (later we can optimize this). + + def __call__(self, *args): + """Call the overloaded function.""" + types = tuple(map(type, args)) + func = self.cache.get(types) + if func is None: + self.cache[types] = func = self.find_func(types) + return func(*args) + + def __contains__ (self, types): + return self.find_func(types) is not self.default_func + + def find_func(self, types): + """Find the appropriate overloaded function; don't call it. + + @note This won't work for old-style classes or classes without __mro__ + """ + func = self.registry.get(types) + if func is not None: + # Easy case -- direct hit in registry. + return func + + # Phillip Eby suggests to use issubclass() instead of __mro__. + # There are advantages and disadvantages. + + # I can't help myself -- this is going to be intense functional code. + # Find all possible candidate signatures. + mros = tuple(inspect.getmro(t) for t in types) + n = len(mros) + candidates = [sig for sig in self.registry + if len(sig) == n and + all(t in mro for t, mro in zip(sig, mros))] + + if not candidates: + # No match at all -- use the default function. + return self.default_func + elif len(candidates) == 1: + # Unique match -- that's an easy case. + return self.registry[candidates[0]] + + # More than one match -- weed out the subordinate ones. + + def dominates(dom, sub, + orders=tuple(dict((t, i) for i, t in enumerate(mro)) + for mro in mros)): + # Predicate to decide whether dom strictly dominates sub. + # Strict domination is defined as domination without equality. + # The arguments dom and sub are type tuples of equal length. + # The orders argument is a precomputed auxiliary data structure + # giving dicts of ordering information corresponding to the + # positions in the type tuples. + # A type d dominates a type s iff order[d] <= order[s]. + # A type tuple (d1, d2, ...) dominates a type tuple of equal length + # (s1, s2, ...) iff d1 dominates s1, d2 dominates s2, etc. + if dom is sub: + return False + return all(order[d] <= order[s] for d, s, order in zip(dom, sub, orders)) + + # I suppose I could inline dominates() but it wouldn't get any clearer. + candidates = [cand + for cand in candidates + if not any(dominates(dom, cand) for dom in candidates)] + if len(candidates) == 1: + # There's exactly one candidate left. + return self.registry[candidates[0]] + + # Perhaps these multiple candidates all have the same implementation? + funcs = set(self.registry[cand] for cand in candidates) + if len(funcs) == 1: + return funcs.pop() + + # No, the situation is irreducibly ambiguous. + raise TypeError("ambigous call; types=%r; candidates=%r" % + (types, candidates)) diff --git a/src/util/tp_utils.py b/src/util/tp_utils.py new file mode 100644 index 0000000..1c6cbc8 --- /dev/null +++ b/src/util/tp_utils.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python + +import logging + +import dbus +import telepathy + +import util.go_utils as gobject_utils +import misc + + +_moduleLogger = logging.getLogger(__name__) +DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties' + + +class WasMissedCall(object): + + def __init__(self, bus, conn, chan, on_success, on_error): + self.__on_success = on_success + self.__on_error = on_error + + self._requested = None + self._didMembersChange = False + self._didClose = False + self._didReport = False + + self._onTimeout = gobject_utils.Timeout(self._on_timeout) + self._onTimeout.start(seconds=10) + + chan[telepathy.interfaces.CHANNEL_INTERFACE_GROUP].connect_to_signal( + "MembersChanged", + self._on_members_changed, + ) + + chan[telepathy.interfaces.CHANNEL].connect_to_signal( + "Closed", + self._on_closed, + ) + + chan[DBUS_PROPERTIES].GetAll( + telepathy.interfaces.CHANNEL_INTERFACE, + reply_handler = self._on_got_all, + error_handler = self._on_error, + ) + + def cancel(self): + self._report_error("by request") + + def _report_missed_if_ready(self): + if self._didReport: + pass + elif self._requested is not None and (self._didMembersChange or self._didClose): + if self._requested: + self._report_error("wrong direction") + elif self._didClose: + self._report_success() + else: + self._report_error("members added") + else: + if self._didClose: + self._report_error("closed too early") + + def _report_success(self): + assert not self._didReport + self._didReport = True + self._onTimeout.cancel() + self.__on_success(self) + + def _report_error(self, reason): + assert not self._didReport + self._didReport = True + self._onTimeout.cancel() + self.__on_error(self, reason) + + @misc.log_exception(_moduleLogger) + def _on_got_all(self, properties): + self._requested = properties["Requested"] + self._report_missed_if_ready() + + @misc.log_exception(_moduleLogger) + def _on_members_changed(self, message, added, removed, lp, rp, actor, reason): + if added: + self._didMembersChange = True + self._report_missed_if_ready() + + @misc.log_exception(_moduleLogger) + def _on_closed(self): + self._didClose = True + self._report_missed_if_ready() + + @misc.log_exception(_moduleLogger) + def _on_error(self, *args): + self._report_error(args) + + @misc.log_exception(_moduleLogger) + def _on_timeout(self): + self._report_error("timeout") + return False + + +class NewChannelSignaller(object): + + def __init__(self, on_new_channel): + self._sessionBus = dbus.SessionBus() + self._on_user_new_channel = on_new_channel + + def start(self): + self._sessionBus.add_signal_receiver( + self._on_new_channel, + "NewChannel", + "org.freedesktop.Telepathy.Connection", + None, + None + ) + + def stop(self): + self._sessionBus.remove_signal_receiver( + self._on_new_channel, + "NewChannel", + "org.freedesktop.Telepathy.Connection", + None, + None + ) + + @misc.log_exception(_moduleLogger) + def _on_new_channel( + self, channelObjectPath, channelType, handleType, handle, supressHandler + ): + connObjectPath = channel_path_to_conn_path(channelObjectPath) + serviceName = path_to_service_name(channelObjectPath) + try: + self._on_user_new_channel( + self._sessionBus, serviceName, connObjectPath, channelObjectPath, channelType + ) + except Exception: + _moduleLogger.exception("Blocking exception from being passed up") + + +class EnableSystemContactIntegration(object): + + ACCOUNT_MGR_NAME = "org.freedesktop.Telepathy.AccountManager" + ACCOUNT_MGR_PATH = "/org/freedesktop/Telepathy/AccountManager" + ACCOUNT_MGR_IFACE_QUERY = "com.nokia.AccountManager.Interface.Query" + ACCOUNT_IFACE_COMPAT = "com.nokia.Account.Interface.Compat" + ACCOUNT_IFACE_COMPAT_PROFILE = "com.nokia.Account.Interface.Compat.Profile" + DBUS_PROPERTIES = 'org.freedesktop.DBus.Properties' + + def __init__(self, profileName): + self._bus = dbus.SessionBus() + self._profileName = profileName + + def start(self): + self._accountManager = self._bus.get_object( + self.ACCOUNT_MGR_NAME, + self.ACCOUNT_MGR_PATH, + ) + self._accountManagerQuery = dbus.Interface( + self._accountManager, + dbus_interface=self.ACCOUNT_MGR_IFACE_QUERY, + ) + + self._accountManagerQuery.FindAccounts( + { + self.ACCOUNT_IFACE_COMPAT_PROFILE: self._profileName, + }, + reply_handler = self._on_found_accounts_reply, + error_handler = self._on_error, + ) + + @misc.log_exception(_moduleLogger) + def _on_found_accounts_reply(self, accountObjectPaths): + for accountObjectPath in accountObjectPaths: + print accountObjectPath + account = self._bus.get_object( + self.ACCOUNT_MGR_NAME, + accountObjectPath, + ) + accountProperties = dbus.Interface( + account, + self.DBUS_PROPERTIES, + ) + accountProperties.Set( + self.ACCOUNT_IFACE_COMPAT, + "SecondaryVCardFields", + ["TEL"], + reply_handler = self._on_field_set, + error_handler = self._on_error, + ) + + @misc.log_exception(_moduleLogger) + def _on_field_set(self): + _moduleLogger.info("SecondaryVCardFields Set") + + @misc.log_exception(_moduleLogger) + def _on_error(self, error): + _moduleLogger.error("%r" % (error, )) + + +def channel_path_to_conn_path(channelObjectPath): + """ + >>> channel_path_to_conn_path("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1") + '/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME' + """ + return channelObjectPath.rsplit("/", 1)[0] + + +def path_to_service_name(path): + """ + >>> path_to_service_name("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1") + 'org.freedesktop.Telepathy.ConnectionManager.theonering.gv.USERNAME' + """ + return ".".join(path[1:].split("/")[0:7]) + + +def cm_from_path(path): + """ + >>> cm_from_path("/org/freedesktop/Telepathy/ConnectionManager/theonering/gv/USERNAME/Channel1") + 'theonering' + """ + return path[1:].split("/")[4] -- 1.7.9.5