Size: 44363
Comment: PEP 8 fixes for example code
|
Size: 44401
Comment:
|
Deletions are marked like this. | Additions are marked like this. |
Line 1442: | Line 1442: |
<!> This code needs a try/finally! |
This page is meant to be a central repository of decorator code pieces, whether useful or not <wink>. It is NOT a page to discuss decorator syntax!
Feel free to add your suggestions (please use the current decorator syntax @dec)!
Creating Well-Behaved Decorators / "Decorator decorator"
Note: This is only one recipe. Others include inheritance from a standard decorator (link?) and a factory function such as [http://www.phyast.pitt.edu/~micheles/python/decorator.zip Michele Simionato's decorator module] which even preserves signature information.
1 def simple_decorator(decorator):
2 """This decorator can be used to turn simple functions
3 into well-behaved decorators, so long as the decorators
4 are fairly simple. If a decorator expects a function and
5 returns a function (no descriptors), and if it doesn't
6 modify function attributes or docstring, then it is
7 eligible to use this. Simply apply @simple_decorator to
8 your decorator and it will automatically preserve the
9 docstring and function attributes of functions to which
10 it is applied."""
11 def new_decorator(f):
12 g = decorator(f)
13 g.__name__ = f.__name__
14 g.__doc__ = f.__doc__
15 g.__dict__.update(f.__dict__)
16 return g
17 # Now a few lines needed to make simple_decorator itself
18 # be a well-behaved decorator.
19 new_decorator.__name__ = decorator.__name__
20 new_decorator.__doc__ = decorator.__doc__
21 new_decorator.__dict__.update(decorator.__dict__)
22 return new_decorator
23
24 #
25 # Sample Use:
26 #
27 @simple_decorator
28 def my_simple_logging_decorator(func):
29 def you_will_never_see_this_name(*args, **kwargs):
30 print 'calling %s' % func.__name__
31 return func(*args, **kwargs)
32 return you_will_never_see_this_name
33
34 @my_simple_logging_decorator
35 def double(x):
36 "Doubles a number"
37 return 2*x
38
39 assert double.__name__ == 'double'
40 assert double.__doc__ == 'Doubles a number'
41 print double(155)
Property Definition
These decorators provide a readable way to define properties:
1 import sys
2
3 def propget(func):
4 locals = sys._getframe(1).f_locals
5 name = func.__name__
6 prop = locals.get(name)
7 if not isinstance(prop, property):
8 prop = property(func, doc=func.__doc__)
9 else:
10 doc = prop.__doc__ or func.__doc__
11 prop = property(func, prop.fset, prop.fdel, doc)
12 return prop
13
14 def propset(func):
15 locals = sys._getframe(1).f_locals
16 name = func.__name__
17 prop = locals.get(name)
18 if not isinstance(prop, property):
19 prop = property(None, func, doc=func.__doc__)
20 else:
21 doc = prop.__doc__ or func.__doc__
22 prop = property(prop.fget, func, prop.fdel, doc)
23 return prop
24
25 def propdel(func):
26 locals = sys._getframe(1).f_locals
27 name = func.__name__
28 prop = locals.get(name)
29 if not isinstance(prop, property):
30 prop = property(None, None, func, doc=func.__doc__)
31 else:
32 prop = property(prop.fget, prop.fset, func, prop.__doc__)
33 return prop
34
35 # These can be used like this:
36
37 class Example(object):
38
39 @propget
40 def myattr(self):
41 return self._half * 2
42
43 @propset
44 def myattr(self, value):
45 self._half = value / 2
46
47 @propdel
48 def myattr(self):
49 del self._half
Here's a way that doesn't require any new decorators:
Yet another property decorator:
1 def property(function):
2 keys = 'fget', 'fset', 'fdel'
3 func_locals = {'doc':function.__doc__}
4 def probe_func(frame, event, arg):
5 if event == 'return':
6 locals = frame.f_locals
7 func_locals.update(dict((k, locals.get(k)) for k in keys))
8 sys.settrace(None)
9 return probe_func
10 sys.settrace(probe_func)
11 function()
12 return property(**func_locals)
13
14 #====== Example =======================================================
15
16 from math import radians, degrees, pi
17
18 class Angle(object):
19 def __init__(self, rad):
20 self._rad = rad
21
22 @property
23 def rad():
24 '''The angle in radians'''
25 def fget(self):
26 return self._rad
27 def fset(self, angle):
28 if isinstance(angle, Angle):
29 angle = angle.rad
30 self._rad = float(angle)
31
32 @property
33 def deg():
34 '''The angle in degrees'''
35 def fget(self):
36 return degrees(self._rad)
37 def fset(self, angle):
38 if isinstance(angle, Angle):
39 angle = angle.deg
40 self._rad = radians(angle)
Memoize
Here's a memoizing class.
1 class memoized(object):
2 """Decorator that caches a function's return value each time it is called.
3 If called later with the same arguments, the cached value is returned, and
4 not re-evaluated.
5 """
6 def __init__(self, func):
7 self.func = func
8 self.cache = {}
9 def __call__(self, *args):
10 try:
11 return self.cache[args]
12 except KeyError:
13 self.cache[args] = value = self.func(*args)
14 return value
15 except TypeError:
16 # uncachable -- for instance, passing a list as an argument.
17 # Better to not cache than to blow up entirely.
18 return self.func(*args)
19 def __repr__(self):
20 """Return the function's docstring."""
21 return self.func.__doc__
22
23 @memoized
24 def fibonacci(n):
25 "Return the nth fibonacci number."
26 if n in (0, 1):
27 return n
28 return fibonacci(n-1) + fibonacci(n-2)
29
30 print fibonacci(12)
Retry
Call a function which returns True/False to indicate success or failure. On failure, wait, and try the function again. On repeated failures, wait longer between each successive attempt. If the decorator runs out of attempts, then it gives up and returns False, but you could just as easily raise some exception.
import time # Retry decorator with exponential backoff def retry(tries, delay=3, backoff=2): """Retries a function or method until it returns True. delay sets the initial delay, and backoff sets how much the delay should lengthen after each failure. backoff must be greater than 1, or else it isn't really a backoff. tries must be at least 0, and delay greater than 0.""" if backoff <= 1: raise ValueError("backoff must be greater than 1") tries = math.floor(tries) if tries < 0: raise ValueError("tries must be 0 or greater") if delay <= 0: raise ValueError("delay must be greater than 0") def deco_retry(f): def f_retry(*args, **kwargs): mtries, mdelay = tries, delay # make mutable rv = f(*args, **kwargs) # first attempt while mtries > 0: if rv == True: # Done on success return True mtries -= 1 # consume an attempt time.sleep(mdelay) # wait... mdelay *= backoff # make future wait longer rv = f(*args, **kwargs) # Try again return False # Ran out of tries :-( return f_retry # true decorator -> decorated function return deco_retry # @retry(arg[, ...]) -> true decorator
Pseudo-currying
1 class curried(object):
2 """
3 Decorator that returns a function that keeps returning functions
4 until all arguments are supplied; then the original function is
5 evaluated.
6 """
7
8 def __init__(self, func, *a):
9 self.func = func
10 self.args = a
11 def __call__(self, *a):
12 args = self.args + a
13 if len(args) < self.func.func_code.co_argcount:
14 return curried(self.func, *args)
15 else:
16 return self.func(*args)
17
18
19 @curried
20 def add(a, b):
21 return a + b
22
23 add1 = add(1)
24
25 print add1(2)
Controllable DIY debug
(Other hooks could be similarly added. Docstrings and exceptions are left out for simplicity of demonstration.)
1 import sys
2
3 WHAT_TO_DEBUG = set(['io', 'core']) # change to what you need
4
5 class debug:
6 """ Decorator which helps to control what aspects of a program to debug
7 on per-function basis. Aspects are provided as list of arguments.
8 It DOESN'T slowdown functions which aren't supposed to be debugged.
9 """
10 def __init__(self, aspects=None):
11 self.aspects = set(aspects)
12
13 def __call__(self, f):
14 if self.aspects & WHAT_TO_DEBUG:
15 def newf(*args, **kwds):
16 print >> sys.stderr, f.func_name, args, kwds
17 f_result = f(*args, **kwds)
18 print >> sys.stderr, f.func_name, "returned", f_result
19 return f_result
20 newf.__doc__ = f.__doc__
21 return newf
22 else:
23 return f
24
25 @debug(['io'])
26 def prn(x):
27 print x
28
29 @debug(['core'])
30 def mult(x, y):
31 return x * y
32
33 prn(mult(2, 2))
Easy adding methods to a class instance
Credits to John Roth.
1 class Foo:
2 def __init__(self):
3 self.x = 42
4
5 foo = Foo()
6
7 def addto(instance):
8 def decorator(f):
9 import new
10 f = new.instancemethod(f, instance, instance.__class__)
11 setattr(instance, f.func_name, f)
12 return f
13 return decorator
14
15 @addto(foo)
16 def print_x(self):
17 print self.x
18
19 # foo.print_x() would print "42"
Counting function calls
1 class countcalls(object):
2 "Decorator that keeps track of the number of times a function is called."
3
4 __instances = {}
5
6 def __init__(self, f):
7 self.__f = f
8 self.__numcalls = 0
9 countcalls.__instances[f] = self
10
11 def __call__(self, *args, **kwargs):
12 self.__numcalls += 1
13 return self.__f(*args, **kwargs)
14
15 @staticmethod
16 def count(f):
17 "Return the number of times the function f was called."
18 return countcalls.__instances[f].__numcalls
19
20 @staticmethod
21 def counts():
22 "Return a dict of {function: # of calls} for all registered functions."
23 return dict([(f, countcalls.count(f)) for f in countcalls.__instances])
Generating Deprecation Warnings
1 import warnings
2
3 def deprecated(func):
4 """This is a decorator which can be used to mark functions
5 as deprecated. It will result in a warning being emitted
6 when the function is used."""
7 def new_func(*args, **kwargs):
8 warnings.warn("Call to deprecated function %s." % func.__name__,
9 category=DeprecationWarning)
10 return func(*args, **kwargs)
11 new_func.__name__ = func.__name__
12 new_func.__doc__ = func.__doc__
13 new_func.__dict__.update(func.__dict__)
14 return new_func
15
16 # === Examples of use ===
17
18 @deprecated
19 def some_old_function(x,y):
20 return x + y
21
22 class SomeClass:
23 @deprecated
24 def some_old_method(self, x,y):
25 return x + y
Enable/Disable Decorators
1 def unchanged(func):
2 "This decorator doesn't add any behavior"
3 return func
4
5 def disabled(func):
6 "This decorator disables the provided function, and does nothing"
7 def empty_func(*args,**kargs):
8 pass
9 return empty_func
10
11 # define this as equivalent to unchanged, for nice symmetry with disabled
12 enabled = unchanged
13
14 #
15 # Sample use
16 #
17
18 global_enable_flag = True
19
20 state = enabled if global_enable_flag else disabled
21 @state
22 def special_function_foo():
23 print "function was enabled"
Easy Dump of Function Arguments
1 def dump_args(func):
2 "This decorator dumps out the arguments passed to a function before calling it"
3 argnames = func.func_code.co_varnames[:func.func_code.co_argcount]
4 fname = func.func_name
5 def echo_func(*args,**kwargs):
6 print fname, ":", ', '.join(
7 '%s=%r' % entry
8 for entry in zip(argnames,args) + kwargs.items())
9 return func(*args, **kwargs)
10 return echo_func
11
12 @dump_args
13 def f1(a,b,c):
14 print a + b + c
15
16 f1(1, 2, 3)
Pre-/Post-Conditions
1 """
2 Provide pre-/postconditions as function decorators.
3
4 Example usage:
5
6 >>> def in_ge20(inval):
7 ... assert inval >= 20, 'Input value < 20'
8 ...
9 >>> def out_lt30(retval, inval):
10 ... assert retval < 30, 'Return value >= 30'
11 ...
12 >>> @precondition(in_ge20)
13 ... @postcondition(out_lt30)
14 ... def inc(value):
15 ... return value + 1
16 ...
17 >>> inc(5)
18 Traceback (most recent call last):
19 ...
20 AssertionError: Input value < 20
21 >>> inc(29)
22 Traceback (most recent call last):
23 ...
24 AssertionError: Return value >= 30
25 >>> inc(20)
26 21
27
28 You can define as many pre-/postconditions for a function as you
29 like. It is also possible to specify both types of conditions at once:
30
31 >>> @conditions(in_ge20, out_lt30)
32 ... def add1(value):
33 ... return value + 1
34 ...
35 >>> add1(5)
36 Traceback (most recent call last):
37 ...
38 AssertionError: Input value < 20
39
40 An interesting feature is the ability to prevent the creation of
41 pre-/postconditions at function definition time. This makes it
42 possible to use conditions for debugging and then switch them off for
43 distribution.
44
45 >>> debug = False
46 >>> @precondition(in_ge20, debug)
47 ... def dec(value):
48 ... return value - 1
49 ...
50 >>> dec(5)
51 4
52 """
53
54 __all__ = ['precondition', 'postcondition', 'conditions']
55
56 DEFAULT_ON = True
57
58 def precondition(precondition, use_conditions=DEFAULT_ON):
59 return conditions(precondition, None, use_conditions)
60
61 def postcondition(postcondition, use_conditions=DEFAULT_ON):
62 return conditions(None, postcondition, use_conditions)
63
64 class conditions(object):
65 __slots__ = ('__precondition', '__postcondition')
66
67 def __init__(self, pre, post, use_conditions=DEFAULT_ON):
68 if not use_conditions:
69 pre, post = None, None
70
71 self.__precondition = pre
72 self.__postcondition = post
73
74 def __call__(self, function):
75 # combine recursive wrappers (@precondition + @postcondition == @conditions)
76 pres = set((self.__precondition,))
77 posts = set((self.__postcondition,))
78
79 # unwrap function, collect distinct pre-/post conditions
80 while type(function) is FunctionWrapper:
81 pres.add(function._pre)
82 posts.add(function._post)
83 function = function._func
84
85 # filter out None conditions and build pairs of pre- and postconditions
86 conditions = map(None, filter(None, pres), filter(None, posts))
87
88 # add a wrapper for each pair (note that 'conditions' may be empty)
89 for pre, post in conditions:
90 function = FunctionWrapper(pre, post, function)
91
92 return function
93
94 class FunctionWrapper(object):
95 def __init__(self, precondition, postcondition, function):
96 self._pre = precondition
97 self._post = postcondition
98 self._func = function
99
100 def __call__(self, *args, **kwargs):
101 precondition = self._pre
102 postcondition = self._post
103
104 if precondition:
105 precondition(*args, **kwargs)
106 result = self._func(*args, **kwargs)
107 if postcondition:
108 postcondition(result, *args, **kwargs)
109 return result
110
111 def __test():
112 import doctest
113 doctest.testmod()
114
115 if __name__ == "__main__":
116 __test()
Profiling/Coverage Analysis
The code and examples are a bit longish, so I'll include a link instead: http://mg.pov.lt/blog/profiling.html
Line Tracing Individual Functions
I cobbled this together from the trace module. It allows you to decorate individual functions so their lines are traced. I think it works out to be a slightly smaller hammer than running the trace module and trying to pare back what it traces using exclusions.
1 import sys
2 import os
3 import linecache
4
5 def trace(f):
6 def globaltrace(frame, why, arg):
7 if why == "call":
8 return localtrace
9 return None
10
11 def localtrace(frame, why, arg):
12 if why == "line":
13 # record the file name and line number of every trace
14 filename = frame.f_code.co_filename
15 lineno = frame.f_lineno
16
17 bname = os.path.basename(filename)
18 print "%s(%d): %s" % (bname, lineno,
19 linecache.getline(filename, lineno)),
20 return localtrace
21
22 def _f(*args, **kwds):
23 sys.settrace(globaltrace)
24 result = f(*args, **kwds)
25 sys.settrace(None)
26 return result
27
28 return _f
Synchronization
Synchronize two (or more) functions on a given lock.
1 def synchronized(lock):
2 """ Synchronization decorator. """
3
4 def wrap(f):
5 def new_function(*args, **kw):
6 lock.acquire()
7 try:
8 return f(*args, **kw)
9 finally:
10 lock.release()
11 return new_function
12 return wrap
13
14 # Example usage:
15
16 from threading import Lock
17 my_lock = Lock()
18
19 @synchronized(my_lock)
20 def critical1(*args):
21 # Interesting stuff goes here.
22 pass
23
24 @synchronized(my_lock)
25 def critical2(*args):
26 # Other interesting stuff goes here.
27 pass
Type Enforcement (accepts/returns)
Provides various degrees of type enforcement for function parameters and return values.
1 """
2 One of three degrees of enforcement may be specified by passing
3 the 'debug' keyword argument to the decorator:
4 0 -- NONE: No type-checking. Decorators disabled.
5 1 -- MEDIUM: Print warning message to stderr. (Default)
6 2 -- STRONG: Raise TypeError with message.
7 If 'debug' is not passed to the decorator, the default level is used.
8
9 Example usage:
10 >>> NONE, MEDIUM, STRONG = 0, 1, 2
11 >>>
12 >>> @accepts(int, int, int)
13 ... @returns(float)
14 ... def average(x, y, z):
15 ... return (x + y + z) / 2
16 ...
17 >>> average(5.5, 10, 15.0)
18 TypeWarning: 'average' method accepts (int, int, int), but was given
19 (float, int, float)
20 15.25
21 >>> average(5, 10, 15)
22 TypeWarning: 'average' method returns (float), but result is (int)
23 15
24
25 Needed to cast params as floats in function def (or simply divide by 2.0).
26
27 >>> TYPE_CHECK = STRONG
28 >>> @accepts(int, debug=TYPE_CHECK)
29 ... @returns(int, debug=TYPE_CHECK)
30 ... def fib(n):
31 ... if n in (0, 1): return n
32 ... return fib(n-1) + fib(n-2)
33 ...
34 >>> fib(5.3)
35 Traceback (most recent call last):
36 ...
37 TypeError: 'fib' method accepts (int), but was given (float)
38
39 """
40 import sys
41
42 def accepts(*types, **kw):
43 """ Function decorator. Checks that inputs given to decorated function
44 are of the expected type.
45
46 Parameters:
47 types -- The expected types of the inputs to the decorated function.
48 Must specify type for each parameter.
49 kw -- Optional specification of 'debug' level (this is the only valid
50 keyword argument, no other should be given).
51 debug = ( 0 | 1 | 2 )
52
53 """
54 if not kw:
55 # default level: MEDIUM
56 debug = 1
57 else:
58 debug = kw['debug']
59 try:
60 def decorator(f):
61 def newf(*args):
62 if debug == 0:
63 return f(*args)
64 assert len(args) == len(types)
65 argtypes = tuple(map(type, args))
66 if argtypes != types:
67 msg = info(f.__name__, types, argtypes, 0)
68 if debug == 1:
69 print >> sys.stderr, 'TypeWarning: ', msg
70 elif debug == 2:
71 raise TypeError, msg
72 return f(*args)
73 newf.__name__ = f.__name__
74 return newf
75 return decorator
76 except KeyError, key:
77 raise KeyError, key + "is not a valid keyword argument"
78 except TypeError, msg:
79 raise TypeError, msg
80
81
82 def returns(ret_type, **kw):
83 """ Function decorator. Checks that return value of decorated function
84 is of the expected type.
85
86 Parameters:
87 ret_type -- The expected type of the decorated function's return value.
88 Must specify type for each parameter.
89 kw -- Optional specification of 'debug' level (this is the only valid
90 keyword argument, no other should be given).
91 debug=(0 | 1 | 2)
92
93 """
94 try:
95 if not kw:
96 # default level: MEDIUM
97 debug = 1
98 else:
99 debug = kw['debug']
100 def decorator(f):
101 def newf(*args):
102 result = f(*args)
103 if debug == 0:
104 return result
105 res_type = type(result)
106 if res_type != ret_type:
107 msg = info(f.__name__, (ret_type,), (res_type,), 1)
108 if debug == 1:
109 print >> sys.stderr, 'TypeWarning: ', msg
110 elif debug == 2:
111 raise TypeError, msg
112 return result
113 newf.__name__ = f.__name__
114 return newf
115 return decorator
116 except KeyError, key:
117 raise KeyError, key + "is not a valid keyword argument"
118 except TypeError, msg:
119 raise TypeError, msg
120
121 def info(fname, expected, actual, flag):
122 """ Convenience function returns nicely formatted error/warning msg. """
123 format = lambda types: ', '.join([str(t).split("'")[1] for t in types])
124 expected, actual = format(expected), format(actual)
125 msg = "'%s' method " % fname \
126 + ("accepts", "returns")[flag] + " (%s), but " % expected\
127 + ("was given", "result is")[flag] + " (%s)" % actual
128 return msg
CGI method wrapper
Handles HTML boilerplate at top and bottom of pages returned from CGI methods. Works with the cgi module. Now your request handlers can just output the interesting HTML, and let the decorator deal with all the top and bottom clutter.
(Note: the exception handler eats all exceptions, which in CGI is no big loss, since the program runs in its separate subprocess. At least here, the exception contents will be written to the output page.)
1 class CGImethod(object):
2 def __init__(self, title):
3 self.title = title
4 def __call__(self, fn):
5 def wrapped_fn(*args):
6 print "Content-Type: text/html\n\n"
7 print "<HTML>"
8 print "<HEAD><TITLE>%s</TITLE></HEAD>" % self.title
9 print "<BODY>"
10 try:
11 fn(*args)
12 except Exception, e:
13 print
14 print e
15 print
16 print "</BODY></HTML>"
17
18 return wrapped_fn
19
20 @CGImethod("Hello with Decorator")
21 def say_hello():
22 print '<h1>Hello from CGI-Land</h1>'
State Machine Implementaion
This example uses Decorators to facilitate the implementation of a state machine in Python. Decorators are used to specify which methods are the event handlers for the class. In this example, actions are associated with the transitions, but it is possible with a little consideration to associate actions with states instead.
The example defines a class, MyMachine that is a state machine. Multiple instances of the class may be instantiated with each maintaining its own state. A class also may have multiple states. Here I've used gstate and tstate.
The code in the imported statedefn file gets a bit hairy, but you may not need to delve into it for your application.
# State Machine example Program from statedefn import * class MyMachine(object): # Create Statedefn object for each state you need to keep track of. # the name passed to the constructor becomes a StateVar member of the current class. # i.e. if my_obj is a MyMachine object, my_obj.gstate maintains the current gstate gstate = StateTable("gstate") tstate = StateTable("turtle") def __init__(self, name): # must call init method of class's StateTable object. to initialize state variable self.gstate.initialize(self) self.tstate.initialize(self) self.mname = name self.a_count = 0 self.b_count = 0 self.c_count = 0 # Decorate the Event Handler virtual functions -note gstate parameter @event_handler(gstate) def event_a(self): pass @event_handler(gstate) def event_b(self): pass @event_handler(gstate) def event_c(self, val): pass @event_handler(tstate) def toggle(self): pass # define methods to handle events. def _event_a_hdlr1(self): print "State 1, event A" self.a_count += 1 def _event_b_hdlr1(self): print "State 1, event B" self.b_count += 1 def _event_c_hdlr1(self, val): print "State 1, event C" self.c_count += 3*val def _event_a_hdlr2(self): print "State 2, event A" self.a_count += 10 # here we brute force the tstate to on, leave & enter functions called if state changes. # turtle is object's state variable for tstate, comes from constructor argument self.turtle.set_state(self, self._t_on) def _event_b_hdlr2(self): print "State 2, event B" self.b_count += 10 def _event_c_hdlr2(self, val): print "State 2, event C" self.c_count += 2*val def _event_a_hdlr3(self): self.a_count += 100 print "State 3, event A" def _event_b_hdlr3(self): print "State 3, event B" self.b_count += 100 # we decide here we want to go to state 2, overrrides spec in state table below. # transition to next_state is made after the method exits. self.gstate.next_state = self._state2 def _event_c_hdlr3(self, val): print "State 3, event C" self.c_count += 5*val # Associate the handlers with a state. The first argument is a list of methods. # One method for each event_handler decorated function of gstate. Order of methods # in the list correspond to order in which the Event Handlers were declared. # Second arg is the name of the state. Third argument is to be come a list of the # next states. # The first state created becomes the initial state. _state1 = gstate.state("One", (_event_a_hdlr1, _event_b_hdlr1, _event_c_hdlr1), ("Two", "Three", None)) _state2 = gstate.state("Two", (_event_a_hdlr2, _event_b_hdlr2, _event_c_hdlr2), ("Three", None, "One")) _state3 = gstate.state("Three",(_event_a_hdlr3, _event_b_hdlr3, _event_c_hdlr3), (None, "One", "Two")) # Declare a function that will be called when entering a new gstate. # Can also declare a leave function using @on_leave_function(gstate) @on_enter_function(gstate) def _enter_gstate(self): print "entering state ", self.gstate.name() , "of ", self.mname @on_leave_function(tstate) def _leave_tstate(self): print "leaving state ", self.turtle.name() , "of ", self.mname def _toggle_on(self): print "Toggle On" def _toggle_off(self): print "Toggle Off" _t_off = tstate.state("Off", [_toggle_on], ["On"]) _t_on = tstate.state("On", [_toggle_off], ["Off"]) def main(): big_machine = MyMachine("big") lil_machine = MyMachine("lil") big_machine.event_a() lil_machine.event_a() big_machine.event_a() lil_machine.event_a() big_machine.event_b() lil_machine.event_b() big_machine.event_c(4) lil_machine.event_c(2) big_machine.event_c(1) lil_machine.event_c(3) big_machine.event_b() lil_machine.event_b() big_machine.event_a() lil_machine.event_a() big_machine.event_a() big_machine.toggle() big_machine.toggle() big_machine.toggle() lil_machine.event_a() big_machine.event_b() lil_machine.event_b() big_machine.event_c(3) big_machine.event_a() lil_machine.event_c(2) lil_machine.event_a() big_machine.event_b() lil_machine.event_b() big_machine.event_c(7) lil_machine.event_c(1) print "Event A count ", big_machine.a_count print "Event B count ", big_machine.b_count print "Event C count ", big_machine.c_count print "LilMachine C count ", lil_machine.c_count main()
And now the imported statedefn.py
# # Support for State Machines. ref - Design Patterns by GoF # Many of the methods in these classes get called behind the scenes. # # Notable exceptions are methods of the StateVar class. # # See example programs for how this module is intended to be used. # import exceptions class StateMachineError(exceptions.Exception): def __init__(self, args = None): self.args = args class StateVar(object): def __init__(self, initial_state): self._current_state = initial_state self.next_state = initial_state # publicly settable in an event handling routine. def set_state(self, owner, new_state): ''' Forces a state change to new_state ''' self.next_state = new_state self.__to_next_state(owner) def __to_next_state(self, owner): ''' The low-level state change function which calls leave state & enter state functions as needed. LeaveState and EnterState functions are called as needed when state transitions. ''' if not (self.next_state is self._current_state): if (hasattr(self._current_state, "leave")): leave = self._current_state.leave leave(owner) elif (hasattr(self, "leave")): self.leave(owner) self._current_state = self.next_state if (hasattr(self._current_state, "enter")): enter = self._current_state.enter enter(owner) elif (hasattr(self, "enter")): self.enter(owner) def __fctn(self, func_name): ''' Returns the owning class's method for handling an event for the current state. This method not for public consumption. ''' vf = self._current_state.get_fe(func_name) return vf def name(self): ''' Returns the current state name. ''' return self._current_state.name class STState(object): def __init__(self, state_name): self.name = state_name self.fctn_dict = {} def set_events(self, event_list, event_hdlr_list, next_states): dictionary = self.fctn_dict if not next_states: def set_row(event, method): dictionary[event] = [method, None] map(set_row, event_list, event_hdlr_list) else: def set_row2(event, method, next_state): dictionary[event] = [method, next_state] map(set_row2, event_list, event_hdlr_list, next_states) self.fctn_dict = dictionary def get_fe(self, fctn_name): return self.fctn_dict[fctn_name] def map_next_states(self, state_dict): ''' Changes second dict value from name of state to actual state ''' for de in self.fctn_dict.values(): next_state_name = de[1] if next_state_name: if state_dict.has_key(next_state_name): de[1] = state_dict[next_state_name] else: raise StateMachineError('Invalid Name for next state: %s' % next_state_name) class StateTable(object): ''' Magical class to define a state machine, with the help of several decorator functions which follow. ''' def __init__(self, declname): self.machine_var = declname self._initial_state = None self._state_list = {} self._event_list = [] self.need_initialize = 1 def initialize(self, parent): ''' Initializes the parent class's state variable for this StateTable class. Must call this method in the parent' object's __init__ method. You can have Multiple state machines within a parent class. Call this method for each ''' statevar= StateVar(self._initial_state) parent.__dict__[self.machine_var] =statevar if (hasattr(self, "enter")): statevar.enter = self.enter if (hasattr(self, "leave")): statevar.leave = self.leave #Magic happens here - in the 'next state' table, translate names into state objects. if self.need_initialize: for xstate in list(self._state_list.values()): xstate.map_next_states(self._state_list) self.need_initialize = 0 def def_state(self, event_hdlr_list, name): ''' This is used to define a state. the event handler list is a list of functions that are called for corresponding events. name is the name of the state. ''' state_table_row = STState(name) if (len(event_hdlr_list) != len(self._event_list)): raise StateMachineError('Mismatch between number of event handlers and the methods specified for the state.') state_table_row.set_events(self._event_list, event_hdlr_list, None) if self._initial_state is None: self._initial_state = state_table_row self._state_list[name] = state_table_row return state_table_row def state(self, name, event_hdlr_list, next_states): state_table_row = STState(name) if (len(event_hdlr_list) != len(self._event_list)): raise StateMachineError('Mismatch between number of event handlers and the methods specified for the state.') if ((not next_states is None) and len(next_states) != len(self._event_list)): raise StateMachineError('Mismatch between number of event handlers and the next states specified for the state.') state_table_row.set_events(self._event_list, event_hdlr_list, next_states) if self._initial_state is None: self._initial_state = state_table_row self._state_list[name] = state_table_row return state_table_row def __add_ev_hdlr(self, func_name): ''' Informs the class of an event handler to be added. We just need the name here. The function name will later be associated with one of the functions in a list when a state is defined. ''' self._event_list.append(func_name) # Decorator functions ... def event_handler(state_class): ''' Declare a method that handles a type of event. ''' def wrapper(func): state_class._StateTable__add_ev_hdlr(func.__name__) def obj_call(self, *args, **keywords): state_var = self.__dict__[state_class.machine_var] funky, next_state = state_var._StateVar__fctn(func.__name__) if not next_state is None: state_var.next_state = next_state rv = funky(self, *args, **keywords) state_var._StateVar__to_next_state(self) return rv return obj_call return wrapper def on_enter_function(state_class): ''' Declare that this method should be called whenever a new state is entered. ''' def wrapper(func): state_class.enter = func return func return wrapper def on_leave_function(state_class): ''' Declares that this method should be called whenever leaving a state. ''' def wrapper(func): state_class.leave = func return func return wrapper
C++/Java-keyword-like function decorators
@abstractMethod, @deprecatedMethod, @privateMethod, @protectedMethod, @raises, @parameterTypes, @returnType
The annotations provide run-time type checking and an alternative way to document code.
The code and documentation are long, so I offer a link: http://fightingquaker.com/pyanno/
Different Decorator Forms
There are operational differences between:
- Decorator with no arguments
- Decorator with arguments
- Decorator with wrapped class instance awareness
This example demonstrates the operational differences between the three using a skit taken from Episode 22: Bruces.
from sys import stdout,stderr from pdb import set_trace as bp class DecoTrace(object): ''' Decorator class with no arguments This can only be used for functions or methods where the instance is not necessary ''' def __init__(self, f): self.f = f def _showargs(self, *fargs, **kw): print >> stderr, 'T: enter %s with args=%s, kw=%s' % (self.f.__name__, str(fargs), str(kw)) def _aftercall(self, status): print >> stderr, 'T: exit %s with status=%s' % (self.f.__name__, str(status)) def __call__(self, *fargs, **kw): '''pass just function arguments to wrapped function''' self._showargs(*fargs, **kw) ret=self.f(*fargs, **kw) self._aftercall(ret) return ret def __repr__(self): return self.f.func_name class DecoTraceWithArgs(object): '''decorator class with ARGUMENTS This can be used for unbounded functions and methods. If this wraps a class instance, then extract it and pass to the wrapped method as the first arg. ''' def __init__(self, *dec_args, **dec_kw): '''The decorator arguments are passed here. Save them for runtime.''' self.dec_args = dec_args self.dec_kw = dec_kw self.label = dec_kw.get('label', 'T') self.fid = dec_kw.get('stream', stderr) def _showargs(self, *fargs, **kw): print >> self.fid, \ '%s: enter %s with args=%s, kw=%s' % (self.label, self.f.__name__, str(fargs), str(kw)) print >> self.fid, \ '%s: passing decorator args=%s, kw=%s' % (self.label, str(self.dec_args), str(self.dec_kw)) def _aftercall(self, status): print >> self.fid, '%s: exit %s with status=%s' % (self.label, self.f.__name__, str(status)) def _showinstance(self, instance): print >> self.fid, '%s: instance=%s' % (self.label, instance) def __call__(self, f): def wrapper(*fargs, **kw): ''' Combine decorator arguments and function arguments and pass to wrapped class instance-aware function/method. Note: the first argument cannot be "self" because we get a parse error "takes at least 1 argument" unless the instance is actually included in the argument list, which is redundant. If this wraps a class instance, the "self" will be the first argument. ''' self._showargs(*fargs, **kw) # merge decorator keywords into the kw argument list kw.update(self.dec_kw) # Does this wrap a class instance? if (fargs and getattr(fargs[0], '__class__', None)): # pull out the instance and combine function and # decorator args instance, fargs = fargs[0], fargs[1:]+self.dec_args self._showinstance(instance) # call the method ret=f(instance, *fargs, **kw) else: # just send in the give args and kw ret=f(*(fargs + self.dec_args), **kw) self._aftercall(ret) return ret # Save wrapped function reference self.f = f wrapper.__name__ = f.__name__ wrapper.__dict__.update(f.__dict__) wrapper.__doc__ = f.__doc__ return wrapper @DecoTrace def FirstBruce(*fargs, **kwargs): 'Simple function using simple decorator' if fargs and fargs[0]: print fargs[0] @DecoTraceWithArgs(name="Second Bruce", standardline="Goodday, Bruce!") def SecondBruce(*fargs, **kwargs): 'Simple function using decorator with arguments' print '%s:' % kwargs.get('name', 'Unknown Bruce'), if fargs and fargs[0]: print fargs[0] else: print kwargs.get('standardline', None) class Bruce(object): 'Simple class' def __init__(self, id): self.id = id def __str__(self): return self.id def __repr__(self): return 'Bruce' @DecoTraceWithArgs(label="Trace a class", standardline="How are yer Bruce?", stream=stdout) def talk(self, *fargs, **kwargs): 'Simple function using decorator with arguments' print '%s:' % self, if fargs and fargs[0]: print fargs[0] else: print kwargs.get('standardline', None) ThirdBruce = Bruce('Third Bruce') SecondBruce() FirstBruce("First Bruce: Oh, Hello Bruce!") ThirdBruce.talk() FirstBruce("First Bruce: Bit crook, Bruce.") SecondBruce("Where's Bruce?") FirstBruce("First Bruce: He's not here, Bruce") ThirdBruce.talk("Blimey, s'hot in here, Bruce.") FirstBruce("First Bruce: S'hot enough to boil a monkey's bum!") SecondBruce("That's a strange expression, Bruce.") FirstBruce("First Bruce: Well Bruce, I heard the Prime Minister use it. S'hot enough to boil a monkey's bum in 'ere, your Majesty,' he said and she smiled quietly to herself.") ThirdBruce.talk("She's a good Sheila, Bruce and not at all stuck up.")
Unimplemented function replacement
Allows you to test unimplemented code in a development environment by specifying a default argument as an argument to the decorator (or you can leave it off to specify None to be returned.
# Annotation wrapper annotation method def unimplemented(defaultval): if(type(defaultval) == type(unimplemented)): return lambda: None else: # Actual annotation def unimp_wrapper(func): # What we replace the function with def wrapper(*arg): return defaultval return wrapper return unimp_wrapper
Redirects stdout printing to python standard logging.
class LogPrinter: """LogPrinter class which serves to emulates a file object and logs whatever it gets sent to a Logger object at the INFO level.""" def __init__(self): """Grabs the specific logger to use for logprinting.""" self.ilogger = logging.getLogger('logprinter') il = self.ilogger logging.basicConfig() il.setLevel(logging.INFO) def write(self, text): """Logs written output to a specific logger""" self.ilogger.info(text) def logprintinfo(func): """Wraps a method so that any calls made to print get logged instead""" def pwrapper(*arg): stdobak = sys.stdout lpinstance = LogPrinter() sys.stdout = lpinstance res = func(*arg) sys.stdout = stdobak return res return pwrapper
This code needs a try/finally!