I'm seeing this very common code I'm using to setup logging.
def has_host_running(self):
log = CustomLogger.action_logger(name=sys._getframe().f_code.co_name, **self.menvargs)
result = self.bash_query.check_is_server_available(log)
self.results[sys._getframe().f_code.co_name] = result
log.debug('result: {}'.format(result))
return result
Looking for dry way to implement this behavior.
Key is that I need to be able to reference/call a log statement from within the function and any child function calls.
******************************edit2*******************************
The most elegant sloppy solution I could get to work.
Loosely adapted from: https://wiki.python.org/moin/PythonDecoratorLibrary#Controllable_DIY_debug
heavily based on https://wiki.python.org/moin/PythonDecoratorLibrary#Controllable_DIY_debug
class ActionLog:
def init(self):
pass
def __call__(self, f):
log = self.get_actionlogger(name=f.func_name)
def newf(log, *args, **kwds):
# pre-function call actions:
log.debug('Start.')
log.debug(' info: params= {args}, {kwds}'.format(args=args, kwds=kwds))
# function call
f_result = f(log, *args, **kwds)
# post-function call actions:
log.debug(' info: result= {result}'.format(result=f_result))
log.debug('Complete.')
return f_result
# changes to be made to returned function
newf.__doc__ = f.__doc__
return newf(log)
def get_actionlogger(self, name, **kwargs):
import logging
import ast
from Helper import ConfigManager
logname = 'action.{func_name}'.format(func_name=name)
logger = logging.getLogger(logname)
# value stored in ini file.
# either DEBUG or ERROR right now.
# todo: store actual logging_level
# todo: store an array/dict for log_name in .ini
# this will allow multiple parameters to be stored within the single entry.
# ex:
# action.check_stuff: logging_level=DEBUG,handler_stream=TRUE,handler_file=stuff.log,formatter='{name} - {message}
conf_logging_level = ConfigManager('config.ini').get_section_dict('CustomLogging_Level').get(logname, 'DEBUG')
logging_level = logging.DEBUG
if conf_logging_level == 'DEBUG':
logging_level = logging.DEBUG
if conf_logging_level == 'ERROR':
logging_level = logging.ERROR
logger.setLevel(logging_level)
# very hacky
# while logging.getLogger is a singleton, adding the handler is not.
# without this check, this code will result in duplicate handlers added.
# currently will not edit/replace the existing handler.
# currently will not allow another handler to be added after the first.
# main issue here is that I can't figure out how to determine labels/names within logger.handlers
# todo: properly label handler
# todo: check for existing labels & types (file, stream, etc)
if len(logger.handlers) == 0:
ch = logging.StreamHandler()
ch.setLevel(logging_level)
ch.set_name = logname
# create formatter
formatter = logging.Formatter(' %(name)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
#ActionLog()
def check_stuff(log, *args, **kwds):
result = True
log.debug(' info: text call from within function.')
return result
if check_stuff:
print 'check_stuff is true.'
So it works with the one parameter "log" being passed into the class. does not work if the class does not have the log parameter. I'm not sure how to handle if there are further parameters... likely with *args or **kwargs, but this solution doesn't handle that.
Apologies on the code formatting... I can't seem to get the class decorator in the same block as the decorated func and func call.
* v3.0 *
v2 had a problem with multiple arguments. Solved that and streamlined v3 quite a bit.
def ActionLog(wrapped):
def _wrapper(*args, **kwargs):
log = CustomLogger.action_logger(wrapped.func_name)
newargs = list()
for a in args:
newargs.append(a)
newargs.append(log)
f = wrapped(*newargs, **kwargs)
if f is None:
f = ''
else:
f = '\tResult: {}'.format(f)
log.debug('Complete.{}'.format(f))
return f
return _wrapper
This works better and has replaced most of my boilerplate logging calls for actions that take a single argument.
Still having problems with named args vs kwargs. I'd like to just pass args through and add my custom items to kwargs, but that had a few issues.
Related
Some background on why I want to achieve what I'm trying to achieve:
I am making a long-running server type application. I would like to be able to perform functional and integration style testing on this application with high coverage, especially in the failure and corner-case scenarios. In order to achieve this, I would like to inject various faults which are configurable at run time so that my test can validate the program's behavior when such a condition is hit.
The Problem:
I would like to be able to dynamically decide the return behavior of a function. I would additionally like to do this with only a function call and without pre-processing the source code (macros). Here is a simple example:
from functools import wraps
def decorator(func):
#wraps(func)
def func_wrapper(*args, **kwargs):
print 'in wrapper before %s' % func.__name__
val = func(*args, **kwargs)
print 'in wrapper after %s' % func.__name__
return val
return func_wrapper
#decorator
def grandparent():
val = parent()
assert val == 2
# do something with val here
#decorator
def parent():
foo = 'foo_val'
some_func(foo)
# other statements here
child()
# if the condition in child is met,
# this would be dead (not-executed)
# code. If it is not met, this would
# be executed.
return 1
def child(*args, **kwargs):
# do something here to make
# the assert in grandparent true
return 2
# --------------------------------------------------------------------------- #
class MyClass:
#decorator
def foo(self):
val = self.bar()
assert val == 2
def bar(self):
self.tar()
child()
return 1
def tar(self):
return 42
# --------------------------------------------------------------------------- #
The grandparent() function in the code above calls parent() to get a response. It would then do something based on the value of val. The parent() function calls child() and unconditionally returns the value 1. I would like to write something in child() which causes the value it returns to be returned to grandparent() and skip processing the rest of parent().
Restrictions/Permissions
grandparent() could be function number n in a long chain of function calls, not necessarily the top level function. Only
child() and any new helper functions called solely as a result of calling child() can be modified/created to make this work.
All work must be done at runtime. No pre-processing of the source files is acceptable
The decision about the parent() function's behavior has to be decided inside of the specific child() call.
Using pure Python (2.7) or making use of the cPython API are both acceptable ways to solve this issue.
This is allowed to be a hack. The child() function will be inert in production mode.
I have tried
I have tried modifying the stack list (deleting the parent() frame) retrieved from inspect.stack(), but this seems to not do anything.
I have tried creating new bytecode for the parent() frame and replacing it in the stack. This also does not seem to have an effect.
I have tried looking into the cPython functions related to stack management, but when I added or removed a frame, I kept getting stack under or overflows.
If you know the name(s) of child(), then you can patch all child() callers at runtime by iterating through module and class functions, patching call sites to child() to add your custom logic, and hot-swapping callers of child with the patched version.
Here is a working example:
#!/usr/bin/env python2.7
from six import wraps
def decorator(func):
#wraps(func)
def func_wrapper(*args, **kwargs):
print 'in wrapper before %s' % func.__name__
val = func(*args, **kwargs)
print 'in wrapper after %s' % func.__name__
return val
return func_wrapper
#decorator
def grandparent():
val = parent()
assert val == 2
# do something with val here
#decorator
def parent():
# ...
# ...
child()
# if the condition in child is met,
# this would be dead (not-executed)
# code. If it is not met, this would
# be executed.
return 1
def child(*args, **kwargs):
# do something here to make
# the assert in grandparent true
return 2
# --------------------------------------------------------------------------- #
class MyClass:
#decorator
def foo(self):
val = self.bar()
assert val == 2
def bar(self):
self.tar()
child()
return 1
def tar(self):
return 42
# --------------------------------------------------------------------------- #
import sys
import inspect
import textwrap
import types
import itertools
import logging
logging.basicConfig()
logging.getLogger().setLevel(logging.INFO)
log = logging.getLogger(__name__)
def should_intercept():
# TODO: check system state and return True/False
# just a dummy implementation for now based on # of args
if len(sys.argv) > 1:
return True
return False
def _unwrap(func):
while hasattr(func, '__wrapped__'):
func = func.__wrapped__
return func
def __patch_child_callsites():
if not should_intercept():
return
for module in sys.modules.values():
if not module:
continue
scopes = itertools.chain(
[module],
(clazz for clazz in module.__dict__.values() if inspect.isclass(clazz))
)
for scope in scopes:
# get all functions in scope
funcs = list(fn for fn in scope.__dict__.values()
if isinstance(fn, types.FunctionType)
and not inspect.isbuiltin(fn)
and fn.__name__ != __patch_child_callsites.__name__)
for fn in funcs:
try:
fn_src = inspect.getsource(_unwrap(fn))
except IOError as err:
log.warning("couldn't get source for fn: %s:%s",
scope.__name__, fn.__name__)
continue
# remove common indentations
fn_src = textwrap.dedent(fn_src)
if 'child()' in fn_src:
# construct patched caller source
patched_fn_name = "patched_%s" % fn.__name__
patched_fn_src = fn_src.replace(
"def %s(" % fn.__name__,
"def %s(" % patched_fn_name,
)
patched_fn_src = patched_fn_src.replace(
'child()', 'return child()'
)
log.debug("patched_fn_src:\n%s", patched_fn_src)
# compile patched caller into scope
compiled = compile(patched_fn_src, inspect.getfile(scope), 'exec')
exec(compiled) in fn.__globals__, scope.__dict__
# replace original caller with patched caller
patched_fn = scope.__dict__.get(patched_fn_name)
setattr(scope, fn.__name__, patched_fn)
log.info('patched %s:%s', scope.__name__, fn.__name__)
if __name__ == '__main__':
__patch_child_callsites()
grandparent()
MyClass().foo()
Run with no arguments to get the original behavior (assertion failure). Run with one or more arguments and the assertion disappears.
I have actually a big problem on a django wizard form.
I have 3 steps. The second step can contains data or not. The last step is a file upload step.
In the WizardForm class, i overrided the get_context_data method and include this in it :
if self.steps.current == 'against_indication':
questions = None
try:
# get the machine
machine_id = self.kwargs['pk']
machine = Machine.objects.get(pk=int(machine_id))
# check if there is against indications
if machine.type_question is False:
questions = YhappsQuestion.objects.filter(type_modalite=machine.type)
else:
questions = CustomQuestion.objects.filter(machine=machine)
except Machine.DoesNotExist:
pass
if len(questions) == 0:
# we modify the form wizard to skip against indication step
self.render_next_step(form, **kwargs)
#self.render_goto_step(step='against_indication', goto_step='prescription', **kwargs)
As you see, if there is no questions, i skip the second step (against_indication) to go into the next step (prescription).
The problem appears here. When the last step is rendered, there is not enough data in the wizard form. In the ddt's request there is it :
with skip step.
So if i upload the file, it gonna fill the against_indication datas instead of prescription datas, and re-renderer me the last step...
I tried to do all of this without skip the second step, and see how look the ddt's request :
without skip step.
Someone has a solution to permit have the right datas when i skip step, plz ?
Thanks for your further answers
I don't think get_context_data is the correct method to do this in; FormWizard is a very specific class that restricts where you can perform different functions.
The typical way to specify when FormWizard skips a step is to use a condition_dictionary. Django uses the structure to only include the form for a step when the conditions (set as callables) return True. If not, then that step's form doesn't force form.is_valid() to be called, bypassing the validation of that step. This also assures that all hidden management info for the form is created for each step.
Here's a example of how this can work:
# I always specify index values for steps so that all functions can share them
STEP_ONE = u'0'
STEP_TWO = u'1'
STEP_THREE = u'2'
def YourFormWizard(SessionWizardView):
# Your form wizard itself; will not be called directly by urls.py, but rather wrapped in a function that provide the condition_dictionary
_condition_dict = { # a dictionary with key=step, value=callable function that return True to show step and False to not
STEP_ONE: return_true, # callable function that says to always show this step
STEP_TWO: check_step_two, # conditional callable for verifying whether to show step two
STEP_THREE: return_true, # callable function that says to always show this step
}
_form_list = [ # a list of forms used per step
(STEP_ONE,your_forms.StepOneForm),
(STEP_TWO, your_forms.StepTwoForm),
(STEP_THREE, your_forms.StepThreeForm),
]
...
def return_true(wizard): # callable function called in _condition_dict
return True # a condition that is always True, for when you always want form seen
def check_step_two(wizard): # callable function called in _condition_dict
step_1_info = wizard.get_cleaned_data_for_step(STEP_ONE)
# do something with info; can retrieve for any prior steps
if step_1_info == some_condition:
return True # show step 2
else: return False # or don't
''' urls.py '''
your_form_wizard = YourFormWizard.as_view(YourFormWizard._form_list,condition_dict= YourFormWizard._condition_dict)
urlpatterns = patterns('',
...
url(r'^form_wizard_url/$', your_form_wizard, name='my-form-wizard',)
)
Based on great Ian Price answer, I tried to make some improvements because I noticed structure could help over time, especially if you try to change order of your steps:
I used a dataclass to centralize data about a step wizard, then generating required data for urls later.
I used lambdas for returning True and giving callable is optional
I extracted STEP_X in a const.py files to be re-usable
I tried as possible to gather data within the class view itself rather than in functions
Here is the code (Python 3.7+):
const.py
STEP_0 = '0'
STEP_1 = '1'
STEP_2 = '2'
views.py
from dataclasses import dataclass
from typing import Optional, Callable, Sequence
#dataclass
class WizardStepData:
step: str
form_class: any
trigger_condition: Optional[Callable] = None
def __init__(self, step, form_class, trigger_condition=None):
""" if trigger_condition is not provided, we return a Callable that returns True """
self.step = step
self.form_class = form_class
self.trigger_condition = trigger_condition if trigger_condition else lambda _: True
def YourFormWizard(SessionWizardView):
#staticmethod
def check_step_one(wizard) -> bool:
pass # ...
#classmethod
def get_wizard_data_list(cls) -> Sequence:
return [
WizardStepData(step=STEP_0,
form_class=StepZeroForm),
WizardStepData(step=STEP_1,
form_class=StepOneForm,
trigger_condition=cls.check_step_one),
WizardStepData(step=STEP_2,
form_class=StepTwoForm),
]
#classmethod
def _condition_dict(cls) -> dict:
return {data.step: data.trigger_condition for data in cls.get_wizard_data_list()}
#classmethod
def _form_list(cls) -> list:
return [(data.step, data.form_class) for data in cls.get_wizard_data_list()]
urls.py
# ...
your_form_wizard = YourFormWizard.as_view(form_list=YourFormWizard._form_list(),
condition_dict=YourFormWizard._condition_dict())
There is large python project where one attribute of one class just have wrong value in some place.
It should be sqlalchemy.orm.attributes.InstrumentedAttribute, but when I run tests it is constant value, let's say string.
There is some way to run python program in debug mode, and run some check (if variable changed type) after each step throught line of code automatically?
P.S. I know how to log changes of attribute of class instance with help of inspect and property decorator. Possibly here I can use this method with metaclasses...
But sometimes I need more general and powerfull solution...
Thank you.
P.P.S. I need something like there: https://stackoverflow.com/a/7669165/816449, but may be with more explanation of what is going on in that code.
Well, here is a sort of slow approach. It can be modified for watching for local variable change (just by name). Here is how it works: we do sys.settrace and analyse the value of obj.attr each step. The tricky part is that we receive 'line' events (that some line was executed) before line is executed. So, when we notice that obj.attr has changed, we are already on the next line and we can't get the previous line frame (because frames aren't copied for each line, they are modified ). So on each line event I save traceback.format_stack to watcher.prev_st and if on the next call of trace_command value has changed, we print the saved stack trace to file. Saving traceback on each line is quite an expensive operation, so you'd have to set include keyword to a list of your projects directories (or just the root of your project) in order not to watch how other libraries are doing their stuff and waste cpu.
watcher.py
import traceback
class Watcher(object):
def __init__(self, obj=None, attr=None, log_file='log.txt', include=[], enabled=False):
"""
Debugger that watches for changes in object attributes
obj - object to be watched
attr - string, name of attribute
log_file - string, where to write output
include - list of strings, debug files only in these directories.
Set it to path of your project otherwise it will take long time
to run on big libraries import and usage.
"""
self.log_file=log_file
with open(self.log_file, 'wb'): pass
self.prev_st = None
self.include = [incl.replace('\\','/') for incl in include]
if obj:
self.value = getattr(obj, attr)
self.obj = obj
self.attr = attr
self.enabled = enabled # Important, must be last line on __init__.
def __call__(self, *args, **kwargs):
kwargs['enabled'] = True
self.__init__(*args, **kwargs)
def check_condition(self):
tmp = getattr(self.obj, self.attr)
result = tmp != self.value
self.value = tmp
return result
def trace_command(self, frame, event, arg):
if event!='line' or not self.enabled:
return self.trace_command
if self.check_condition():
if self.prev_st:
with open(self.log_file, 'ab') as f:
print >>f, "Value of",self.obj,".",self.attr,"changed!"
print >>f,"###### Line:"
print >>f,''.join(self.prev_st)
if self.include:
fname = frame.f_code.co_filename.replace('\\','/')
to_include = False
for incl in self.include:
if fname.startswith(incl):
to_include = True
break
if not to_include:
return self.trace_command
self.prev_st = traceback.format_stack(frame)
return self.trace_command
import sys
watcher = Watcher()
sys.settrace(watcher.trace_command)
testwatcher.py
from watcher import watcher
import numpy as np
import urllib2
class X(object):
def __init__(self, foo):
self.foo = foo
class Y(object):
def __init__(self, x):
self.xoo = x
def boom(self):
self.xoo.foo = "xoo foo!"
def main():
x = X(50)
watcher(x, 'foo', log_file='log.txt', include =['C:/Users/j/PycharmProjects/hello'])
x.foo = 500
x.goo = 300
y = Y(x)
y.boom()
arr = np.arange(0,100,0.1)
arr = arr**2
for i in xrange(3):
print 'a'
x.foo = i
for i in xrange(1):
i = i+1
main()
There's a very simple way to do this: use watchpoints.
Basically you only need to do
from watchpoints import watch
watch(your_object.attr)
That's it. Whenever the attribute is changed, it will print out the line that changed it and how it's changed. Super easy to use.
It also has more advanced features, for example, you can call pdb when the variable is changed, or use your own callback functions instead of print it to stdout.
A simpler way to watch for an object's attribute change (which can also be a module-level variable or anything accessible with getattr) would be to leverage hunter library, a flexible code tracing toolkit. To detect state changes we need a predicate which can look like the following:
import traceback
class MutationWatcher:
def __init__(self, target, attrs):
self.target = target
self.state = {k: getattr(target, k) for k in attrs}
def __call__(self, event):
result = False
for k, v in self.state.items():
current_value = getattr(self.target, k)
if v != current_value:
result = True
self.state[k] = current_value
print('Value of attribute {} has chaned from {!r} to {!r}'.format(
k, v, current_value))
if result:
traceback.print_stack(event.frame)
return result
Then given a sample code:
class TargetThatChangesWeirdly:
attr_name = 1
def some_nested_function_that_does_the_nasty_mutation(obj):
obj.attr_name = 2
def some_public_api(obj):
some_nested_function_that_does_the_nasty_mutation(obj)
We can instrument it with hunter like:
# or any other entry point that calls the public API of interest
if __name__ == '__main__':
obj = TargetThatChangesWeirdly()
import hunter
watcher = MutationWatcher(obj, ['attr_name'])
hunter.trace(watcher, stdlib=False, action=hunter.CodePrinter)
some_public_api(obj)
Running the module produces:
Value of attribute attr_name has chaned from 1 to 2
File "test.py", line 44, in <module>
some_public_api(obj)
File "test.py", line 10, in some_public_api
some_nested_function_that_does_the_nasty_mutation(obj)
File "test.py", line 6, in some_nested_function_that_does_the_nasty_mutation
obj.attr_name = 2
test.py:6 return obj.attr_name = 2
... return value: None
You can also use other actions that hunter supports. For instance, Debugger which breaks into pdb (debugger on an attribute change).
Try using __setattr__ to override the function that is called when an attribute assignment is attempted. Documentation for __setattr__
You can use the python debugger module (part of the standard library)
To use, just import pdb at the top of your source file:
import pdb
and then set a trace wherever you want to start inspecting the code:
pdb.set_trace()
You can then step through the code with n, and investigate the current state by running python commands.
def __setattr__(self, name, value):
if name=="xxx":
util.output_stack('xxxxx')
super(XXX, self).__setattr__(name, value)
This sample code helped me.
According the Requests documentation, event hooks can be added to .get() function.
requests.get('http://httpbin.org', hooks=dict(response=print_url))
def print_url(r, *args, **kwargs):
print(r.url)
This is fine but how to set *args with custom parameters, for example, I want to pass some custom values to print_url(), how to set those in *args ? Something like this fails :
args = ("search_item", search_item)
rs = (grequests.get(u, hooks={'response': [parse_books],'args': [args]}) for u in urls)
You cannot specify extra arguments to pass to a response hook. If you want extra information to be specified you should make a function that you call which then returns a function to be passed as a hook, e.g.,
def hook_factory(*factory_args, **factory_kwargs):
def response_hook(response, *request_args, **request_kwargs):
# use factory_kwargs
# etc.
return None # or the modified response
return response_hook
grequests.get(u, hooks={'response': [hook_factory(search_item=search_item)]})
the response_hook function has to return a response-object. the simplest workaround would be
to modify the respose object you get from the hook_factory calling the hook.
def response_hook(response, *request_args, **request_kwargs):
# use factory_kwargs
# etc.
response.meta1='meta1' #add data
response.meta2='meta2'
#etc.
return response# or the modified response
return response_hook
hope this helps.
I'm writing a utility where I would like to have global variables that change the way a function operates. By default I'd like all the functions to follow one style, but in certain cases I'd also like the ability to force the way a function operates.
Say I have a file Script_Defaults.py with
USER_INPUT = True
In another python file I have many functions like this:
from Script_Defaults import USER_INPUT
def DoSomething(var_of_data, user_input = USER_INPUT):
if user_input:
... #some code here that asks the user what to change in var_of_data
.... # goes on to do something
The problem I face here is that the default parameter only loads once when the file starts.
I want to be able to set USER_INPUT as False or True during the run of the program. To get this behaviour I'm currently using it like this...
from Script_Defaults import USER_INPUT
def DoSomething(var_of_data, user_input = None):
if user_input is None:
user_input = USER_INPUT
if user_input:
... #some code here that asks the user what to change in var_of_data
.... # goes on to do something
This seems like a lot of unnecessary code, especially if I have a lot of conditions like USER_INPUT, and many functions that need them. Is there a better to get this functionality, or is this the only way?
Using decorators, and manipulation of a function's default arguments, you can use the following solution:
from change_defaults import Default, set_defaults
my_defaults = dict(USER_INPUT=0)
#set_defaults(my_defaults)
def DoSomething(var_of_data, user_input=Default("USER_INPUT")):
return var_of_data, user_input
def main():
print DoSomething("This")
my_defaults["USER_INPUT"] = 1
print DoSomething("Thing")
my_defaults["USER_INPUT"] = 2
print DoSomething("Actually")
print DoSomething("Works", 3)
if __name__ == "__main__":
main()
Which requires the following code:
# change_defaults.py
from functools import wraps
class Default(object):
def __init__(self, name):
super(Default, self).__init__()
self.name = name
def set_defaults(defaults):
def decorator(f):
#wraps(f)
def wrapper(*args, **kwargs):
# Backup original function defaults.
original_defaults = f.func_defaults
# Replace every `Default("...")` argument with its current value.
function_defaults = []
for default_value in f.func_defaults:
if isinstance(default_value, Default):
function_defaults.append(defaults[default_value.name])
else:
function_defaults.append(default_value)
# Set the new function defaults.
f.func_defaults = tuple(function_defaults)
return_value = f(*args, **kwargs)
# Restore original defaults (required to keep this trick working.)
f.func_defaults = original_defaults
return return_value
return wrapper
return decorator
By defining the default parameters with Default(parameter_name) you tell the set_defaults decorator which value to take from the defaults dict.
Also, with a little more code (irrelevant to the solution) you can make it work like:
#set_defaults(my_defaults)
def DoSomething(var_of_data, user_input=Default.USER_INPUT):
...