I want to enforce childrens to use a classmethod in python2.7.
I tried this:
import abc
class Base(object):
__metaclass__ = abc.ABCMeta
#abc.abstractproperty
def value(self):
pass
#abc.abstractproperty
#classmethod
def text(cls):
pass
class Imp(Base):
TEXT = "hi im text"
#classmethod
def haba(cls):
print 'HI'
#property
def value(self):
return 'asdasd'
#classmethod
#property
def text(cls):
return 'ho ho p'
print Imp.text
print Imp.TEXT
But I'm getting this output:
<bound method ABCMeta.? of <class 'main.Imp'>>
hi im text
How I can properly enforce childrens to implement classmethod properties?
You can see that Imp.TEXT is working but there is no way to enforce creating this member from base class this way
After re-reading your question a few times I concluded that you want the cl method to behave as if it is a property for the class.
First, Python's implementation of abstract method/property checking is meant to be performed at instantiation time only, not at class declaration. I hope you are aware of that.
Second, Python's descriptor protocol allows for the creation of the equivalent of "class properties", although there is no higher level support for that on the language itself - you can create a class which __get__ method returns your calculated property when the instance argument is None (usually descriptors will return 'self' so that they can be retrieved from the class).
Finally - it is possible that by declaring a custom metaclass being abstract itself, and then declaring it as your class metaclass, abstractproperties will trigger in runtime - let's try that - :
In [1]: import abc
In [2]: class AbsPropertyMeta(abc.ABC, type):
...: #abc.abstractproperty
...: def cl(cls):
...: return "Ho ho ho"
...:
In [3]: class ConcreteExample(metaclass=AbsPropertyMeta):
...: pass
...:
(Note that I will develop the answer using Python 3, which should be what you should be using in any new project or for learning purposes as well)
So, as for the former example, the property in the metaclass does work as a "class property", but Python does not enforce its redefinition in the class body.
So, if you really need this design, you should create a complete custom metaclass for that, and let go of the abc.ABCMeta mechanisms at all:
from functools import partial
def abstractclassproperty(func):
func._abstract_property = True
return func
class clsproperty(object):
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
return self.func(owner)
class ABCAbstractClsProperty(type):
def __new__(mcls, name, bases, namespace, **kw):
new_cls = super(ABCAbstractClsProperty, mcls).__new__(mcls, name, bases, namespace, **kw)
for attr_name in dir(new_cls): # Dir retrieves attributes from all superclasses
attr = getattr(new_cls, attr_name)
if getattr(attr, "im_func", None): # Python 2 specific normalization.
attr = attr.im_func
if getattr(attr, '_abstract_property', False) and new_cls.__dict__.get(attr_name) is not attr:
raise TypeError("Can't create class {!r}: abstract property {!r} not implemented".format(name, attr_name))
return new_cls
""" # Python 3:
class A(metaclass=ABCAbstractClsProperty):
#abstractclassproperty
def cl(cls):
pass
"""
class A(object):
__metaclass__ = ABCAbstractClsProperty
#abstractclassproperty
def cl(cls):
pass
try:
class B(A):
pass
except TypeError:
print("Check ok")
class C(A):
#clsproperty
def cl(cls):
return "ho ho ho " + cls.__name__
print(C.cl)
Related
Update: to work around the combination of Choregraphe and Python, I rejected the idea of having #classmethod. Instead I raise AlMemory events in MyCustomClass when I want to make use of MyClass.
I read many posts on NameError but still couldn't find a solution to my problem.
I write a program with Choregraphe using the Python box for Nao.
I got the followings:
class MyClass(GeneratedClass): #GeneratedClass is given
def __init__(self):
GeneratedClass.__init__(self)
#classmethod
def doSomething(cls, a):
print a
class myCustomClass():
def func(self):
MyClass.doSomething(a)
When calling func() from myCustomClass, I got NameError on MyClass.
[ERROR] behavior.box :FMBox::createPythonModule:0 _Behavior__lastUploadedChoregrapheBehaviorbehavior_1275012824__root__test_1: User class evaluation failed with the error:
global name 'MyClass' is not defined
How can I fix that?
As a start your #method and class structure is wrong.
When I ran your code it says this :
class MyClass(GeneratedClass):
#classmethod
def do(self, a):
return a
class myCustomClass():
def func(self):
MyClass.do(a)
Output:
Traceback (most recent call last):
File "test.py", line 236, in <module>
class MyClass(GeneratedClass):
NameError: name 'GeneratedClass' is not defined
Your class structure is completely wrong. If you want to pass a parameter, use __init__ method.
class MyClass:
def __init__(self, GeneratedClass):
self.generated_class = GeneratedClass
def do(self):
doSomething(self.generated_class)
class MyCustomClass:
def func(self):
GeneratedClass = 1
MyClass(GeneratedClass).do()
myCustomClass().func()
If you are using #methodclass you should not pass self, it is cls. Like in this example:
from datetime import date
# random Person
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
#classmethod
def fromBirthYear(cls, name, birthYear):
return cls(name, date.today().year - birthYear)
def display(self):
print(self.name + "'s age is: " + str(self.age))
person = Person('Adam', 19)
person.display()
person1 = Person.fromBirthYear('John', 1985)
person1.display()
In case if you are trying inheritance take this is an example how it should be.
class Mapping:
def __init__(self, iterable):
self.items_list = []
self.__update(iterable)
def update(self, iterable):
for item in iterable:
self.items_list.append(item)
__update = update # private copy of original update() method
class MappingSubclass(Mapping):
def update(self, keys, values):
# provides new signature for update()
# but does not break __init__()
for item in zip(keys, values):
self.items_list.append(item)
Now all in one according to yours:
class GeneratedClass:
def __init__(self):
self.myclass = self
def print(self):
print('hello_people')
class MyClass(GeneratedClass):
def __init__(self,a):
self.a = a
GeneratedClass.__init__(self)
print(a)
#classmethod
def give_param(cls, a):
return cls(a)
class myCustomClass:
def func(self):
MyClass.give_param('aa')
myCustomClass().func()
NOTE: I have used python 3.x.
I think "MyClass" is replaced on the fly by the Choregraphe/PythonBridge interpreter invoked when you press run.
As you can see each choregraphe box classes are named "MyClass", they are so replaced and changed by a generated name like root/class/boxname...
You could try calling and printing the self.getName() in MyClass, to have a clue.
So in your case, you could:
add doSomething directly in myClass
create a not postprocessed, eg:
as:
class MyVeryOneClass:
def __init__(self):
...
While executing the code below, I'm getting AttributeError: attribute '__doc__' of 'type' objects is not writable.
from functools import wraps
def memoize(f):
""" Memoization decorator for functions taking one or more arguments.
Saves repeated api calls for a given value, by caching it.
"""
#wraps(f)
class memodict(dict):
"""memodict"""
def __init__(self, f):
self.f = f
def __call__(self, *args):
return self[args]
def __missing__(self, key):
ret = self[key] = self.f(*key)
return ret
return memodict(f)
#memoize
def a():
"""blah"""
pass
Traceback:
AttributeError Traceback (most recent call last)
<ipython-input-37-2afb130b1dd6> in <module>()
17 return ret
18 return memodict(f)
---> 19 #memoize
20 def a():
21 """blah"""
<ipython-input-37-2afb130b1dd6> in memoize(f)
7 """
8 #wraps(f)
----> 9 class memodict(dict):
10 """memodict"""
11 def __init__(self, f):
/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/functools.pyc in update_wrapper(wrapper, wrapped, assigned, updated)
31 """
32 for attr in assigned:
---> 33 setattr(wrapper, attr, getattr(wrapped, attr))
34 for attr in updated:
35 getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
AttributeError: attribute '__doc__' of 'type' objects is not writable
Even though the doc string is provided, I don't know what's wrong with this.
It's works fine if not wrapped, but I need to do this.
functools.wraps() was designed to wrap function, not class objects. One of the things it does is attempt to assign the __doc__ string of the wrapped (original) function to the wrapper function, which, as you've discovered, isn't allowed in Python 2. It also does the same for the __name__ and __module__ attributes.
A simple way to work around this restriction is by manually doing it when the MemoDict class is defined. Here's what I mean. (Note for increased readability I always use CamelCase class names as per the PEP 8 - Style Guide for Python Code.)
def memoize(f):
""" Memoization decorator for functions taking one or more arguments.
Saves repeated api calls for a given value, by caching it.
"""
class MemoDict(dict):
__doc__ = f.__doc__
__name__ = f.__name__
__module__ = f.__module__
def __init__(self, f):
self.f = f
def __call__(self, *args):
return self[args]
def __missing__(self, key):
ret = self[key] = self.f(*key)
return ret
return MemoDict(f)
#memoize
def a():
"""blah"""
print('Hello world!')
print(a.__doc__) # -> blah
print(a.__name__) # -> a
print(a.__module__) # -> __main__
a() # -> Hello world!
In fact, if you wished, you could create your own wrapper / class-decorating function to do it:
def wrap(f):
""" Convenience function to copy function attributes to derived class. """
def class_decorator(cls):
class Derived(cls):
__doc__ = f.__doc__
__name__ = f.__name__
__module__ = f.__module__
return Derived
return class_decorator
def memoize(f):
""" Memoization decorator for functions taking one or more arguments.
Saves repeated api calls for a given value, by caching it.
"""
#wrap(f)
class MemoDict(dict):
def __init__(self, f):
self.f = f
def __call__(self, *args):
return self[args]
def __missing__(self, key):
ret = self[key] = self.f(*key)
return ret
return MemoDict(f)
#memoize
def a():
"""blah"""
print('Hello world!')
print(a.__doc__) # -> blah
print(a.__name__) # -> a
print(a.__module__) # -> __main__
a() # -> Hello world!
#wraps(f) is primarily designed to be used as a function decorator, rather than as a class decorator, so using it as the latter may lead to the occasional odd quirk.
The specific error message you're receiving relates to a limitation of builtin types on Python 2:
>>> class C(object): pass
...
>>> C.__doc__ = "Not allowed"
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: attribute '__doc__' of 'type' objects is not writable
If you use Python 3, switch to a classic class in Python 2 (by inheriting from UserDict.UserDict rather than the dict builtin), or use a closure to manage the result cache rather than a class instance, the decorator will be able to copy the docstring over from the underlying function.
The wraps decorator you're trying to apply to your class doesn't work because you can't modify the docstring of a class after it has been created. You can recreate the error with this code:
class Foo(object):
"""inital docstring"""
Foo.__doc__ = """new docstring""" # raises an exception in Python 2
The exception doesn't occur in Python 3 (I'm not exactly sure why it's changed).
A workaround might be to assign the class variable __doc__ in your class, rather than using wraps to set the docstring after the class exists:
def memoize(f):
""" Memoization decorator for functions taking one or more arguments.
Saves repeated api calls for a given value, by caching it.
"""
class memodict(dict):
__doc__ = f.__doc__ # copy docstring to class variable
def __init__(self, f):
self.f = f
def __call__(self, *args):
return self[args]
def __missing__(self, key):
ret = self[key] = self.f(*key)
return ret
return memodict(f)
This won't copy any of the other attributes that wraps tries to copy (like __name__, etc.). You may want to fix those up yourself if they're important to you. The __name__ attribute however needs to be set after the class is created (you can't assign it in the class definition):
class Foo(object):
__name__ = "Bar" # this has no effect
Foo.__name__ = "Bar" # this works
I'm running python 2.7, 3.4 and 3.5. Only 2.7 raises a TypeError with the following code. I'm wondering if I'm doing something wrong, is this a know bug or is it something else?
from abc import ABCMeta, abstractmethod
class Base(object):
__metaclass__ = ABCMeta
#abstractmethod
def bar(self):
pass
class Concrete(Base):
pass
confused = Concrete()
In Python 2.7 I get the following (helpful) error:
Traceback (most recent call last):
File "confused.py", line 16, in <module>
confused = Concrete()
TypeError: Can't instantiate abstract class Concrete with abstract methods bar
But in Python3.x it runs without an error (bad). Thanks.
Declaring an abstract base class changed in python3 to:
import abc
class Base(metaclass=abc.ABCMeta):
#abc.abstractmethod
def bar(self):
pass
class Concrete(Base):
pass
Concrete() # Will raise a TypeError
They behave differently in Python2.x and Python3.x .
Python3.6
# importing abstract base classes module
import abc
class GetterSetter(abc.ABC):
'''
ABSTRACT BASE CLASSES:
- An abstract base class is a kind of 'model' for other classes to be defined.
- It is not designed to construct instances, but can be subclassed by regular classes
- Abstract classes can define interface, or methods that must be implemented by its subclasses.
'''
# Abstract classes are not designed to be instantiated, only to be subclassed
# decorator for abstract class
#abc.abstractmethod
def set_val(self, input):
"""set the value in the instance"""
return
#abc.abstractmethod
def get_val(self):
"""Get and return a value from the instance..."""
return
# Inheriting from the above abstract class
class MyClass(GetterSetter):
# methods overriding in the GetterSetter
def set_val(self, input):
self.val = input
def get_val(self):
return self.val
# Instantiate
x = MyClass()
print(x) # prints the instance <__main__.MyClass object at 0x10218ee48>
x = GetterSetter() #throws error, abstract classes can't be instantiated
Python2.x
import abc
class GetterSetter(object):
# meta class is used to define other classes
__metaclass__ = abc.ABCMeta
# decorator for abstract class
#abc.abstractmethod
def set_val(self, input):
"""set the value in the instance"""
return
#abc.abstractmethod
def get_val(self):
"""Get and return a value from the instance..."""
return
# Inheriting from the above abstract class
class MyClass(GetterSetter):
# methods overriding in the GetterSetter
def set_val(self, input):
self.val = input
def get_val(self):
return self.val
# Instantiate
x = GetterSetter()
print(x)
x = GetterSetter() #throws error, abstract classes can't be instantiated
Check my answer here.
I'm overriding a ModelAdmin method thus:
def response_change(self, request, obj):
# alter redirect location if 'source' is found in GET
response = super(JobOptions, self).response_change(request, obj)
source = request.GET.get('source', None)
if source:
response['location'] = source
return response
Rather than repeat this on every model I'd like to make it a mixin.
If I do:
def RedirectMixin(admin.ModelAdmin)
and then:
def MyModel(admin.ModelAdmin, RedirectMixin)
then I get a MRO error.
However if RedirectMixin doesn't inherit from admin.ModelAdmin then the method doesn't get called.
An additional problem is how to generalise the super() call so it doesn't have the superclass hard-coded in.
Firstly, I presume you mean class rather than def in your examples.
Anyway, the right way to use a Mixin is to use it first in the list of classes to inherit. So:
class RedirectMixin(object):
and
class MyModelAdmin(RedirectMixin, admin.ModelAdmin):
This is because Python looks through all parent classes in order of declaration to find the method, and calls the first one it finds.
As for super, this shouldn't be mentioning the superclass at all - that's the whole point of it. It should reference the current class:
return super(MyModelAdmin, self).__init__(self, *args, **kwargs)
or whatever.
Edit after comments Yes, the mixin should refer to its own class in the super call. Consider the following:
In [1]: class BaseClass(object):
...: def my_function(self):
...: print 'base my_function'
...:
In [2]: class Mixin(object):
...: def my_function(self):
...: print 'mixin my_function'
...: super(Mixin, self).my_function()
...:
In [3]: class MyDerivedClass(Mixin, BaseClass):
...: pass
...:
Now if you instantiate the subclass and call its my_function method, the MRO will happen as you expect, even though Mixin doesn't inherit from BaseClass:
In [4]: m=MyDerivedClass()
In [5]: m.my_function()
mixin my_function
base my_function
The error you mention comes if you fail to make Mixin a descendant of object - if you don't, it's an old-style class, which doesn't support the use of super.
I use a snippet in http://www.djangosnippets.org/snippets/1034/ for my Model inheritance. It works fine at the first. However, after I delete some elements in database, the code works wrong.
As I debug, I found that the problem is reside in the method: as_leaf_class.
In the following code:
if (model == Meal):
return self
return model.objects.get(id=self.id)
the last line will raise exception when the element is deleted.
Anyone could give a solution for this?
Model inheritance with content type and inheritance-aware manager
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.db.models.query import QuerySet
class SubclassingQuerySet(QuerySet):
def __getitem__(self, k):
result = super(SubclassingQuerySet, self).__getitem__(k)
if isinstance(result, models.Model) :
return result.as_leaf_class()
else :
return result
def __iter__(self):
for item in super(SubclassingQuerySet, self).__iter__():
yield item.as_leaf_class()
class MealManager(models.Manager):
def get_query_set(self):
return SubclassingQuerySet(self.model)
class Meal (models.Model) :
name = models.TextField(max_length=100)
content_type = models.ForeignKey(ContentType,editable=False,null=True)
objects = MealManager()
def save(self, *args, **kwargs):
if(not self.content_type):
self.content_type = ContentType.objects.get_for_model(self.__class__)
super(Meal, self).save(*args, **kwargs)
def as_leaf_class(self):
content_type = self.content_type
model = content_type.model_class()
if (model == Meal):
return self
return model.objects.get(id=self.id)
class Salad (Meal) :
too_leafy = models.BooleanField(default=False)
objects = MealManager()
I don't know if that snippet is still relevant now that you can use abstract base classes.
This lets you declare a model that is not a db table but that other models can inherit from.
First answer: Why are you trying to call as_leaf_class on a deleted object? If it hurts when you do that, don't do it.
The second answer is that you could wrap the failing line with try...except Meal.DoesNotExist, and return None or self or something.