Call unbound method, get class that it was accessed through? - python-3.x

In Python3, instance methods can be called in two ways, obj.ix() or Foo.ix(obj). Setting aside whether it is a good idea or not: When using the latter, is there a way to get the class that the instance method was accessed through?
class Foo(object):
#classmethod
def cx(cls, obj):
print(cls.X)
def ix(self):
# Any way to get the class that ix was accessed through?
print(self.X)
class AFoo(Foo):
X = "A"
class BFoo(Foo):
X = "B"
a = AFoo()
AFoo.cx(a) # Prints "A"
AFoo.ix(a) # Prints "A"
b = BFoo()
BFoo.cx(b) # Prints "B"
BFoo.ix(b) # Prints "B"
AFoo.cx(b) # Prints "A"
AFoo.ix(b) # Prints "B" -> I would like "A", like classmethod.
BFoo.cx(a) # Prints "B"
BFoo.ix(a) # Prints "A" -> I would like "B", like classmethod.
As you can see, the desired behavior is trivial to achieve with a class method, but there does not appear to be a way to do the same with an instance method.

Nope. This information is not preserved. If you want that info, you'd have to write a custom descriptor to implement a new method type. For example:
import functools
class CrazyMethod:
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
if instance is None:
return functools.partial(self.func, owner)
return functools.partial(self.func, instance, instance)
class Foo:
#CrazyMethod
def foo(accessed_through, self):
print(accessed_through)
class Bar(Foo): pass
obj = Bar()
obj.foo() # <__main__.Bar object at 0xb727dd4c>
Bar.foo(obj) # <class '__main__.Bar'>
Foo.foo(obj) # <class '__main__.Foo'>

I have already accepted user2357112's answer, but just in case anyone is interested I found another way to do it (based on A class method which behaves differently when called as an instance method?):
import types
class Foo(object):
#classmethod
def x(cls, obj):
print(cls.X)
def __init__(self):
self.x = types.MethodType(type(self).x, self)
class AFoo(Foo):
X = "A"
class BFoo(Foo):
X = "B"
a = AFoo()
b = BFoo()
a.x() # Prints "A"
AFoo.x(a) # Prints "A"
AFoo.x(b) # Prints "A"
b.x() # Prints "B"
BFoo.x(b) # Prints "B"
BFoo.x(a) # Prints "B"

Related

dynamic inheritance with type and super

I'm looking for a way to dynamically inherit a parent class with its attributes and methods, by using type for class creation and super for inheritance, like so:
class A:
def __init__(self,a,b):
self.a = a
self.b = b
def some_method(self,q):
return (self.a + self.b)**q
def B_init(self,**kwargs):
super().__init__(**kwargs)
def another_method(self,):
return 1
def class_B_factory(parent_class):
return type(
'B',
(parent_class, some_other_parent_class),
{'__init__':B_init,
'another_method':another_method
}
)
And then be able to call...
model = class_B_factory(A)(a = 1, b = 5)
print(model.some_method(2)) # outputs to (1 + 5)**2 = 36
I'm not sure how to proceed. I don't think I'll need a custom metaclass since I'm pretty sure you can't call the parent class' __init__ method while also creating self in the process. I also tried overriding the default __init__ method outside the scope of class_B_factory like so:
def class_B_factory(parent_class):
return type(
'B',
(parent_class, some_other_parent_class),
{'another_method':another_method
}
)
B = class_B_factory(A)
def B_init(self,**kwargs):
super(B,self).__init__(**kwargs)
B.__init__ = B_init
model = B(a = 1, b = 5)
because I figured type doesn't need __init__ right away, as it is only needed during instantiation. But then I get TypeError: __init__() got an unexpected keyword argument error, which seems like it didn't work, and its not clean anyway.
EDIT: I tried defining the methods outside the factory via the following but I am still unsuccessful. Not sure how to fix it. Python has trouble instantiating maybe?
class A:
...
def B_init(self, produced_class = None, **kwargs):
super(produced_class,self).__init__(**kwargs)
def another_method(self, q, parent_class = None):
if parent_class is not None:
return 3 * parent_class.some_method(self,q) # I expect any parent_class passed to have a method called some_method
return 1
def class_B_factory(parent_class, additional_methods):
methods = {}
for name, method in additional_methods.items():
if "parent_class" in signature(method).parameters:
method = partial(method, parent_class = parent_class) # freeze the parent_class argument, which is a cool feature
methods[name] = method
newcls = type(
'B',
(parent_class,),
methods # would not contain B_init
)
newcls.__init__ = partial(B_init, produced_class = newcls) # freeze the produced class that I am trying to fabricate into B_init here
return newcls
model = class_B_factory(parent_class = A, additional_methods = {"another_method": another_method})
print(signature(model.__init__).parameters) # displays OrderedDict([('self', <Parameter "self">),...]) so it contains self!
some_instance_of_model = model(a = 1, b = 5) # throws TypeError: B_init() missing 1 required positional argument: 'self'
The parameterless form of super() relies on it being physically placed inside a class body - the Python machinnery them will, under the hood, create a __class__ cell variable referring that "physical" class (roughly equivalent to a non-local variable), and place it as the first parameter in the super() call.
For methods not written inside class statements, one have to resort to explicitly placing the parameters to super, and these are the child class, and the instance (self).
The easier way to do that in your code is to define the methods inside your factory function, so they can share a non-local variable containing the newly created class in the super call: ​
def class_B_factory(parent_class):
def B_init(self,**kwargs):
nonlocal newcls # <- a bit redundant, but shows how it is used here
​super(newcls, self).__init__(**kwargs)
def another_method(self,):
​​return 1
​ newcls = type(
​'B',
​(parent_class, some_other_parent_class),
​{'__init__':B_init,
​'another_method':another_method
​}
return newcls
If you have to define the methods outside of the factory function (which is likely), you have to pass the parent class into them in some form. The most straightforward would be to add a named-parameter (say __class__ or "parent_class"), and use functools.partial inside the factory to pass the parent_class to all methods in a lazy way:
from functools import partial
from inspect import signature
class A:
...
# the "parent_class" argument name is given a special treatement in the factory function:
def B_init(self, *, parent_class=None, **kwargs):
nonlocal newcls # <- a bit redundant, but shows how it is used here
​super([parent_class, self).__init__(**kwargs)
def another_method(self,):
​​return 1
def class_B_factory(parent_class, additional_methods, ...):
methods = {}
for name, method in additional_methods.items():
if "parent_class" in signature(method).parameters:
method = partial(method, parent_class=parent_class)
# we populate another dict instead of replacing methods
# so that we create a copy and don't modify the dict at the calling place.
methods[name] = method
​ newcls = type(
​'B',
​(parent_class, some_other_parent_class),
methods
)
return newcls
new_cls = class_B_factory(B, {"__init__": B_init, "another_method": another_method})

How to initialize instance with dynamic base class

Environment: python3
I'd like to have some generic object with my custom methods and attributes, so I've created a function that internally creates a class and instance from the class from given base class like the following.
But I am not sure how to initialize the instance with the given value. I would appreciate if you have any ideas?
def get_object_with_uuid(value):
base = type(value)
class ObjectAndUUID(base):
def __init__(self):
self.uuid = "dummy"
super().__init__()
return ObjectAndUUID()
if __name__ == '__main__':
a = get_object_with_uuid([4,5,6])
b = get_object_with_uuid((7,8,9))
c = get_object_with_uuid(10)
d = get_object_with_uuid("some_string")
a.append(7)
print(a) # '[7]' -> I want this to be [4,5,6,7]
print(a.uuid) # 'dummy' -> this is expected
print(b) # '()' -> I want this to be (7,8,9)
print(b.uuid) # 'dummy' -> this is expected
print(c) # '0' -> I want this to be 10
print(c.uuid) # 'dummy' -> this is expected
print(d) # '' -> I want this to be 'some_string'
print(d.uuid) # 'dummy' -> this is expected
# and so on...
To my best knowledge, Python does not have a generic type of syntax, and based on the current view of the code, there is no need to inherit str, list or dict in ObkjectAndUUID. I keep it for consistency only.
Passing arguments to the instance is done via __init__. Note that one of the arguments of init is self, so there is no need to create it.
def get_object_with_uuid(value):
base = type(value)
class ObjectAndUUID(base):
def __init__(self, val):
self.uuid = "dummy"
self.val = val
return ObjectAndUUID(value)

Can we skip explicit object creation in Python

When I do not crate object for CP class, the operations are not captured. I am referring to the code below, Can somebody help me understand why we need obj creation in this case
from abc import ABC, abstractmethod
class P(ABC):
def __init__(self):
super().__init__()
self._pre_map = {}
self._pre_order = []
def set_pre(self, tag_value):
index = len(self._pre_map)
print(index)
self._pre_map[index] = tag_value
self._pre_order.append(index)
def execute(self):
pass
class CP(P):
def __init__(self):
super().__init__()
def execute(self):
self.prnt()
def prnt(self):
print (self._pre_map)
print (self._pre_order)
#Working
print("\n++++++++ working")
obj = CP()
obj.set_pre("test string added")
obj.execute()
#Not Working
print("\n+++++++ not working")
CP().set_pre("test string added")
CP().execute()
It produces,
++++++++working
0
{0: 'test string added'}
[0]
+++++++not working
0
{}
[]
When you call the class the second time with CP.execute(), you have created a completely new instance of the CP class. It is not going to have the text string you specified.
If you actually wanted it to print the values like the working one you can make the functions return self after each call in the P class. If you did that you could do something like this.
from abc import ABC, abstractmethod
class P(ABC):
def __init__(self):
super().__init__()
self._pre_map = {}
self._pre_order = []
def set_pre(self, tag_value):
index = len(self._pre_map)
print(index)
self._pre_map[index] = tag_value
self._pre_order.append(index)
##need to return self here
return self
def execute(self):
pass
class CP(P):
def __init__(self):
super().__init__()
def execute(self):
self.prnt()
def prnt(self):
print (self._pre_map)
print (self._pre_order)
#Working
print("\n++++++++ working")
obj = CP()
obj.set_pre("test string added")
obj.execute()
#Not Working
print("\n+++++++ not working: but now working after returning self in the P class")
CP().set_pre("test string added").execute()
++++++++ working
0
{0: 'test string added'}
[0]
+++++++ not working: but now working after returning self in the P class
0
{0: 'test string added'}
[0]
This would print the result you want.
The reason for the difference is the fact that in the first one, you are creating an instance, and using that instance the whole way through, whereas in the second one, you are using two different instances of your class.
The two different instances cannot share their attributes, so you are unable to recall what happened. If you really don't want to use a dedicated variable, change your P class to look like this:
class P(ABC):
...
def set_pre(self, tag_value):
index = len(self._pre_map)
print(index)
self._pre_map[index] = tag_value
self._pre_order.append(index)
return self
...
And use CP().set_pre("test string added").execute()

Create a list that can be accessed from multiple classes

I'm trying to create a list that is populated in one class and read in another class. I have a number of things, right now I'm getting the error shown below. I show only one class where the list is read, but there will be others.
How do I do this?
ReadDatabase.py
class ReadDatabase(object):
f_type_list = ReadDatabase.getFTypes() ## 'ReadDatabase' not defined
#staticmethod
def getFTypes():
<reads from database>
return my_list
MyTreeView.py
from ReadDatabase import *
class MyTreeView (ttk.Treeview):
def __init__(self, frame=None, list=[], column_headers=[]):
for f_type in ReadDatabase.f_type_list:
<do stuff>
You can separate it into two different classes in two different ways.
Example 1: Using two classes, one class (A) creates & updates the list and the other class (B) creates an instance of A and controls it:
class A:
"""
Create and updates the list
"""
def __init__(self):
self.my_list = []
self._read_from_database()
def _read_from_database(self):
# some database update logic
self.my_list.append(3)
class B:
"""
Creates an instance of A and can read from it.
"""
def __init__(self):
self.a = A()
def print_list(self):
for index, element in enumerate(self.a.my_list):
print(f"Index: {index} Element: {element}")
b_object = B()
b_object.print_list() # Prints: Index: 0 Element: 3
or
Example 2: You can just create a method in class B and just pass it the lst from class A:
class A:
"""
Create and updates the list
"""
def __init__(self):
self.my_list = []
self._read_from_database()
def _read_from_database(self):
# some database update logic
self.my_list.append(3)
class B:
def __init__(self):
pass
def print_list(self, lst):
for index, element in enumerate(lst):
print(f"Index: {index} Element: {element}")
a_object = A()
b_object = B()
b_object.print_list(a_object.my_list)
You can also pass the entire instance of A to B for it to use if you wanted to do it that way.

Change Return Value of Class Instance

How can I overwrite a class with one of its attributes?
e.g.
class AListGenerator(list):
def __init__(self, *args):
self._mylist = [word for word in args if 'a' in word]
self = self._mylist # does nothing
>>> x = AListGenerator('mum', 'dad', 'mike', 'aaron')
>>> x
[]
>>> x._mylist
['dad', 'aaron']
How can I make x return x._mylist, so that there's no need to call the _mylist attribute?
>>> x
['dad', 'aaron']
To clarify, I do not want/need a __repr__, I want to be able to do stuff like:
x.append('ryan') and x returning ['dad', 'aaron', 'ryan'], and not just ['ryan'].
You are inheriting from list, so all its methods are already accessible in your class, so you already can do x.append(stuff).
You should (probably always) initiate the base class before doing anything in your __init__ method:
class Stuff(list):
def __init__(self, *args):
# initiate the parent class!
super().__init__(word.lower() for word in args)
# you also can define your own attributes and methods
self.random_ID = 5 # chosen by fair dice roll, guaranteed to be random
x = Stuff("HELLO", "WoRlD")
And then you can print x and do with it everything you can do with a list.

Resources