ensure unique instance is created when class object is initialized - python-3.x

what would be a good way to ensure that only a unique instance is created when class object is initialized, please be specific in your answers.
for example for the following class I want to make sure when I create an StateMachineSystems instance with 'TEST' any latter created objects (y in this case) points to x created previously.
class StateMachineSystems:
def __init__(self,system_name):
self.system_name = system_name
def set_sequence_number(self,sequnce_number):
self.sequnce_number = sequnce_number
def get_sequence_number(self):
return self.sequnce_number
def get_system_name(self):
return self.system_name
x = StateMachineSystems('TEST')
y = StateMachineSystems('TEST')
if x==y:
print("single instance")
....
single instance

I think after looking at singleton design pattern this is what works for me.
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class StateMachineSystems(metaclass=Singleton):
def __init__(self,system_name):
self.system_name = system_name
def set_sequence_number(self,sequnce_number):
self.sequnce_number = sequnce_number
def get_sequence_number(self):
return self.sequnce_number
def get_system_name(self):
return self.system_name
x = StateMachineSystems('TEST')
x.set_sequence_number('1234')
print(x.get_sequence_number())
y = StateMachineSystems('TEST')
print(y.get_sequence_number())
y.set_sequence_number('4321')
print(y.get_sequence_number())
print(x.get_sequence_number())
............
1234
1234
4321
4321

Related

Python (+Django) : use #property in success_url KO . while using it with get_success_url is OK (class based view)

I found a workaround for my issue but I need to know why the first above case doesn't work.
I need to pass a parameter (reman_pk) to my view but when I try :
class RepairCreateView(LoginRequiredMixin, CreateView):
#property
def reman_pk(self):
return int(self.kwargs['reman_pk'])
[...]
success_url = reverse_lazy(
'reman:update-reman', kwargs={'pk': reman_pk})
[...]
... I got an error
django.urls.exceptions.NoReverseMatch: Reverse for 'update-reman' with keyword arguments '{'pk': <property object at 0x10c20bbd0>}' not found. 1 pattern(s) tried: ['reman/update/(?P[0-9]+)/$']
But when in the same class based view I use :
def get_success_url(self, **kwargs):
if kwargs != None:
return reverse_lazy('reman:update-reman', kwargs={'pk': self.reman_pk})
... it's OK : an int is well passed in my URL.
I tried to pass int(reman_pk) in the first method ... not better.
I've already use #property in the past and always got a value (int/str) and not property object.
EDIT (FULL views.py)
success_url = reverse_lazy...is commented. I must use def get_success_url( ... instead. Otherwise I get the above mentioned error.
class RepairCreateView(LoginRequiredMixin, CreateView):
#property
def reman_pk(self):
return int(self.kwargs['reman_pk'])
# success_url = reverse_lazy(
# 'reman:repairs-list', kwargs={'pk': reman_pk})
success_message = "Nouvelle réparation créée"
form_class = RepairCreateForm
template_name = 'reman/repair_create_form.html'
def get_context_data(self, *args, **kwargs):
context = super(RepairCreateView, self).get_context_data(
*args, **kwargs)
context['title'] = 'Nouveau Repair'
context['pk'] = self.reman_pk
return context
def get_initial(self):
reman = Reman.objects.get(pk=self.reman_pk)
return {'reman': reman}
def get_success_url(self, **kwargs):
return reverse_lazy('reman:repairs-list', kwargs={'pk': self.reman_pk})
This is not related to django, this is related to python in general. When you want to access a class property within the class you always have to call self before!
class Tree:
fruits = 5
#property
def leafes(self):
return self.fruits * 5
def show_tree(self):
print(self.fruits)
print(self.leafes)
print(leafes) # THIS LINE WOULD ERROR
Edit after comment of OP
I don't know how to phrase this properly. Anyhow this keeps being a problem related to python and not to django. The reason is how classes work.
You probably know the def __init__(self): function. That is called when the class gets instanciated. After that function got called your class can use all the self attributes (class attributes). But class attributes like my fruits = 5 get assigned even before that def __init__(self) method is called. So all your assignments directly inside the body of the class do not have self yet.
class Tree:
fruits = 5
def __init__(self):
self.twigs = 10
self.weight = self.twigs + self.fruits # THIS WORKS
class Tree:
fruits = 5
weight = self.twigs + fruits # THIS DOES NOT WORK
def __init__(self):
self.twigs = 10
Last example does not work because at the moment you want to assign weight = self.twigs + fruits your class's __init__ function was not called yet. So you can not use self at that place.

How could I create a docstring decorator in the presence of properties?

I have a collection of ever more specialized classes which correspond to collections of the same kind of data (temperature, density, etc) but for different drifts, for example, one subclass has dimensions (nx, ny) and a different suclass has dimensions (ncv), and I want to reflect that in the docstrings, for having a better documentation using Sphinx.
After reading many very useful threads here in Stack Overflow, I have arrived to this model:
import numpy as np
from functools import wraps
def class_decorator(cls):
import ipdb; ipdb.set_trace()
clsdict = {}
mro = cls.mro()
mro.reverse()
for tmp in mro[1:]: ##Ignore object class parent.
clsdict.update(tmp.__dict__)
for name, method in clsdict.items():
if hasattr(method, '__og_doc__'):
try:
method.__doc__ = method.__og_doc__.format(**clsdict)
except:
pass
else:
try:
method.__og_doc__ = method.__doc__
method.__doc__ = method.__doc__.format(**clsdict)
except:
pass
return cls
def mark_documentation(fn):
if not hasattr(fn, '__og_doc__'):
try:
fn.__og_doc__ = fn.__doc__
except:
pass
#wraps(fn)
def wrapped(*args, **kwargs):
return fn(*args, **kwargs)
return wrapped
def documented_property(fn):
if not hasattr(fn, '__og_doc__'):
try:
fn.__og_doc__ = fn.__doc__
except:
pass
#wraps(fn)
def wrapped(*args, **kwargs):
return fn(*args, **kwargs)
prp= property(wrapped)
prp.__og_doc__ = fn.__og_doc__
return prp
#class_decorator
class Base(object):
_GRID_DIM = 'nx, ny'
_TYPE = 'BaseData'
def __init__(self, name):
self.name = name
def shape(self):
""" This docstring contains the type '{_TYPE}' of class."""
print('Simple')
def operation(self, a, b, oper=np.sum, **kwargs):
""" Test for functions with args and kwargs in {_TYPE}"""
return oper([a,b])
#classmethod
def help(cls, var):
try:
print(get(cls, var).__doc__)
except:
print("No docstring yet.")
#class_decorator
class Advanced(Base):
_GRID_DIM = 'ncv'
_TYPE = 'AdvancedData'
def __init__(self,name):
super().__init__(name)
#property
#mark_documentation
# #documented_property
def arkansas(self):
"""({_GRID_DIM}, ns): Size of Arkansaw."""
return 'Yeah'
I am aiming to get the correctly formatted docstring when I call the help method or I use Sphinx, so that:
> adv = Advanced('ADV')
> adv.help("arkansas")
(ncv, ns): Size of Arkansaw.
> adv.help("operation")
Test for functions with args and kwargs in AdvancedData
I have managed to make it work so far, except for properties, because I assigned __og_doc__ to the function, but the property does not have that attribute. My last attempt at monkeypatching this, documented_property, fails because property is inmutable (as expected), and I cannot come up with any way to avoid this roadblock.
Is there any way around this problem?

Creating a child class from a parent method in python

I am trying to make a class that has a bunch of children that all have their own respective methods but share common methods through the parent. The problem is I need to create an instance of the child class in the parent method but am not sure how to go about it
my code so far looks like this
def filterAttribute(self, attribute, value):
newlist = []
for thing in self._things:
if thing._attributes[attribute] == value:
newlist.append(thing)
return self.__init__(newlist)
the class constructor takes in a list as its sole argument. Does anyone know if there is a standard way of doing this because my code is returning a NoneType object
Here are a few examples of classes I have made
This is the parent class:
class _DataGroup(object):
def __init__(self, things=None):
self._things=things
def __iter__(self):
for x in self._things:
yield x
def __getitem__(self, key):
return self._things[key]
def __len__(self):
return len(self._things)
def extend(self, datagroup):
if(isinstance(datagroup, self.__class__)):
self._things.extend(datagroup._things)
self._things = list(set(self._things))
def filterAttribute(self, attribute, value):
newlist = []
for thing in self._things:
if thing._attributes[attribute] == value:
newlist.append(thing)
#return self.__init__(newlist)
return self.__init__(newlist)
this is one of the child classes
class _AuthorGroup(_DataGroup):
def __init__(self, things=None):
self._things = things
def getIDs(self):
return [x.id for x in self._things]
def getNames(self):
return [x.name for x in self._things]
def getWDs(self):
return [x.wd for x in self._things]
def getUrns(self):
return [x.urn for x in self._things]
def filterNames(self, names, incl_none=False):
newlist = []
for thing in self._things:
if((thing is not None or (thing is None and incl_none)) and thing.name in names):
newlist.append(thing)
return _AuthorGroup(newlist)
The functionality I am looking for is that I can use the parent class's with the child classes and create instances of the child classes instead of the overall DataGroup parent class
So if I correctly understand what you are trying to accomplish:
You want a Base Class 'DataGroup' which has a set of defined attributes and methods;
You want one or mpore child classes with the ability to inherit both methods and attributes from the base class as well as have the ability to over-ride base class methjods if necessary: and
You want to invoke the child class without also having to manually invoke the base class.
If this in fact is your problem, this is how I would proceed:
Note: I have modified several functions, since I think you have several other issues with your code, for example in the base class self._things is set up as a list, but in the functions get_item and filterAttribute you are assuming self._things is a dictionary structure. I have modified the functions so all assume a dict structure for self._things
class _DataGroup:
def __init__(self, things=None):
if things == None:
self._things = dict() #Sets up default empty dict
else:
self._things=things
def __iter__(self):
for x in self._things.keys():
yield x
def __len__(self):
return len(self._things)
def extend(self, datagroup):
for k, v in datagroup.items():
nv = self._things.pop(k, [])
nv.append(v)
self._things[k] = nv
# This class utilizes the methods and attributes of DataGroup
# and adds new methods, unique to the child class
class AttributeGroup(_DataGroup):
def __init__(self, things=None):
super.__init__(things)
def getIDs(self):
return [x for x in self._things]
def getNames(self):
return [x.name for x in self._things]
def getWDs(self):
return [x.wd for x in self._things]
def getUrns(self):
return [x.urn for x in self._things]
# This class over-rides a DataGroup method and adds new attribute
class NewChild(_DataGroup):
def __init__(self, newAttrib, things = None):
self._newattrib = newAttrib
super.__init__(self, things)
def __len__(self):
return max(len(self._newattrib), len(self._things))
These examples are simplified, since I am not absolutely sure of what you really want.

Why doesn't __get__ method of metaclass get called?

I got class Op:
class Pipeable(type):
def __get__(self, instance, owner):
def pipe_within(*args, **kwargs):
return self(*args, op=instance, **kwargs)
print('piping...')
return pipe_within
class Op(metaclass=Pipeable):
def __init__(self, op=None):
if op is not None:
print('piped!')
self.op = op
self.__dict__[type(self).__name__] = type(self)
I expect Op class itself to work as descriptor, because its metaclass has __get__ method, but the code
op = Op().Op()
doesn't invoke Op.__get__. Why?
It is hard to tell what you really want there. But a metaclass that would add a property to itself at every new class maybe works better for whatever you want.
As far as I can understand your code, older classes won't be populated with references to the newer ones, as you create new instances (that in turn, get the reference for others).
On a second though, dinamically creating properties inisde __new__ seems hacky - but you can just implement the metaclass __getattr__ and __dir__ methods for much less convoluted code:
The simple version works for classes, but not for their instances - because instances do not trigger the __getattr__ on the metaclass:
class Pipeable(type):
_classes = {}
def __new__(metacls, name, bases, namespace, **kwds):
cls = type.__new__(metacls, name, bases, namespace)
metacls._classes[name] = cls
return cls
def __getattr__(cls, attr):
classes = cls.__class__._classes
if attr not in classes:
raise AttributeError
def pipe_within(*args, **kwargs):
return cls(*args, op=classes[attr], **kwargs)
print('piping...')
return pipe_within
def __dir__(cls):
regular = super().__dir__()
return sorted(regular + list(cls.__class__._classes.keys()))
class Op(metaclass=Pipeable):
def __init__(self, op=None):
if op is not None:
print('piped!')
self.op = op
Op.Op()
(Note as well, that over time I picked this parameter naming convention to use on metaclasses - as most their methods take the class created with them in place of what is the "self" in ordinary classes, I find this naming easier to follow. It is not mandatory, not necessarily "correct", though)
But then, we can make it work for instances by creating the __dir__ and __getattr__ directly on the created classes as well. The catch with that is that the class you are creating already have a __getattr__ or custom __dir__, even in their super-classes, those have to be wrapped. And then, we don't want to re-wrap our own __dir__ and __getattr__, so some extra-care:
class Pipeable(type):
_classes = {}
def __new__(metacls, name, bases, namespace, **kwds):
cls = type.__new__(metacls, name, bases, namespace)
metacls._classes[name] = cls
original__getattr__ = getattr(cls, "__getattr__", None)
if hasattr(original__getattr__, "_metapipping"):
# Do not wrap our own (metaclass) implementation of __getattr__
original__getattr__ = None
original__dir__ = getattr(cls, "__dir__") # Exists in "object", so it is always found.
# these two functions have to be nested so they can get the
# values for the originals "__getattr__" and "__dir__" from
# the closure. These values could be set on the class created, alternatively.
def __getattr__(self, attr):
if original__getattr__:
# If it is desired that normal attribute lookup have
# less precedence than these injected operators
# move this "if" block down.
try:
value = original__getattr__(self, attr)
except AttributeError:
pass
else:
return value
classes = self.__class__.__class__._classes
if attr not in classes:
raise AttributeError
def pipe_within(*args, **kwargs):
return cls(*args, op=classes[attr], **kwargs)
print('piping...')
return pipe_within
__getattr__._pipping = True
def __dir__(self):
regular = original__dir__(self)
return sorted(regular + list(self.__class__.__class__._classes.keys()))
__dir__.pipping = True
if not original__getattr__ or not hasattr(original__getattr__, "_pipping"):
cls.__getattr__ = __getattr__
if not hasattr(original__dir__, "_pipping"):
cls.__dir__ = __dir__
return cls
def __getattr__(cls, attr):
classes = cls.__class__._classes
if attr not in classes:
raise AttributeError
def pipe_within(*args, **kwargs):
return cls(*args, op=classes[attr], **kwargs)
print('piping...')
return pipe_within
__getattr__._metapipping = True
def __dir__(cls):
regular = super().__dir__()
return sorted(regular + list(cls.__class__._classes.keys()))
class Op(metaclass=Pipeable):
def __init__(self, op=None):
if op is not None:
print('piped!')
Op().Op()
So, this ended up being lengthy - but it "does the right thing", by ensuring all classes and instances in the hierarchy can see each other, regardless of creation order.
Also, what make up for the complexity is correctly wrapping other possible customizations of __getattr__ and __dir__ in the class hierarchy - if you don't get any customization of those, this can be an order of magnitude simpler:
class Pipeable(type):
_classes = {}
def __new__(metacls, name, bases, namespace, **kwds):
cls = type.__new__(metacls, name, bases, namespace)
metacls._classes[name] = cls
def __getattr__(self, attr):
classes = self.__class__.__class__._classes
if attr not in classes:
raise AttributeError
def pipe_within(*args, **kwargs):
return cls(*args, op=classes[attr], **kwargs)
print('piping...')
return pipe_within
def __dir__(self):
regular = original__dir__(self)
return sorted(regular + list(self.__class__.__class__._classes.keys()))
cls.__getattr__ = __getattr__
cls.__dir__ = __dir__
return cls
def __getattr__(cls, attr):
classes = cls.__class__._classes
if attr not in classes:
raise AttributeError
def pipe_within(*args, **kwargs):
return cls(*args, op=classes[attr], **kwargs)
print('piping...')
return pipe_within
def __dir__(cls):
regular = super().__dir__()
return sorted(regular + list(cls.__class__._classes.keys()))
To get into work, descriptor must be class attribute, not that of instance.
This code does what was desired.
class Pipeable(type):
_instances = {}
def __new__(cls, name, bases, namespace, **kwds):
namespace.update(cls._instances)
instance = type.__new__(cls, name, bases, namespace)
cls._instances[name] = instance
for inst in cls._instances:
setattr(inst, name, instance)
return instance
def __get__(self, instance, owner):
def pipe_within(*args, **kwargs):
return self(*args, op=instance, **kwargs)
print('piping...')
return pipe_within
class Op(metaclass=Pipeable):
def __init__(self, op=None):
if op is not None:
print('piped!')
self.op = op
Op().Op()

Circular dependency in the class constructor

I have the following class:
class CustomDictionary(dict):
def __init__(self, val, *args, **kwargs):
self.wk = val
super(dict, self).__init__()
def __setattr__(self, key, value):
if key in self.wk:
raise Exception("Wrong key", "")
key = key.replace(" ", "_")
self.__dict__[key] = value
def main():
wrong_keys = ("r23", "fwfew", "s43t")
dictionary = CustomDictionary(wrong_keys)
dictionary["a1"] = 1
As you can see, I create the attribute wk in the constructor. But I have __setattr__ function, in which I work with attribute wk. However, CustomDictionary object has no attribute wk.
__setattr__ is a pain that way, because it is called for every assignment to an instance member. Probably the easiest fix for your situation is to define an empty wk before __init__:
class CustomDictionary(dict):
wk = []
def __init__(self, val, *args, **kwargs):
self.wk = val
...

Resources