How do I test for str equality using factory_boy faker method? - python-3.x

I have two factory classes, the other is linked to the one through foreign key relationships, and was kinda hoping to achieve some similarities with the attributes. To start with, the model looks something like this:
class Track(models.Model):
response = models.ForeignKey('Response')
def __str__(self):
return str(self.response)
class Response(models.Model):
title = models.CharField(max_length=640)
def __str__(self):
return self.title
I should be able to access these classes as I have done below
r = Response(title='foo')
r.save()
t = Track(response=r)
t.save()
# with this I wanted to test that str(t) == t.response
The factory classes look like this:
class ResponseFactory(factory.django.DjangoModelFactory):
class Meta:
model = Response
title = factory.Faker('text')
class TrackFactory(factory.django.DjangoModelFactory):
class Meta:
model = Track
response = factory.SubFactory(ResponseFactory)
Below is how I have accessed these factory classes to test for str equality
track = TrackFactory() # generates random string e.g `foo`
a = str(track) # == foo
b = track.response # == foo
# however I get an assertion error with the statement below
assert a == b
Could you point out where I've gone wrong, thank you.

Related

How to share variables across Python modules when getter and setter methods are required

How can I share variables across different modules of my Python project if I need these variables to have setter and getter methods.
The reason I need setter\getter methods is because when getting and setting the variables I need to have backwards compatibility with code that stored these variable as environment variables. So I need to write and read using os.environ too.
Usually all I need to do is create a class with class-level variables, import the class in each Module and access the module as follows:
datastore.py/
class DataStore:
target_server_ip: str = '10.10.10.100'
consumer.py/
from project.datastore import DataStore
def print_target_server_ip():
print(DataStore.target_server_ip)
This doesn't work (at least not in Python 3.6.5) if the variables require property getter and setter methods.
The reason is that I cannot define a class level method as a property. The following code just isn't possible:
datastore.py/
class DataStore:
target_server_ip: str = '10.10.10.100'
#classmethod
#property
def target_server_ip(cls):
return cls.target_server_ip
#classmethod
#target_server_ip.setter
def target_server_ip(cls, value):
cls.target_server_ip = value
To solve this issue I propose the following code section. It is split into two classes.
The first class is working at the class level and maintains a 2 level nested dictionary that contains the name of the datastore and the variable name.
The second class is the datastore itself. It has the minimum required code to keep it visually simple.
This specific implementation has one known error prone limitation. If you declare two or more variables with the same name in different datastore classes, i.d. you define class FrameworkDatastore and another class SecondDatastore with the same variable in both, the environment will have only one of them.
import inspect
import logging
import os
from typing import Any, Dict, Type
logger = logging.getLogger(__name__)
class _BaseDataStoreWithEnvironSupport:
"""
The class support global storing of variables in a class level dictionary, allowing all instances of the
datastore to access the same values.
This class is backward compatible to store the global variables as os.environ, but also
"""
_members: Dict[str, Dict[str, Any]] = {} # holds all the members of the datastore
#classmethod
def get_value(cls) -> Any:
datastore_name: str = cls.__name__
member_name: str = inspect.stack()[1][3]
env_value: str = os.environ.get(member_name)
ds_value: Any = cls._members[datastore_name][member_name]
if env_value:
type_ds_value: Type = type(ds_value)
if type_ds_value is bool:
value: bool = (env_value == True.__str__())
else:
value: Any = type(ds_value)(env_value)
if value != ds_value:
logger.warning('Environment stored value is different from Datastore value. Check your implementation')
else:
value: Any = ds_value
return value
#classmethod
def set_value(cls, value: Any) -> None:
datastore_name: str = cls.__name__
name: str = inspect.stack()[1][3]
if datastore_name not in cls._members.keys():
cls._members[datastore_name] = {}
cls._members[datastore_name][name] = value
os.environ[name] = str(value)
def validate_datastore(self):
members = set([attr for attr in dir(self) if not callable(getattr(self, attr)) and not attr.startswith("_")])
if members.__len__() == 0:
raise RuntimeError(f'There are no members in the datastore or the validation runs at the start of __init__')
datastore_name: str = self.__class__.__name__
dict_keys: set = set(self._members[datastore_name].keys())
if members != dict_keys:
missing_members: set = members - dict_keys
raise NotImplementedError(f'Datastore is missing get and set methods for members: {missing_members}')
class FrameworkDatastore(_BaseDataStoreWithEnvironSupport):
"""
This class is storing all variables that are currently saved as global or os.environ variables
If the data stored here becomes irrelevant after the code change or is seldom used, remove it and merge its
functionality into other sections
"""
def __init__(self):
"""
predefine all the members of the datastore.
Members which dont implement get/set methods will be flagged by the validate_datastore check
"""
self.run_traffic_validations: bool = True # Should Ixia traffic validations run in the current suite
# The validation of the datastore must come at the end of the __init__ method
self.validate_datastore()
#property
def run_traffic_validations(self):
return self.get_value()
#run_traffic_validations.setter
def run_traffic_validations(self, value: Any):
self.set_value(value)
if __name__ == '__main__':
# This tests the datastore code
fd1 = FrameworkDatastore()
fd2 = FrameworkDatastore()
print(fd1.run_traffic_validations)
print(fd2.run_traffic_validations)
fd1.run_traffic_validations = False
print(fd1.run_traffic_validations)
print(fd2.run_traffic_validations)
fd2.run_traffic_validations = True
print(fd1.run_traffic_validations)
print(fd2.run_traffic_validations)

Accessing variables from a method in class A and using it in Class B in python3.5

I have a BaseClass and two classes (Volume and testing) which inherits from the BaseClass. The class "Volume" use a method "driving_style" from another python module. I am trying to write another method "test_Score" which wants to access variables computed in the method "driving_style" which I want to use to compute further. These results will be accessed to the class "testing" as shown.
from training import Accuracy
import ComputeData
import model
class BaseClass(object):
def __init__(self, connections):
self.Type = 'Stock'
self.A = connections.A
self.log = self.B.log
def getIDs(self, assets):
ids = pandas.Series(assets.ids, index=assets.B)
return ids
class Volume(BaseClass):
def __init__(self, connections):
BaseClass.__init__(self, connections)
self.daystrade = 30
self.high_low = True
def learning(self, data, rootClass):
params.daystrade = self.daystrade
params.high_low = self.high_low
style = Accuracy.driving_style()
return self.Object(data.universe, style)
class testing(BaseClass):
def __init__(self, connections):
BaseClass.__init__(self, connections)
def learning(self, data, rootClass):
test_score = Accuracy.test_score()
return self.Object(data.universe, test_score)
def driving_style(date, modelDays, params):
daystrade = params.daystrade
high_low = params.high_low
DriveDays = model.DateRange(date, params.daystrade)
StopBy = ComputeData.instability(DriveDays)
if high_low:
style = ma.average(StopBy)
else:
style = ma.mean(StopBy)
return style
def test_score(date, modelDays, params):
"want to access the following from the method driving_style:"
DriveDays =
StopBy =
return test_score ("which i compute using values DriveDays and StopBy and use test_score in the method learning inside
the 'class - testing' which inherits some params from the BaseClass")
You can't use locals from a call to a function that was made elsewhere and has already returned.
A bad solution is to store them as globals that you can read from later (but that get replaced on every new call). A better solution might to return the relevant info to the caller along with the existing return values (return style, DriveDays, StopBy) and somehow get it to where it needs to go. If necessary, you could wrap the function into a class and store the computed values as attributes on an instance of the class, while keeping the return type the same.
But the best solution is probably to refactor, so the stuff you want is computed by dedicated methods that you can call directly from test_score and driving_style independently, without duplicating code or creating complicated state dependencies.
In short, basically any time you think you need to access locals from another function, you're almost certainly experiencing an XY problem.

Templating Python class level attributes to create generic rest serializers

I'm using the Django Rest Framework and would like to serialize different types using the same format. The format being a list of all instances of a specific type as well a certain selected instance.
My problem is that I have to write a different serializer class for every type that I want to serialize. In C++ I'd solve this by giving the type and type serializer as a template argument. How can I do this in Python?
The generic Object I'd like to serialize:
class OptionSelect(object):
def __init__(self, options, selected):
self.options = options
self.selected = selected
What I currently need to serialize it:
class TypeAOptionSerializer(serializers.Serializer):
options = TypeASerializer(many=True)
selected = TypeASerializer()
class TypeBOptionSerializer(serializers.Serializer):
options = TypeBSerializer(many=True)
selected = TypeBSerializer()
class TypeCOptionSerializer(serializers.Serializer):
options = TypeCSerializer(many=True)
selected = TypeCSerializer()
Instead I'd like to create a Serializer like this:
class OptionSerializer(serializers.Serializer):
options = serializer(many=True)
selected = serializer()
def __init__(self, serializer):
self.serializer = serializer
super().__init__()
Is there maybe a different approach that I should be taking?
You can try the following:
def create_serializer(serializer):
class MySerializer(serializers.Serializer):
options = serializer(many=True)
selected = serializer()
return MySerializer
TypeAOptionSerializer = create_serializer(TypeASerializer)
TypeBOptionSerializer = create_serializer(TypeBSerializer)
TypeCOptionSerializer = create_serializer(TypeCSerializer)
This should be equivalent to your current approach with three separate classes.

Dynamic SQLAlchemy ORM relationship generation

Premise: I have a lot of tables that have to individually created (they cannot be dynamically created) and therefore, I find myself constantly having to make mixins that allow the standardization of relating tables:
class A_Table(Base):
id = Column(Integer, primary_key=True)
class A_Relator(My_Mixin_Base):
#declared_attr
def a_table_id(cls):
return Column(ForeignKey(A_Table.id))
#declared_attr
def a_table(cls):
return relationship(A_Table)
class B_Table(A_Relator, Base):
id = Column(Integer, primary_key=True)
class C_Table(A_Relator, Base):
id = Column(Integer, primary_key=True)
class D_Table(A_Relator, Base):
id = Column(Integer, primary_key=True)
# ad nauseam
Simple, but when B_Table, C_Table, etc. all have their own Relator classes, it gets very repetitive, and thus, something that should be easily solved in code.
My Solution: I made a class factory (?) that creates a mixin class to be used one time.
def related(clss, defined=False, altName=None):
class X((Definer if defined else Relator),):
linkedClass = clss
#classmethod
def linkedClassFieldName(cls):
return "{}Id".format(clss.getBackrefName())
def linkId(cls):
return Column(ForeignKey(clss.id))
def linkRe(cls):
return relationship(clss,
foreign_keys=getattr(cls, "{}Id".format(clss.getBackrefName() if not altName else altName)),
backref=cls.getBackrefName())
setattr(X, "{}Id".format(clss.getBackrefName() if not altName else altName), declared_attr(X.linkId))
setattr(X, "{}".format(clss.getBackrefName() if not altName else altName), declared_attr(X.linkRe))
del X.linkId
del X.linkRe
return X
Which allows you to do the following and be done with it:
class B_Table(related(A_Table), Base):
id = Column(Integer, primary_key=True)
...but this is messy and confusing, and I would guess there is a much better way to do this that leaves a lot less to uncertainty.
Question: I'm looking for a way to do this in a more direct SQLAlchemy-aligned way with less roundabout "hack". Or in summary: how do I make a generic SQLAlchemy mixin that generates a relationship?
I had a mess around with this. Not sure how well this solution will suit your needs but I did it as more of a learning exercise for myself, and if it helps for you, then great.
So with the objective to be able to have foreign keys and relationships defined on models with as little input as possible, this is what I came up with.
Here are the models that I used:
class Base:
#declared_attr
def __tablename__(cls):
return cls.__name__.lower()
#declared_attr
def id(cls):
return Column(Integer, primary_key=True)
def __repr__(self):
return f'<{type(self).__name__}(id={self.id})>'
Base = declarative_base(cls=Base)
class A_Table(Base):
parents = []
class B_Table(Base):
parents = ['A_Table']
class C_Table(Base):
parents = ['A_Table', 'B_Table']
Notice the class variable parents on each model which is a sequence of strings that should be other model names that inherit from the same declarative_base instance. Foreign keys and relationships to the parent classes will be created on the class that declares them as parents.
So then leveraging off of the fact that:
Attributes may be added to the class after its construction, and they
will be added to the underlying Table and mapper() definitions as
appropriate
(see docs)
I iterate through all of the models that are defined on Base and build the required objects according to the parents it's given and plug them in.
Here's the function that does all of that:
from sqlalchemy import inspect # this would be the only new import you'd need
def relationship_builder(Base):
""" Finds all models defined on Base, and constructs foreign key
columns and relationships on each as per their defined parent classes.
"""
def make_fk_col(parent):
""" Constructs a Column of the same type as the primary
key of the parent and establishes it as a foreign key.
Constructs a name for the foreign key column and attribute.
"""
parent_pk = inspect(parent).primary_key[0]
fk_name = f'{parent.__name__}_{parent_pk.name}'
col = Column(
fk_name, parent_pk.type,
ForeignKey(f'{parent.__tablename__}.{parent_pk.name}')
)
return fk_name, col
# this bit gets all the models that are defined on Base and maps them to
# their class name.
models = {
cls.__name__: cls for cls in Base._decl_class_registry.values() if
hasattr(cls, '__tablename__')
}
for model in models.values():
for parentname in model.parents:
parent = models.get(parentname)
if parent is not None:
setattr(model, *make_fk_col(parent))
rel = relationship(parent, backref=model.__name__)
setattr(model, parentname, rel)
To test, this is just at the bottom of the same module that I've got everything else defined in:
if __name__ == '__main__':
relationship_builder(Base)
a = A_Table(id=1)
b = B_Table(id=1)
c = C_Table(id=1)
a.B_Table.append(b)
a.C_Table.append(c)
b.C_Table.append(c)
print(b.A_Table)
print(c.A_Table)
print(c.B_Table)
# <A_Table(id=1)>
# <A_Table(id=1)>
# <B_Table(id=1)>
Here's the schema it created:
This won't work for composite primary/foreign keys but I don't think it would be too much of a stretch to get it there. If len(inspect(parent).primary_keys) > 1 you'd need to build ForeignKeyConstraints and add them to the table definition, but I haven't tested that at all.
I also don't think it would be too much of a stretch to make it fully automated if you could name your models in such a manner that the subordination of a model could be inferred from the name of the model itself. Again, just thinking out loud.

sub-classing a peewee field type to add behavior

I am trying to add the required behavior to a CharFiled or TextField so I can store a list of lists and retrieve it as a list of lists again. I am not asking for a solution rather I would like to see an example where a subclassing of an already supported field type is done as I didn't find any in the documentation or the Internet.
Do I have to do it as explained in the documents for creating a custom type?
for example:
class mylistoflists(TextField):
if yes, then what do I have to assign to field_type?
Example code (see tests/fields.py for full example):
class ListField(TextField):
def db_value(self, value):
return ','.join(value) if value else ''
def python_value(self, value):
return value.split(',') if value else []
class Todo(TestModel):
content = TextField()
tags = ListField()
class TestCustomField(ModelTestCase):
requires = [Todo]
def test_custom_field(self):
t1 = Todo.create(content='t1', tags=['t1-a', 't1-b'])
t2 = Todo.create(content='t2', tags=[])
t1_db = Todo.get(Todo.id == t1.id)
self.assertEqual(t1_db.tags, ['t1-a', 't1-b'])
t2_db = Todo.get(Todo.id == t2.id)
self.assertEqual(t2_db.tags, [])
t1_db = Todo.get(Todo.tags == Value(['t1-a', 't1-b'], unpack=False))
self.assertEqual(t1_db.id, t1.id)

Resources