test the function using Unitests Mock method - python-3.x

def command(self) -> None:
"""
This command will create the latest tag
:param args: arguments for setting up make_client, it also consists of
the name of the microservice for tag needs to be created
"""
gitlab_url = "https://gitlab.com"
gl = gitlab.Gitlab(url=gitlab_url, private_token=self.args.privatetoken)
project = gl.projects.get(f'pa/{self.args.service}')
tags = project.tags.list(get_all=False)
# iterate over tags from tag list retrieved
tags = [tag for tag in tags if self.args.environment in tag.name]
if not tags:
self.log(f"No tag found for environment {self.args.environment}")
return
tag = tags[0].name.split('-')
newtag = f"{tag[0]}-{tag[1]}-{int(tag[2])+1}"
git("tag", "-a", newtag, "-m", "$newtag")
git("push", "origin", newtag)
self.log(f"New tag is pushed... {newtag}")
i am unable to use patch i have used #patch("arke.commands.create_tag_command.CreateTagCommand.gitlab"). but it keeps on saying that <class 'arke.commands.create_tag_command.CreateTagCommand'> does not have the attribute 'gitlab'
test method i wrote
class TestCreateTagCommand:
#patch("sys.argv", [__file__, "--createtag", "True", "--environment", "b",
"--privatetoken", "testtoken", "--service", "c", ])
#patch("arke.commands.create_tag_command.CreateTagCommand")
#patch("arke.commands.create_tag_command.CreateTagCommand.gitlab")
#patch("logging.Logger.info")
def test_git_create_tag(
self, mock_logger, mock_api_call, mock_create_tag_command):
mock_api_call.return_value = MagicMock(spec=Response, status_code=200, response=json.dumps(['<ProjectTag name:impl-59>']))
ct = CreateTagCommand()
response=ct.command()
assert(response, '200')

Related

kedro dynamic catalog creation only for specific nodes before their run

I have several thousands of files to be processed of the different types. I am using dynamic catalog creation with hooks. I used first after_catalog_created hook but it is too early in and I need those entries only for specific nodes. My try is with before_node_run for specific node tags returning the dictionary with just dynamically created entries. Node function is **kwargs only. It works as I see that node get updated inputs, but the problem is that I need to provide for the node specification the already existing catalog entry. So I have such, fake one. Then I am using it to build a dictionary with the same length as the dictionary that is being returned by the hook.
Pipeline code
for doc in docs["Type1_documents"]:
item = doc["name"]
item_name, _ = os.path.splitext(item)
type1_datasets_dict[item_name] = "brace_dictionary"
return Pipeline(
[
node(
func=func1,
inputs=type1_datasets_dict,
outputs=[
f"output1",
f"output2",
],
name=f"type1_eta",
tags=["dynamic-catalog", "type1", "data-engineering"],
)
]
)
Hook code
#hook_impl
def before_node_run(
self, node: Node, catalog: DataCatalog
) -> Optional[Dict[str, Any]]:
self.node = node
self.catalog = catalog
if "dynamic-catalog" in node.tags:
input_catalog_name = node.name
catalog_string = f"params:{input_catalog_name}.full_name"
if self.catalog.exists(catalog_string):
true_datasets_dict = {}
catalog_properties = self.catalog.load(f"params:{input_catalog_name}")
catalog_name = catalog_properties["full_name"]
type = catalog_properties["type"]
subtype = catalog_properties["subtype"]
datasets_dict = self.catalog.load(f"params:{catalog_name}")
for dataset in datasets_dict:
doc_name, _ = os.path.splitext(dataset["name"])
self.add_text_dataset(
name=doc_name,
folder=f"parsed/{type}/{subtype}",
)
true_datasets_dict[doc_name] = doc_name
return true_datasets_dict
return true_datasets_dict
But I am getting value error for this:
line 487, in _run_with_dict
raise ValueError(
ValueError: Node type1_eta: func1([brace_dictionary,brace_dictionary,brace_dictionary,..,brace_dictionary]) -> [output1, output2] expected 1 input(s) ['brace_dictionary'], but got the following 1497 input(s) instead: ['file1', 'file2', ...].
Is there another way how to do it conditionally?

django-import-export - Export one to many relationship with ForeignKeyWidget - returns an empty Field

I am trying to use the dajngo-import-export package to export data from two tables with a one to many relationship. I have a custom ForeignKeyWidget class that overrides the get_queryset method.
The problem is that the export returns an empty field - no errors, just an empty field. I also tried just using the ForeignKeyWidget without the custom class/get_queryset - but I get the same result.
Does anyone see what I'm doing wrong here?
#admin.py
from import_export import resources
from import_export.fields import Field
from import_export.widgets import ForeignKeyWidget
class SlateDocResource(resources.ModelResource):
actbreaks = Field(
column_name="actbreaks",
attribute="id",
widget=ActBreaksForeignKeyWidget(ActTimecodes, "slatedoc_id"),
)
class Meta:
model = SlateDoc
fields = [
"actbreaks",
]
class ActBreaksForeignKeyWidget(ForeignKeyWidget):
def get_queryset(self, value, row, *args, **kwargs):
qs = ActTimecodes.objects.filter(slatedoc_id=self.pk)
print(qs.values())
return qs
#models.py
class SlateDoc(models.Model):
#primary Model - fields not listed here.
class ActTimecodes(models.Model):
#Secondary model - every slatedoc can have multiple instances of ActTimecodes
slatedoc = models.ForeignKey(
SlateDoc,
on_delete=models.CASCADE,
related_name="acts"
)
act_number = models.IntegerField(verbose_name="Act", default=1)
tc_in = models.CharField(max_length=11, default="00:00:00:00")
tc_out = models.CharField(max_length=11, default="00:00:00:00")
dur = models.CharField(max_length=11, default="00:00:00:00")
objects = ActTimecodesQuerySet.as_manager()
class Meta:
ordering = ["act_number", "tc_in", "tc_out"]
#version info
"python_version": { "version": ==3.10" }
"django": { "version": "==4.1.1" },
"django-import-export": { "version": "==2.8.0"},
Here is the solution that I figured out.
The answer is very simple compared to what I was attempting to do - using the ForeignKey was totally unnecessary.
#admin.py
class SlateDocResource(resources.ModelResource):
actbreaks = Field(column_name="Act Breaks")
def dehydrate_actbreaks(self, slatedoc):
actbreaks = []
count = 1
for x in ActTimecodes.objects.filter(slatedoc_id=slatedoc.id):
tc_in = f"{count}_in"
tc_out = f"{count}_out"
duration = f"{count}_dur"
actbreak = {tc_in: x.tc_in, tc_out: x.tc_out, duration: x.dur}
actbreaks.append(actbreak)
count += 1
return actbreaks
the code above returns each actbreak as a dict in a list:
[{'tc_1_in': '01:00:00:00', 'tc_1_out': '01:13:34:00', 'act_1_dur': '00:13:34;00'}, {'tc_2_in': '01:13:36:00', 'tc_2_out': '01:19:03:00', 'act_2_dur': '00:05:26;28'}, {'tc_3_in': '01:19:05:00', 'tc_3_out': '01:26:13:00', 'act_3_dur': '00:07:08;02'}, {'tc_4_in': '01:26:15:00', 'tc_4_out': '01:31:16:00', 'act_4_dur': '00:05:01;02'}, {'tc_5_in': '01:31:18:00', 'tc_5_out': '01:37:39:00', 'act_5_dur': '00:06:21;00'}, {'tc_6_in': '01:37:41:00', 'tc_6_out': '01:44:10:00', 'act_6_dur': '00:06:29;00'}]

How to set mocked exception behavior on Python?

I am using an external library (github3.py) that defines an internal exception (github3.exceptions.UnprocessableEntity). It doesn't matter how this exception is defined, so I want to create a side effect and set the attributes I use from this exception.
Tested code not-so-minimal example:
import github3
class GithubService:
def __init__(self, token: str) -> None:
self.connection = github3.login(token=token)
self.repos = self.connection.repositories()
def create_pull(self, repo_name: str) -> str:
for repo in self.repos:
if repo.full_name == repo_name:
break
try:
created_pr = repo.create_pull(
title="title",
body="body",
head="head",
base="base",
)
except github3.exceptions.UnprocessableEntity as github_exception:
extra = ""
for error in github_exception.errors:
if "message" in error:
extra += f"{error['message']} "
else:
extra += f"Invalid field {error['field']}. " # testing this case
return f"{repo_name}: {github_exception.msg}. {extra}"
I need to set the attributes msg and also errors from the exception. So I tried in my test code using pytest-mock:
#pytest.fixture
def mock_github3_login(mocker: MockerFixture) -> MockerFixture:
"""Fixture for mocking github3.login."""
mock = mocker.patch("github3.login", autospec=True)
mock.return_value.repositories.return_value = [
mocker.Mock(full_name="staticdev/nope"),
mocker.Mock(full_name="staticdev/omg"),
]
return mock
def test_create_pull_invalid_field(
mocker: MockerFixture, mock_github3_login: MockerFixture,
) -> None:
exception_mock = mocker.Mock(errors=[{"field": "head"}], msg="Validation Failed")
mock_github3_login.return_value.repositories.return_value[1].create_pull.side_effect = github3.exceptions.UnprocessableEntity(mocker.Mock())
mock_github3_login.return_value.repositories.return_value[1].create_pull.return_value = exception_mock
response = GithubService("faketoken").create_pull("staticdev/omg")
assert response == "staticdev/omg: Validation Failed. Invalid field head."
The problem with this code is that, if you have side_effect and return_value, Python just ignores the return_value.
The problem here is that I don't want to know the implementation of UnprocessableEntity to call it passing the right arguments to it's constructor. Also, I didn't find other way using just side_effect. I also tried to using return value and setting the class of the mock and using it this way:
def test_create_pull_invalid_field(
mock_github3_login: MockerFixture,
) -> None:
exception_mock = Mock(__class__ = github3.exceptions.UnprocessableEntity, errors=[{"field": "head"}], msg="Validation Failed")
mock_github3_login.return_value.repositories.return_value[1].create_pull.return_value = exception_mock
response = GithubService("faketoken").create_pull("staticdev/omg")
assert response == "staticdev/omg: Validation Failed. Invalid field head."
This also does not work, the exception is not thrown. So I don't know how to overcome this issue given the constraint I don't want to see the implementation of UnprocessableEntity. Any ideas here?
So based on your example, you don't really need to mock github3.exceptions.UnprocessableEntity but only the incoming resp argument.
So the following test should work:
def test_create_pull_invalid_field(
mocker: MockerFixture, mock_github3_login: MockerFixture,
) -> None:
mocked_response = mocker.Mock()
mocked_response.json.return_value = {
"message": "Validation Failed", "errors": [{"field": "head"}]
}
repo = mock_github3_login.return_value.repositories.return_value[1]
repo.create_pull.side_effect = github3.exceptions.UnprocessableEntity(mocked_response)
response = GithubService("faketoken").create_pull("staticdev/omg")
assert response == "staticdev/omg: Validation Failed. Invalid field head."
EDIT:
If you want github3.exceptions.UnprocessableEntity to be completely abstracted, it won't be possible to mock the entire class as catching classes that do not inherit from BaseException is not allowed (See docs). But you can get around it by mocking the constructor only:
def test_create_pull_invalid_field(
mocker: MockerFixture, mock_github3_login: MockerFixture,
) -> None:
def _initiate_mocked_exception(self) -> None:
self.errors = [{"field": "head"}]
self.msg = "Validation Failed"
mocker.patch.object(
github3.exceptions.UnprocessableEntity, "__init__",
_initiate_mocked_exception
)
repo = mock_github3_login.return_value.repositories.return_value[1]
repo.create_pull.side_effect = github3.exceptions.UnprocessableEntity
response = GithubService("faketoken").create_pull("staticdev/omg")
assert response == "staticdev/omg: Validation Failed. Invalid field head."

i have added a field in existing module "hr.payslip" from my module.How to show the data of that field

an existing module is present i.e hr.payslip,In that module i have added a field from my module . now i want to display the value of that field in hr.payslip module.
models/emailpayslip.py
#api.multi #Decorate a record-style method where 'self' is a recordset. The method typically defines an operation on records.
def send_email(self):
ir_model_data = self.env['ir.model.data']
payslip_obj = self.env['hr.payslip']
ids = self.env.context.get('active_ids', [])
ctx = dict()
employee_name = ''
for id_vals in ids:
payslip_browse = payslip_obj.browse(id_vals)
global email
email = payslip_browse.employee_id.work_email
store_email.sql_example(self,email)#calling method of store_email model
if payslip_browse.employee_id.work_email:
template_id = ir_model_data.get_object_reference('Payslip', 'email_template_payslip')[1]
ctx.update({
'default_model': 'hr.payslip',
'default_res_id': payslip_browse.id,
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'email_to': payslip_browse.employee_id.work_email,
})
mail_id = self.env['mail.template'].browse(template_id).with_context(ctx).send_mail(payslip_browse.id, True)
This model is used to create a new table in database and store email to which the payslip is send and date of payslip send
class store_email(models.Model):
_name = "store.email"
sendemail = fields.Char(
string='Send Email',
default=lambda self: self._get_default_name(),
)
no_of_times = fields.Integer(string='No of Times')
date_of_email_send = fields.Date(
string="Date of Email",
default=lambda self: fields.datetime.now())
#api.model
def _get_default_name(self):
return "test"
#api.multi
def sql_example(self,temp):
dob = datetime.today()
self.env.cr.execute("SELECT * FROM store_email WHERE sendemail = %s",(temp,))
res = self.env.cr.fetchall()
if res == []:
count = 1
self.env.cr.execute("INSERT INTO store_email (sendemail,no_of_times,date_of_email_send) VALUES (%s,%s,%s)",(temp,count,dob))
self.env.cr.commit()
else:
for x in res:
count = x[7] + 1
self.env.cr.execute("UPDATE store_email SET date_of_email_send=%s,no_of_times=%s WHERE sendemail=%s",(dob,count,temp))
self.env.cr.commit()
Model to add a field in hr.payslip ,Which show last payslip send date
class Add_Field(models.Model):
_inherit = "hr.payslip"
last_payslip_send = fields.Date(string='Last Payslip Send')
#api.multi
def last_send_payslip(self):
self.env.cr.execute("SELECT * FROM store_email WHERE sendemail=%s",(email,))
res = self.env.cr.fetchall()
my addfile.xml
add_newfield
screenshort of the page where i have added the field
this page cntain the screenshort
You can use compute or default function to load value in to field or you can also pass value while creating record
default function example:
name = fields.Char(
string='Name',
default=lambda self: self._get_default_name(),
)
#api.model
def _get_default_name(self):
return "test"
Refer this link for computed fields

Python unknown number of commandline arguments in boto3

I am trying to add tags based on commandline arguments passed to python script something like below:
./snapshot-create.py --id abcd --key1 Env --value1 Test
The script is like below:
client = boto3.client('ec2')
response = client.create_tags(
Resources=[
ID,
],
Tags=[
{
'Key': 'key1',
'Value': 'value1'
},
]
)
I want to use --key1 and --values as Tags as above but the problem is that there could be more than one tags that need to be added like:
./snapshot-create.py --id abcd --key1 Env --value1 Test -key2 Loca --value2 US -key1 Size --value1 small ...
How would I use those key-values if their number of arguments is not fixed.
I don't mind using function or any other way than what I came up with.
One option would be loading a json string as a dictionary and iterating it when creating the tags.
For example, consider this invocation:
$ my_script.py --tags "{'tag1': 'value1', 'tag2': 'value2'}" --id i-1234567890 i-0987654321
and this code snippet:
import json
import boto3
import argparse
parser.add_argument('-t', '--tags', type=str)
parser.add_argument('-i', '--id', nargs='+')
args = parser.parse_args()
client = boto3.client('ec2')
def create_tags(key, value, resources, c):
c.create_tags(
Resources=
resources,
,
Tags=[
{
'Key': key,
'Value': value
},
]
)
my_tags = json.loads(args.tags) # {'tag1': 'value1', 'tag2': 'value2'}
resources = args.id # ['i-1234567890', 'i-0987654321']
for k, v in my_tags.items():
create_tags(k, v, resources, client)
This should cause instances i-1234567890 & i-0987654321 to be tagged with both tags tag1 and tag2 described in --tags above.
If you require a more dynamic interface for resources as well, consider adding it to the json as such:
{ 'instance_id': [{'tag_key': 'tag_value'} ... ] ... }
You can the take a single argument --tags which will contain a mapping of resources and tags, instead of the above example where resources is statically mapped to the tags.
Pretty sure there are better, more pythonic, solutions than this though - this is one viable solution.

Resources