I'm trying to use vcxproj-stream-editor to edit my .vcxproj files. What I want to do is:
Call check_file to see if a section exists in the file and set a flag.
If the flag isn't set, call filter_file to add it.
But how do I set a flag inside the coroutine called by check_file? I've tried declaring a global flag:
#! python3
import vcxproj
Configuration_debug_x64 = None
#vcxproj.coroutine
def print_project_guid():
while True:
action, params = yield
if action == "start_elem" and params["name"] == "PropertyGroup":
if "attrs" in params:
if "Label" in params["attrs"]:
if params["attrs"]["Label"] == "Configuration":
if "Condition" in params["attrs"]:
if params["attrs"]["Condition"] == "'$(Configuration)|$(Platform)'=='Debug|x64'":
Configuration_debug_x64 = True
vcxproj.check_file("My.vcxproj", print_project_guid)
if Configuration_debug_x64:
print("Configuration_debug_x64")
else:
print("No Configuration_debug_x64")
but that doesn't work, and I can't add a parameter to the function I pass to check_file. Is there some obvious method I'm missing?
Related
Unit-Test code for a function which does a validation operation and updates the Global dict - result_count = {'test_method':{'Total_tested': 0, 'passed': 0, 'failed': 0}
below is the function
def validate_response(testmethod, response, expected_data):
ra = response.json()
expected = expected_data['payload']
if (response.status_code == expected['response_status']) \
and (result_count[testmethod['folder']]["FAILED"] < 10):
------code logic - checks using jsondiff and re.expressions--
else:
missmatch = 'response status code missmatch'
update_result(testmethod, 'status_code', expected_data, response, missmatch, fail=True)
result_count[testmethod['folder']]['FAILED'] += 1
Need to write test for the above function. to check if the result_count is updated properly.
Regular expression library & jsondiff is also used in the mentioned function.
help required for mocking the global variable and using the same for testing.
During executing the test script below i was getting a key error for the global variable result_count. That implies that the code is unable to access the result_count.. the key error after the update - is throwing TypeError: '<=' not supported between instances of 'MagicMock' and 'int' as Error
The current issue is that the result_count is not updated when the below line code is executed.
partner_test.validate_response(test_input_mocker.method, response, expected_data)
my unit test script is as below
#patch("tests.p_test.result")
#patch("tests.p_test.result_count")
def test_validate_response_pass(result_count_mocker, monkeypatch, result_mocker, test_input_mocker):
# Build data for validate response function
response = Resp(200, {'message': 'pong'})
response_data = response.json()
expected_data = {some_test_data}
# global variable import and initialize result, result_count
from p_test import result, result_count
result_count.update(result_count_mocker.data)
result.update(result_mocker.data)
result_count_mocker.return_value = result_count_mocker.data
def update_result_mocker(*args):
mock operations here
return None
monkeypatch.setattr(partner_test, "update_result", update_result_mocker)
p_test.validate_response(test_input_mocker.method, response, expected_data)
Resp() in the test fnction is a response class created to mock the response object.
the issue can be resolved using #patch.dict(dict_name, values) below is the solution
#patch.dict(p_test.result, {dict_values })
#patch.dict(p_test.result_count, {dict_values})
def test_validate_response(monkeypatch, test_input_mocker, result_count_test):
response = Resp(200, {'message': 'pong'})
response_data = response.json()
expected_data = {----your data-----}
def update_result_mocker(*args):
# mock_operations
monkeypatch.setattr(p_test, "update_result", update_result_mocker)
p_test.validate_response(test_input_mocker.method, response, expected_data)
print(p_test.result_count)
I'm trying to write a Builder in SCons to call a command line executable with some arguments that are generated by a (series of) python functions.
cmdVars = Variables(None, ARGUMENTS)
cmdVars.AddVariables(
EnumVariable('DEBUG', 'help for debug', 'a', allowed_values=('a','b','c')),
PathVariable('CLI', 'path to cli exe', 'C:\...\blah.exe', PathVariable.PathIsFile)
)
env = Environment(variables = cmdVars)
def generateSomeExtraBitsDependingOnFlag(debug):
if (debug == 'a'):
return "-DDEBUG -DBlah myTextHere"
return ''
myBuilder = Builder(
action = '"$CLI" generateSomeExtraBitsDependingOnFlag("$DEBUG")'
)
<extra stuff to add myBuilder to env and call env.myBuilder>
The "$CLI" input is correctly substituted to output (when calling scons) something like C:\Program Files\...\blah.exe but the output from the function never appears, regardless of the DEBUG setting.
Prepending print(debug) inside the function prints $DEBUG whilst parsing the SConstruct file (so it's not surprising it doesn't match the if condition).
Do I just need to use a Generator or follow the instructions in chapter 18.4 (Builders That Execute Python Functions) to make this work? Is the section Writing Builders That Execute External Commands not what I want here?
I suspect that given understanding, the user guide is clear, but without already knowing the answer, the guide is a little opaque to me.
Try this:
cmdVars = Variables(None, ARGUMENTS)
cmdVars.AddVariables(
EnumVariable('DEBUG', 'help for debug', 'a', allowed_values=('a','b','c')),
PathVariable('CLI', 'path to cli exe', 'C:\...\blah.exe', PathVariable.PathIsFile)
)
env = Environment(variables = cmdVars)
def generateSomeExtraBitsDependingOnFlag(source, target, env, for_signature):
if (env['DEBUG'] == 'a'):
return "-DDEBUG -DBlah myTextHere"
return ''
env['generateSomeExtraBitsDependingOnFlag'] =generateSomeExtraBitsDependingOnFlag
myBuilder = env.Builder(
action = '"$CLI" ${generateSomeExtraBitsDependingOnFlag}'
)
env.Append(BUILDERS = {'myBuilder' : myBuilder})
env.myBuilder('dummy','input')
I am writing tests for an API with pytest.
The tests are structured like that:
KEEP_BOX_IDS = ["123abc"]
#pytest.fixture(scope="module")
def s():
UID = os.environ.get("MYAPI_UID")
if UID is None:
raise KeyError("UID not set in environment variable")
PWD = os.environ.get("MYAPI_PWD")
if PWD is None:
raise KeyError("PWD not set in environment variable")
return myapi.Session(UID, PWD)
#pytest.mark.parametrize("name,description,count", [
("Normal Box", "Normal Box Description", 1),
("ÄäÖöÜüß!§", "ÄäÖöÜüß!§", 2),
("___--_?'*#", "\n\n1738\n\n", 3),
])
def test_create_boxes(s, name, description, count):
box_info_create = s.create_box(name, description)
assert box_info_create["name"] == name
assert box_info_create["desc"] == description
box_info = s.get_box_info(box_info_create["id"])
assert box_info["name"] == name
assert box_info["desc"] == description
assert len(s.get_box_list()) == count + len(KEEP_BOX_IDS)
def test_update_boxes(s):
bl = s.get_box_list()
for b in bl:
b_id = b['id']
if b_id not in KEEP_BOX_IDS:
new_name = b["name"] + "_updated"
new_desc = b["desc"] + "_updated"
s.update_box(b_id, new_name, new_desc)
box_info = s.get_box_info(b_id)
assert box_info["name"] == new_name
assert get_box_info["desc"] == new_desc
I use a fixture to set up the session (this will keep me connected to the API).
As you can see I am creating 3 boxes at the beginning.
All test that are following do some sort of operations on this 3 boxes. (Boxes are just spaces for folders and files)
For example: update_boxes, create_folders, rename_folders, upload_files, change_file names, etc..
I know it's not good, since all the tests are dependent from each other, but if I execute them in the right order the test is valid and thats enough.
The second issue, which borders me the most, is that all the following tests start with the same lines:
bl = s.get_box_list()
for b in bl:
b_id = b['id']
if b_id not in KEEP_BOX_IDS:
box_info = s.get_box_info(b_id)
I always need to call this for loop to get each boxs id and info.
I've tried to put it in a second fixture, but the problem is that then there will be two fixtures.
Is there a better way of doing this?
Thanks
I want to define the scons build variables in external.py file like
external.py
mode=debug
toolchain=x86
This I want to read back these variables in the SConstruct file which is there in the same directory. Depending on the variable values I want to do some operations!
vars = Variables('external.py')
vars.Add('mode', 'Set the mode for debug or release', 'debug')
if ${RELEASE}=="debug"
#Do these!
elif ${RELEASE}=="release"
#Do that!
If external.py contains valid Python code then you can simply import it using the import keyword. You can then use the dir function to iterate over the names defined in the external module and add them to the SCons variables. You might also want to take a look at the getattr function.
The thing you are missing is Scons Environment with your variables.
vars = Variables('external.py')
vars.Add('mode', 'Set the mode for debug or release', 'debug')
env = Environment(variables = vars)
if env['mode'] == 'debug':
# do action1
elif env['mode'] == 'release':
# do action2
else:
# do action3
You can read more about using Scons here, and about your question here
Soumyajit answer is great but I would add that if you want to be able to override values from your file with the command line and restrain the allowed values for your variables you can do as follow:
# Build variables are loaded in this order:
# Command Line (ARGUMENTS) >> Config File (external.py) >> Default Value
vars = Variables(files='external.py', args=ARGUMENTS)
vars.Add(EnumVariable('mode', 'Build mode.', 'debug', allowed_values=('debug', 'release')))
env = Environment(variables = vars)
if env['mode'] == 'debug':
env.Append(CCFLAGS = [ '-g' ])
# whatever...
else:
env.Append(CCFLAGS = '-O2')
# whatever...
You can invoke you build script like this scons but also override specific variables without editing your config file by doing scons mode=release
If you specify a bad value for your variable you will get an error from Scons like:
$> scons mode=foo
scons: Reading SConscript files ...
scons: *** Invalid value for option mode: foo. Valid values are: ('debug', 'release')
I'm using SCons for building a project and need to add a symbolic link to a file it is installing via env.Install. What command(s) will make a link that's the equivalent of running ln -s on the command line?
SCons doesn't have a dedicated symbolic link command, but you can use os.symlink(src, dst) from Python's os module:
import os
env = Environment()
def SymLink(target, source, env):
os.symlink(os.path.abspath(str(source[0])), os.path.abspath(str(target[0])))
env.Command("file.out", "file.in", SymLink)
This may not work correctly on Windows, I've only tried it on Linux.
There seems to be little advancement in the SCons core code for symbolic link support and I wasn't satisfied any one solution I found on the web. Here is a potential builder which incorporates aspects of both Nick's and richq's answers. Additionally, it will catch name changes (due to the emitter method) and is as platform-agnostic as I could get it.
I prefer this builder because it will make links relative to the directory in which they are installed. One could add an option to force the link to be absolute I suppose, but I have not needed or wanted that yet.
Currently, if the OS doesn't support symlinks, I just pass and do nothing, but one could use os.copytree() for example however the dependency becomes messy if the source is a directory so the emitter would need to do something fancy. I'm up for any suggestions here.
One can put the following code into the file site_scons/site_tools/symlink.py (with blank _init_.py files in the appropriate places). Then do this in the SConstruct file:
SConstruct:
env = Environment()
env.Tool('symlink')
env.SymLink('link_name.txt', 'real_file.txt')
symlink.py:
import os
from os import path
from SCons.Node import FS
from SCons.Script import Action, Builder
def generate(env):
'''
SymLink(link_name,source)
env.SymLink(link_name,source)
Makes a symbolic link named "link_name" that points to the
real file or directory "source". The link produced is always
relative.
'''
bldr = Builder(action = Action(symlink_builder,symlink_print),
target_factory = FS.File,
source_factory = FS.Entry,
single_target = True,
single_source = True,
emitter = symlink_emitter)
env.Append(BUILDERS = {'SymLink' : bldr})
def exists(env):
'''
we could test if the OS supports symlinks here, or we could
use copytree as an alternative in the builder.
'''
return True
def symlink_print(target, source, env):
lnk = path.basename(target[0].abspath)
src = path.basename(source[0].abspath)
return 'Link: '+lnk+' points to '+src
def symlink_emitter(target, source, env):
'''
This emitter removes the link if the source file name has changed
since scons does not seem to catch this case.
'''
lnk = target[0].abspath
src = source[0].abspath
lnkdir,lnkname = path.split(lnk)
srcrel = path.relpath(src,lnkdir)
if int(env.get('verbose',0)) > 3:
ldir = path.relpath(lnkdir,env.Dir('#').abspath)
if rellnkdir[:2] == '..':
ldir = path.abspath(ldir)
print ' symbolic link in directory: %s' % ldir
print ' %s -> %s' % (lnkname,srcrel)
try:
if path.exists(lnk):
if os.readlink(lnk) != srcrel:
os.remove(lnk)
except AttributeError:
# no symlink available, so we remove the whole tree? (or pass)
#os.rmtree(lnk)
print 'no os.symlink capability on this system?'
return (target, source)
def symlink_builder(target, source, env):
lnk = target[0].abspath
src = source[0].abspath
lnkdir,lnkname = path.split(lnk)
srcrel = path.relpath(src,lnkdir)
if int(env.get('verbose',0)) > 4:
print 'target:', target
print 'source:', source
print 'lnk:', lnk
print 'src:', src
print 'lnkdir,lnkname:', lnkdir, lnkname
print 'srcrel:', srcrel
if int(env.get('verbose',0)) > 4:
print 'in directory: %s' % path.relpath(lnkdir,env.Dir('#').abspath)
print ' symlink: %s -> %s' % (lnkname,srcrel)
try:
os.symlink(srcrel,lnk)
except AttributeError:
# no symlink available, so we make a (deep) copy? (or pass)
#os.copytree(srcrel,lnk)
print 'no os.symlink capability on this system?'
return None
This creates a builder to perform the job:
mylib = env.SharedLibrary("foobar", SRCS)
builder = Builder(action = "ln -s ${SOURCE.file} ${TARGET.file}", chdir = True)
env.Append(BUILDERS = {"Symlink" : builder})
mylib_link = env.Symlink("_foobar.so", mylib)
env.Default(mylib)
env.Default(mylib_link)
Again, this solution is for Linux.
If you wanted to issue the command directly to the shell and know the OS, subprocess can be used as well.
E.g.: subprocess.call(['ln', '-s', '</src/path>', '</dest/path>'])
In addition to Nicks solution, you can add a directory symlink by using a file as a directory name carrier. It's not the cleanest solution and debugging path names is a pain, but this works well:
def symlink_last(target_source_env):
src = os.path.basename(os.path.dirname(str(source[0])))
link = "deliverables/last"
print "Symlinking "+ src + "as" + link
os.symlink(src, link)
BUILD_TARGETS.append('link')
install_dir = "deliverables/subdir"
carrier_file = "filename"
builder = Builder(action = symlink_last, chdir=False)
env.Append(BUILDERS={ "Symlink" : builder })
env.Alias(target="link", source=env.Symlink(dir="deliverables", source = install_dir + carrier_file)
This will make a link to deliverables/subdir named deliverables/last, provided that a file deliverables/subdir/filename exists.