E2E Protractor: page object mapping cannot find module - object

Currently, I am trying to use page objects for an e2e tests to reduce the maintenance effort, and I was able to write something like this:
Test1- declare login page fields
var xxxx_Page_Mdemo = function () {
var companycode = element(by.id('company-code'));
var username = element(by.id('username'));
var password = element(by.id('password'));
var signclick = element(by.css('section>form>button'));
this.get = function () {
browser.get('https://xxxx.com');
};
this.setCompanycode = function (ccode) {
companycode.sendKeys(ccode);
};
this.setUsername = function (uname) {
username.sendKeys(uname);
};
this.setPassword = function (pasword) {
password.sendKeys(pasword);
};
this.setsignclick = function (signin) {
signclick.click(signin);
};
};
module.exports = new xxxx_Page_Mdemo();
Test2
var xxxxlogin_Page_Mdemo = require('./PageAdminuserlogsuper.js');
describe('xxlogin page', function () {
beforeEach(function () {
browser.waitForAngularEnabled(false);
});
afterEach(function () {
browser.executeScript('window.sessionStorage.clear();');
browser.executeScript('window.localStorage.clear();');
browser.restart();
});
it('should login to thexxx dashboard', function () {
login_Page_Mdemo.get();
var EC = protractor.ExpectedConditions;
browser.wait(EC.visibilityOf($('#company-code')))
.then(function () {
login_Page_Mdemo.setCompanycode('cc90');
login_Page_Mdemo.setUsername('xxxx.com');
xxxx_Page_Mdemo.setPassword('pass');
xxxx_Page_Mdemo.setsignclick();
browser.wait(EC.urlContains('https://.xxx.com/#/xxx/index'), 100000).then(function () {
console.log('xxxx');
browser.sleep(4000);
});
});
});
});
From that I was able to login to a particular page,
and my question is I have to navigate to some other field inside the app after the login and perform some clicks on separate page. I have already created the same page objects for that and I can not link the login functions to that I got this error. Any comments?
at Module._compile (module.js:635:30)
at Object.Module._extensions..js (module.js:646:10)
at Module.load (module.js:554:32)
at tryModuleLoad (module.js:497:12)
at Function.Module._load (module.js:489:3)
[09:25:28] E/launcher - Process exited with error code 100
Process finished with exit code 4
Empty test suite.
My conf file is something like this:
'./tests/MainObjects/PageAdminuserlogsuper.js',
'./tests/MainObjects/AdminuserlogPageobjectadopt.js',
'./tests/MainObjects/PageEmployeetemplatesuper.js',
'./tests/MainObjects/QE35CustomizeEmptemplate.js'

Related

Node: SerialPort is not a constructor

I'm trying to connect to my Raspberry pi's serialport via node but when i run the js file it comes up with this error:
var serialport = new SerialPort("/dev/ttyAMA0", {
^
TypeError: SerialPort is not a constructor
at Object.<anonymous> (/home/pi/exploringrpi/chp13/xbee/nodejs/test.js:12:18)
at Module._compile (module.js:652:30)
at Object.Module._extensions..js (module.js:663:10)
at Module.load (module.js:565:32)
at tryModuleLoad (module.js:505:12)
at Function.Module._load (module.js:497:3)
at Function.Module.runMain (module.js:693:10)
at startup (bootstrap_node.js:191:16)
at bootstrap_node.js:612:3
node is at version 8.11.4
serialport is at version 7.0.2
This is the code:
// From the example code at www.npmjs.com/package/xbee-api
var util = require('util');
var SerialPort = require('serialport').SerialPort;
var xbee_api = require('xbee-api');
var C = xbee_api.constants;
var xbeeAPI = new xbee_api.XBeeAPI({
api_mode: 1
});
var serialport = new SerialPort("/dev/ttyAMA0", {
baudRate: 115200});
serialport.on("open", function() {
var frame_obj = { // AT Request to be sent to
type: C.FRAME_TYPE.AT_COMMAND, // Prepare for an AT command
command: "NI", // Node identifer command
commandParameter: [], // No parameters needed
};
serialport.write(xbeeAPI.buildFrame(frame_obj));
});
// The data frames are outputted by this function
xbeeAPI.on("frame_object", function(frame) {
console.log(">>", frame);
});
Hope you can help
You should probably remove the .SerialPort at the end of the require line:
var SerialPort = require('serialport').SerialPort;
The serial port documentation shows how it should be used, and it doesn't include that .SerialPort at the end:
var SerialPort = require('serialport');
var port = new SerialPort('/dev/ttyAMA0', {
baudRate: /dev/ttyAMA0
});
Another error was shown to me:
throw new TypeError("path" is not defined: ${settings.path});
I created the port object like this:
const sp = new SerialPort("/dev/ttyACM0", {baudRate: 4800});
It was wrong way.
Try to create port and define the path inside curl brackets:
const sp = new SerialPort({path: "/dev/ttyACM0", baudRate: 4800});
const SerialPort = require('serialport').SerialPort;
const sp = new SerialPort({path: "/dev/ttyACM0", baudRate: 4800});
const sent_data = '\x45';
sp.on("open", function() {
console.log("port has opened");
sp.write(sent_data, function(err){
if(err){
return console.log('Error on write', err.message);
}
console.log('Port.write: ', sent_data);
});
});
sp.on('data', function(data){
// decoding uint8Array to string
var enc = new TextDecoder();
var arr = new Uint8Array(data);
ready = enc.decode(arr)
console.log('Data received: ', ready);
});
// Read data that is available but keep the stream from entering "flowing mode"
sp.on('readable', function () {
console.log('Data2:', sp.read());
});

Load and modify svg files from node.js

We have an API that should send png files. This png files are composition of several svg files and some changes in attributes: background-color, image size..
We are using node.js on the server.
I've tried without success using d3 in order to modify SVG files & Jsdom to use d3 on the backend:
var fs = require('fs');
var d3 = require('d3');
var JSDOM = require('jsdom').JSDOM;
var outputLocation = 'test.svg';
const window = (new JSDOM(`<html><head></head><body></body></html>`, { pretendToBeVisual: true })).window;
window.d3 = d3.select(window.document); //get d3 into the dom
d3.xml("./react02.svg").mimeType("image/svg+xml").get(function(error, xml) {
var importedNode = document.importNode(xml.documentElement, true);
if (error) throw error;
console.log(xml);
window.d3.select('body')
.append('div').attr('class', 'container')
.appendChild(importedNode);
});
fs.writeFileSync(outputLocation, window.d3.select('.container').html()) //using sync to keep the code simple
But I always get an error:
TypeError: Cannot read property 'innerHTML' of null
at Selection.selection_html [as html] (/home/juanda/kk2/node_modules/d3-selection/build/d3-selection.js:793:20)
at Object.<anonymous> (/home/juanda/kk2/test2.js:18:65)
at Module._compile (module.js:624:30)
at Object.Module._extensions..js (module.js:635:10)
at Module.load (module.js:545:32)
at tryModuleLoad (module.js:508:12)
at Function.Module._load (module.js:500:3)
at Function.Module.runMain (module.js:665:10)
at startup (bootstrap_node.js:201:16)
at bootstrap_node.js:626:3
Can't find any example / library reading and modifing svg files on server side.
Moved from d3 to snapsvg as I don't need data analysis, just play with the svg itself.
This is my code:
let {JSDOM} = require('jsdom')
var resolve = require('resolve');
resolve('snapsvg', { basedir: __dirname }, function (err, resource) {
if (err) console.error(err)
else {
const options = {
runScripts: "dangerously",
resources: "usable",
url: "file:///prueba.html" // avoid cors error reading svg file
};
const dom = new JSDOM(`
<!DOCTYPE html><html><body><div id="test"></div></body></html>
`, options);
var script = dom.window.document.createElement('script');
script.src = `file://${resource}`;
var head= dom.window.document.getElementsByTagName('head')[0];
head.appendChild(script);
// console.log(dom.serialize())
script.onload = function () {
// console.log("Script loaded and ready");
// console.log(dom.window)
var s = dom.window.Snap("#test");
dom.window.Snap.load("file:///Users/juandaniel/Code/prueba/2375.svg", onSVGLoaded);
function onSVGLoaded(data) {
s.append(data);
console.log(s.node.innerHTML)
}
};
}
});

"Learning Node JS" example - 'The super constructor to "inherits" must not ' +'

I'm fowolling along to exercise in the O'Reilly "Learning Node JS" book and I've run into an unrunnable example. My code is as follows:
"use strict";
var util = require('util');
var eventEmitter = require('events').eventEmitter;
var fs = require('fs')
function InputChecker(name, file) {
this.name = name;
this.writeStream = fs.createWriteStream('./' + file + '.txt',
{'flags': 'a',
'encoding': 'utf8',
'mode' : 0o666});
};
util.inherits(InputChecker, eventEmitter);
InputChecker.prototype.check = function check(input) {
let command = input.trim().substr(0,3);
if (command == 'wr') {
this.emit('write', input.substr(3, input.length));
} else if (command == 'en:') {
this.emit('end');
} else {
this.emit('echo', input);
}
};
let ic = new InputChecker('Shelley', 'output');
ic.on('write', function(data) {
this.writeStream.write(data, 'utf8');
});
ic.on('echo', function(data) {
process.stdout.write(ict.name + ' wrote ' + data);
});
ic.on('end', function() {
process.exit();
});
process.stdin.setEncoding('utf8');
process.stdin.on('readable', function() {
let input = process.stdin.read();
if (input != null)
ic.check(input);
});
The error is:
[user#MacBook-Pro NodeJS]$ node fileevent.js
util.js:957
throw new TypeError('The super constructor to "inherits" must not ' +
^
TypeError: The super constructor to "inherits" must not be null or undefined
at Object.exports.inherits (util.js:957:11)
at Object.<anonymous> (/Users/user/Documents/NodeJS/fileevent.js:15:6)
at Module._compile (module.js:570:32)
at Object.Module._extensions..js (module.js:579:10)
at Module.load (module.js:487:32)
at tryModuleLoad (module.js:446:12)
at Function.Module._load (module.js:438:3)
at Module.runMain (module.js:604:10)
at run (bootstrap_node.js:389:7)
at startup (bootstrap_node.js:149:9)`
Any help is appreciated! TIA!
Your mistake is here:
var eventEmitter = require('events').eventEmitter;
The EventEmitter object is title-case, since it is a class name. It could be corrected by rewriting it as:
var EventEmitter = require('events').EventEmitter;
but that is the obsolete way of importing the EventEmitter class for Node.js v0.10.x and earlier, which is only supported now for backwards compatibility. The recommended way to import it is:
const EventEmitter = require('events');
and refactor the rest of your references from eventEmitter to EventEmitter in order to follow the convention of naming classes with a capital letter.

use node-redis with node 8 util.promisify

node -v : 8.1.2
I use redis client node_redis with node 8 util.promisify , no blurbird.
the callback redis.get is ok, but promisify type get error message
TypeError: Cannot read property 'internal_send_command' of undefined
at get (D:\Github\redis-test\node_modules\redis\lib\commands.js:62:24)
at get (internal/util.js:229:26)
at D:\Github\redis-test\app.js:23:27
at Object. (D:\Github\redis-test\app.js:31:3)
at Module._compile (module.js:569:30)
at Object.Module._extensions..js (module.js:580:10)
at Module.load (module.js:503:32)
at tryModuleLoad (module.js:466:12)
at Function.Module._load (module.js:458:3)
at Function.Module.runMain (module.js:605:10)
my test code
const util = require('util');
var redis = require("redis"),
client = redis.createClient({
host: "192.168.99.100",
port: 32768,
});
let get = util.promisify(client.get);
(async function () {
client.set(["aaa", JSON.stringify({
A: 'a',
B: 'b',
C: "C"
})]);
client.get("aaa", (err, value) => {
console.log(`use callback: ${value}`);
});
try {
let value = await get("aaa");
console.log(`use promisify: ${value}`);
} catch (e) {
console.log(`promisify error:`);
console.log(e);
}
client.quit();
})()
changing let get = util.promisify(client.get);
to let get = util.promisify(client.get).bind(client);
solved it for me :)
If you are using node v8 or higher, you can promisify node_redis with
util.promisify as in:
const {promisify} = require('util');
const getAsync = promisify(client.get).bind(client); // now getAsync is a promisified version of client.get:
// We expect a value 'foo': 'bar' to be present
// So instead of writing client.get('foo', cb); you have to write:
return getAsync('foo').then(function(res) {
console.log(res); // => 'bar'
});
or using async await:
async myFunc() {
const res = await getAsync('foo');
console.log(res);
}
culled shamelessly from redis official repo
You can also use Blue Bird Library plus monkey patching will do the trick for you.
For example:
const bluebird = require('bluebird')
const redis = require('redis')
async connectToRedis() {
// use your url to connect to redis
const url = '//localhost:6379'
const client = await redis.createClient({
url: this.url
})
client.get = bluebird.promisify(client.get).bind(client);
return client
}
// To connect to redis server and getting the key from redis
connectToRedis().then(client => client.get(/* Your Key */)).then(console.log)

Using RequireJS plugin in node.js amdefine

I created a simple requireJS plugin.
It works well in browser but in nodejs not... The plugin is available here.
The current code for being a plugin not just a module is this:
if (typeof define !== 'function')
var define = require('amdefine')(module);
define(function (require, exports, module) {
var _ = require("underscore"),
Backbone = require("backbone");
...
var plugin = {
load:function (name, require, load, config) {
var resources = this.parseResources(name);
require(resources, function () {
var localValidator = global.Validator.extend({});
_.each(arguments, function (resource) {
localValidator.customize(resource);
}, this);
var local = _.extend({}, global, {
Validator:localValidator,
Model:global.Model.extend({
Validator:localValidator
}),
SyncModel:global.SyncModel.extend({
Validator:localValidator
})
});
load(local);
});
},
parseResources:function (name) {
if (name == "")
return [];
return name.split("+");
}
};
var global = {
version:"1.0.1",
View:View,
Aggregator:Aggregator,
Messenger:Messenger,
Model:AsyncModel,
SyncModel:SyncModel,
Validator:Validator,
Runner:Runner,
DependencyResolver:DependencyResolver,
load:plugin.load.bind(plugin)
};
module.exports = global;
});
I try to call it with nodejs jasmine to unit test whether it works, but it displays that cannot find the file name:
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
require("./validation"); //works
require("./validation!./basicTests"); //does not work, but the plugin and path is ok...
Stack:
Message:
Error: Cannot find module './validation!./basicTests'
Stacktrace:
Error: Cannot find module './validation!./basicTests'
at Function.Module._resolveFilename (module.js:338:15)
at Function.Module._load (module.js:280:25)
at Module.require (module.js:362:17)
at require (module.js:378:17)
at null.<anonymous> (D:\creation\software developer\projects\dev\document root\src\validation.plugin.spec.js:9:17)
at jasmine.Env.describe (D:\creation\software developer\libs\jasmine-node\lib\jasmine-node/jasmine-2.0.0.rc1.js:791:21)
at describe (D:\creation\software developer\libs\jasmine-node\lib\jasmine-node/jasmine-2.0.0.rc1.js:575:27)
at Object.<anonymous> (D:\creation\software developer\projects\dev\document root\src\validation.plugin.spec.js:7:1)
at Module._compile (module.js:449:26)
at Object.Module._extensions..js (module.js:467:10)
Is it possible to make it work somehow?
edit:
Made some progress:
if (typeof define !== 'function')
var define = require('amdefine')(module, require);
define(function (require, exports, module) {
var _ = require("underscore"),
Backbone = require("backbone"),
global = require("./validation"),
empty = require("./validation!"),
local = require("./validation!../src/basicTests");
describe("validation", function () {
expect(global).not.toBe(undefined);
expect(empty).not.toBe(undefined); //fail
expect(local).not.toBe(undefined); //fail
});
});
In the plugin.load the local object seems to be okay, I give it to the load method, but it does not arrive with the require. At least in a define tag it calls the amdefine require, not the commonJS...
edit2:
I found that on amdefine site:
Loader plugins are supported as long as they call their load()
callbacks synchronously. So ones that do network requests will not
work. However plugins like text can load text files locally.
I did not know that require is called async.
I modified my plugin to call require by amdefine synchronous and by browser script asynchronous .
var amdefine = false;
if (typeof define !== 'function')
var define = require('amdefine')(module, require),
amdefine = true;
define(function (require, exports, module) {
var _ = require("underscore"),
Backbone = require("backbone");
...
var plugin = {
load:function (name, _require, load, config) {
var resources = this.parseResources(name);
var require = amdefine ? function (resources, callback) {
var modules = [];
_.each(resources, function (resource) {
modules.push(_require(resource));
});
callback.apply(null, modules);
} : _require;
require(resources, function () {
var localValidator = global.Validator.extend({});
_.each(arguments, function (resource) {
localValidator.customize(resource);
}, this);
var local = _.extend({}, global, {
Validator:localValidator,
Model:global.Model.extend({
Validator:localValidator
}),
SyncModel:global.SyncModel.extend({
Validator:localValidator
})
});
load(local);
});
},
parseResources:function (name) {
if (name == "")
return [];
return name.split(":");
}
};
...
});

Resources