Using ElementArrayFinder.filter() with async/await - node.js

I've been using the following function to filter element arrays for the past few years with Webdriver's Control Flow enabled:
filterElementsByText (elemList, comparator, locator) {
return elemList.filter((elem) => {
let searchTarget = locator ? elem.element(locator) : elem
return searchTarget.getText().then((text) => text === comparator)
})
}
I am now trying to migrate my repo to using async/await which requires turning off the Control Flow.
This transition has been mostly successful, but I'm having trouble with the function above. Intermittently, I am seeing this error:
Failed: java.net.ConnectException: Connection refused: connect
I am able to reproduce this issue with a test case I've written against https://angularjs.org, although it happens with much higher frequency against my own app.
let todoList = element.all(by.repeater('todo in todoList.todos'))
let todoText = element(by.model('todoList.todoText'))
let todoSubmit = element(by.css('[value="add"]'))
let addItem = async (itemLabel = 'write first protractor test') => {
await todoText.sendKeys(itemLabel)
return todoSubmit.click()
}
let filterElementsByText = (elemList, comparator, locator) => {
return elemList.filter((elem) => {
let searchTarget = locator ? elem.element(locator) : elem
return searchTarget.getText().then((text) => {
console.log(`Element text is: ${text}`)
return text === comparator
})
})
}
describe('filter should', () => {
beforeAll(async () => {
browser.ignoreSynchronization = true
await browser.get('https://angularjs.org')
for (let i = 0; i < 10; i++) {
await addItem(`item${i}`)
}
return addItem()
})
it('work', async () => {
let filteredElements = await filterElementsByText(todoList, 'write first protractor test')
return expect(filteredElements.length).toEqual(1)
})
})
This is being run with the following set in Protractor's conf file:
SELENIUM_PROMISE_MANAGER: false
With the simplified test case it seems to occur on 5-10% of executions (although, anecdotally it does seem to occur more frequently once it occurs the first time)
My problem is, this feels like a bug in Webdriver, but I'm not sure what conditions would cause that error so I'm not sure how to proceed.

For anyone reading and wondering, the problem with my own app was two-fold.
First, as described in the comments to the original question, ElementArrayFinder.filter() causes this error because it runs parallel requests for each element in the array.
Secondly (and not apparent in the original question), rather than passing an ElementArrayFinder as described in this test case, I was actually passing in a chained child of each element in the array such as:
element.all(by.repeater('todo in todoList.todos').$$('span')
Looking at the Webdriver output as this happens I noticed that this then causes all those locators to be retrieved in parallel leading to the same error.
I was able to work around both issues by filtering this way:
let filterElementsByText = async (elemList, comparator, locator) => {
let filteredElements = []
let elems = await elemList
for (let i = 0; i < elems.length; i++) {
let elem = await elems[i]
let searchTarget = locator ? elem.element(locator) : elem
let text = await searchTarget.getText()
if (text === comparator) {
filteredElements.push(elem)
}
}
return filteredElements
}
This unblocks me, but it still feels like an issue that these functions are just unusable with async/await.

Related

Wait for a function to create modified Array

I'm writing React app. After clicking one button, I want the file to be downloaded. Before that, the array that I have has to be modified in order to have the downloaded report in proper format.
The problem I have is that I don't know how to force getReports() to wait for setInOrder() to process the data. Therefore code doesn't enter the loop.
export const setInOrder = async (objects) => {
var sortedObjectsAll = new Object();
for (let i = 0; i < objects.length; ++i) {
if (sortedObjectsAll.hasOwnProperty(objects[i].addedBy)) {
sortedObjectsAll[objects[i].addedBy].push(objects[i]);
} else {
sortedObjectsAll[objects[i].addedBy] = new Array();
}
}
return sortedObjectsAll
}
export const getReports = async (objects) => {
const sortedObjectsAll = await setInOrder(objects) // This is correct but not available instantly
console.log(sortedObjectsAll) // this is correctly printed
const reports = new Array();
for (let j = 0; j < sortedObjectsAll.length; ++j) {
console.log("Never enters here")
reports.push(createReport(sortedObjectsAll[j]))
}
return reports
}
I'm trying to use await or async somehow, but can't solve it. I see some Promises advised but I don't know how to really return the resulting variable to the code that actually downloads the report.
First you do not need to write an async-await something like that, because it is not an async operation (and if you write one and do not have any await in it, it will wait for nothing).
Second you want to iterate through an object, and not through an array, and that is the problem. Replace with the following (there are other solutions as well):
for (const key in sortedObjectsAll) {
...
}

How do I chain a set of functions together using promises and q in node.js?

I have some dynamic data that needs to have work performed on it. The work must happen sequentially. Using the Q Library, I'd like to create an array of functions and execute the code sequentially using sequences. I can't seem to quite figure out the syntax to achieve this.
const fruits = ["apple", "cherry", "blueberry"]
function makeFruitPie (fruit) {
return Q.Promise((resolve, reject) => {
// Do some stuff here
resolve(fruit+" pie")
// Error handling here
reject(new Error(""))
})
}
const fruitFuncs = new Array(fruits.length)
for(var i = 0; i < fruits.length; i++) {
fruitFuncs[i] = makeFruitPie(fruits[i])
}
// Stole this example from the github docs but can't quite get it right.
i = 0
var result = Q(fruits[i++])
fruitFuncs.forEach((f) => {
result = result(fruits[i++]).then(f)
})
With these lines
for(var i = 0; i < fruits.length; i++) {
fruitFuncs[i] = makeFruitPie(fruits[i])
}
you already run the functions and, hence, their processing will begin.
Assuming you want the execution of the functions in sequence, the following would be more appropriate:
// construct the pipeline
const start = Q.defer();
let result = start.promise; // we need something to set the pipeline off
fruits.forEach( (fruit) => {
result = result.then( () => makeFruitPie( fruit ) );
});
// start the pipeline
start.resolve();
Sidenote: There is a native Promise implementation supported by almost all environments. Maybe consider switching from the library backed version.
You can use Promise.all
Promise.all(fruits.map(fruit=>makeFruitPie(fruit).then(res=> res) )).
then(final_res => console.log(final_res))
final_res will give you array of results
you could use for..of and do things sequentially. something like this
const Q = require("q");
const fruits = ["apple", "cherry", "blueberry"];
function makeFruitPie(fruit) {
return Q.Promise((resolve, reject) => {
// Do some stuff here
resolve(`${fruit} pie`);
// Error handling here
reject(new Error(""));
});
}
for (const fruit of fruits) {
const result = await makeFruitPie(fruit);
console.log(result);
}
By the way also worth considering native Promise insteead of using q

Return Variable After Running forEach

i just recently try learn nodejs, i choose adonisjs framework, because i think it's will be easier for me to learn, because on the nutshell have some similarities with laravel, which i used to code with.
but now, i have some problem, i can't solve, i have function like this :
async getAllPeople() {
let allPeople = await People.all()
let myArray = []
let checkChild = async (people) => {
let eachPeopleChild = await people.children().fetch()
if (eachPeopleChild.rows.length > 0) {
return people
}
return false
}
allPeople.rows.forEach(async (people) => {
let res = await checkChild(people)
if (res !== false) {
myArray.push(res)
}
})
console.log(myArray)
}
when i run this function, it's show an empty array [], i know because of nodejs or js actually have asynchronous behavior, it run this : console.log(myArray) first
what i expected is, how to execute, or return myArray after all the looping or other process is done?
-- problem is on the way i loop the array is not on the way i made callback, promise, because i already using async - await which is returning promise and it's behavior. map or forEach does not allow what i expected.. and the solution is clear : for ( item of items )
thank you
actually i just found the solution, thx for the guy at the other place that told me, that i can't use forEach and just use for (let item of anArray) instead.
here is my code now :
async getAllPeople() {
let allPeople = await People.all()
let myArray = []
let checkChild = async (people) => {
let eachPeopleChild = await people.children().fetch()
if (eachPeopleChild.rows.length > 0) {
return people
}
return false
}
for (let people of allPeople.rows) {
let res = await checkChild(people)
if (res !== false) {
myArray.push(res)
}
}
return myArray
}
everyhing works now..!!

Creating a promise from code that uses await

I would like to write a block of code using await syntax, immediately execute it, and create a promise that waits for execution to finish. I've come up with the following way to do this.
let makePromise = async () => {
return foo && await bar();
}
let promise = makePromise();
However, I find this hard to read and understand. Creating a function and then calling it right away seems counter-intuitive and goes against good practice in other programming languages. Is there a more idiomatic way to write this piece of code?
Particularly, this code is used in the following context.
let promises = items.map((item) => {
let makePromise = async () => {
return foo(item) && await bar(item);
}
return makePromise();
});
Why not using an async function directly? The following code behaves exactly the same as your example, but keep in mind, this results in an array of promises and awaits none of them.
function foo(x) { return "foo " + x; }
async function bar(x) { return "bar " + x; }
const items = [1];
let promises = items.map(
async item => foo(item) && await bar(item)
);
Promise.all(promises).then(x => console.log(x));
Async functions actually return a Promise. This is valid:
const x = async () => {
console.log(123)
}
x().then(() => {
console.log(456)
})
So in your case:
let promise = bar()

How to translate Kafka pub-sub semantics into a peakNext promise semantics for unittesting in NodeJS

While unittesting my NodeJS application I'm trying to create a simple helper class that will translate the Kafka pub-sub semantics into a simpler API suited for unittesting.
My idea is to be able to write mocha unittest like this:
const testSubscriber = kafkaTestHelper.getTestSubscriber({topic:'test'});
return someKafkaProducer.sendAsync({topic: 'test', message: randomWord})
.then(() =>
testSubscriber.next()
).then(msg => {
msg.should.equal(randomWord);
});
Of course I would also add helper methods such as
testSubscriber.nextUntil(someFilter)
This is inspired by the AKKA.NET TestKit which has a similar approach.
I have two questions:
Is this a reasonable approach or is there some cleaner way to unittest application logic based on Kafka stream processing in NodeJS?
Can anybody post coding examples showing how to make testSubscriber work as I intend?
This might not be the most elegant solution but it seems to work, at least for my initial testing. The trick is to create an ever growing list of Promises for which the resolver function is kept by reference in an array called 'resolvers'. Then when a message comes in, the resolver is invoked with the message. In this way I can return promises to any unittest invoking next() and it will work transparently if either the message was already delivered or it will be delivered in the future.
I still feel I'm reinventing the wheel here, so any comments would still be greatly appreciated.
function TestSubscriber(consumer, initialMessageFilter) {
this.consumer = consumer;
let promiseBuffer = [];
let resolvers = [];
let resolveCounter = 0;
let isStarted = false;
const ensurePromiseBuffer = function() {
if (promiseBuffer.length === 0 || resolveCounter >= resolvers.length) {
const newPromise = new Promise(function(resolve, reject) {
resolvers.push(resolve);
});
promiseBuffer.push(newPromise);
}
}
const that = this;
this.consumer.on('message', function(message) {
if (!isStarted) {
//Determine if we should start now.
isStarted = initialMessageFilter === undefined || initialMessageFilter(message);
}
if (isStarted) {
ensurePromiseBuffer();
const resolver = resolvers[resolveCounter];
resolver(message);
resolveCounter++;
that.consumer.commit(function(err, data) {
if (err) {
//Just log any errors here as we are running inside a unittest
log.warn(err)
}
})
}
});
this.next = function() {
ensurePromiseBuffer();
return promiseBuffer.shift();
};
}
const cache = {};
module.exports = {
getTestSubscriber: function({topic}, initialMessageFilter) {
if (!cache[topic]) {
const consumer = kafka.getConsumer({topic, groupId: GROUP_ID});
cache[topic] = new TestSubscriber(consumer, initialMessageFilter);
}
return cache[topic];
}
}

Resources