Load csv from newman using nodejs - node.js

I have the following code that is working now.
I have a script with javascript that use nodejs and read a csv file but I don't know how to use the data from csv in newman.run
var fs = require('fs');
var people = [];
var fileContents = fs.readFileSync('users.csv');
var lines = fileContents.toString().split('\n');
const newman = require('newman'); // require newman in your project
for (var i = 0; i < lines.length; i++) {
people.push(lines[i].toString().split(','));
}
for (var i = 0; i < lines.length; i++) {
console.log(people[i][0]);
newman.run({
collection: require('collection.json'),
environment: require('environment.json'),
reporters: 'htmlextra',
}, function (err) {
if (err) { throw err; }
console.log("i"+'collection run complete!');
});
}
I try to explain what's happening.
I iterate over csv file correctly but I don't know how to pass the values of csv in body about post.
Could aynone help to me?

To use a datafile with Newman, from a script - you would only need to add the iterationData key to the newman.run object and then reference the filename as the value.
const newman = require('newman');
newman.run({
collection: require('collection.json'),
environment: require('environment.json'),
iterationData: '<path to file>/users.csv',
reporters: 'htmlextra',
}, function (err) {
if (err) { throw err; }
console.log('collection run complete!');
});
To use the values in the requests, you would need to add this variable names using the {{var_name}} syntax which would reference the CSV column headings.
More information about working with data files can be found here:
https://learning.getpostman.com/docs/postman/collection-runs/working-with-data-files/

Related

How to Synchronize the file writes in Node.Js

I am using the EJS compile to create notification templates and I would like to know how to write the file to the file system in parallel and send the notification once all the files are saved.
Please see the below code snippet which I used
var fs = require('fs');
var ejs = require('ejs');
var arrayOfData = [someData]; //Prepare data from database
//Iterate through the data
for (var i = 0; i < arrayOfData.length; i++) {
generateFileFromTemplate(arrayOfData[i],function(){});
}
function generateFileFromTemplate(templateData,callback)
{
var outputFile = fileData.Id + ".html";
var compiled = ejs.compile(fs.readFileSync('email-template.ejs', 'utf8'));
var html = compiled(templateData);
fs.writeFile(outputFile, html, callback);
}
Please help.
Use async.each for your use case
async.each(arrayOfData,
function(ele, next){
generateFileFromTemplate(ele,function(){});
},
function(err){
if(err) console.log('err', err);
sendNotification();
}
);
You can use a great utility library called Async, particularly its parallel method: https://github.com/caolan/async#parallel.
Here's an example:
var async = require('async');
/*-------------*/
var tasks = arrayOfData.map(function(data) {
return function(cb) {
generateFileFromTemplate(data,function(){});
cb(null);
}
});
async.parallel(tasks, function(err) {
console.log('My job is done');
})

Send response after call back functions execution completion in nodejs

I have a problem with call back functions and loops in nodejs how can I do so that the response should be send after the call back functions execution completion
app.post('/data', function(req,res){
var send = []
for (var i =0; i < p_objReq.body.quantity; i++) {
Instruments.count({//condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
Instruments.find({id: 'rondom_generated_code'},function(err, instrumentsCount){
if(instrumentsCount.length == 0){
send.push(rondom_generated_code)
if(send.length == p_objReq.body.quantity)
p_objRes.json({success : true, data : send})
}
else{
Instruments.count({//condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
send.push(rondom_generated_code)
if(send.length == p_objReq.body.quantity)
p_objRes.json({success : true, data : send})
})
}
})
})
}
})
when i wrote like this its sending the same random code that is last generated one. I tried removing the whole thing and written in function and called back but its also not working
One solution is to use Q.js, which is one of Promise library. More APIs of Q.js, please refer to the link in it. Here is one sample codes may could help you, If I catch you correctly for your question.
var Promise = require('Q');
app.post('/data', function(req,res){
var send = [];
var p = function () {
var deferred = Q.defer();
Instruments.count({condetion}, function(err, count){
//using count and other parameters I am generating the code which is unique
if (err) {
deferred.reject(new Error(error));
} else {
send.push(randomnumber)
}
});
return deferred.promise;
}
var ps = [];
for (var i =0; i < p_objReq.body.quantity; i++) {
ps.push(p);
}
Q.all(ps).then(function(){ res.json({success : true, data : send}); });
});

Store values from spookyjs environment into mongoDB

I am trying to scrape data from site by spookyjs and store in mongoDB.I am able to get data from the website.But not able to save scraped data from spookyjs environment to mongoDB.To save scraped data,I passed my database model instance to spookyjs .I refered below link for it.
https://github.com/SpookyJS/SpookyJS/wiki/Introduction
Below is my code where I extracted data in prod_link_info variable and pass its values into mongoDB
var product_model = require('./product').product_model;
//get results
spooky.then([{product_model:product_model},function(){
this.waitForSelector('li[id^="product_"]', function() {
// Get info on all elements matching this CSS selector
var prod_link_info = this.evaluate(function() {
var nodes = document.querySelectorAll('li[id^="product_"]');
return [].map.call(nodes, function(node) { // Alternatively: return Array.prototype.map.call(...
return node.querySelector('a').getAttribute('href')+"\n";
});
});
//insert values in mongodb
for (var i = 0; i < prod_link_info.length; i++) {
product_model.create(
{
prod_link_info:prod_link_info[i],
}, function(err, product){
if(err) console.log(err);
else console.log(product);
});
} });
}]);
Below is the code of database schema and model used in above code.
var mongoose=require('mongoose');
var Schema = mongoose.Schema;
// create a schema
var productSchema = new Schema({
prod_link_info: String,
});
var product_model= mongoose.model('product_model', productSchema);
module.exports = {
product_model: product_model
}
But when I run above code it gives me following error ReferenceError: Can't find variable: product_model.
I want to store the data extracted from spookyjs to mongoDB.Please suggest where am I doing wrong.
When you pass hash of variables to spooky, it is converted to a string using JSON.stringify and then gets converted back to an object using JSON.parse in casper environment (please refer docs); so it is impossible to pass mongoose model to casper environment (moreover there is no actual reason for that).
To solve the problem, you should pass the data from Spooky (casper) environment. As far as I know, the only way to do is to emit data and then handle it using spooky.on. Your example should look like:
var product_model = require('./product').product_model;
//get results
spooky.then([{},function(){
this.waitForSelector('li[id^="product_"]', function() {
// Get info on all elements matching this CSS selector
var prod_link_info = this.evaluate(function() {
var nodes = document.querySelectorAll('li[id^="product_"]');
return [].map.call(nodes, function(node) { // Alternatively: return Array.prototype.map.call(...
return node.querySelector('a').getAttribute('href')+"\n";
});
});
this.emit('data.ready', prod_link_info);
});
}]);
spooky.on('data.ready', function (prod_link_info) {
//insert values in mongodb
for (var i = 0; i < prod_link_info.length; i++) {
product_model.create(
{
prod_link_info:prod_link_info[i],
}, function(err, product){
if(err) console.log(err);
else console.log(product);
});
}
});

How can i upload multiple files in node.js?

I need help in upload multiple files using node.js upload file reader.
I am using the fs = require('fs').
I have problem in choose two files,only one file only write in upload directory.
This is my backend
var files = req.files.files[0];
for (var i = 0; i < files.length; i++) {
file = files[i];
fs.readFile(files[i].path, function(error, data) {
// console.log( files[i].path ) ,here displayed two same both
fs.writeFile(uploadDirectory() + newFileName, data, function(error) {
});
});
}
Please help me.
what is the problem in my code.
Thanks.
You should avoid using files[i] in asynchronous function's callback which is directly written inside of for-loop.
the reason why console.log( files[i].path ) displays same thing twice is because when the code is loaded,the for-loop has already done. so you always get the last element of the array.
the easiest way to fix that is making a new scope(function)
for (var i = 0; i < files.length; i++) {
readAndWriteFile(files[i]);
}
var readAndWriteFile = function(file){
fs.readFile(file.path, function(error, data) {
// console.log( file.path ) displays what you expect.
fs.writeFile(/* define new file name */, data, function(error) {
});
});
}

Asynchronously reading and caching multiple files in nodejs

I have an array which keeps URL of several files. For example:
var files = ['1.html', '2.html', '3.html'];
I need to read them asynchronously and save them in an object named cache (cache = {}).
To do this I used the code:
for(var i = 0; i < files.length; i++){
require('fs').readFile(files[i], 'utf8', function (error,data) {
cache[files[i]]=data;
});
}
In the end I have the result:
cache = { undefined : 'File 3 content' }
I do understand that the "readFile" acts after the loop is ended and it looses it's scope. Is there a way to fix this or another method to read files from an array and cache them?
When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.
The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});

Resources