Automation Testing - Checking of PDF content using WebdriverIO - node.js

I am currently working on with a test automation that focuses on verifying the data on a PDF file if it matches the data on my webpage and API using WebdriverIO with NodeJS.
I’m browsing for some resources but can’t seem to find a solution that suitable for the tool I’m using. I hope someone could help me. Thank you

You can use npm packages pdf-parse or pdfreader.
For example:
// const { PdfReader } = require("pdfreader");
const pdf = require('pdf-parse');
module.exports = function data(filePath) {
return new Promise((resolve) => {
pdf(filePath).then(function(data) {
resolve(data.text)
})
})
}
// module.exports = function data (pathToPdf) {
// return new Promise((resolve) => {
// new PdfReader().parseFileItems(pathToPdf, (err, item) => {
// if (err) console.error("error:", err);
// else if (!item) console.warn("end of file");
// else if (item.text) resolve(item.text);
// })
// });
// }

Related

NodeJS MSSQL results to JSON array

I am trying to create an array of JSON objects from an SQL Server query using NodeJS but using just JSON.Stringify on each row I get the results I am looking for as all individual JSON objects but not in an array. Ideally I am just trying to write these results to a file as a JSON array. Any ideas appreciated.
Tried the following
const sql=require('mssql');
const fs=require('fs');
const config = {
// Creds removed
},
};
sql.connect(config, err => {
console.log(err);
const request = new sql.Request()
request.stream = true // You can set streaming differently for each request
request.query(fs.readFileSync('./new-query.sql').toString()); // or request.execute(procedure)
request.on('row', row => {
console.log(JSON.stringify(row));
})
request.on('error', err => {
console.log(err);
// May be emitted multiple times
})
request.on('done', result => {
// console.log('done emitted', result);
sql.close();
})
})
As always, reading the library notes in more detail led me to a solution. Using the MSSQL library you can actually work on the entire returned recordset as opposed to every column. Using this code works as I was wanting it to.
const sql = require('mssql')
const fs=require('fs');
const config = {
// cred removed
};
sql.connect(config).then(() => {
return sql.query(fs.readFileSync('./new-query.sql').toString())
}).then(result => {
console.log(JSON.stringify(result.recordsets[0]))
}).catch(err => {
// ... error checks
})
sql.on('error', err => {
// ... error handler
})

How to create a share link in Buildfire

I'm attempting to create a share link that users within a plugin can send to friends via email or sms. If their friend has the app, the goal would be to open the app to the plugin with a query string similar to navigation.navigateTo does so that it would open to specific content within the plugin. If they don't, the goal would be to send them to the app's webpage where they could download the app from the appropriate store (android or apple). Is this possible or at least a portion of it?
I've generated a share link using buildfire.deeplink.generateUrl but can't find the appropriate steps from there in the API documentation.
Yes, The steps as following:
generate the deeplink URL
buildfire.deeplink.generateUrl(options, callback)
make sure of passing data property to options where its representing the
deep link data that developers need to provide to the plugin once the user
opened the share link. For more information on how to reed this data from
plugin, see buildfrire.deeplink.getData.
buildfire.deeplink.generateUrl(
{
data: { videoId: "9Q-4sZF0_CE" },
},
(err, result) => {
if (err) {
console.error(err);
} else {
console.log(result.url);
}
}
);
after generate the deep link use the following function to open share device options
buildfire.device.share({ link: deepLinkUrl }, callback);
Finally you have to handle deeplink data in you plugin to be able to open desired content based deeplink data that you pass during generate the deeplink URL, check this buildfrire.deeplink.getData.
For more details check the doc.
Example
// share function
const share = () => {
let deeplinOptions= {};
deeplinOptions.title = 'Hello world';
deeplinOptions.type = "website";
deeplinOptions.description = 'First program';
deeplinOptions.imageUrl = '<IMAGE URL>';
deeplinOptions.data = {
"link": vidId
};
buildfire.deeplink.generateUrl(deeplinOptions, function (err, result) {
if (err) {
console.error(err);
} else {
let options = {
link: result.url
};
let callback = function (err, result) {
if (err) {
console.warn(err);
};
};
buildfire.device.share(options, callback);
};
});
};
// Handle Deeplink data in your plugin
const handleDeepLinkData = () => {
buildfire.deeplink.getData(function (data) {
if (data && data.link) {
let vidId= data.link;
// Do what you want
}
// Do what you want
});
}
Yeah, Just share the result URL

Handling Multiple functionalities in Firebase using Nodejs

Hellow all,
I'm Newbie to Nodejs and Firebase, I need two functionalities to takes place in a single function and also I have written a piece of code it's works fine.
But My question is, the code I have written is the correct way to achieve the multiple functionality or do we have any other alternate method(correct way) to achieve the same functionality.
Doubt :
Retrieving relevant details of project ----> Inside Callback function ----> saving data to another table ----> Inside Callback function ----> Deleting data from table -----> Inside Callback function ----> response
Do we need to write the functionality inside the nested callback function to achieve the output or is there is any other way to achieve it .
// Nodejs Post Function
app.post('/delete_user_request_project/', function (req, res)
{
if (!is_admin_login(req.cookies.login_type))
{
return res.redirect('/');
}
var project_id = req.body.project_id; // Getting the project Id
let del_ref = admin.database().ref("user_request_project/" + project_id); // Targeting the details of the project to fetch that particular data
del_ref.once("value", function (snapshot)
{
var request_project_obj = snapshot.val(); // fetching the details of project
if (request_project_obj != null)
{
let update_ref = admin.database().ref("deleted_user_request_project/" + project_id);
update_ref.set(
request_project_obj // Updating project details to another table
).then(function ()
{
del_ref.remove().then(function () // Deleting the details from project Table
{
return res.status(200).send('success');
});
});
}
else
{
var error = "プロジェクトが存在しない";
req.flash("error", error_message);
return res.send({
status: 'error',
error: error
});
}
});
})
TIA
I would suggest you use the Promise version of the once() method instead of the Callback version, as follows. It will allow you to correctly chain the different promises returned by the asynchronous Firebase method.
app.post('/delete_user_request_project/', function (req, res) {
if (!is_admin_login(req.cookies.login_type)) {
return res.redirect('/');
}
var project_id = req.body.project_id; // Getting the project Id
let del_ref = admin.database().ref("user_request_project/" + project_id); // Targeting the details of the project to fetch that particular data
del_ref.once("value")
.then(function (snapshot) {
var request_project_obj = snapshot.val(); // fetching the details of project
if (request_project_obj != null) {
let update_ref = admin.database().ref("deleted_user_request_project/" + project_id);
return update_ref.set(request_project_obj); // Updating project details to another table
}
else {
throw new Error('request_project_obj null');
}
})
.then(function () {
return del_ref.remove();
})
.then(function () // Deleting the details from project Table
{
return res.status(200).send('success');
})
.catch(function (error) {
if (error.message === 'request_project_obj null') {
var error = "プロジェクトが存在しない";
req.flash("error", error_message);
return res.send({
status: 'error',
error: error
});
} else {
//...
}
})
})

How can I parse data from a csv file uploaded by a user in node/express?

I have a front end react app and a backend node/express app. I want to allow a user to upload a csv file, then parse the file and instantiate a model for each row. However, I am somewhat confused about how to do this, since I am used to simply posting to a route in the API, and persisting the thing from the request body. In this case, the thing from the request body is the file, and I don't want to save the file, just the data inside it. How can I parse the file without saving it to the database? I have tried to use multer to process the upload and csv-parse to parse the contents, but I am not sure this makes sense. Nonetheless, here is the code (app/index):
...
const multer = require('multer');
const upload = multer().single();
const parse = require('csv-parse');
...
router.post('/distributor/:id/files', (req,res) => {
upload(req, res, function (err) {
if (err) {
console.error("An error occurred when uploading. Please try again. Note
that you may only upload one file at a time, and we only support .csv
files.")
return
}
console.log("We have received your file")
})
});
...
// router.get('/distributor/:id/files/:id', (req, res) => {
// File
// .forge({id: req.params.id})
// .fetch()
// .then((file) => {
// if (_.isEmpty(file))
// return res.sendStatus(404);
// return parseJson(file)
// })
// .then((jsonData) => {
// for (var i in jsonData) {
// //save instance of model
// }
// })
// .catch((error) => {
// console.error(error);
// return res.sendStatus(500);
// });
// })
// function parseJson(file) {
// var output = [];
// // Create the parser
// var parser = parse({delimiter: ':'});
// // Use the writable stream api
// parser.on('readable', function(){
// while(record = parser.read()){
// output.push(record);
// }
// });
// // Catch any error
// parser.on('error', function(err){
// console.log(err.message);
// });
// parser.end();
// }
I know this doesn't make sense, since I don't actually want to save the file as a model and table in the database, I just want to save each item inside the file, so I know I cannot make a route called '/distributor/:id/files/:id'. But I am lost as to what to do instead. I hope that what I am trying to do is clear! I am fairly new to node, and programming in general, and I have never come across a situation in which I needed to handle file upload.
You can use this node module to parse the csv file. https://www.npmjs.com/package/csvtojson
For example you have file name users in the request object.
const csv=require('csvtojson');
csv()
.fromString(req.files.users.data.toString('utf8'))
.on('json', (user) => {
console.log(user);
})
.on('done', () => {
console.log('done parsing');
});
You will be able to get every row as a json object.

Initialization of db connection - nodejs

I want to use gridfs-stream in a nodejs application.
A simple example is given in the documentation:
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/test');
// make sure the db instance is open before passing into `Grid`
mongoose.connection.once('open', function () {
var gfs = Grid(mongoose.connection);
// all set!
})
My problem is described by the comment:
make sure the db instance is open before passing into Grid
I try to use gfs in a post request. Now when the code gets initialized, the gfs variable is not defined yet.
api.post('/upload', function(req, res) {
req.pipe(gfs.createWriteStream({
filename: 'test'
}).on('close', function(savedFile){
console.log('file saved', savedFile);
return res.json({file: savedFile});
}));
})
Initializing my route from a callback seems kind of odd.
I read in this post (Asynchronous initialization of Node.js module) that require('') is performed synchronous, and since I rely on the connection being established, I'm kind of forced to wait
Basically I'm not sure if I should use a async pattern on startup now, or if I just miss a more elegant way to solve this.
I have a very similar problem with my server. In my case I am reading https certs asynchronously, the software version from git asynchronously and I want to make sure I have it all together by the time the user comes to log in so I can pass the software version back as a reply to login.
The solution is to use promises. Create the promises on user start up for each activity. Then in the code where you want to be sure its all ready, just call then on either the promise itself or Promise.all(array of promises).then()
Here is an example of what I am doing to read the ssl certs to start the server
class Web {
constructor(manager,logger) {
var self = this;
this.server = false;
this.logger = logger;
var key = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'key.pem'),(err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
var cert = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'certificate.pem'), (err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
Promise.all([key,cert]).then(values => {
var certs = {
key: values[0],
cert: values[1],
};
return certs;
}).then(certs => {
self.server = require('http2').createServer(certs,(req,res) => {
// NOW Started and can do the rest of the stuff
});
self.server.listen(...);
});
NEEDS SOME MORE CLOSING BRACKETS

Resources