I work with Hummus-Recipe library and it's work fine but I want to make a function that accept array of files to append all files to one.
This is my code that work:
const filesRoot = './uploads';
router.route('/')
.get( async (request, response) => {
const src = filesRoot + '/one.pdf';
const appendedFile = filesRoot + '/two.pdf';
const appendedFile2 = filesRoot + '/three.pdf';
const output = filesRoot + '/new.pdf';
const recipe = new HummusRecipe(src, output);
recipe
.appendPage(appendedFile)
.appendPage(appendedFile2)
.endPDF();
});
How can I take this code and make it accept array??
Something like that:
let combinePdfFiles = (array) => {
for (let i = 0; i < array.length; i++) {
}
};
thanks.
You can use easy-pdf-merge package that let you merge an array of some pdf files.
Usage:
const merge = require('easy-pdf-merge');
merge(source_files,dest_file_path,function(err){
if(err) {
return console.log(err)
}
console.log('Success')
});
Example:
merge(['File One.pdf', 'File Two.pdf'], 'File Ouput.pdf', function(err){
if(err) {
return console.log(err)
}
console.log('Successfully merged!')
});
I create this function and it work.
const combinePdfFiles = async (files, companyID, flowID) => {
const filesRoot = `./uploads/${companyID}/${flowID}`;
try {
const originalFile = `${filesRoot}/${files[0]}`;
const output = `${filesRoot}/combined.pdf`;
const recipe = new HummusRecipe(originalFile, output);
for (let i = 1; i < files.length; i++) {
recipe
.appendPage(`${filesRoot}/${files[i]}`);
}
recipe.endPDF();
} catch (error) {
throw error;
}
};
Related
I want to write a script that divides the lines read from the file into packages of 25, unfortunately the sample package returns 40 codes. I would like to do so that, for example, he divided me into packages of 25 items. I mean, I have, for example, 60 codes, this creates 2 packages of 25, and one with 10 codes. Unfortunately, I can't handle it.
const fs = require('fs');
fs.readFile('code.txt', function (err, data) {
if (err) throw err;
const array = data.toString().split("\n");
let count = 0;
let items = [];
for (let i in array) {
items.push({
PutRequest: {
Item: {
code: array[i]
}
}
});
let params = {
RequestItems: {
'TABLE_NAME': items
}
};
if (count === 25) {
dynamoDB.batchWrite(params, function (err, data) {
if (err) {
console.log(err);
} else {
count = 0;
items = [];
}
});
}else{
count++;
}
}
});
code.txt content
https://0bin.net/paste/NA8-4hkq#1Ohwt5uUkQqE0YscwnxTX2gxEqlvAUVKp1JRipBCsZg
Any idea what I do wrong?
Your dynamoDB.batchWrite() is asynchronous. Thus its callback is executed only after the loop has completed. So items and count are never reset ...
The easiest would be, if you could switch to an promise based approach like the following
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
}
}
A callback based approach could look like this
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch() {
if (!lines.length) return;
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch();
});
}
writeBatch();
}
The function writeBatch takes a certain number of lines from your original array and writes them into the database. Only afer the write into the DB was successful, it recursively calls itself and handles the next batch. But be aware, that this approach may exceed the maximum call stack size and throw an error.
You can also make either of this approaches not manipulate the lines array (which may be quite expensive), but just get out the current slice
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
let currentIndex = 0;
while (currentIndex < lines.length) {
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
currentIndex += BATCHSIZE;
}
}
and
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch(currentIndex) {
if (currentIndex >= lines.length) return;
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch(currentIndex + BATCHSIZE);
});
}
writeBatch(0);
}
To prevent stumbling into a maximum callstack exception you may also add the next batch to the eventloop and not call it recursively. Ie
dynamoDb.batchWrite(params, err => {
if (err) ...
else setTimeout(()=> { writeBatch(currentIndex + BATCHSIZE);}, 0);
});
This way you won't build up a massive callstack from recursive calls.
To keep track of how many records are already saved to the db you could simply store the current counter in a file. When you restart the process, load that file and check how many lines to skip. Don't forget to remove the file, once all records have been saved ... For example with the first approach:
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
const skipLines = 0;
try {
skipLines = +(await fs.readFile("skip.txt", "utf-8"));
if (isNaN(skipLines)) skipLines = 0;
lines.splice(0, skipLines);
} catch (e) {
skipLines = 0;
}
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
skipLines += BATCHSIZE;
await fs.writeFile("skip.txt", `${skipLines}`);
}
try {
await fs.unlink("skip.txt");
} catch (e) {
}
}
When an excel file is uploaded in my program, it needs to get converted to a CSV file to be read. The process is working fine and I am using asyc/await, however whenever I try to read the file using the csvtojson package in Node, the file does not get read properly. If I directly use a CSV file then it works fine. The issue arises when the conversion occurs.
product.js
const multer = require('multer');
const express = require('express');
const router = express.Router();
const csv = require('csvtojson');
const fs = require('fs');
const xlsx = require('node-xlsx');
router.post('/upload', upload.single('singleFile'), async (req, res) => {
let csvFilePath = req.file.path;
let fileType = req.file.mimetype;
const convertToCSV = async _ => {
console.log("2");
if (fileType === 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ||
fileType === 'application/vnd.ms-excel') {
let obj = xlsx.parse(csvFilePath);
let rows = [];
let writeStr = "";
for (let i = 0; i < obj.length; i++) {
let sheet = obj[i];
for (let j = 0; j < sheet['data'].length; j++) {
rows.push(sheet['data'][j]);
}
}
//creates the csv string to write it to a file
for (let i = 0; i < rows.length; i++) {
writeStr += rows[i].join(",") + "\n";
}
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
});
}
}
console.log("1");
await convertToCSV().then(async _ => {
console.log("5");
const jsonArray = await csv({flatKeys: true})
.fromFile(csvFilePath)
.then(async (jsonObj) => {
console.log("6");
console.log(jsonObj[0]);
...
//Few more functions
}).catch(err => {
return res.status(400).send(err);
});
});
});
My console log looks like this
1
2
3
5
4
out.csv was saved in the current directory!
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
Whereas if a new CSV file is uploaded or an existing CSV is used then the output for the console.log(jsonObj[0]); is
{
'Column1': 'Column 1 Data',
field2: 'field2 Data',
field3: 'field 3 Data',
Categories: 'categories',
....
}
I added await before fs.writeFile however the same issue arises. There are two files that get saved under the directory csv/
c33129f3bdef482657992dbf452d2c1b
out.csv
And the contents of the previous file are read (assuming that, since they are very similar) and out.csv is not read.
Update
Wrapped a promise around fs.writeFile and the console is ordered now, however the output for the data read is yet the same:
const convertToCSV = async _ => {
return new Promise(((resolve, reject) => {
console.log("2");
....
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
}
}));
}
Console Log
1
2
3
4
out.csv was saved in the current directory!
5
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
You don't wait fs.writeFile("csv/out.csv" that's why you get 5 before 4 in console.
You should wrap your function content into Promise:
const convertToCSV = async _ => {
return new Promise((resolve, reject) => {
console.log("2");
...
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
// I'd prefer to call "reject" here and add try/catch outside for sending 400
return resolve(res.status(400).send({'error': err}));
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
})
)
Also you read csvFilePath that contains a filename of Excel file and not a CSV that was stored under csv/out.csv name.
I'm trying to write a batch script that will
Read XMLs from a directory
Parse each XML and find a value to use for DB(database) Lookup
Use the parsed value to DB lookup additional metadata
Populate XML with the metadata retrieved from DB lookup (step 4)
Write updated XML to complete directory
Close DB connection
The issue I'm running into is that I cannot control the code execution order so that I can close the DB connection at the end of the script. If I attempt to close the connection, I get a 'connection undefined' error. Below is my code for reference. Is there a good way to accomplish something like this in NodeJs, or should I look at doing this in Java or some other language?
enter code here
'use strict';
let fs = require('fs');
let xml2js = require('xml2js');
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
function pad(number, length)
{
var str = '' + number;
while (str.length < length)
{
str = '0' + str;
}
return str;
}
async function run() {
try {
// Get a non-pooled connection
let connection;
if (!connection)
{
connection = await oracledb.getConnection(dbConfig);
console.log('Connection was successful!');
}
let directory = "EDI_XMLS";
let dirBuf = Buffer.from(directory);
//var f = 0;
let files = fs.readdirSync(directory);
console.log(files);
for (let f = 0; f < files.length; f++)
{
let parser = new xml2js.Parser();
var xml_json_data = "";
// read the file
await fs.readFile(directory + "/" + files[f], async function(err, data) {
// parse the file
await parser.parseString(data, async function(err, result) {
let results;
var line_count = result.page.GLLines[0].GLLine.length;
console.dir('Invoice: ' + result.page.InvoiceNumber[0]);
console.dir('Line Total: ' + line_count);
console.log('File: ' + f);
try
{ // Lookup Data
results = await connection.execute('SELECT BUSINESS_UNIT, OPERATING_UNIT, DEPTID,PRODUCT,a.effdt FROM SYSADM.PS_A_NSS_SHPTO_ACC#FDEV a where(a.a_ship_to_customer = :shipTo) order by a.effdt
desc',[pad(result.page.VoucherDescription[0], 10)], {
maxRows: 2
});
console.log(results.metaData);
console.log(results.rows);
}
catch (err)
{
console.error(err);
}
for (let i = 0; i < line_count; i++) // Populate data
{
result.page.GLLines[0].GLLine[i].GLBU[0] = results.rows[0][0];
result.page.GLLines[0].GLLine[i].OpUnit[0] = results.rows[0[1];
result.page.GLLines[0].GLLine[i].Department[0] = results.rows[0][2];
result.page.GLLines[0].GLLine[i].Product[0] = results.rows[0][3];
}
// Write to File
var builder = new xml2js.Builder();
var xml = builder.buildObject(result);
await fs.writeFile("complete/" + files[f], xml, function(err, data) {
if (err) console.log(err);
console.log("successfully written our update xml to file");
console.dir('BUs: ' + JSON.stringify(result.page));
}); //end write
}); //end parser
}); //end readfile
console.log('End');
} // async for
}
catch (err)
{
console.error(err);
}
finally
{
await connection.close();
console.log('Finally Done');
}
}
run();
console.log('completely Done');
I've make a translation service in nodejs :
module.exports = {GetcountryFR, GetRegionFR, GetChildrenFR, GetHomeownFR, GetHomestyleFR, GetIncomeFR, GetLstatusFR, GetWdomainFR, GetWtypeFR, GetSexFR}
async function GetcountryFR(country) {
var countrix = country;
switch (countrix) {
case 'createuser.france': countrix = 'FRANCE'; break;
case 'createuser.belgique': countrix = 'BELGIQUE'; break;
default: countrix = 'UNKNOWN'; break;
}
return countrix;
}
And now I make a function which uses translat function
const translate = require('../services/translate');
exports.AllUserToCSV = async function CSV() {
try {
let user = User.find();
var data = [];
len = user.length;
for (i = 0; i < len; i++) {
let sexix = await translate.GetSexFr(user[i].sex);
let regionix = await translate.GetRegionFR(user[i].region);
let countrix = await translate.GetcountryFR(user[i].country);
let wtypix = await translate.GetWtypeFR(user[i].wtype);
let wdomainix = await translate.GetWdomainFR(user[i].wdomain);
temp = {
sex: sexix,
region: regionix,
country: countrix,
wtype: wtypix,
wdomain: wdomainix,
}
data.push(temp);
}
const csvData = csvjson.toCSV(data, { headers: 'key' })
filename2 = '/assets/media/' + 'TEST' + '.csv';
writeFile(filename, csvData, (err) => {
if (err) {
console.log(err); // Do something to handle the error or just throw it
throw new Error(err);
}
console.log('Success!');
});
} catch (e) {
console.error(e);
}
}
In results CSV file is Empty [].
If I put values in temp it's OK.
Why my translate function didn't work ??
Thanks for Help Good Friends :)
Simply await your call to the User model i.e let user = await User.find();
Also for the loop, try
let users = await User.find();
await Promise.all(users.map(async (user) => {
let sexix = await translate.GetSexFr(user.sex);
...
})));
Writing to file, you may want to use await fs.writeFile(...);. This will make sure file is written before processing the next.
I want to save each array data into each document at Nodejs.
Therefore I made this code below.
But when I run this code, it only saves body[0].
Could you recommend some solution?
exports.saveOrder = (req, res) => {
const body = JSON.parse(res);
for (let i = 0; i < body.length; i += 1) {
const eachBody = body[i];
const order = new Order(eachBody);
order.save();
return res.send('order is saved');
}
}
}
};
For Db operations, You need to use promise or async/await, and send response once after all orders saved to DB. Add try/catch to catch errors as well.
Check this code, it should work now.
exports.saveOrder = async (req, res) => {
try {
const body = JSON.parse(res); // check this before do you realy need to parse it or not
const allResults = [];
for (let i = 0; i < body.length; i += 1) {
const eachBody = body[i];
const order = new Order(eachBody);
const result = await order.save();
allResults.push(result);
}
return res.send(allResults);
} catch (e) {
console.log(e);
return res.send(e);
}
};
This is because you have send the response(used return) inside the for loop.
So it saves body[0] and return the response.
Use "return" outside the "for loop".
for (let i = 0; i < body.length; i += 1) {
const eachBody = body[i];
const order = new Order(eachBody);
order.save();
}
return res.send('order is saved');