async/await with postgres question and nodejs questions - node.js

Hi so I have this piece of code that I want to run in order, however I always end up running the last line first and then the first one. Some guidance with this would be great.
await fs.readFile(file, "UTF-8", (err, lines) => {
lines = lines.split(/\r?\n/);
lines.forEach( (line) => {
line = line.split('\t');
let book = new Book(line[0], line[1], line[2], line[3],
line[4],line[5],line[6],line[7],line[8],line[9],
line[10],line[11]);
console.log(book);
}
);
});
await pgdb.closeDatabase();
Close database looks like this
closeDatabase = async() => {
console.log("Closed")
this.client.end();
}
Thanks!

You are mixing callbacks and async/await
with callbacks, only the code inside the callback will execute after the file is retrieved. The control flow will immediately jump to the next line.
You are not awaiting anything.
try:
import fs from "fs/promises" //or require()
// code...
const file = await fs.readFile(file, "UTF-8")
// do something with file contents
// do some more sync or async tasks
await pgdb.closeDatabase();

Related

Node js issue with loop order

Hello i really need help with this issue, my last console.log is execute BEFORE the for loop and i dont know how to fix it. I really need to have access at my array nbfilm after the for loop
Can someone help me?
What the console print : lien
client.db.query("SELECT name,id FROM film", function (err, result) {
if (err) throw err;
const catalog = new MessageEmbed()
.setTitle("Catalogue")
.setColor("#fcfe80")
.setFooter({text:"🍿 ・ PopFlix"})
let testresult =[]
let nbfilm =[]
for (let compteur of result){
testresult.push(compteur.id)
testresult.push(compteur.name)
}
console.log(testresult)
for (let compteur2 = 0; compteur2 < testresult.length; compteur2+=2){
client.db.query(`SELECT link FROM lien WHERE fid=${testresult[compteur2]}`, function (err,result) {
nbfilm.push(testresult[compteur2+1])
nbfilm.push(result.length)
console.log("nbfilm in for loop",nbfilm)
});
}
console.log("nbfilmAFTER",nbfilm)
});
The body of the loop delays execution. Due to the fact that javascript is an asynchronous i/o type operation language, it is common to return a promise. In other words, the code is executed as expected, but the result of the actions will be visible only after all pending tasks have been completed. In your case, adding async/await in code might help. Use it node docs
It looks like client.db.query is asynchroneous. JS here works as expected because it doesnt wait for the query to be finished before moving to the next line.
Its now a better practice to use async/await instead of callbacks. If you provide the package that you are using we can provide a code example.
client.db.query() is an asynchronous function and won't execute until after the console.log(nbfilm) is executed regardless of how fast that query actually runs.
I'd recommend using Promise.all(). You will also have to "promisify" the query() function. Pass everything you want to resolve(), and then concatenate them in the final line as below.
let promises = [];
for(....) {
promises.push(new Promise((resolve, reject) => {
client.db.query("select ... ", () => {
// all the same stuffs
resolve([testresult[compteru2+1], result.length]);
});
}))
}
Promise.all(promises)
.then((results) => results.reduce((p, c) => p.concat(c), []))
.then(data => console.log(data)); // do whatever you want with "data"
Here's a simplified demo:
https://codesandbox.io/s/promise-all-example-78r347

What's the best way to create a RxJS Observable out of objects that are yielded from a node.js stream pipeline?

I was able to implement this, but I'm not able to explain why the process will exit with code 0 if I don't have another async function (processAfterDeclaration) always trying to pull from the Observable, recordObservable.
setup to run the source file
npm init -y
npm i rxjs#7.4.0 byline
source file that does what I want, but in a confusing way
// node.js 14
const fs = require('fs');
const pipeline = require('util').promisify(require('stream').pipeline);
const byline = require('byline');
const { Observable } = require('rxjs');
const { take } = require('rxjs/operators');
const sleep = ms => new Promise(r => setTimeout(r, ms));
let recordObservable;
(async () => {
const inputFilePath = 'temp.csv';
try {
const data = 'a,b,c\n' +
'1,2,3\n' +
'10,20,30\n' +
'100,200,300';
fs.writeFileSync(inputFilePath, data);
console.log('starting pipeline');
// remove this line, and the `await pipeline` resolves, but process exits early?
processAfterDeclaration().catch(console.error);
await pipeline(
fs.createReadStream(inputFilePath),
byline.createStream(),
async function* (sourceStream) {
console.log('making observable', inputFilePath);
recordObservable = new Observable(async subscriber => {
for await (const lineBuffer of sourceStream) {
subscriber.next(lineBuffer.toString());
}
subscriber.complete();
});
console.log('made observable', recordObservable);
}
);
console.log('pipeline done', recordObservable);
} catch (error) {
console.error(error);
} finally {
fs.unlinkSync(inputFilePath);
}
})();
async function processAfterDeclaration() {
while (!recordObservable) {
await sleep(100);
}
console.log('can process');
await recordObservable
.pipe(take(2))
.subscribe(console.log)
}
edit: It may be better to just forgo node.js stream.pipeline. I'd think using pipeline is best bc it should be the most efficient and offers backpressuring, but I want to test some things offered by RxJS.
edit2: More reasons to be able to forgo stream.pipeline is that I can still use pipe methods and provide any readable stream to the from function as an arg. I can then use subscribe method to write/append each thing from the observable to my output stream then call add on my subscription to add teardown logic, specifically for closing my write stream. I would hope that RxJS from would help determine when to close the read stream it's given as input. Finally, I would recommend await lastValueFrom(myObservable) or firstValueFrom possibly.
RxJS from operator
The RxJS from operator will turn an async iterator (like node stream) into an observable for you!
I can't run/test on your code, but something in this ballpark should work.
const fs = require('fs');
const byline = require('byline');
const { from } = require('rxjs');
const { map, take, finalize} = require('rxjs/operators');
const inputFilePath = 'temp.csv';
(async () => {
const data = 'a,b,c\n' +
'1,2,3\n' +
'10,20,30\n' +
'100,200,300';
fs.writeFileSync(inputFilePath, data);
console.log('starting pipeline');
from(byline(fs.createReadStream(inputFilePath)))
.pipe(
map(lineBuffer => lineBuffer.toString()),
take(2),
finalize(() => fs.unlinkSync(inputFilePath))
)
.subscribe(console.log);
})();
Your second async function
I'm not able to explain why the process will exit with code 0 if I don't have another async function (processAfterDeclaration) always trying to pull from the Observable
If you define a function and never call it, that function will never calculate anything.
If you define an observable and never subscribe to it, that observable will never do anything either. That's different from promises which start the moment they're defined. You just need to subscribe to that observable, it doesn't need a separate function though.
This should work the same:
recordObservable = new Observable(async subscriber => {
for await (const lineBuffer of sourceStream) {
subscriber.next(lineBuffer.toString());
}
subscriber.complete();
});
recordObservable.pipe(
take(2)
).subscribe(console.log)
The second async function
I'm not able to explain why the process will exit with code 0 if I
don't have another async function (processAfterDeclaration) always
trying to pull from the Observable
The logic error was that the await pipeline would never resolve or reject because the 3rd step in the pipeline would never yield anything because nothing would ever subscribe and pull from the recordObservable. It was a deadlock that was written accidentally.

combining results from several async/await calls (again)

Note to would-be closers or markers-as-duplicate : this question is not answered in How do I convert an existing callback API to promises?, as all answers there treat of calls in isolation and do not explain how to deal with successive, dependent calls.
This question is basically the same as in combining results from several async/await calls, but because of my slightly different context I fail to see how the solution therein can be adapted/mimicked.
I have two successive calls to a database, using an old API which only knows about callbacks. The second call needs objects/values returned by the first.
I have a working callback-version of the code, as follows :
connection.query(sql1,function(error1,results1,fields1) {
if(error1) {
console.log("Error during first query",error1);
}
let sql2=computeUsingFirstResult(result1);
connection.query(sql2,function(error2,results2,fields2) {
if(error2) {
console.log("Error during second query",error2);
}
doSomething(connection,results1,results2);
})
});
Here is my unsuccessful attempt to do it in async/await-style :
const util = require('util');
async function firstQuery(connection) {
return util.promisify(connection.query).call(sql1).catch(
error1 => console.log("Error during first query : ", error1)
);;
}
async function secondQuery(connection, result1) {
let sql2 = computeUsingFirstResult(result1);
return util.promisify(connection.query).call(sql2).catch(
error2 => console.log("Error during second query : ", error2)
);
}
let finalResult = {};
async function main() {
const results1 = await firstQuery(connection);
const results2 = await secondQuery(connection, results1);
doSomething(connection, results1, results2);
finalResult = [results1,results2];
console.log("Here is the finalResult : ", finalResult);
}
main().catch(
err => console.log("Something went wrong towards the end", err)
);
My async/await version fails as all the intermediate results are undefined. Yet, as far as I can see it should be equivalent to the non-async version above.
What is the correct way to do this ?
There are 2 approaches I am hoping you can try and see if either of them help:
(1) Bind the util.promisify to connection for both firstQuery and secondQuery something like so:
async function firstQuery(connection) {
return util.promisify(connection.query).bind(connection,sql1)().catch(
error1 => console.log("Error during first query : ", error1)
);;
}
// bind makes sure that internal bindings of connection object are in tact
If the above approach doesn't yield any result, try the next approach:
(2) Try using Promises-Wrapper instead of util.promisify for both firstQuery and secondQuery like so:
function firstQuery(connection,sql1){
return new Promise((resolve,reject)=>{
connection.query(sql1,function(error1,results1,fields1){
return error1 ? reject(error1):resolve(results1);
})
});
}
And now call them as you were in your code using Async/Await in the main function.
Few Potential bugs/typos I noticed in the code:
(1) firstQuery doesn't seem to be passed sql1 as one of its arguments, maybe this is intentional if sql1 exists in global scope.
(2) If you attach a catch block to both your queries, they will have an impact on your 2 calls (see comments in the code below):
async function main() {
const results1 = await firstQuery(connection); //<-- if this call fails, results1 will be undefined and the call will go to next step
const results2 = await secondQuery(connection, results1); // same thing here
doSomething(connection, results1, results2);
finalResult = [results1,results2];
console.log("Here is the finalResult : ", finalResult);
}
A possible solution is to remove the catch blocks from individual functions and just return the respective promises.
Then you can deal with them via a single try/catch block like this:
async function main() {
try{
const results1 = await firstQuery(connection);
const results2 = await secondQuery(connection, results1);
doSomething(connection, results1, results2);
finalResult = [results1,results2];
console.log("Here is the finalResult : ", finalResult);
}catch(err){
console.log("Error",err);
}
}
This will ensure that as soon as the first call fails, function goes straight to catch block without executing any other lines.

Asynchronous file read reading different number of lines each time, not halting

I built a simple asynchronous implementation of the readlines module built into nodejs, which is simply a wrapper around the event-based module itself. The code is below;
const readline = require('readline');
module.exports = {
createInterface: args => {
let self = {
interface: readline.createInterface(args),
readLine: () => new Promise((succ, fail) => {
if (self.interface === null) {
succ(null);
} else {
self.interface.once('line', succ);
}
}),
hasLine: () => self.interface !== null
};
self.interface.on('close', () => {
self.interface = null;
});
return self;
}
}
Ideally, I would use it like so, in code like this;
const readline = require("./async-readline");
let filename = "bar.txt";
let linereader = readline.createInterface({
input: fs.createReadStream(filename)
});
let lines = 0;
while (linereader.hasLine()) {
let line = await linereader.readLine();
lines++;
console.log(lines);
}
console.log("Finished");
However, i've observed some erratic and unexpected behavior with this async wrapper. For one, it fails to recognize when the file ends, and simply hangs once it reaches the last line, never printing "Finished". And on top of that, when the input file is large, say a couple thousand lines, it's always off by a few lines and doesn't successfully read the full file before halting. in a 2000+ line file it could be off by as many as 20-40 lines. If I throw a print statement into the .on('close' listener, I see that it does trigger; however, the program still doesn't recognize that it should no longer have lines to read.
It seems that in nodejs v11.7, the readline interface was given async iterator functionality and can simply be looped through with a for await ... of loop;
const rl = readline.createInterface({
input: fs.createReadStream(filename);
});
for await (const line of rl) {
console.log(line)
}
How to get synchronous readline, or "simulate" it using async, in nodejs?

Handling Async Functions and Routing in Formidable / Extracting text in PDFReader

I'm creating an application where users upload a pdf and extracts the text into JSON format. I am able to access the text, but I can't hold the response until the PDF extraction is complete. I'm unfamiliar with Formidable and I may be missing something entirely.
I am using Formidable for uploading and PDFReader for text extraction. The front-end and back-end are on separate servers, and the app is only intended for local use, so that shouldn't be an issue. I'm able to console.log the text perfectly. I would like to work with the text in JSON format in some way. I would like to append the text to the response back to the front-end, but I can't seem to hold it until the response is sent.
const IncomingForm = require("formidable").IncomingForm;
const { PdfReader } = require('pdfreader');
const test = new PdfReader(this,1);
module.exports = function upload(req, res) {
let str = ''
let form = new IncomingForm();
form.parse(req, () => {
console.log('parse')
});
form.on("file", (field, file) => {
test.parseFileItems(file.path, (err, item) => {
if (err){
console.log(err)
}
else if (item){
if (item.text){
console.log(item.text)
str += item.text
}
}
})
});
form.on("end", () => {
console.log("reached end/str: ", str)
});
};
I've attempted a number of different ways of handling the async functions, primarily within form.on('file'). The following attempts at form.on('file') produce the same effect (the text is console.logged correctly but only after form.on('end") is hit:
//Making the callback to form.on('file') async then traditional await
form.on("file", async (field, file) => {
//...
await test.parseFileItems(...)
//...
console.log(str) //After end of PDFReader code, shows blank
//Making cb async, then manually creating promise
form.on("file", async (field, file) => {
//...
let textProm = await new Promise ((res, rej) => //...
I've also attempted to convert the text manually from the Buffer using fs.readFile, but this also produces the same effect; I can only access text after form.end is hit.
A few things I see is that form.on('file') is hit first, then form.parse. It seems maybe I'm attempting to parse the document twice (Formidable and Pdfreader), but this is probably necessary.
Also, after reading through the docs/stackoverflow, I think I'm mixing the built-in middleware with form.parse/form.on/form.end with manual callbacks, but I was unsure of how to stick with just one, and I'm still able to access the text.
Finally, PDFReader accesses text one line at a time, so parseFileItems is run for every line. I've attempted to resolve a Promise.all with the PdfReader instance, but I couldn't get it to work.
Any help would be greatly appreciated!

Resources