await is only valid in async function error - node.js

I am trying to do async and await in the product.findOneAndUpdate() but it seems that I am getting "await is only valid in async function error" for the await Product.findOneAndUpdate(). Here is my code. many thanks in advance and greatly appreciate any helps. Thanks
router.post('/product/saveeditproduct/:id',JWTAuthenticatToken, async (req, res) => {
let form = formidable.IncomingForm()
form.parse(req, (err,fields, files) => {
if(err){
return res.json({statusCode: "400", msg:"upload denied"})
}
const{productname, productdescription} = fields
const productslug = slugify(productname)
const{image} = files
const product= await Product.findOneAndUpdate({productslug:req.params.id},
{$set:{productname:productname,productdescription:productdescription}},{new:true})
if(image){
//---Remove old image from AWS S3---
const deleteParams ={
Bucket:process.env.AWS_BUCKET_NAME,
Key:`image/${product.productslug}`,
Body:fs.readFileSync(image.path),
ACL:'public-read',
ContentType:`image/jpg`
}
s3.deleteObject(deleteParams,(err,data) => {
})
//---Remove old image from AWS S3---
//----Upload new image to AWS S3----
const params ={
Bucket:process.env.AWS_BUCKET_NAME,
Key:`image/${productslug}`,
Body:fs.readFileSync(image.path),
ACL:'public-read',
ContentType:`image/jpg`
}
s3.upload(params, async(err,data) => {
if(err) {
res.json({status:true, error:err})
} else{
product.productimageurl = data.Location
const productresult = await product.save()
return res.json({statusCode: "200", data:productresult})
}
})
}
//----Upload new image to AWS S3----
return res.json({statusCode: "200"})
})
})

I think you forget to add async at :
form.parse(req, async (err, fields, files) => {
//code....
}

You should always use async with await.
You made the outer function async:
router.post('/product/saveeditproduct/:id',JWTAuthenticatToken, async (req, res) => {
});
But you forgot to add async in the inner function (Parent of that particular await).
Solution is to make that function async:
form.parse(req, async (err, fields, files) => {
}

Related

await not working on res.app.render in Express.js

I want to generate multiple pdf files and attach to the email. But await seems not working on res.app.render.
route.get('/:id/receipts', async function (req, res) {
...
let attachments = [];
for await(let item of items){
res.view.item = item;
console.log(1)
await res.app.render('pdfs/receipt', res.view, async function(err, html){
console.log(2)
if (err) return res.end(err.stack)
return await pdf.create(html).toBuffer(async function(err, buffer){
console.log(3)
attachments.push({
content: buffer,
filename: 'receipt.pdf',
})
});
});
}
console.log(4)
...
})
Expect Result:
1
2
3
4
Actually Result:
1
4
2
3
I think res.app.render is not returning a promise that's why you are facing this issue. You have to make a custom promise. I hope following code will help you.
oute.get('/:id/receipts', async function (req, res) {
...
let attachments = [];
for await(let item of items){
res.view.item = item;
console.log(1)
const customPromise = new Promise((resolve, reject) => {
res.app.render('pdfs/receipt', res.view, async function(err, html){
console.log(2)
if (err) { res.end(err.stack);reject()}
else {
await pdf.create(html).toBuffer(async function(err, buffer){
console.log(3)
attachments.push({
content: buffer,
filename: 'receipt.pdf',
})
});
resolve();
}
});
})
}
console.log(4)
...
})

How to delete image on Aws s3 with Node and Express

I am working on Nodejs and Expressjs applications. I want to delete an image on Aws-s3 image which I uploaded with multer-s3.
I have tried so many examples that I saw online but none of them worked.
I tried this example but it didn't work:
router.delete("/:id/delete", async (req, res) => {
const params = {
Bucket: bucketname,
Key:
"https://nodeilders.s3.us-east-
2.amazonaws.com/public/uploads/programImages/church4%20%281%29.jpeg",
};
s3.deleteObject(params, (error, data) => {
if (error) {
res.status(500).send(error);
} else {
console.log("File has been deleted successfully");
}
});
});
and I also tried this example but it didn't work.
router.delete("/:id/delete", async (req, res) => {
const s3delete = function (params) {
return new Promise((resolve, reject) => {
s3.createBucket({
Bucket: BUCKET_NAME /* Put your bucket name */
}, function () {
s3.deleteObject(params, function (err, data) {
if (err) console.log(err);
else
console.log(
"Successfully deleted file from bucket";
);
console.log(data);
});
});
});
};
});
The first example logged that the file was successfully deleted but it was not deleted.
What can I try to resolve this?

writeFileSync only write half of data

So i encountered a problem while doing my project.The problem is that when i try to write my data to csv file,it only write half of the data ,even sometimes only less than half of my data.I don't know what the problem is because there is no error shown in the terminal.
Below is my code
async function getFile(req, res, next) {
try {
let URI;
const listOfKeys = await listAllKeys();
let temp = []
await Promise.all(listOfKeys.map(async function (data) {
let response = await writeFile(data.Key);
temp.push(response)
}))
.then(async _ => {
fs.writeFileSync(FILE_PATH, Buffer.concat(temp));
})
.catch(err => {
console.log(err)
})
return res.json({ message: 'halo' });
} catch (err) {
console.log('hmm.... error', err);
return next(new APIError(err, httpStatus.INTERNAL_SERVER_ERROR, true));
};
};
And this is the writeFile function
function writeFile(key) {
return new Promise((resolve, reject) => {
s3.getObject({ Bucket: process.env.AWS_BUCKET, Key: key }, (err, data) => {
if (err) reject(err)
else resolve(data.Body)
})
});
};
If possible, i would like to know the detail of my problem and how to fix it.Thanks.
It looks to me like you can do it like this (function names have been modified to make sense to me):
const fsp = require('fs').promises;
async function getDataAndWriteFile(req, res, next) {
try {
let URI;
const listOfKeys = await listAllKeys();
let responses = await Promise.all(listOfKeys.map(function (data) {
return getData(data.Key);
}));
await fsp.writeFile(FILE_PATH, Buffer.concat(responses);
res.json({ message: 'halo' });
} catch(err) {
console.log('hmm.... error', err);
next(new APIError(err, httpStatus.INTERNAL_SERVER_ERROR, true));
}
}
function getData(key) {
return new Promise((resolve, reject) => {
s3.getObject({ Bucket: process.env.AWS_BUCKET, Key: key }, (err, data) => {
if (err) reject(err)
else resolve(data.Body)
})
});
}
Summary of changes:
Change function names to better describe what they do
Use let responses = await Promise.all() to get the data from the promise array.
Use the promise interface in the fs module with await fsp.writeFile() to write the data out to your file.
Use try/catch to catch all the promise rejections in one place
Possible Open Issues:
Writing this Buffer.concat(responses) to disk seems kind of odd. Is that really what you want in this file?

How to use result of called 'then' for next 'then'?

I want to return what I get after an async call.
So
app.get("/books", async (req, res) => {
let books = await getBooks()
.then(json => {
res.status(200).send({"books": json});
});
});
Should wait on rendering the result until the called getBooks is done.
export async function getBooks() {
console.log("Getting books from cloud");
Book.findAll({
// ...
}).then(books => {
console.log("Got books");
return JSON.stringify(books, null, 4);
});
}
But right now the response gets rendered without actually waiting for the result.
You don't need to use promises. You can just use await and then use the result of that.
app.get("/books", async (req, res) => {
const books = await getBooks();
res.status(200).send({ books });
});
I'd highly suggest taking it a step further and using try/catch to handle failure cases
app.get("/books", async (req, res) => {
try {
const books = await getBooks();
res.status(200).send({ books });
} catch (error) {
// massage this to send the correct status code and body
res.status(400).send( { error });
}
});
You can use await in second method too:
export async function getBooks() {
console.log("Getting books from cloud");
var books = await Book.findAll({
// ...
})
if(books){
console.log("Got books");
return JSON.stringify(books, null, 4);
}
You just need to return your promise, and since you're just returning a promise you don't need async;.
app.get("/books", async (req, res) => {
let json = await getBooks()
res.status(200).send({"books": json});
});
export function getBooks() {
console.log("Getting books from cloud");
return Book.findAll({
// ...
}).then(books => {
console.log("Got books");
return JSON.stringify(books, null, 4);
});
}
app.get("/books", async (req, res) => {
let books = await getBooks();
res.status(200).send({"books": books});
})

Avoiding callback hell

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Resources