Upload images - Nodejs Paperclip and S3 - node.js

I want to upload an image to S3 and save to user record with NodeJS, just like the Rails Paperclip gem.
I believe this should be the process, but again I'm quite confused about how this package should work:
receive an image and resize by paperclip
save or update to S3
save file to user in DB
I have a Rails Postgres database, and users can upload an image, stored in S3, and reformatted with Paperclip gem. Here's how it is stored:
irb(main):003:0> user.avatar
=> #<Paperclip::Attachment:0x000055b3e043aa50 #name=:avatar,
#name_string="avatar", #instance=#<User id: 1, email:
"example#gmail.com", created_at: "2016-06-11 22:52:36",
updated_at: "2019-06-16 17:17:16", first_name: "Clarissa",
last_name: "Jones", avatar_file_name: "two_people_talking.gif",
avatar_content_type: "image/gif", avatar_file_size: 373197,
avatar_updated_at: "2019-06-16 17:17:12", #options={:convert_options=>{},
:default_style=>:original, :default_url=>":style/missing.png",
:escape_url=>true, :restricted_characters=>/[&$+,\/:;=?#<>\[\]\
{\}\|\\\^~%# ]/, :filename_cleaner=>nil,
:hash_data=>":class/:attachment/:id/:style/:updated_at",
:hash_digest=>"SHA1", :interpolator=>Paperclip::Interpolations,
:only_process=>[],
:path=>"/:class/:attachment/:id_partition/:style/:filename",
:preserve_files=>false, :processors=>[:thumbnail],
:source_file_options=>{:all=>"-auto-orient"}, :storage=>:s3,
:styles=>{:large=>"500x500#", :medium=>"200x200#",
:thumb=>"100x100#"}, :url=>":s3_path_url",
:url_generator=>Paperclip::UrlGenerator,
:use_default_time_zone=>true, :use_timestamp=>true, :whiny=>true,
:validate_media_type=>true, :adapter_options=>
{:hash_digest=>Digest::MD5},
:check_validity_before_processing=>true, :s3_host_name=>"s3-us-
west-2.amazonaws.com", :s3_protocol=>"https", :s3_credentials=>
{:bucket=>"example", :access_key_id=>"REDACTED",
:secret_access_key=>"REDACTED",
:s3_region=>"us-west-2"}}, #post_processing=true,
#queued_for_delete=[], #queued_for_write={}, #errors={},
#dirty=false, #interpolator=Paperclip::Interpolations,
#url_generator=#<Paperclip::UrlGenerator:0x000055b3e043a8e8
#attachment=#<Paperclip::Attachment:0x000055b3e043aa50 ...>>,
#source_file_options={:all=>"-auto-orient"}, #whiny=true,
#s3_options={}, #s3_permissions={:default=>:"public-read"},
#s3_protocol="https", #s3_metadata={}, #s3_headers={},
#s3_storage_class={:default=>nil},
#s3_server_side_encryption=false, #http_proxy=nil,
#use_accelerate_endpoint=nil>
user.avatar(:thumb) returns:
https://s3-us-west-2.amazonaws.com/example/users/avatars/000/000/001/thumb/two_people_talking.gif?1560705432
Now, I'm trying to allow the user to upload a new/change image through a react-native app, and the backend is Nodejs, which is relatively new to me.
I'm so confused about how to implement this, especially because the examples are all referencing Mongoose, which I'm not using.
Just to show how I'd successfully update the user, here is how to update first_name of the user:
users.updateUserPhoto = (req, res) => {
let id = req.decoded.id
let first_name = req.body.first_name
models.Users.update(
first_name: first_name,
{
where: {
id: req.decoded.id
}
},
).then(response => {
res.status(200).json({ status: 200, data: { response } });
})
.catch(error => {
res.status(500).json({ status: 500, err: error });
})
}
Here is the package I found node-paperclip-s3, and here's what I'm trying to do:
'use strict'
let users = {};
const { Users } = require('../models');
let models = require("../models/index");
let Sequelize = require('sequelize');
let Paperclip = require('node-paperclip');
let Op = Sequelize.Op;
let sequelizeDB = require('../modules/Sequelize');
users.updateUserPhoto = (req, res) => {
let id = req.decoded.id
let avatar = req.body.avatar <- this is a file path
models.Users.plugin(Paperclip.plugins, {
avatar: {
styles: [
{ original: true },
{ large: { width: 500, height: 500 } },
{ medium: { width: 200, height: 200 } },
{ thumb: { width: 100, height: 100 } }
],
prefix: '/users/{{attachment}}/{{id}}/{{filename}}',
name_format: '{{style}}.{{extension}}',
storage: 's3',
s3: {
bucket: process.env.S3_BUCKET_NAME,
region: 'us-west-2',
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
}
}
})
models.Users.update(
avatar,
{
where: {
id: req.decoded.id
}
},
).then(response => {
res.status(200).json({ status: 200, data: { response } });
})
.catch(error => {
res.status(500).json({ status: 500, err: error });
})
}
I've also tried something like this:
models.Users.update(Paperclip.plugins, {
avatar: {
styles: [
{ original: true },
{ large: { width: 500, height: 500 } },
{ medium: { width: 200, height: 200 } },
{ thumb: { width: 100, height: 100 } }
],
prefix: '/users/{{attachment}}/{{id}}/{{filename}}',
name_format: '{{style}}.{{extension}}',
storage: 's3',
s3: {
bucket: process.env.S3_BUCKET_NAME,
region: 'us-west-2',
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
}
},
{
where: {
id: req.decoded.id
}
},
).then(response => {
res.status(200).json({ status: 200, data: { response } });
})
.catch(error => {
res.status(500).json({ status: 500, err: error });
})
})
I've tried:
let new_avatar = (Paperclip.plugins, {
avatar: {
styles: [
{ original: true },
{ large: { width: 500, height: 500 } },
{ medium: { width: 200, height: 200 } },
{ thumb: { width: 100, height: 100 } }
],
prefix: `/users/avatars/{{attachment}}/{{id}}/{{filename}}`,
name_format: '{{style}}.{{extension}}',
storage: 's3',
s3: {
bucket: process.env.S3_BUCKET_NAME,
region: 'us-west-2',
key: process.env.AWS_ACCESS_KEY_ID,
secret: process.env.AWS_SECRET_ACCESS_KEY,
}
},
})
let data = {
avatar: new_avatar
}
models.Users.update(
data,
{
where: {
id: req.decoded.id
}
},
).then(response => {
res.status(200).json({ status: 200, data: { response } });
})
.catch(error => {
res.status(500).json({ status: 500, err: error });
})
From the example in the link above, I don't understand how it is saving to S3, or how it's updating the database in the same way the Rails gem is creating that record.
Question : how to save resized images + original in the exact same way that the Rails paperclip gem is saving to S3 AND the user record in the database.
I originally had this open for a 400 point bounty, and am more than happy to still offer 400 points to anyone who can help me solve this. Thanks!!

The below code is for nodeJs.
I have added an api to save an image from frontend to AWS S3.
I have added comments within code for better understanding.
var express = require("express");
var router = express.Router();
var aws = require('aws-sdk');
aws.config.update({
secretAccessKey: config.AwsS3SecretAccessKey,
accessKeyId: config.AwsS3AccessKeyId,
region: config.AwsS3Region
});
router
.route("/uploadImage")
.post(function (req, res) {
//req.files.imageFile contains the file from client, modify it as per you requirement
var file = getDesiredFileFromPaperclip(req.files.imageFile);
const fileName = new Date().getTime() + file.name;
//before uploading, we need to create an instance of client file
file.mv(fileName, (movErr, movedFile) => {
if (movErr) {
console.log(movErr);
res.send(400);
return;
}
//read file data
fs.readFile(fileName, (err, data) => {
if (err) {
console.error(err)
res.send(400);
}
else {
//as we have byte data of file, delete the file instance
try {
fs.unlink(fileName);
} catch (error) {
console.error(error);
}
//now, configure aws
var s3 = new aws.S3();
const params = {
Bucket: config.AwsS3BucketName, // pass your bucket name
Key: fileName, // file will be saved as bucket_name/file.ext
Body: data
}
//upload file
s3.upload(params, function (s3Err, awsFileData) {
if (s3Err) {
console.error(s3Err)
res.send(400);
} else {
console.log(`File uploaded successfully at ${awsFileData.Location}`)
//update uploaded file data in database using 'models.Users.update'
//send response to client/frontend
var obj = {};
obj.status = { "code": "200", "message": "Yipee!! Its Done" };
obj.result = { url: awsFileData.Location };
res.status(200).send(obj);
}
});
}
});
});
});
This is old school, non - fancy solution.Please try it out and let me know.

Related

Custom authorizations lambda functions node js [closed]

Closed. This question needs details or clarity. It is not currently accepting answers.
Want to improve this question? Add details and clarify the problem by editing this post.
Closed 14 days ago.
Improve this question
How to use custom authorizer for lambda functions through https API in AWS using node js
I am a novice to the field so I do not have prior experience regarding this. Any help is highly appreciated.
Create following 3 files under respective file names (serverless.yml, middleware-auth.js, auth.js) and write the code inside those files. This allows custom authorization for AWS lambda functions.
serverless.yml
provider:
name: aws
runtime: nodejs14.x
lambdaHashingVersion: 20201221
# you can overwrite defaults here
stage: dev
region: us-east-1
apiGateway:
binaryMediaTypes:
- "*/*"
httpApi:
cors: true
authorizers:
customAuthorizer:
type: request
functionName: authoriserAnyToken
functions:
authoriserAnyToken:
handler: middleware-auth.handler
signin:
handler: auth.signin
timeout: 15
events:
- httpApi:
path: /auth/{role}/signin
method: post
auth_remember_me:
handler: auth.auth_remember_me
timeout: 15
events:
- httpApi:
path: /auth/remember-me
method: post
authorizer:
name: customAuthorizer
type: request
auth_jwt_token_check:
handler: auth.auth_jwt_token_check
timeout: 15
events:
- httpApi:
path: /auth/jwt/check
method: post
authorizer:
name: customAuthorizer
type: request
middleware-auth.js
const JWT_SECRET = "goK!pusp6ThEdURUtRenOwUhAsSURESHBazl!uJLPlS8EbreWLdrupIwabRAsiBa";
const jwt = require("jsonwebtoken");
exports.handler = async event => {
console.log('event', event);
const token = event.headers.authorization? event.headers.authorization.replace('Bearer ', ''): null;
if (!token) {
console.log('could not find a token on the event');
return generatePolicy({ allow: false });
}
try {
const decoded = jwt.verify(token, JWT_SECRET);
console.log('token_details ', decoded);
if(decoded.user == 'admin'){
return generatePolicy({ allow: true });
}
else if (decoded.user == 'seller')
{
return generatePolicySeller({ allow: true });
}
} catch (error) {
console.log('error ', error);
return generatePolicy({ allow: false });
}
};
const generatePolicy = ({ allow }) => {
return {
principalId: 'token',
policyDocument: {
Version: '2012-10-17',
Statement: {
Action: 'execute-api:Invoke',
Effect: allow ? 'Allow' : 'Deny',
Resource: '*',
// Resource: 'arn:aws:execute-api:us-east-1:*:*/*/*',
// Resource: 'arn:aws:execute-api:us-east-1:*:*/*/auth/*',
},
},
};
};
const generatePolicySeller = ({ allow }) => {
return {
principalId: 'token',
policyDocument: {
Version: '2012-10-17',
Statement: {
Action: 'execute-api:Invoke',
Effect: allow ? 'Allow' : 'Deny',
// Resource: '*',
// Resource: 'arn:aws:execute-api:us-east-1:*:*/*/*',
Resource: ['arn:aws:execute-api:us-east-1:*:*/*/sellers/*','arn:aws:execute-api:us-east-1:*:*/*/auth/*'],
},
},
};
};
auth.js
'use strict';
const jsonwebtoken = require("jsonwebtoken");
const bcrypt = require('bcrypt');
const AWS = require("aws-sdk");
const s3 = new AWS.S3()
const MAX_SIZE = 2097152 // 2MB
const bucket = 'S3_BUCKET_NAME' // Name of your bucket.
const Busboy = require("busboy")
const JWT_SECRET = "goK!pusp6ThEdURUtRenOwUhAsSURESHBazl!uJLPlS8EbreWLdrupIwabRAsiBa";
// Tables
const USERS_TABLE = 'users'
const { Validator } = require('node-input-validator');
s3.config.update({
region: "us-east-1",
accessKeyId: 'S3_ACCESS_KEY_ID', // ACCESS_KEY_ID of your S3 bucket.
secretAccessKey: 'S3_SECRET_ACCESS_KEY' // SECRET_ACCESS_KEY of your S3 bucket.
});
const mysql = require('serverless-mysql')({ // DB configuration
config: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
debug: false
}
});
const sendJSON = (code, message, data) => {
let resData = {
"status": code < 400 ? 'Success' : 'Error',
"message": message,
}
data ? (resData["data"] = data) : null;
return {
statusCode: code,
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
...resData
})
};
}
const FORM = {
parse(body, headers) {
return new Promise((resolve, reject) => {
const data = {};
const buffer = Buffer.from(body, 'base64');
const bb = Busboy({
headers: Object.keys(headers).reduce((newHeaders, key) => {
// busboy expects lower-case headers.
newHeaders[key.toLowerCase()] = headers[key];
return newHeaders;
}, {})
});
bb.on('field', (name, val, info) => {
data[name] = val;
});
bb.on('error', (err) => {
reject(err);
});
bb.on('close', () => {
resolve(data);
});
bb.end(buffer);
});
}
};
module.exports.signin = async (event, context) => {
try {
const data = await FORM.parse(event['body'], event['headers']);
const v = new Validator(data, {
email: 'required|email',
password: 'required'
});
const matched = await v.check();
if (!matched)
return {
statusCode: 400,
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(v.errors),
};
let results = "";
let email = data.email;
let password = data.password;
const role = event.pathParameters.role;
results = await mysql.query(`SELECT * FROM ${USERS_TABLE} WHERE email = ? LIMIT 1`, [email]);
await mysql.end();
if (!results[0]) {
return {
statusCode: 400,
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
email: {
"message": "User not found"
}
}),
};
} else {
let user_hash = results[0].password;
user_hash = user_hash.replace(/^\$2y(.+)$/i, '$2a$1');
const match = await bcrypt.compare(password, user_hash);
if (!match) {
return {
statusCode: 400,
body: JSON.stringify({
password: {
"message": "Missing or Invalid password"
}
}),
};
}
const user_info = {
'id' : results[0].id,
'role': results[0].role,
'fname' : results[0].fname,
'lname' : results[0].lname
};
const token = jsonwebtoken.sign({ id: results[0].id, user: results[0].role, fname: results[0].fname, lname: results[0].lname}, JWT_SECRET, { expiresIn: "2h" });
return sendJSON(200, 'Successfully logged.',{
'user' : user_info,
'token': token
});
}
} catch (e) {
return sendJSON(400, e.message);
}
};
module.exports.auth_jwt_token_check = async (event) => {
try {
return sendJSON(200, 'verifyed user');
} catch (e) {
return sendJSON(400, e.message);
}
};
module.exports.auth_remember_me = async (event) => {
try {
const token = event.headers.authorization? event.headers.authorization.replace('Bearer ', ''): null;
const decoded = jsonwebtoken.verify(token, JWT_SECRET);
const new_token = jsonwebtoken.sign({ id: decoded.id, user: decoded.user, fname: decoded.fname, lname: decoded.lname}, JWT_SECRET, { expiresIn: "2h" });
return sendJSON(200, 'Get Renew Token.', {
'token': new_token
});
} catch (e) {
return sendJSON(400, e.message);
}
};

API Only sends 1 chunk of metadata when called

I have a problem with my API that sends metadata when called from my smart contract of website. Its NFT tokens and my database is postgres and API is node.js
The problem is when I mint 1 NFT metadata works perfect, but if I mint 2 or more it will only ever send 1 chunk of data? So only 1 NFT will mint properly and the rest with no data?
Do I need to set a loop function or delay? Does anyone have any experience with this?
Any help would be much appreciated.
Below is the code from the "controller" folder labeled "nft.js"
const models = require("../../models/index");
const path = require("path");
const fs = require("fs");
module.exports = {
create_nft: async (req, res, next) => {
try {
const dir = path.resolve(__dirname + `../../../data/traitsfinal.json`);
const readCards = fs.readFileSync(dir, "utf8");
const parsed = JSON.parse(readCards);
console.log("ya data ha final ??", parsed);
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
return res.json({
data: "nft created",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft: async (req, res, next) => {
try {
const { id } = req.params;
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findByPk(id);
if (!nft) {
throw new Error("Token ID invalid");
}
if (!nft.isMinted) {
throw new Error("Token not minted");
}
console.log(nft);
// }
const resObj = {
name: nft.name,
description: nft.description,
image: `https://gateway.pinata.cloud/ipfs/${nft.image}`,
attributes: [
{ trait_type: "background", value: `${nft.background}` },
{ trait_type: "body", value: `${nft.body}` },
{ trait_type: "mouth", value: `${nft.mouth}` },
{ trait_type: "eyes", value: `${nft.eyes}` },
{ trait_type: "tokenId", value: `${nft.tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: id,
max_value: 1000,
},
],
};
return res.json(resObj);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
get_nft_all: async (req, res, next) => {
try {
// console.log("id ?????????",id)
// console.log("type of ",typeof(id))
// const n=Number(id)
// console.log("type of ",typeof(id))
const nft = await models.NFT.findAndCountAll({
limit: 10
});
// console.log(nft);
if (!nft) {
throw new Error("Token ID invalid");
}
// if (nft.isMinted) {
// throw new Error("Token not minted");
// }
// console.log(nft);
// }
var resObjarr = [];
for (var i = 0; i < nft.rows.length; i++) {
resObj = {
name: nft.rows[i].name,
description: nft.rows[i].description,
image: `https://gateway.pinata.cloud/ipfs/${nft.rows[i].image}`,
attributes: [
{ trait_type: "background", value: `${nft.rows[i].background}` },
{ trait_type: "body", value: `${nft.rows[i].body}` },
{ trait_type: "mouth", value: `${nft.rows[i].mouth}` },
{ trait_type: "eyes", value: `${nft.rows[i].eyes}` },
{ trait_type: "tokenId", value: `${nft.rows[i].tokenId}` },
{
display_type: "number",
trait_type: "Serial No.",
value: nft.rows[i].id,
max_value: 1000,
},
],
};
resObjarr.push(resObj);
}
console.log(JSON.stringify(resObjarr))
return res.json(resObjarr);
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
mint: async (req, res, next) => {
try {
const { id } = req.params;
const updated = await models.NFT.findByPk(id);
if (!updated) {
throw new Error("NFT ID invalid");
}
if (updated.isMinted) {
throw new Error("NFT Already minted");
}
updated.isMinted = true;
updated.save();
return res.json({
data: "Token minted successfully",
error: null,
success: true,
});
} catch (error) {
console.log("server error", error.message);
next(error);
}
},
};
Below is from the routes folder.
const router = require("express").Router();
const auth=require("../middleware/auth")
const {
create_nft,
get_nft,
get_nft_all,
mint
} = require("../controller/nft");
router.post(
"/create",
create_nft
);
router.get(
"/metadata/:id",
get_nft
);
router.get(
"/metadata",
get_nft_all
);
router.put(
"/mint/:id",
mint
);
module.exports = router;
Looking your code,you may having some kind of asyncrhonous issue in this part:
parsed.forEach(async (item) => {
// return res.json(item)
let newNft = await models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
});
});
Because .forEach is a function to be used in synchronous context and NFT.create returns a promise (that is async). So things happens out of order.
So one approach is to process the data first and then perform a batch operation using Promise.all.
const data = parsed.map(item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
const results = await Promise.all(data)
The main difference here is Promise.all resolves the N promises NFT.create in an async context in paralell. But if you are careful about the number of concurrent metadata that data may be too big to process in parallel, then you can use an async iteration provided by bluebird's Promise.map library.
const Promise = require('bluebird')
const data = await Promise.map(parsed, item => {
return models.NFT.create({
name: item.Name,
description: item.Description,
background: item.Background,
body: item.Body,
mouth: item.Mouth,
eyes: item.Eyes,
head_gear: item.Head_Gear,
tokenId: item.tokenId,
image: item.imagesIPFS,
})
})
return data

How export sheets to pdf and upload it in one specific folder?

I'm trying to convert one google sheet into a pdf file. Actually, that, seems ok. But i can't put it directly in one specifics folder ...
Can you help me ?
const getData = await getSpreadSheetData(newSpreadsheetsId);
if (!getData) {
// nop
return;
}
let url = getData.data.spreadsheetUrl;
if (!url) {
// nop
return
}
url = url.replace(/edit$/, '');
const url_ext = 'export?exportFormat=pdf&format=pdf&portrait=true'
url = url + url_ext;
const dest = fs.createWriteStream('test.pdf');
await g.drive.files.export(
{
fileId: `${newSpreadsheetsId}`, // Please set the file ID of Google Docs.
mimeType: "application/pdf"
},
{ responseType: "stream" },function(err, response) {
if (err) {
console.log(err);
return;
}
if (!response) {
// nop
return
}
response.data
.on("end", function() {
console.log("Done.");
})
.on("error", function(err) {
console.log("Error during download", err);
return process.exit();
})
.pipe(dest);
})
getSpreadSheetData retrieve me all the data from one spreadsheetID
I'm not an expert with pipe etc ...
I have trying some options like this link :
Github - google Drive export pdf in Landscape
And i don't want this file on my server, or transiting by my server ... :/
after few hours there is the solution :
g = auth
const exportAsPdfInFolder = await g.drive.files.export(
{
fileId: fileId,
mimeType: 'application/pdf',
alt: 'media',
},
{ responseType: 'stream' },
async (err, result) => {
if (err) console.log(err);
else {
const media = {
mimeType: 'application/pdf',
body: result?.data,
};
await g.drive.files.create(
{
requestBody: {
name: newTitlePDF,
parents: [folderParentId],
},
media: media,
fields: 'id',
},
async (err: any, file: any) => {
if (err) {
// Handle error
console.error(err);
} else {
console.log('File Id: ', file.data.id);
}
},
);
}
},
);
Reference:
Files: create

Image uploads in react native not reaching server

I am sending images to a nodeJs server using react native. I have noticed that when i make the request, there is an image file in the request but the response fails saying photos are needed for file upload. The request however works perfectly on postman.
Here is a sample of the code. I am using react native image crop picker to select the image.
const choosePhotoFromLibrary = () => {
ImagePicker.openPicker({
width: 300,
height: 400,
cropping: true,
// multiple: true,
multiple: false,
mediaType: 'photo'
}).then((image) => {
setPhotos(image.path)
}).catch(err => {
console.log(err);
})
}
getStoreId()
const createProduct = async () => {
console.log(typeof photos);
const data = new FormData()
data.append('name', productName)
data.append('description', description)
data.append('price', price)
data.append('category', productCategory)
data.append('sub-category', productSubCategory)
data.append('condition', productCondition)
data.append('photos', photos)
data.append('type', `Women's wear`)
console.log(data);
var config = {
method: 'post',
url:url,
headers: {
'token': token,
'Content-Type': 'multipart/form-data'
},
data: data
};
try {
const product = await axios(config)
console.log(product);
} catch (err) {
console.log(err.response);
}
Instead of
data.append('photos', photos)
write like this
data.append('photos', {
name: "Example.jpg",
uri: photos, // This should be something like 'file://...'
type: "image/jpg" // MIME type of file
})

My nodejs api mixes the codes of simultaneous api requests by clients

I have a nodejs server file which has an api as shown below to update the profile pictures.
app.post('/updateProfilePic', async(req, res) => {
try {
if (VerifyAPIKey(req.query.key)) {
let userdata = users.find(e => e.Id == req.query.socketId);
if (userdata) {
for (var a = 0; a < users.length; a++) {
const b = a;
if (users[a].IsAuthenticated) {
if (req.query.pub) {
cloudinary.uploader.destroy(req.query.pub, {resource_type: 'image'}, function(err, res) {
// console.log(err, res);
});
}
cloudinary.uploader.upload(req.files.profilePic.tempFilePath, {resource_type: 'image', folder: 'members', eager: [
{width: 25, height: 25, g: 'face', radius: "max", crop: 'fill', format: "png"},
{width: 50, height: 50, g: 'face', radius: "max", crop: 'fill', format: "png"},
{width: 100, height: 100, g: 'face', radius: "max", crop: 'fill', format: "png"},
{width: 250, height: 250, g: 'face', radius: "max", crop: 'fill', format: "png"},
{width: 500, height: 500, g: 'face', crop: 'fill'},
]}, function(err,response) {
if (err) {
console.log(err);
}
if (response) {
const logo = userModel.findOneAndUpdate({
_id: users[b]._id,
},{
PictureUrl: response
}, (err, result) => {
data.status = 200;
data.message = "Your Profile Picture has been updated!"
res.status(200).send(data);
})
}
});
}
}
} else {
data.status = 404;
data.message = "Invalid User!";
res.status(200).send(data);
}
} else {
res.json('Unauthorized request!');
}
} catch(err) {
res.status(400).send(err.message);
}
})
The VerifyAPIKey function is given below
function VerifyAPIKey(key) {
var a = users.find(e=> e.API_KEY == key);
console.log(a)
fs.appendFile('./data/apiRequests.txt', JSON.stringify(a) + "\r\n", function (err) {
if (err) throw err;
});
return Boolean(a);
}
The userdata is in a format as shown below
{
Id: 'FjWs0GZ4MkE_GCmKAAAD',
Ip: '::1',
API_KEY: '590c3789-e807-431b-bfdb-e20b6649e553',
HOST: undefined,
IsAuthenticated: false
}
The problem is the current code causes the response data from cloudinary to mix up between simultaneous requests. I have tested it with two simultaneous requests. Out of the two cloudinary responses whichever comes first is sent back as response to the user who invoked the api later than the two. And the user who invoked the api first get's an error saying cannot set headers after they are sent.
I have tried searching for solution but haven't found any. Can someone please help?
How does data got initiated? data does not seem to be thread-safe and defined outside your async flow. You may want to start from there and make sure data is thread-safe.

Resources