Missing credentials in config while uploading image to AWS S3 - node.js

Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1
Everytime I try to post images though postman toaws s3 I am getting this error.
require("./config/database").connect();
const express = require("express");
const jwt = require("jsonwebtoken");
const fileUpload = require("express-fileupload");
var bcrypt = require("bcryptjs");
const app = express();
app.use(fileUpload());
var AWS = require("aws-sdk");
app.post("/images", async (req, res) => {
AWS.config.update({
accessKeyId:process.env.AWS_ACCESS_KEY ,
secretAccesskey: process.env.AWS_SECRET_KEY,
region: process.env.AWS_BUCKET_REGION,
});
const s3 = new AWS.S3();
const fileContent = Buffer.from(req.files.data.data, "binary");
const params = {
Bucket:"newawspracticeimage",
Key: req.files.data.name,
Body: fileContent,
};
s3.upload(params, (err, data) => {
if (err) {
throw err;
}
res.send({
response_code: 200,
response_message: "Success",
response_data: data,
});
});
});```

Related

Cannot Get Error from getObject API to download/get an object(audio file) from S3

My call recordings of amazon connect are being stored on S3 in .wav file, i am looking to get and play those recordings in a third party application. For this i am using getObject API & trying to get/download the .wav files by name but i am getting the Cannot Get Error.
At the same time i do want to provide the path in key e.g. Connect/Lab/2022/04/08/abc.wav, is it possible?
How to resolve it? here is my code:
require("dotenv").config();
const expres = require("express");
const app = expres();
app.listen(3001);
const aws = require("aws-sdk");
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const BUCKET = process.env.BUCKET
const Key = '/connect/oblab2/CallRecordings/2022/04/08/'
const s3 = new aws.S3(secretAccessKey = process.env.ACCESS_SECRET, accessKeyId = process.env.ACCESS_KEY);
app.get("/download/filename", async(req, res)=>{
const filename = req.params.filename
let x = await s3.getObject({Bucket:BUCKET, Key:Key + filename}).promise();
res.send(x.Body);
})
Achieved the goal through:
require("dotenv").config();
const aws = require('aws-sdk');
const expres = require("express");
const app = expres();
app.listen(3001);
app.get('/getfilefromS3', async (req, res, next) => {
aws.config.update({
secretAccessKey: process.env.ACCESS_SECRET,
accessKeyId: process.env.ACCESS_KEY,
region: process.env.REGION
})
const s3 = new aws.S3(secretAccessKey = process.env.ACCESS_SECRET, accessKeyId = process.env.ACCESS_KEY);
var params = { Bucket: process.env.BUCKET, Key: "connect/oblab2/CallRecordings/2022/04/08/" + req.query.filename };
s3.getObject(params, function (err, data) {
if (err) {
res.status(200);
res.end('Error Fetching File');
}
else {
res.attachment(params.Key); // Set Filename
res.type(data.ContentType); // Set FileType
res.send(data.Body); // Send File Buffer
}
});
})
And than hitting endpoint http://localhost:3001/getfilefromS3?filename=filename.wav

Node.js uploading image with multer to both MongoDb and Amazon s3

I have been trying to use a MVC structure in Node.js to build a post route which both uploads to Mongodb as well as Amazon S3.
I have built below to help me upload to S3.
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
require("dotenv").config();
aws.config.update({
secretAccessKey: process.env.AMAZON_SECRET_ACCESS_KEY,
accessKeyId: process.env.AMAZON_ACCESS_KEY,
region: "eu-north-1",
});
const s3 = new aws.S3();
const uploader = multer({
storage: multerS3({
s3: s3,
bucket: "shopitemimages",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, { fieldName: "lets see what we want as fieldvalue" });
},
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
}),
});
module.exports = uploader;
If it's use this in an own route like below, it works like a charm.
const express = require("express");
const router = express.Router();
const multer = require("multer");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
router.post("/image-upload", (req, res) => {
singleUpload(req, res, function (err) {
console.log(req.file);
res.json({ "image-url": req.file.location });
});
});
module.exports = router;
However, when I try to use the MVC structure to use it together with another middleware, below code works for the upload to Mongodb, but not for S3.. it says success but nothing is uploaded.
This is the route:
const express = require("express");
const router = express.Router();
const multer = require("multer");
const upload = multer();
const shopController = require("../controllers/shop");
router.post(
"/shop/create/:shopId",
upload.single("file1"),//this seems to be needed, otherise I can't parse the file for mongodb upload
shopController.createShopItem //controller doing the actual work
);
This is the controller i'm trying to use in above route:
const ShopItem = require("../models/shopitem");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
exports.createShopItem = (req, res, next) => {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
next(new Error("invalid file type"));
}
//this is the part where I try to upload to S3
singleUpload(req, res, (err) => {
console.log("iwas here");
console.log(req.file);
return res.json({ "image-url": req.file.location });
});
const newItem = new ShopItem({
title: title,
price: price,
description: description,
location: location,
user: user,
});
newItem
.save()
.then((res) => console.log("saved"))
.catch((err) => {
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
};
Any suggestions what I'm failing to understand?
I'v spent a few days hobby work on this.. let's see if someone is a lot quicker ;-)
Best regards,
Oscar
After testing your codes i noticed what you are doing wrong, which are;
const upload = multer(); creates the instance of multer without passing parameters(like the bucketID and the s3 credentials) to it, so nothing is going to happen when you try to use the instance as you tried to use it here upload.single("file1"). Instead you should call the middleware in the route as well.
you declared singleUpload without calling it anywhere in your controller.
So I made some few modifications to your code, although with google GCS because I dont have S3 account to test with, but am confident it will work for s3 as well, and you can also create GCS account to test as well.
controllers/shop.js
//const ShopItem = require("../models/shopitem");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
exports.createShopItem = (req, res, next) => {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
console.log("file.mimetype-->",file.mimetype);
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
next(new Error("invalid file type"));
}
console.log("----------------------");
console.log("if you can see this location: ",location);
console.log("that means you can store into your mongodb");
console.log("-----------------------");
return res.json({ "image-url": req.file.location });
};
services/file-upload.js
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
const multerGoogleStorage =require('multer-cloud-storage');
require("dotenv").config();
aws.config.update({
secretAccessKey: process.env.AWS_ACCESS_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
region: "eu-north-1",
});
const s3 = new aws.S3();
const uploader_ = multer({
storage: multerS3({
s3: s3,
bucket: "shopitemimages",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
console.log("this is the file",file);
cb(null, { fieldName: "lets see what we want as fieldvalue" });
},
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
}),
});
const uploader = multer({
storage: multerGoogleStorage.storageEngine({
autoRetry: true,
bucket: 'buck-name',
projectId: 'projectid',
keyFilename: 'pathtokeyfile.json',
filename: (req, file, cb) => {
cb(null, `/${Date.now()}${file.originalname}`);
console.log(file);
// output = output.replace(/{%DESCRIPTION%}/g, product.description);
}
}),
});
module.exports = uploader;
route.js
const express = require("express");
const router = express.Router();
const multer = require("multer");
// const upload = multer();
const shopController = require('./controllers/shop');
const uploader = require("./services/file-upload");
router.post(
"/shop/create/:shopId",
uploader.single("file1"),// I changed this to object from your middleware
shopController.createShopItem //controller doing the actual work
);
module.exports = router;
server.js
const express = require("express");
const http = require("http");
const port = process.env.PORT || 3000;
//setup app & its routes
const app = express();
const routes = require('./route');
app.use(routes);
//start http server
const httpServer = http.createServer(app);
httpServer.listen(port);
let serviceName ="Upload service"
console.log(`[${serviceName}] http server listening at port ${port}`);

node js multer s3 file Uploading Issue

I am working a on nodejs restful api and trying to upload file on S3 using multer but its not working also i am not getting any error .
here is a code from my Controller
var aws = require('aws-sdk')
var express = require('express')
var multer = require('multer')
var multerS3 = require('multer-s3')
var bodyParser = require('body-parser')
var uuid = require('uuid').v4;
aws.config.update({
secretAccessKey: '',
accessKeyId: '' ,
region: 'us-west-2'
});
var app = express();
var s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'stack',
key: function (req, file, cb) {
console.log(file);
cb(null, req.s3key)
}
})
})
var fileUpload = upload.array('attachments',1);
function uploadToS3(req, res){
req.s3key = uuid();
let downloadUrl = 'https://s3-us-west-2.amazonaws.com/stack/'+req.s3key;
return new Promise((resolve,reject) => {
return fileUpload(req, res, err=> {
if(err) return reject(err);
return resolve(downloadUrl)
})
})
}
exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
});
}
What am i missing here??
Can you try catch block for error log if there is any missing then it will display in log.
`exports.uploadImagetoS3 = (req, res) => {
uploadToS3(req,res).then(downloadUrl=> {
console.log(downloadUrl);
}).catch(error=>{
console.log(error)
});`
}

nodejs upload files to s2 aws amazon

This is my current code. It does not work.
I post the image in form data.
My Questions are:
How can i see the form data in node.js (try log req.body and it is an empty object)
What does the "demo.jpg" in the code mean?
NODEJS
var aws = require('aws-sdk');
const express = require('express');
const router = express.Router();
aws.config.update({
"accessKeyId": "<MY_KEY>",
"secretAccessKey": "<MY_SECRET>"
});
router.post('/', (req, res, next) => {
console.log(res.body)
var s3 = new aws.S3();
var params = {
Bucket: "passwordapp",
Key: 'aaaaaa',
Expires: 60,
ContentType: 'jpg'
};
s3.getSignedUrl('demo.jpg', params, function(err, data) {
//console.log("err",err);
if (err) {
res.json(err)
} else {
res.json(data)
}
});
});
module.exports = router;
Ok got the solution from here:
https://stackoverflow.com/a/54402849/1230198
use multer
this is my WORKING code:
const express = require('express');
const router = express.Router();
const AWS = require('aws-sdk');
const multer = require('multer');
const storage = multer.memoryStorage()
const upload = multer({storage: storage});
const s3Client = new AWS.S3({
"accessKeyId": "<KEY>",
"secretAccessKey": "<SECRET>"
});
const uploadParams = {
Bucket: 'passwordapp',
Key: '', // pass key
Body: null, // pass file body
};
router.post('/', upload.single("image"),(req, res, next) => {
const params = uploadParams;
uploadParams.Key = req.file.originalname;
uploadParams.Body = req.file.buffer;
s3Client.upload(params, (err, data) => {
if (err) {
res.status(500).json({error:"Error -> " + err});
}
res.json({message: 'File uploaded successfully','filename':
req.file.originalname, 'location': data.Location});
});
});
module.exports = router;

Image is getting corrupted after uploading it from lambda to S3 using serverless framework and connect-multiparty module

When i am uploading image from local its working fine i am able to view the image from browser but when i upload it from lambda using serverless framework the image is displayed as empty white square box the body data for s3bucket.upload is in <Buffer format i have tried converting to base64 and buffering even then no luck.
const express = require('express');
const bodyParser = require('body-parser');
var Request = require("request");
const AWS = require('aws-sdk');
var app = express();
var fs = require('fs');
const serverless = require('serverless-http');
var multipart = require('connect-multiparty');
var multipartMiddleware = multipart({ uploadDir: '/tmp' })
app.post('/uploadImageToS3',multipartMiddleware, (req, res) => {
console.log(req.files);
console.log(req.files.uImage.type);
if(typeof req.files.uImage !== 'undefined' && req.files.uImage !== null)
{
AWS.config.update({
accessKeyId: '.................',
secretAccessKey: '................................',
region: 'ap-south-1'
});
var file = req.files.uImage;
var s3bucket = new AWS.S3();
fs.readFile(file.path, function (err, data) {
var params = {
Bucket: "..........",
Key:file.name,
Body:data,
ContentType: req.files.uImage.type,
ContentEncoding: 'base64'
};
console.log(data);
s3bucket.upload(params, function(err, data) {
if (err) {
return res.status(400).json(err);
console.log('ERROR MSG: ', err);
} else {
console.log(data.Location);
res.status(200).json(data);
}
});
});
}else
{
return res.status(400).json('No files were uploaded.');
}
});

Resources