Related
I have been trying to use a MVC structure in Node.js to build a post route which both uploads to Mongodb as well as Amazon S3.
I have built below to help me upload to S3.
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
require("dotenv").config();
aws.config.update({
secretAccessKey: process.env.AMAZON_SECRET_ACCESS_KEY,
accessKeyId: process.env.AMAZON_ACCESS_KEY,
region: "eu-north-1",
});
const s3 = new aws.S3();
const uploader = multer({
storage: multerS3({
s3: s3,
bucket: "shopitemimages",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, { fieldName: "lets see what we want as fieldvalue" });
},
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
}),
});
module.exports = uploader;
If it's use this in an own route like below, it works like a charm.
const express = require("express");
const router = express.Router();
const multer = require("multer");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
router.post("/image-upload", (req, res) => {
singleUpload(req, res, function (err) {
console.log(req.file);
res.json({ "image-url": req.file.location });
});
});
module.exports = router;
However, when I try to use the MVC structure to use it together with another middleware, below code works for the upload to Mongodb, but not for S3.. it says success but nothing is uploaded.
This is the route:
const express = require("express");
const router = express.Router();
const multer = require("multer");
const upload = multer();
const shopController = require("../controllers/shop");
router.post(
"/shop/create/:shopId",
upload.single("file1"),//this seems to be needed, otherise I can't parse the file for mongodb upload
shopController.createShopItem //controller doing the actual work
);
This is the controller i'm trying to use in above route:
const ShopItem = require("../models/shopitem");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
exports.createShopItem = (req, res, next) => {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
next(new Error("invalid file type"));
}
//this is the part where I try to upload to S3
singleUpload(req, res, (err) => {
console.log("iwas here");
console.log(req.file);
return res.json({ "image-url": req.file.location });
});
const newItem = new ShopItem({
title: title,
price: price,
description: description,
location: location,
user: user,
});
newItem
.save()
.then((res) => console.log("saved"))
.catch((err) => {
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
};
Any suggestions what I'm failing to understand?
I'v spent a few days hobby work on this.. let's see if someone is a lot quicker ;-)
Best regards,
Oscar
After testing your codes i noticed what you are doing wrong, which are;
const upload = multer(); creates the instance of multer without passing parameters(like the bucketID and the s3 credentials) to it, so nothing is going to happen when you try to use the instance as you tried to use it here upload.single("file1"). Instead you should call the middleware in the route as well.
you declared singleUpload without calling it anywhere in your controller.
So I made some few modifications to your code, although with google GCS because I dont have S3 account to test with, but am confident it will work for s3 as well, and you can also create GCS account to test as well.
controllers/shop.js
//const ShopItem = require("../models/shopitem");
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1");
exports.createShopItem = (req, res, next) => {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
console.log("file.mimetype-->",file.mimetype);
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
next(new Error("invalid file type"));
}
console.log("----------------------");
console.log("if you can see this location: ",location);
console.log("that means you can store into your mongodb");
console.log("-----------------------");
return res.json({ "image-url": req.file.location });
};
services/file-upload.js
const aws = require("aws-sdk");
const multer = require("multer");
const multerS3 = require("multer-s3");
const multerGoogleStorage =require('multer-cloud-storage');
require("dotenv").config();
aws.config.update({
secretAccessKey: process.env.AWS_ACCESS_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
region: "eu-north-1",
});
const s3 = new aws.S3();
const uploader_ = multer({
storage: multerS3({
s3: s3,
bucket: "shopitemimages",
acl: "public-read",
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
console.log("this is the file",file);
cb(null, { fieldName: "lets see what we want as fieldvalue" });
},
key: function (req, file, cb) {
cb(null, Date.now().toString());
},
}),
});
const uploader = multer({
storage: multerGoogleStorage.storageEngine({
autoRetry: true,
bucket: 'buck-name',
projectId: 'projectid',
keyFilename: 'pathtokeyfile.json',
filename: (req, file, cb) => {
cb(null, `/${Date.now()}${file.originalname}`);
console.log(file);
// output = output.replace(/{%DESCRIPTION%}/g, product.description);
}
}),
});
module.exports = uploader;
route.js
const express = require("express");
const router = express.Router();
const multer = require("multer");
// const upload = multer();
const shopController = require('./controllers/shop');
const uploader = require("./services/file-upload");
router.post(
"/shop/create/:shopId",
uploader.single("file1"),// I changed this to object from your middleware
shopController.createShopItem //controller doing the actual work
);
module.exports = router;
server.js
const express = require("express");
const http = require("http");
const port = process.env.PORT || 3000;
//setup app & its routes
const app = express();
const routes = require('./route');
app.use(routes);
//start http server
const httpServer = http.createServer(app);
httpServer.listen(port);
let serviceName ="Upload service"
console.log(`[${serviceName}] http server listening at port ${port}`);
I'm trying to send a file (.obj file) via formdata to my node server, it appears that everything is fine until the controller of the endpoint on the server throws an error that seems to be like the formdata parser is receiving an empty formdata object, here is the error in question. Now, here's my code:
Fron-end (where the request is being executed):
const data = new FormData();
data.append('file', this.props.filesSubmit, this.props.filesSubmit.name);
for (var pair of data.entries()) {
console.log(pair[0] + ', ' + pair[1]);
}
await axios
.post(
'https://hushpuppys-3d-hub-api.herokuapp.com/api/v1/modelfiles/',
data,
{
headers: {
'Content-Type': undefined,
},
}
)
.then((res) => {
console.log('File Upload Successful! Res: ', res);
})
.catch((err) => {
console.log(err);
});
Back-end endpoint Controller (Where request is received):
const AWS = require('aws-sdk');
const fs = require('fs');
const fileType = require('file-type');
const bluebird = require('bluebird');
const multiparty = require('multiparty');
// Keys Configuration to Access AWS
AWS.config.update({
accessKeyId: '[I erased this]',
secretAccessKey: '[I erased this]',
});
// Configuring AWS to work with promises
AWS.config.setPromisesDependency(bluebird);
// Creating S3 instance
const s3 = new AWS.S3();
// abstracts function to upload a file returning a promise
const uploadFile = (buffer, name, type) => {
const params = {
ACL: 'public-read',
Body: buffer,
Bucket: '[I erased this]',
ContentType: type.mime,
Key: `${name}.${type.ext}`,
};
return s3.upload(params).promise();
};
exports.fileUploaderController = (req, res) => {
const form = new multiparty.Form();
form.parse(req.body, async (error, fields, files) => {
if (error) throw new Error(error);
try {
const path = files.file[0].path;
const buffer = fs.readFileSync(path);
const type = fileType(buffer);
const timestamp = Date.now().toString();
const fileName = `bucketFolder/${timestamp}-lg`;
const data = await uploadFile(buffer, fileName, type);
return res.status(200).send(data);
} catch (error) {
return res.status(400).send(error);
}
});
};
I want to also add the code of my app.js where other middlewares manipulate the body, maybe that's also relevant to solve the problem:
const express = require('express');
const modelRouter = require('./routes/modelRoutes');
const modelFilesRouter = require('./routes/modelFilesRoutes');
const cors = require('cors');
const app = express();
// MIDDLEWARES
app.use(cors());
app.use(express.json({ limit: '50mb' }));
app.use(express.urlencoded({ limit: '50mb' }));
// ROUTES
app.use('/api/v1/models', modelRouter);
app.use('/api/v1/modelfiles', modelFilesRouter);
module.exports = app;
Ok, I got this problem solved a few minutes ago. I was basically passing a wrong parameter in the form.parse() method in the fileUploaderController controller from the Controller file, the method form.parse() needs the whole req variable to be passed as a parameter, not the body of the request (req.body) as I did in the code I posted with the question.
I am trying to upload images to s3 Bucket. And have tried many solutions online yet I get the above errors. I don't want to store images locally, instead I want to upload them directly to s3 bucket. Any help would be appreciated.
This is Upload.js file
const AWS = require('aws-sdk');
const Keys = require('../Config/dev');
const { v4: uuidv4 } = require('uuid');
const axios = require('axios').default;
const multer = require('multer');
const multerS3 = require('multer-s3');
const s3 = new AWS.S3({
accessKeyId: Keys.accessKeyId,
secretAccessKey: Keys.secretAccessKey,
region : 'ap-south-1'
});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'thebucketname',
acl : "public-read",
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname});
},
key: function (req, file , cb){
cb(new Date().toISOString().replace(/[-T:\.Z]/g, "") + file.originalname);
}
})
});
module.exports = upload;
This is the router code
const express = require('express');
const Router = express.Router();
const controllers = require('../controllers/controllers.js');
const uploader = require('../controllers/Upload');
const singleUpload = uploader.single('img');
Router.post('/single-image',(req, res)=>{
singleUpload(req, res , (err)=>{
if(!req.file){
console.log(req.file);
}else
{
console.log(req.file);
return res.json({'imageUrl': req.file.location});
}
});
});
This is how I am using postman for api request. I have also set Content-Type to Multipart/form-data inside the Headers in postman. I get the error "undefined" for req.file when I do this.
Also, If I use
app.use(multer({dest:'./public/uploads/'}).single('file'));
my file gets stored in the 'uploads' folder but then I get the error "req.file.location undefined", and file doesn't upload to aws.
Firstly, if you want to upload files to s3 and not store it on your server, you can store the uploaded file as an in-memory buffer instead of writing it on your server and then uploading to s3. NOTE: this in-memory method is not recommended with large files or a large number of small files, because you need to ensure that your server has enough memory to deal with the uploads.
Then you can just pass the buffer to the s3 upload function. I don't know much about some package called multer-s3 that you've apparantly used so I'm not using that. I had made it for an array of files but it should work for single files also. I combined your code with some of my code and came up with the following:
//aws-sdk for node
const AWS = require('aws-sdk');
AWS.config.update({ region: <your region here> });
//S3
const S3 = new AWS.S3({});
const express = require('express');
const Router = express.Router();
const controllers = require('../controllers/controllers.js');
const uploader = require('../controllers/Upload');
//import multer
const multer = require("multer");
//make multer ready for in-memory storage of uploaded file
const multerMemoryStorage = multer.memoryStorage();
const multerUploadInMemory = multer({
storage: multerMemoryStorage
});
//using multer.single as a middleware is what I prefer
Router.post('/single-image',multerUploadInMemory.single("filename"),async(req, res)=>{
try{
if(!req.file || !req.file.buffer){
throw new Error("File or buffer not found");
}
const uploadResult = await S3.upload({
Bucket: "yourBucketName",
Key: "WhateverKeynameYouWantToGive",
Body: req.file.buffer,
ACL: 'public-read'
}).promise();
console.log(`Upload Successful!`);
res.send({
message: "file uploaded"
})
}catch(e){
console.error(`ERROR: ${e.message}`);
res.status(500).send({
message: e.message
})
}
});
You can first use console.log(req.file) to see if it's not undefined (which it shouldn't be) and you can check if you're getting the buffer property in the file.
Also, it says in a "warning" here that you should never add multer as a global middleware, so app.use(multer({dest:'./public/uploads/'}) is a no-no.
I am trying to write a code where I am sending an image or array of images via multipart/form-data as in the below images:
Click here for the postman Screenshot
I am trying to store these images to an S3 bucket and then trigger another lambda to read the images to perform text extraction using AWS rekognition.
Code for the lambda that is storing the images is as below:
var AWS = require('aws-sdk')
var express = require('express')
var multer = require('multer')
var multerS3 = require('multer-s3')
var bodyParser = require('body-parser')
const cors = require('cors')
var app = express()
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cors())
const env = process.env.NODE_ENV || 'prod'
AWS.config.update({
accessKeyId: 'ACCESS_KEY',
secretAccessKey: 'SECRET_KEY',
region: 'ap-south-1'
});
var s3 = new AWS.S3({
region: "ap-south-1"
})
var lambda = new AWS.Lambda({
region: "ap-south-1"
});
var upload = multer({
storage: multerS3({
s3: s3,
bucket: 'MY_BUCKET_NAME',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
req.originalname = file.originalname
cb(null, req.originalname)
}
})
})
app.post('/', upload.array('file', 2), function (req, res, next) {
const params = {
FunctionName: "ANOTHER_LAMBDA_NAME",
Payload: JSON.stringify({ "fileName": req.originalname })
};
return lambda.invoke(params, (err, data) => {
if (err) console.log("err", err.stack);
else {
res.json(data.Payload)
}
});
})
if (env === "dev") {
const port = process.env.port || 4000
app.listen(port, () => console.log(`server is running on ${port}`))
}
else {
module.exports = app
}
Code for another lambda is as below:
var AWS = require('aws-sdk')
var s3 = new AWS.S3()
const rekognition = new AWS.Rekognition()
exports.handler = function(event, context) {
var params = {
Image: {
S3Object: {
Bucket: 'MY_BUCKET_NAME',
Name: event.filename,
}
}
};
rekognition.detectText(params, function(err, data) {
if (err) console.log(err, err.stack);
else context.succeed(data); // returns "null"
});
};
After this, I created an API Gateway which triggers the first lambda to upload the files in s3 as in the picture above. The response I get is "null"
The main problem seems to be that the image is getting stored in S3 Bucket. But when I download that image and try to open it, it gives the following error:
The file “EPICared.jpg” could not be opened. It may be damaged or use a file format that Preview doesn’t recognize.
So what do you guys suggest I should be doing? Where am I going wrong? I cannot figure out why the image cannot be opened or read by the other lambda.
I am at a loss of what I am doing wrong, here is what I have:
HTML
<html>
<body>
<form method="POST" action="/upload" enctype="multipart/form-data">
<div class="field">
<label for="image">Image Upload</label>
<input type="file" name="image" id="image">
</div>
<input type="submit" class="btn" value="Save">
</form>
</body>
</html>
Port 5000 is my Node.js server's port.
In this example I am using POST to /upload, and it works fine.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: "Hello"
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
}
Now I want to post the file that I am POSTing, which is where the problem arises.
module.exports = function(app, models) {
var fs = require('fs');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.post('/upload', function(req, res){
var path = req.files.image.path;
fs.readFile(path, function(err, file_buffer){
var params = {
Bucket: 'makersquest',
Key: 'myKey1234.png',
Body: file_buffer
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
});
}
The error I get is:
TypeError: Cannot read property 'path' of undefined
As a matter of fact files is completely empty.
I am assuming I am missing something pretty obvious but I can't seem to find it.
You will need something like multer to handle multipart uploading.
Here is an example streaming your file upload to s3 using aws-sdk.
var multer = require('multer');
var AWS = require('aws-sdk');
var accessKeyId = process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
var s3 = new AWS.S3();
app.use(multer({ // https://github.com/expressjs/multer
dest: './public/uploads/',
limits : { fileSize:100000 },
rename: function (fieldname, filename) {
return filename.replace(/\W+/g, '-').toLowerCase();
},
onFileUploadData: function (file, data, req, res) {
// file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
var params = {
Bucket: 'makersquest',
Key: file.name,
Body: data
};
s3.putObject(params, function (perr, pres) {
if (perr) {
console.log("Error uploading data: ", perr);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
}
}));
app.post('/upload', function(req, res){
if(req.files.image !== undefined){ // `image` is the field name from your form
res.redirect("/uploads"); // success
}else{
res.send("error, no file chosen");
}
});
Simple S3 File Upload Without Multer
var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();
app.use(fileUpload());
var AWS = require('aws-sdk');
app.post('/imageUpload', async (req, res) => {
AWS.config.update({
accessKeyId: "ACCESS-KEY", // Access key ID
secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
region: "us-east-1" //Region
})
const s3 = new AWS.S3();
// Binary data base64
const fileContent = Buffer.from(req.files.uploadedFileName.data, 'binary');
// Setting up S3 upload parameters
const params = {
Bucket: 'BUKET-NAME',
Key: "test.jpg", // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
res.send({
"response_code": 200,
"response_message": "Success",
"response_data": data
});
});
})
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
[Update Mar 2022] Supports multiple file uploads at a time, and returns the uploaded file(s)' public URL(s) too.
Latest Answer # Dec-2016 [New]
Use multer-s3 for multipart uploading to s3 without saving on local disk as:
var express = require('express'),
aws = require('aws-sdk'),
bodyParser = require('body-parser'),
multer = require('multer'),
multerS3 = require('multer-s3');
aws.config.update({
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1'
});
var app = express(),
s3 = new aws.S3();
app.use(bodyParser.json());
var upload = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'bucket-name',
key: function (req, file, cb) {
console.log(file);
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl', 25), function (req, res, next) {
res.send({
message: "Uploaded!",
urls: req.files.map(function(file) {
return {url: file.location, name: file.key, type: file.mimetype, size: file.size};
})
});
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
Latest Answer # Mar-2016 [Old-One]
Edited 1 use multer#1.1.0 and multer-s3#1.4.1 for following snippet:
var express = require('express'),
bodyParser = require('body-parser'),
multer = require('multer'),
s3 = require('multer-s3');
var app = express();
app.use(bodyParser.json());
var upload = multer({
storage: s3({
dirname: '/',
bucket: 'bucket-name',
secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
accessKeyId: 'XXXXXXXXXXXXXXX',
region: 'us-east-1',
filename: function (req, file, cb) {
cb(null, file.originalname); //use Date.now() for unique file keys
}
})
});
//open in browser to see upload form
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
res.send("Uploaded!");
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
For complete running example clone express_multer_s3 repo and run node app.
You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:
var express = require('express')
var multer = require('multer')
var app = express()
app.use(multer({ dest: './uploads/'}))
Now req.files.image.path should be populated in your app.post function.
One of the easy ways to upload your image is to use an NPM package Multer
You can upload an image to S3 and then store its name in your database so every time you want to fetch it you can generate a signed URL for that image. This is one of the ways to secure access to your S3 bucket.
For uploading an image you can do something like this
const AWS = require("aws-sdk");
const express = require("express");
const multer = require("multer");
const crypto = require("crypto");
const cors = require("cors");
const {
S3Client,
PutObjectCommand
} = require("#aws-sdk/client-s3");
const app = express();
app.use(cors());
app.use(express.json());
const port = process.env.PORT || 3000
const storage = multer.memoryStorage();
const upload = multer({ storage: storage });
// Read the values from .env file
const bucketName = process.env.BUCKET_NAME;
const bucketRegion = process.env.BUCKET_REGION;
const accessId = process.env.ACCESS_ID;
const secretAccessKey = process.env.SECRET_ACCESS_KEY;
// Create a client
const s3 = new S3Client({
credentials: {
accessKeyId: accessId,
secretAccessKey: secretAccessKey,
},
region: bucketRegion,
});
// This function generates unique name for our files
const generateFileName = (bytes = 32) =>
crypto.randomBytes(bytes).toString("hex");
// Notice the upload middleware.
// "image" is the same name that you will pass form your UI request
app.post('/', upload.single("image"), (req, res) => {
# When you use multer the image can be accessed from req.file
let fileName = generateFileName()
let params = {
Bucket: bucketName,
Key: fileName,
Body: req.file.buffer ,
ContentType: req.file.mimetype,
ContentEncoding: 'base64',
};
const command = new PutObjectCommand(params);
await s3.send(command);
// before sending response you can save the 'fileName' in the DB of your choice
res.send('image uploaded')
})
app.listen(port, () => {
console.log(`app listening on port ${port}`)
})
Next, to get the signed URL for the image you can do as follows
// assuming other things are set as above snippet
const { GetObjectCommand } = require("#aws-sdk/client-s3");
const { getSignedUrl } = require("#aws-sdk/s3-request-presigner");
app.get('/', (req, res) => {
// First you will get the image name that was saved in DB
// lets say it was called user_image.
let obj_params = {
Bucket: bucketName,
Key: user_image,
};
let command = new GetObjectCommand(obj_params);
image_url = await getSignedUrl(
s3,
command,
{ expiresIn: 86400 } // seconds in a day
);
let response = {
success: true,
data: {
image_url
},
};
res.status(200).send(response);
})
Note:
Note that you might need to install some packages to make it work.
Make sure in your API requests you are setting 'content-type': 'multipart/form-data' in request headers
In your API gateway in S3, you might also need to set the Binary Media Type as multipart/form-data. More info on that in this link
This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.
AWS Missing credentials when i try send something to my S3 Bucket (Node.js)
This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.
my aconfig.json file with the amazon credentials looks like this:
{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }
The final contents of the route file look like the file pasted below.
router.post('/sendToS3', function(req, res) {
var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');
var awsCredFile = path.join(__dirname, '.', 'aconfig.json');
console.log('awsCredFile is');
console.log(awsCredFile);
AWS.config.loadFromPath(awsCredFile);
var s3 = new AWS.S3();
var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});
var sampleFile = {
"_id" : 345345,
"fieldname" : "uploads[]",
"originalname" : "IMG_1030.JPG",
"encoding" : "7bit",
"mimetype" : "image/jpeg",
"destination" : "./public/images/uploads",
"filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
"path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
"size" : 251556,
"user" : "579fbe61adac4a8a73b6f508"
};
var filePathToSend = path.join(__dirname, '../public', sampleFile.path);
function uploadToS3(filepath, destFileName, callback) {
photoBucket
.upload({
ACL: 'public-read',
Body: fs.createReadStream(filepath),
Key: destFileName.toString(),
ContentType: 'application/octet-stream' // force download if it's accessed as a top location
})
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
.on('httpUploadProgress', function(evt) { console.log(evt); })
// http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
.send(callback);
}
multer({limits: {fileSize:10*1024*1024}});
console.log('filePathToSend is ');
console.log(filePathToSend);
uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
if (err) {
console.error(err);
return res.status(500).send('failed to upload to s3').end();
}
res.status(200)
.send('File uploaded to S3: '
+ data.Location.replace(/</g, '<')
+ '<br/><img src="' + data.Location.replace(/"/g, '"') + '"/>')
.end();
});
console.log('uploading now...');
});
This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.
Hope this helps