Upload a file to Google Cloud, in a specific directory - node.js

How to upload a file on Google Cloud, in a specific bucket directory (e.g. foo)?
"use strict";
const gcloud = require("gcloud");
const PROJECT_ID = "<project-id>";
let storage = gcloud.storage({
projectId: PROJECT_ID,
keyFilename: 'auth.json'
});
let bucket = storage.bucket(`${PROJECT_ID}.appspot.com`)
bucket.upload("1.jpg", (err, file) => {
if (err) { return console.error(err); }
let publicUrl = `https://firebasestorage.googleapis.com/v0/b/${PROJECT_ID}.appspot.com/o/${file.metadata.name}?alt=media`;
console.log(publicUrl);
});
I tried:
bucket.file("foo/1.jpg").upload("1.jpg", ...)
But there's no upload method there.
How can I send 1.jpg in the foo directory?
In Firebase, on the client side, I do:
ref.child("foo").put(myFile);

bucket.upload("1.jpg", { destination: "YOUR_FOLDER_NAME_HERE/1.jpg" }, (err, file) => {
//Do something...
});
This will put 1.jpg in the YOUR_FOLDER_NAME_HERE-folder.
Here is the documentation. By the way, gcloud is deprecated and you should use google-cloud instead.

UPDATE 2020
according to google documentation:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage()
const bucket = storage.bucket('YOUR_GCLOUD_STORAGE_BUCKET')
const blob = bucket.file('youFolder/' + 'youFileName.jpg')
const blobStream = blob.createWriteStream({
resumable: false,
gzip: true,
public: true
})
blobStream.on('error', (err) => {
console.log('Error blobStream: ',err)
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = ('https://storage.googleapis.com/'+ bucket.name + '/' + blob.name)
res.status(200).send(publicUrl);
});
blobStream.end(req.file.buffer)//req.file is your original file

Here you go...
const options = {
destination: 'folder/new-image.png',
resumable: true,
validation: 'crc32c',
metadata: {
metadata: {
event: 'Fall trip to the zoo'
}
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});

If accessing from the same project projectId , keyFilename,.. not required,I use the below code for both upload and download , it works fine.
// Imports the Google Cloud client library
const Storage = require('#google-cloud/storage');
const storage = new Storage();
var destFilename = "./test";
var bucketName = 'cloudtesla';
var srcFilename = 'test';
const options = {
destination: destFilename,
};
//upload file
console.log("upload Started");
storage.bucket(bucketName).upload(srcFilename, {}, (err, file) => {
if(!err)
console.log("upload Completed");
else
console.log(err);
});
//Download file
console.log("Download Started");
storage
.bucket(bucketName)
.file(srcFilename)
.download(options)
.then(() => {
console.log("Download Completed");
})
.catch(err => {
console.error('ERROR:', err);
});

To upload inside specific directory in .NET Core, use
var uploadResponse= await storageClient.UploadObjectAsync(bucketName, $"{foldername}/"+fileName, null, memoryStream);
This should upload your file 'fileName' inside folder 'foldername' in the bucket

I think just adding foo/ to the filename should work, like bucket.upload("foo/1.jpg", (err, file) ... In GCS, directories just a matter of having a '/' in the file name.

If you want to use async-await while uploading files into storage buckets the callbacks won't do the job, Here's how I did it.
async function uploadFile() {
const destPath = 'PATH_TO_STORAGE/filename.extension';
await storage.bucket("PATH_TO_YOUR_BUCKET").upload(newFilePath, {
gzip: true,
destination: destPath,
});
}
Hope it helps someone!

Related

How to upload multiple files in nodejs to AWS S3 and save file url into database?

Hi i need to upload multiple images at a time on s3.
Currently i am using express-fileupload to upload single image on AWS, and i want to use same approach to make it upload multiple files to s3 and update images array with urls on mongodb.
My schema property:
const ServiceSchema = new mongoose.Schema(
{
photo: [
{
type: String,
default: 'no-photo.jpg',
},
],
});
module.exports = mongoose.model('Service', ServiceSchema);
My Controller:
// #desc Upload photo for service
// #route PUT /api/v1/services/:id/photo
// #access Private
exports.servicePhotoUpload = asyncHandler(async (req, res, next) => {
const service = await Service.findById(req.params.id);
if (!service) {
return next(new ErrorResponse(`Service not found with id of ${req.params.id}`, 404));
}
// Make sure user adding service is business owner
if (service.user.toString() !== req.user.id && req.user.role !== 'admin') {
return next(
new ErrorResponse(
`User ${req.user.id} is not authorized to update this service to business ${service._id}`,
401
)
);
}
// File Upload validation
if (!req.files) {
return next(new ErrorResponse(`Please upload a file.`, 400));
}
const file = req.files.file;
// Make sure it is a valid image file
if (!file.mimetype.startsWith('image')) {
return next(new ErrorResponse(`Please upload a valid image file.`, 400));
}
//Check File Size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please upload an image less then ${process.env.MAX_FILE_UPLOAD / 1024}KB in size.`,
400
)
);
}
// Create custom filename
file.name = `service-uploads/servicePhoto_${service._id}${path.parse(file.name).ext}`;
uploadToS3({
fileData: req.files.file.data,
fileName: file.name,
})
.then(async (result) => {
console.log('Success Result: ', result);
await Service.findByIdAndUpdate(service._id, { photo: result.Location });
return res
.status(200)
.json({ success: true, message: 'Service photo added successfully', url: result.Location });
})
.catch((err) => {
console.log(err);
return next(new ErrorResponse('Failed to upload file to S3', 500));
});
});
My Utility File to upload File to S3:
const AWS = require('aws-sdk');
const uploadToS3 = (options) => {
// Set the AWS Configuration
AWS.config.update({
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
region: 'us-east-2',
});
// Create S3 service object
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
// Setting up S3 upload parameters
const params = {
Bucket: 'toolbox-uploads',
Key: options.fileName, // File name you want to save as in S3
Body: options.fileData, //
};
// Return S3 uploading function as a promise so return url can be handled properly
return s3.upload(params).promise();
};
module.exports = uploadToS3;
My Router:
const express = require('express');
const {
servicePhotoUpload
} = require('../controllers/service');
const Service = require('../models/Service');
router.route('/:id/photo').put(protect, authorize('publisher', 'business', 'admin'), servicePhotoUpload);
module.exports = router;
This above code is workng 100%.
I am bit confused as there were different approach and none worked for me from google and stack overflow and none of them is getting return url and saving into database.
I want to make separate utility file to upload multiple files to 3 same as i did for single files to use them anywhere. That file should return uploaded urls so i can update my database.
I have tried multer-s3 but no solution works for me.
This approach might be different for you but that is how I was able to resolve the same issue.
First you'll need
Multer
multer-s3
aws-sdk
I made a FileUpload class that handles both single and multi-upload (I also needed to be able to upload pdf and video files) and this is the code in my constructor, note that I also specified the s3-bucket in question from aws.
this.s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_KEY,
Bucket: 'name_of_s3_bucket',
});
I created a method called upload in the class. Code below
upload(path, type) {
let ext = 'jpeg';
const multerFilter = (req, file, cb) => {
if (type === 'image') {
if (file.mimetype.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError(
'Not an Image! Please upload only images',
400
),
false
);
}
} else if (type === 'pdf') {
ext = 'pdf';
const isPdf = file.mimetype.split('/')[1];
if (isPdf.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError('Not a pdf! Please upload only pdf', 400),
false
);
}
}
};
const upload = multer({
storage: multers3({
acl: 'public-read',
s3: this.s3,
bucket: 'name_of_s3_bucket',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
let filename = `user-${
req.user.id
}/${path}/${uuid.v4()}-${Date.now()}.${ext}`;
// eslint-disable-next-line camelcase
const paths_with_sub_folders = [
'auditions',
'biography',
'movies',
];
if (paths_with_sub_folders.includes(path)) {
filename = `user-${req.user.id}/${path}/${
req.params.id
}/${uuid.v4()}-${Date.now()}.${ext}`;
}
cb(null, filename);
},
}),
fileFilter: multerFilter,
limits: {
fileSize: 5000000,
},
});
return upload;
}
To consume the above, I import the class into any controller that I needed an upload feature and called the following.
Side Note : Ignore the paths code (It was just a way to generate unique file name for the files)
const upload = new FileUpload('image').upload('profile-images', 'image');
exports.uploadUserPhoto = upload.array('photos', 10);
I then used the uploadUserPhoto as a middleware before calling the following
exports.addToDB = catchAsync(async (req, res, next) => {
if (!req.files) return next();
req.body.photos = [];
Promise.all(
req.files.map(async (file, i) => {
req.body.photos.push(file.key);
})
);
next();
});
On a high-level overview, this is the flow, First, upload your photos to s3 and get the req.files, then look through that req.files object passing them into an array field on your req object then finally save them on your DB.
NOTE: You must promisify the req.file loop since the task is asynchrnous
My final router looked like this
router
.route('/:id')
.put(uploadUserPhoto, addToDB, updateProfile)
Item.js
Your model can have a field called images thats type array.
const mongoose = require("mongoose");
const ItemSchema = mongoose.Schema({
images: {
type: [],
},
});
module.exports = mongoose.model("Items", ItemSchema);
You map through the array of object and only extract the data you want to store, in this example it is the key which is the unique name given to every image thats uploaded.
route.js
router.post("/", verify, upload.array("image"), async (req, res) => {
const { files } = req;
const images = [];
files.map((file) => {
images.push(file.key);
});
try {
new Item({
images,
}).save();
res.status(200).send({message: "saved images to db"})
}catch(err){
res.status(400).send({message: err})
}
});
Let me know if this does what you wanted

application/octet-stream issue while using google moderate images trigger (blur image)

I,m using moderate images solution trigger from google.
I taked this solution from here.
I ask some to upgrade for me this solution & here is code:
'use strict'
const gm = require('gm').subClass({imageMagick: true})
const functions = require('firebase-functions')
const admin = require('firebase-admin')
admin.initializeApp()
const Vision = require('#google-cloud/vision')
const vision = new Vision.ImageAnnotatorClient()
const spawn = require('child-process-promise').spawn
const path = require('path')
const fs = require('fs')
const { Storage } = require('#google-cloud/storage')
const gcs = new Storage({
projectId: xxxxxxxxxxx,
})
exports.blurOffensiveImages = functions.storage
.object()
.onFinalize(async (object) => {
const file = gcs.bucket(object.bucket).file(object.name)
const filePath = `gs://${object.bucket}/${object.name}`
console.log(`Analyzing ${file.name}.`)
try {
const [result] = await vision.safeSearchDetection(filePath)
const detections = result.safeSearchAnnotation || {}
if (
detections.adult === 'VERY_LIKELY' ||
detections.violence === 'VERY_LIKELY'
) {
console.log(`Detected ${file.name} as inappropriate.`)
await blurImage(file, object.bucket, object.metadata)
console.log('Deleted local file', file)
return null
} else {
console.log(`Detected ${file.name} as OK.`)
}
} catch (err) {
console.error(`Failed to analyze ${file.name}.`, err)
throw err
}
})
async function blurImage(file, bucketName, metadata) {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`
const bucket = gcs.bucket(bucketName)
await file.download({ destination: tempLocalPath })
console.log('The file has been downloaded to', tempLocalPath)
// Blur the image using ImageMagick.
await new Promise((resolve, reject) => {
gm(tempLocalPath)
.blur(0, 20)
.write(tempLocalPath, (err, stdout) => {
if (err) {
console.error('Failed to blur image.', err);
reject(err);
} else {
console.log(`Blurred image: ${file.name}`);
resolve(stdout);
}
});
});
console.log('Blurred image created at', tempLocalPath)
await bucket.upload(tempLocalPath, {
destination: file.name,
metadata: { metadata: metadata },
})
console.log('Blurred image uploaded to Storage at', file)
return fs.unlink(tempLocalPath, (e) => { if (e) {console.log(e)}})
}
End it's worked perfect, with one bad issue.
Sometimes when user sending list of photos i have "application/octet-stream" file type, but it should be "image/jpg" all media files at my project should be image/jpg.
one user's publication with error in image data type
It's looks like this trigger stuck when it executing.
I made delay in uploading images in my project, but it's doesn't helps me.
I tested - when i delete this trigger - all uploading photos is well & no issues at all.
Help me fix it.
P.S. want to say also, after uploading - image should have all data like original. (Destination, name etc.)

Destination not being set while uploading files using multer and gcloud

I am trying to upload files in a specific folder in my google storage bucket. I wrote my code using Google documentations
app.js
const multerMid = multer({
storage: multer.memoryStorage(),
limits: {
// no larger than 5mb.
fileSize: 5 * 1024 * 1024,
},
});
app.disable("x-powered-by");
app.use(
multerMid.fields([
{ name: "avatar", maxCount: 1 },
{ name: "gallery", maxCount: 8 },
])
);
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.post("/uploads", async (req, res, next) => {
try {
const myFile = req.files;
console.log(myFile);
const imageUrl = await uploadImage(myFile.gallery[0]);
res.status(200).json({
message: "Upload was successful",
data: imageUrl,
});
} catch (error) {
next(error);
}
});
uploadimage function
const uploadImage = (file) =>
new Promise((resolve, reject) => {
const { originalname, buffer } = file;
const blob = bucket.file(originalname.replace(/\s+/g, "_"));
const blobStream = blob.createWriteStream({
destination: "documents/image.png",
resumable: false,
});
blobStream
.on("finish", () => {
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve(publicUrl);
})
.on("error", () => {
reject(`Unable to upload image, something went wrong`);
})
.end(buffer);
});
Here, i am setting the destination as documents/image.png but the file is still storing in the bucket root and not in the documents folder. How can i make the files get stored in a specific folder in bucket?
createWriteStream has no property named destination on its configuration option!
https://googleapis.dev/nodejs/storage/latest/global.html#CreateWriteStreamOptions
There is the uploadOptions object:
https://googleapis.dev/nodejs/storage/latest/global.html#UploadOptions
Solution: You should use upload convenience method (which wraps createWriteStream) and pass the uploadOptions object to it which as a destionation property!
https://googleapis.dev/nodejs/storage/latest/Bucket.html#upload
I have figured out the way to store a file in a particular subdirectory in google storage bucket.Google stores a file in its bucket with a name which contains it path too. For eg, if an image name xyz.jpg is store in abc folder in mnp bucket. The filename will be mnp/abc/xyz.jpg. So, to save a file in a particular directory, we just have to add the path in the original file name.
const uploadImage = (file) =>
new Promise((resolve, reject) => {
const { originalname, buffer } = file;
const blob =
bucket.file(originalname.replace(originalname,"document/"+originalname));
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream
.on("finish", () => {
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve(publicUrl);
})
.on("error", () => {
reject(`Unable to upload image, something went wrong`);
})
.end(buffer);
});

Upload screenshots to google cloud storage bucket with Fluent-ffmpeg

I am currently using multer to upload videos to my google storage bucket, and fluent-ffmpeg to capture thumbnails of the videos. Videos are being uploaded into the buckets correctly, but not the thumbnails from ffmpeg. How can I change the location of the thumbnails to my google storage bucket?
Back-End Video upload
require ('dotenv').config()
const express = require('express');
const router = express.Router();
const multer = require("multer");
var ffmpeg = require('fluent-ffmpeg');
const multerGoogleStorage = require('multer-google-storage');
const { Video } = require("../models/Video");
const {User} = require("../models/User")
const { auth } = require("../middleware/auth");
var storage = multer({
destination: function (req, file, cb) {
cb(null, 'videos/')
},
filename: function (req, file, cb) {
cb(null, `${Date.now()}_${file.originalname}`)
},
fileFilter: (req, file, cb) => {
const ext = path.extname(file.originalname)
if (ext !== '.mp4' || ext !== '.mov' || ext !== '.m3u' || ext !== '.flv' || ext !== '.avi' || ext !== '.mkv') {
return cb(res.status(400).end('Error only videos can be uploaded'), false);
}
cb(null,true)
}
})
// Set location to google storage bucket
var upload = multer({ storage: multerGoogleStorage.storageEngine() }).single("file")
router.post("/uploadfiles", (req, res) => {
upload(req, res, err => {
if (err) {
return res.json({sucess: false, err})
}
return res.json({ success: true, filePath: res.req.file.path, fileName: res.req.file.filename})
})
});
Back-end thumbnail upload
router.post("/thumbnail", (req, res) => {
let thumbsFilePath = "";
let fileDuration = "";
ffmpeg.ffprobe(req.body.filePath, function (err, metadata) {
console.dir(metadata);
console.log(metadata.format.duration);
fileDuration = metadata.format.duration;
})
ffmpeg(req.body.filePath)
.on('filenames', function (filenames) {
console.log('Will generate ' + filenames.join(', '))
thumbsFilePath = "thumbnails/" + filenames[0];
})
.on('end', function () {
console.log('Screenshots taken');
return res.json({ success: true, thumbsFilePath: thumbsFilePath, fileDuration: fileDuration })
})
//Can this be uploaded to google storage?
.screenshots({
// Will take 3 screenshots
count: 3,
folder: '/thumbnails/',
size: '320x240',
//Names file w/o extension
filename:'thumbnail-%b.png'
});
});
Front-end video upload
const onDrop = (files) => {
let formData = new FormData();
const config = {
header: {'content-type': 'multipart/form-data'}
}
console.log(files)
formData.append("file", files[0])
axios.post('/api/video/uploadfiles', formData, config)
.then(response => {
if (response.data.success) {
let variable = {
filePath: response.data.filePath,
fileName: response.data.fileName
}
setFilePath(response.data.filePath)
//Thumbnail
axios.post('/api/video/thumbnail', variable)
.then(response => {
if (response.data.success) {
setDuration(response.data.fileDuration)
setThumbnail(response.data.thumbsFilePath)
} else {
alert("Failed to generate a thumbnail");
}
})
} else {
alert('Failed to save video to the server')
}
})
}
Here you can find the sample code of an application web page prompting the user to supply a file to be stored in Cloud Storage. The code is configuring bucket using environment variables and creates a new blob in the bucket to upload the file data.
I hope this information helps.
You may have to just move them after they're generated with ffmpeg.
For example, I'm writing them to a temp directory outputted by ffmpeg, and then moving after to a Cloud Storage bucket in my cloud function:
const uploadResult = await bucket.upload(targetTempFilePath, {
destination: targetStorageFilePath,
gzip: true
});
Not sure which environment you're using (flex, cloud run, etc) but these were the instructions I was referencing, and are generally the same steps you'll want to follow: https://firebase.google.com/docs/storage/extend-with-functions

Upload a file to Amazon S3 with NodeJS

I ran into a problem while trying to upload a file to my S3 bucket. Everything works except that my file paramters do not seem appropriate. I am using Amazon S3 sdk to upload from nodejs to s3.
These are my routes settings:
var multiparty = require('connect-multiparty'),
multipartyMiddleware = multiparty();
app.route('/api/items/upload').post(multipartyMiddleware, items.upload);
This is items.upload() function:
exports.upload = function(req, res) {
var file = req.files.file;
var s3bucket = new AWS.S3({params: {Bucket: 'mybucketname'}});
s3bucket.createBucket(function() {
var params = {
Key: file.name,
Body: file
};
s3bucket.upload(params, function(err, data) {
console.log("PRINT FILE:", file);
if (err) {
console.log('ERROR MSG: ', err);
} else {
console.log('Successfully uploaded data');
}
});
});
};
Setting Body param to a string like "hello" works fine. According to doc, Body param must take (Buffer, Typed Array, Blob, String, ReadableStream) Object data. However, uploading a file object fails with the following error message:
[Error: Unsupported body payload object]
This is the file object:
{ fieldName: 'file',
originalFilename: 'second_fnp.png',
path: '/var/folders/ps/l8lvygws0w93trqz7yj1t5sr0000gn/T/26374-7ttwvc.png',
headers:
{ 'content-disposition': 'form-data; name="file"; filename="second_fnp.png"',
'content-type': 'image/png' },
ws:
{ _writableState:
{ highWaterMark: 16384,
objectMode: false,
needDrain: true,
ending: true,
ended: true,
finished: true,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
sync: false,
bufferProcessing: false,
onwrite: [Function],
writecb: null,
writelen: 0,
buffer: [],
errorEmitted: false },
writable: true,
domain: null,
_events: { error: [Object], close: [Object] },
_maxListeners: 10,
path: '/var/folders/ps/l8lvygws0w93trqz7yj1t5sr0000gn/T/26374-7ttwvc.png',
fd: null,
flags: 'w',
mode: 438,
start: undefined,
pos: undefined,
bytesWritten: 261937,
closed: true },
size: 261937,
name: 'second_fnp.png',
type: 'image/png' }
Any help will be greatly appreciated!
So it looks like there are a few things going wrong here. Based on your post it looks like you are attempting to support file uploads using the connect-multiparty middleware. What this middleware does is take the uploaded file, write it to the local filesystem and then sets req.files to the the uploaded file(s).
The configuration of your route looks fine, the problem looks to be with your items.upload() function. In particular with this part:
var params = {
Key: file.name,
Body: file
};
As I mentioned at the beginning of my answer connect-multiparty writes the file to the local filesystem, so you'll need to open the file and read it, then upload it, and then delete it on the local filesystem.
That said you could update your method to something like the following:
var fs = require('fs');
exports.upload = function (req, res) {
var file = req.files.file;
fs.readFile(file.path, function (err, data) {
if (err) throw err; // Something went wrong!
var s3bucket = new AWS.S3({params: {Bucket: 'mybucketname'}});
s3bucket.createBucket(function () {
var params = {
Key: file.originalFilename, //file.name doesn't exist as a property
Body: data
};
s3bucket.upload(params, function (err, data) {
// Whether there is an error or not, delete the temp file
fs.unlink(file.path, function (err) {
if (err) {
console.error(err);
}
console.log('Temp File Delete');
});
console.log("PRINT FILE:", file);
if (err) {
console.log('ERROR MSG: ', err);
res.status(500).send(err);
} else {
console.log('Successfully uploaded data');
res.status(200).end();
}
});
});
});
};
What this does is read the uploaded file from the local filesystem, then uploads it to S3, then it deletes the temporary file and sends a response.
There's a few problems with this approach. First off, it's not as efficient as it could be, as for large files you will be loading the entire file before you write it. Secondly, this process doesn't support multi-part uploads for large files (I think the cut-off is 5 Mb before you have to do a multi-part upload).
What I would suggest instead is that you use a module I've been working on called S3FS which provides a similar interface to the native FS in Node.JS but abstracts away some of the details such as the multi-part upload and the S3 api (as well as adds some additional functionality like recursive methods).
If you were to pull in the S3FS library your code would look something like this:
var fs = require('fs'),
S3FS = require('s3fs'),
s3fsImpl = new S3FS('mybucketname', {
accessKeyId: XXXXXXXXXXX,
secretAccessKey: XXXXXXXXXXXXXXXXX
});
// Create our bucket if it doesn't exist
s3fsImpl.create();
exports.upload = function (req, res) {
var file = req.files.file;
var stream = fs.createReadStream(file.path);
return s3fsImpl.writeFile(file.originalFilename, stream).then(function () {
fs.unlink(file.path, function (err) {
if (err) {
console.error(err);
}
});
res.status(200).end();
});
};
What this will do is instantiate the module for the provided bucket and AWS credentials and then create the bucket if it doesn't exist. Then when a request comes through to upload a file we'll open up a stream to the file and use it to write the file to S3 to the specified path. This will handle the multi-part upload piece behind the scenes (if needed) and has the benefit of being done through a stream, so you don't have to wait to read the whole file before you start uploading it.
If you prefer, you could change the code to callbacks from Promises. Or use the pipe() method with the event listener to determine the end/errors.
If you're looking for some additional methods, check out the documentation for s3fs and feel free to open up an issue if you are looking for some additional methods or having issues.
I found the following to be a working solution::
npm install aws-sdk
Once you've installed the aws-sdk , use the following code replacing values with your where needed.
var AWS = require('aws-sdk');
var fs = require('fs');
var s3 = new AWS.S3();
// Bucket names must be unique across all S3 users
var myBucket = 'njera';
var myKey = 'jpeg';
//for text file
//fs.readFile('demo.txt', function (err, data) {
//for Video file
//fs.readFile('demo.avi', function (err, data) {
//for image file
fs.readFile('demo.jpg', function (err, data) {
if (err) { throw err; }
params = {Bucket: myBucket, Key: myKey, Body: data };
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
});
I found the complete tutorial on the subject here in case you're looking for references ::
How to upload files (text/image/video) in amazon s3 using node.js
Or Using promises:
const AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'accessKeyId',
secretAccessKey: 'secretAccessKey',
region: 'region'
});
let params = {
Bucket: "yourBucketName",
Key: 'someUniqueKey',
Body: 'someFile'
};
try {
let uploadPromise = await new AWS.S3().putObject(params).promise();
console.log("Successfully uploaded data to bucket");
} catch (e) {
console.log("Error uploading data: ", e);
}
Using aws SDK v3
npm install #aws-sdk/client-s3
Upload code
import { S3Client, PutObjectCommand } from "#aws-sdk/client-s3";
/**
* advisable to save your AWS credentials and configurations in an environmet file. Not inside the code
* AWS lib will automatically load the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY if available in your environment
*/
const s3Client = new S3Client({ region: process.env.AWS_S3_REGION });
/**
* upload a file
* #param file the file object to be uploaded
* #param fileKey the fileKey. could be separated with '/' to nest the file into a folder structure. eg. members/user1/profile.png
*/
export function uploadFile(file, fileKey){
s3Client.send(new PutObjectCommand({
Bucket: process.env.MY_AWS_S3_BUCKET,
Key: fileKey,
Body: file
}));
}
And if you want to download
import { GetObjectCommand } from "#aws-sdk/client-s3";
/**
* download a file from AWS and send to your rest client
*/
app.get('/download', function(req, res, next){
var fileKey = req.query['fileKey'];
var bucketParams = {
Bucket: 'my-bucket-name',
Key: fileKey,
};
res.attachment(fileKey);
var fileStream = await s3Client.send(new GetObjectCommand(bucketParams));
// for TS you can add: if (fileStream.Body instanceof Readable)
fileStream.Body.pipe(res)
});
Uploading a file to AWS s3 and sending the url in response for accessing the file.
Multer is a node.js middleware for handling multipart/form-data, which is primarily used for uploading files. It is written on top of busboy for maximum efficiency. check this npm module here.
When you are sending the request, make sure the headers, have Content-Type is multipart/form-data.
We are sending the file location in the response, which will give the url, but if you want to access that url, make the bucket public or else you will not be able to access it.
upload.router.js
const express = require('express');
const router = express.Router();
const AWS = require('aws-sdk');
const multer = require('multer');
const storage = multer.memoryStorage()
const upload = multer({storage: storage});
const s3Client = new AWS.S3({
accessKeyId: 'your_access_key_id',
secretAccessKey: 'your_secret_access_id',
region :'ur region'
});
const uploadParams = {
Bucket: 'ur_bucket_name',
Key: '', // pass key
Body: null, // pass file body
};
router.post('/api/file/upload', upload.single("file"),(req,res) => {
const params = uploadParams;
uploadParams.Key = req.file.originalname;
uploadParams.Body = req.file.buffer;
s3Client.upload(params, (err, data) => {
if (err) {
res.status(500).json({error:"Error -> " + err});
}
res.json({message: 'File uploaded successfully','filename':
req.file.originalname, 'location': data.Location});
});
});
module.exports = router;
app.js
const express = require('express');
const app = express();
const router = require('./app/routers/upload.router.js');
app.use('/', router);
// Create a Server
const server = app.listen(8080, () => {
console.log("App listening at 8080");
})
Upload CSV/Excel
const fs = require('fs');
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: XXXXXXXXX,
secretAccessKey: XXXXXXXXX
});
const absoluteFilePath = "C:\\Project\\test.xlsx";
const uploadFile = () => {
fs.readFile(absoluteFilePath, (err, data) => {
if (err) throw err;
const params = {
Bucket: 'testBucket', // pass your bucket name
Key: 'folderName/key.xlsx', // file will be saved in <folderName> folder
Body: data
};
s3.upload(params, function (s3Err, data) {
if (s3Err) throw s3Err
console.log(`File uploaded successfully at ${data.Location}`);
debugger;
});
});
};
uploadFile();
Works for me :)
const fileContent = fs.createReadStream(`${fileName}`);
return new Promise(function (resolve, reject) {
fileContent.once('error', reject);
s3.upload(
{
Bucket: 'test-bucket',
Key: `${fileName + '_' + Date.now().toString()}`,
ContentType: 'application/pdf',
ACL: 'public-read',
Body: fileContent
},
function (err, result) {
if (err) {
reject(err);
return;
}
resolve(result.Location);
}
);
});```
var express = require('express')
app = module.exports = express();
var secureServer = require('http').createServer(app);
secureServer.listen(3001);
var aws = require('aws-sdk')
var multer = require('multer')
var multerS3 = require('multer-s3')
aws.config.update({
secretAccessKey: "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
accessKeyId: "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
region: 'us-east-1'
});
s3 = new aws.S3();
var upload = multer({
storage: multerS3({
s3: s3,
dirname: "uploads",
bucket: "Your bucket name",
key: function (req, file, cb) {
console.log(file);
cb(null, "uploads/profile_images/u_" + Date.now() + ".jpg"); //use
Date.now() for unique file keys
}
})
});
app.post('/upload', upload.single('photos'), function(req, res, next) {
console.log('Successfully uploaded ', req.file)
res.send('Successfully uploaded ' + req.file.length + ' files!')
})
Thanks to David as his solution helped me come up with my solution for uploading multi-part files from my Heroku hosted site to S3 bucket. I did it using formidable to handle incoming form and fs to get the file content. Hopefully, it may help you.
api.service.ts
public upload(files): Observable<any> {
const formData: FormData = new FormData();
files.forEach(file => {
// create a new multipart-form for every file
formData.append('file', file, file.name);
});
return this.http.post(uploadUrl, formData).pipe(
map(this.extractData),
catchError(this.handleError));
}
}
server.js
app.post('/api/upload', upload);
app.use('/api/upload', router);
upload.js
const IncomingForm = require('formidable').IncomingForm;
const fs = require('fs');
const AWS = require('aws-sdk');
module.exports = function upload(req, res) {
var form = new IncomingForm();
const bucket = new AWS.S3(
{
signatureVersion: 'v4',
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: 'us-east-1'
}
);
form.on('file', (field, file) => {
const fileContent = fs.readFileSync(file.path);
const s3Params = {
Bucket: process.env.AWS_S3_BUCKET,
Key: 'folder/' + file.name,
Expires: 60,
Body: fileContent,
ACL: 'public-read'
};
bucket.upload(s3Params, function(err, data) {
if (err) {
throw err;
}
console.log('File uploaded to: ' + data.Location);
fs.unlink(file.path, function (err) {
if (err) {
console.error(err);
}
console.log('Temp File Delete');
});
});
});
// The second callback is called when the form is completely parsed.
// In this case, we want to send back a success status code.
form.on('end', () => {
res.status(200).json('upload ok');
});
form.parse(req);
}
upload-image.component.ts
import { Component, OnInit, ViewChild, Output, EventEmitter, Input } from '#angular/core';
import { ApiService } from '../api.service';
import { MatSnackBar } from '#angular/material/snack-bar';
#Component({
selector: 'app-upload-image',
templateUrl: './upload-image.component.html',
styleUrls: ['./upload-image.component.css']
})
export class UploadImageComponent implements OnInit {
public files: Set<File> = new Set();
#ViewChild('file', { static: false }) file;
public uploadedFiles: Array<string> = new Array<string>();
public uploadedFileNames: Array<string> = new Array<string>();
#Output() filesOutput = new EventEmitter<Array<string>>();
#Input() CurrentImage: string;
#Input() IsPublic: boolean;
#Output() valueUpdate = new EventEmitter();
strUploadedFiles:string = '';
filesUploaded: boolean = false;
constructor(private api: ApiService, public snackBar: MatSnackBar,) { }
ngOnInit() {
}
updateValue(val) {
this.valueUpdate.emit(val);
}
reset()
{
this.files = new Set();
this.uploadedFiles = new Array<string>();
this.uploadedFileNames = new Array<string>();
this.filesUploaded = false;
}
upload() {
this.api.upload(this.files).subscribe(res => {
this.filesOutput.emit(this.uploadedFiles);
if (res == 'upload ok')
{
this.reset();
}
}, err => {
console.log(err);
});
}
onFilesAdded() {
var txt = '';
const files: { [key: string]: File } = this.file.nativeElement.files;
for (let key in files) {
if (!isNaN(parseInt(key))) {
var currentFile = files[key];
var sFileExtension = currentFile.name.split('.')[currentFile.name.split('.').length - 1].toLowerCase();
var iFileSize = currentFile.size;
if (!(sFileExtension === "jpg"
|| sFileExtension === "png")
|| iFileSize > 671329) {
txt = "File type : " + sFileExtension + "\n\n";
txt += "Size: " + iFileSize + "\n\n";
txt += "Please make sure your file is in jpg or png format and less than 655 KB.\n\n";
alert(txt);
return false;
}
this.files.add(files[key]);
this.uploadedFiles.push('https://gourmet-philatelist-assets.s3.amazonaws.com/folder/' + files[key].name);
this.uploadedFileNames.push(files[key].name);
if (this.IsPublic && this.uploadedFileNames.length == 1)
{
this.filesUploaded = true;
this.updateValue(files[key].name);
break;
}
else if (!this.IsPublic && this.uploadedFileNames.length == 3)
{
this.strUploadedFiles += files[key].name;
this.updateValue(this.strUploadedFiles);
this.filesUploaded = true;
break;
}
else
{
this.strUploadedFiles += files[key].name + ",";
this.updateValue(this.strUploadedFiles);
}
}
}
}
addFiles() {
this.file.nativeElement.click();
}
openSnackBar(message: string, action: string) {
this.snackBar.open(message, action, {
duration: 2000,
verticalPosition: 'top'
});
}
}
upload-image.component.html
<input type="file" #file style="display: none" (change)="onFilesAdded()" multiple />
<button mat-raised-button color="primary"
[disabled]="filesUploaded" (click)="$event.preventDefault(); addFiles()">
Add Files
</button>
<button class="btn btn-success" [disabled]="uploadedFileNames.length == 0" (click)="$event.preventDefault(); upload()">
Upload
</button>

Resources