Deleting original image once the image url is updated - node.js

I am using firebase and as you can see in the code I am updating the user's image url that is stored on the user table. Is there a way to delete the old image file that's still being stored in my storage bucket once an image is updated?
exports.uploadImage = (req, res) => {
const BusBoy = require("busboy")
const path = require("path")
const os = require("os")
const fs = require("fs")
const busboy = new BusBoy({ headers: req.headers })
let imageToBeUploaded = {}
let imageFileName
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== `image/jpeg` && mimetype !== `image/png`) {
return res.status(400).json({ error: `Not an acceptable file type` })
}
// my.image.png => ['my', 'image', 'png']
const imageExtension = filename.split(".")[filename.split(".").length - 1]
// 32756238461724837.png
imageFileName = `${Math.round(
Math.random() * 1000000000000
).toString()}.${imageExtension}`
const filepath = path.join(os.tmpdir(), imageFileName)
imageToBeUploaded = { filepath, mimetype }
file.pipe(fs.createWriteStream(filepath))
})
busboy.on("finish", () => {
admin
.storage()
.bucket(config.storageBucket)
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`
return db.doc(`/users/${req.user.uid}`).update({ imageUrl })
})
.then(() => {
return res.json({ message: "image uploaded successfully" })
})
.catch(err => {
console.error(err)
return res.status(500).json({ error: "something went wrong" })
})
})
busboy.end(req.rawBody)
}
Any suggestions would be greatly appreciated

The best way for you to achieve that is by using a Cloud Function to be run, once you have a new upload of photo done on your function.
I would recommend you to take a look at the article Automatically delete your Firebase Storage Files from Firestore with Cloud Functions for Firebase, to get more information, on how to perform these automatic deletions with Cloud Functions. Besides that, on this post from the Community here, you can check that with node.js language.
On these other two posts from the Community, you can get more ideas and insights to achieve this goal.
Firebase function (written in NodeJS) to delete file from Cloud Storage when an object is removed from Realtime Database
Firebase Storage-How to delete file from storage with node.js?
Let me know if the information helped you!

Related

How to upload multiple files in nodejs to AWS S3 and save file url into database?

Hi i need to upload multiple images at a time on s3.
Currently i am using express-fileupload to upload single image on AWS, and i want to use same approach to make it upload multiple files to s3 and update images array with urls on mongodb.
My schema property:
const ServiceSchema = new mongoose.Schema(
{
photo: [
{
type: String,
default: 'no-photo.jpg',
},
],
});
module.exports = mongoose.model('Service', ServiceSchema);
My Controller:
// #desc Upload photo for service
// #route PUT /api/v1/services/:id/photo
// #access Private
exports.servicePhotoUpload = asyncHandler(async (req, res, next) => {
const service = await Service.findById(req.params.id);
if (!service) {
return next(new ErrorResponse(`Service not found with id of ${req.params.id}`, 404));
}
// Make sure user adding service is business owner
if (service.user.toString() !== req.user.id && req.user.role !== 'admin') {
return next(
new ErrorResponse(
`User ${req.user.id} is not authorized to update this service to business ${service._id}`,
401
)
);
}
// File Upload validation
if (!req.files) {
return next(new ErrorResponse(`Please upload a file.`, 400));
}
const file = req.files.file;
// Make sure it is a valid image file
if (!file.mimetype.startsWith('image')) {
return next(new ErrorResponse(`Please upload a valid image file.`, 400));
}
//Check File Size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please upload an image less then ${process.env.MAX_FILE_UPLOAD / 1024}KB in size.`,
400
)
);
}
// Create custom filename
file.name = `service-uploads/servicePhoto_${service._id}${path.parse(file.name).ext}`;
uploadToS3({
fileData: req.files.file.data,
fileName: file.name,
})
.then(async (result) => {
console.log('Success Result: ', result);
await Service.findByIdAndUpdate(service._id, { photo: result.Location });
return res
.status(200)
.json({ success: true, message: 'Service photo added successfully', url: result.Location });
})
.catch((err) => {
console.log(err);
return next(new ErrorResponse('Failed to upload file to S3', 500));
});
});
My Utility File to upload File to S3:
const AWS = require('aws-sdk');
const uploadToS3 = (options) => {
// Set the AWS Configuration
AWS.config.update({
accessKeyId: process.env.AWS_S3_ACCESS_KEY,
secretAccessKey: process.env.AWS_S3_SECRET_KEY,
region: 'us-east-2',
});
// Create S3 service object
const s3 = new AWS.S3({ apiVersion: '2006-03-01' });
// Setting up S3 upload parameters
const params = {
Bucket: 'toolbox-uploads',
Key: options.fileName, // File name you want to save as in S3
Body: options.fileData, //
};
// Return S3 uploading function as a promise so return url can be handled properly
return s3.upload(params).promise();
};
module.exports = uploadToS3;
My Router:
const express = require('express');
const {
servicePhotoUpload
} = require('../controllers/service');
const Service = require('../models/Service');
router.route('/:id/photo').put(protect, authorize('publisher', 'business', 'admin'), servicePhotoUpload);
module.exports = router;
This above code is workng 100%.
I am bit confused as there were different approach and none worked for me from google and stack overflow and none of them is getting return url and saving into database.
I want to make separate utility file to upload multiple files to 3 same as i did for single files to use them anywhere. That file should return uploaded urls so i can update my database.
I have tried multer-s3 but no solution works for me.
This approach might be different for you but that is how I was able to resolve the same issue.
First you'll need
Multer
multer-s3
aws-sdk
I made a FileUpload class that handles both single and multi-upload (I also needed to be able to upload pdf and video files) and this is the code in my constructor, note that I also specified the s3-bucket in question from aws.
this.s3 = new AWS.S3({
accessKeyId: process.env.S3_ACCESS_KEY_ID,
secretAccessKey: process.env.S3_SECRET_KEY,
Bucket: 'name_of_s3_bucket',
});
I created a method called upload in the class. Code below
upload(path, type) {
let ext = 'jpeg';
const multerFilter = (req, file, cb) => {
if (type === 'image') {
if (file.mimetype.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError(
'Not an Image! Please upload only images',
400
),
false
);
}
} else if (type === 'pdf') {
ext = 'pdf';
const isPdf = file.mimetype.split('/')[1];
if (isPdf.startsWith(this.type)) {
cb(null, true);
} else {
cb(
new AppError('Not a pdf! Please upload only pdf', 400),
false
);
}
}
};
const upload = multer({
storage: multers3({
acl: 'public-read',
s3: this.s3,
bucket: 'name_of_s3_bucket',
metadata: function (req, file, cb) {
cb(null, { fieldName: file.fieldname });
},
key: function (req, file, cb) {
let filename = `user-${
req.user.id
}/${path}/${uuid.v4()}-${Date.now()}.${ext}`;
// eslint-disable-next-line camelcase
const paths_with_sub_folders = [
'auditions',
'biography',
'movies',
];
if (paths_with_sub_folders.includes(path)) {
filename = `user-${req.user.id}/${path}/${
req.params.id
}/${uuid.v4()}-${Date.now()}.${ext}`;
}
cb(null, filename);
},
}),
fileFilter: multerFilter,
limits: {
fileSize: 5000000,
},
});
return upload;
}
To consume the above, I import the class into any controller that I needed an upload feature and called the following.
Side Note : Ignore the paths code (It was just a way to generate unique file name for the files)
const upload = new FileUpload('image').upload('profile-images', 'image');
exports.uploadUserPhoto = upload.array('photos', 10);
I then used the uploadUserPhoto as a middleware before calling the following
exports.addToDB = catchAsync(async (req, res, next) => {
if (!req.files) return next();
req.body.photos = [];
Promise.all(
req.files.map(async (file, i) => {
req.body.photos.push(file.key);
})
);
next();
});
On a high-level overview, this is the flow, First, upload your photos to s3 and get the req.files, then look through that req.files object passing them into an array field on your req object then finally save them on your DB.
NOTE: You must promisify the req.file loop since the task is asynchrnous
My final router looked like this
router
.route('/:id')
.put(uploadUserPhoto, addToDB, updateProfile)
Item.js
Your model can have a field called images thats type array.
const mongoose = require("mongoose");
const ItemSchema = mongoose.Schema({
images: {
type: [],
},
});
module.exports = mongoose.model("Items", ItemSchema);
You map through the array of object and only extract the data you want to store, in this example it is the key which is the unique name given to every image thats uploaded.
route.js
router.post("/", verify, upload.array("image"), async (req, res) => {
const { files } = req;
const images = [];
files.map((file) => {
images.push(file.key);
});
try {
new Item({
images,
}).save();
res.status(200).send({message: "saved images to db"})
}catch(err){
res.status(400).send({message: err})
}
});
Let me know if this does what you wanted

application/octet-stream issue while using google moderate images trigger (blur image)

I,m using moderate images solution trigger from google.
I taked this solution from here.
I ask some to upgrade for me this solution & here is code:
'use strict'
const gm = require('gm').subClass({imageMagick: true})
const functions = require('firebase-functions')
const admin = require('firebase-admin')
admin.initializeApp()
const Vision = require('#google-cloud/vision')
const vision = new Vision.ImageAnnotatorClient()
const spawn = require('child-process-promise').spawn
const path = require('path')
const fs = require('fs')
const { Storage } = require('#google-cloud/storage')
const gcs = new Storage({
projectId: xxxxxxxxxxx,
})
exports.blurOffensiveImages = functions.storage
.object()
.onFinalize(async (object) => {
const file = gcs.bucket(object.bucket).file(object.name)
const filePath = `gs://${object.bucket}/${object.name}`
console.log(`Analyzing ${file.name}.`)
try {
const [result] = await vision.safeSearchDetection(filePath)
const detections = result.safeSearchAnnotation || {}
if (
detections.adult === 'VERY_LIKELY' ||
detections.violence === 'VERY_LIKELY'
) {
console.log(`Detected ${file.name} as inappropriate.`)
await blurImage(file, object.bucket, object.metadata)
console.log('Deleted local file', file)
return null
} else {
console.log(`Detected ${file.name} as OK.`)
}
} catch (err) {
console.error(`Failed to analyze ${file.name}.`, err)
throw err
}
})
async function blurImage(file, bucketName, metadata) {
const tempLocalPath = `/tmp/${path.parse(file.name).base}`
const bucket = gcs.bucket(bucketName)
await file.download({ destination: tempLocalPath })
console.log('The file has been downloaded to', tempLocalPath)
// Blur the image using ImageMagick.
await new Promise((resolve, reject) => {
gm(tempLocalPath)
.blur(0, 20)
.write(tempLocalPath, (err, stdout) => {
if (err) {
console.error('Failed to blur image.', err);
reject(err);
} else {
console.log(`Blurred image: ${file.name}`);
resolve(stdout);
}
});
});
console.log('Blurred image created at', tempLocalPath)
await bucket.upload(tempLocalPath, {
destination: file.name,
metadata: { metadata: metadata },
})
console.log('Blurred image uploaded to Storage at', file)
return fs.unlink(tempLocalPath, (e) => { if (e) {console.log(e)}})
}
End it's worked perfect, with one bad issue.
Sometimes when user sending list of photos i have "application/octet-stream" file type, but it should be "image/jpg" all media files at my project should be image/jpg.
one user's publication with error in image data type
It's looks like this trigger stuck when it executing.
I made delay in uploading images in my project, but it's doesn't helps me.
I tested - when i delete this trigger - all uploading photos is well & no issues at all.
Help me fix it.
P.S. want to say also, after uploading - image should have all data like original. (Destination, name etc.)

configuring the image file reducer Google Firestore

The following google cloud function properly uploads an image, but I would also like to compress the image as to avoid unnecessary charges due to large files being uploaded. I am using the image reducer extension from Firebase and it works but the issue is that the image file no longer shows up on my user table. is there something i need to configure in the extension so that the image url in the user table is overwritten by the reduced image??
exports.uploadImage = (req, res) => {
const BusBoy = require("busboy")
const path = require("path")
const os = require("os")
const fs = require("fs")
const busboy = new BusBoy({ headers: req.headers })
let imageToBeUploaded = {}
let imageFileName
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== `image/jpeg` && mimetype !== `image/png`) {
return res.status(400).json({ error: `Not an acceptable file type` })
}
// my.image.png => ['my', 'image', 'png']
const imageExtension = filename.split(".")[filename.split(".").length - 1]
// 32756238461724837.png
imageFileName = `${Math.round(
Math.random() * 1000000000000
).toString()}.${imageExtension}`
const filepath = path.join(os.tmpdir(), imageFileName)
imageToBeUploaded = { filepath, mimetype }
file.pipe(fs.createWriteStream(filepath))
})
busboy.on("finish", () => {
admin
.storage()
.bucket(config.storageBucket)
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`
return db.doc(`/users/${req.user.uid}`).update({ imageUrl })
})
.then(() => {
return res.json({ message: "image uploaded successfully" })
})
.catch(err => {
console.error(err)
return res.status(500).json({ error: "something went wrong" })
})
})
busboy.end(req.rawBody)
}
The Resize Images extension only handles the resizing of the image in Cloud Storage. It does not update data in any other location, including Cloud Firestore. If you need such functionality, you'll need to create it yourself.
Also see:
this discussion on the extensions open-source repo about allowing to specify a callback that gets invoked after resizing.

Busboy seems to be causing an internal server error when requiring it in my uploadImage function for Firebase

I've spent pretty much all day trying to figure out how to successfully upload photos into my storage and database, and so far I can't even get one to be successful!
Essentially I'm using postman to send a post request with an image file attached. I think I hit all the point as in I have the content type set to multipart/form-data, I have the body tab on form-data, and I have the row with my file set to file and not text.
Photo of postman request and error
And this internal server error is the issue that I can't resolve.
Now In my function below, I require busboy and at that point is where the internal server error occurs, if I place a return before that, then it will return. But if I place a return after this is declared, this error occurs.
const { admin, db } = require('../util/admin');
const config = require("../util/config");
...
exports.uploadImage = (req, res) => {
// res.send("this worked"); // everything works up to this point
const Busboy = require("busboy");
const path = require("path");
const os = require("os");
const fs = require("fs");
const busboy = new Busboy({ headers: req.headers });
let imageToBeUploaded = {};
let imageFileName;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
console.log(fieldname, file, filename, encoding, mimetype);
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res.status(400).json({ error: "Wrong file type submitted" });
}
// my.image.png => ['my', 'image', 'png']
const imageExtension = filename.split(".")[filename.split(".").length - 1];
// 32756238461724837.png
imageFileName = `${Math.round(
Math.random() * 1000000000000
).toString()}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const images = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`;
return db.doc(`/posts/${req.params.postId}`).update({ images });
})
.then(() => {
return res.json({ message: "image uploaded successfully" });
})
.catch(err => {
console.error(err);
return res.status(500).json({ error: "something went wrong" });
});
});
busboy.end(req.rawBody);
};
And here's my index file included
const functions = require('firebase-functions');
const app = require('express')();
const FBAuth = require('./util/fbAuth')
const { getAllPosts, createOnePost, getThePost, deletePost, uploadImage } = require('./handlers/posts');
const { login } = require('./handlers/users');
// Posts Routes
app.get('/posts', getAllPosts);
app.get('/post/:postId', getThePost);
app.post("/post", FBAuth, createOnePost);
app.delete('/post/:postId', FBAuth, deletePost);
app.post('/post/:postId/image', FBAuth, uploadImage);
//TODO update post
// Login Route
app.post('/login', login)
exports.api = functions.https.onRequest(app)
The it seems there's something going on when I declare busboy in my function that is causing the error. And I have no idea why.
I should mention that the code seemed to run when I was using a localhost with "$ firebase serve", but the jpeg images weren't really showing up in the firestorage.
I really appreciate any help you all can offer, and please feel free to ask for more information!
I don't see anything in the code that would be causing an internal server error. I imagine you don't actually have busboy installed with npm. Have you gone into your functions folder and typed "npm install busboy"?

Upload a file to Google Cloud, in a specific directory

How to upload a file on Google Cloud, in a specific bucket directory (e.g. foo)?
"use strict";
const gcloud = require("gcloud");
const PROJECT_ID = "<project-id>";
let storage = gcloud.storage({
projectId: PROJECT_ID,
keyFilename: 'auth.json'
});
let bucket = storage.bucket(`${PROJECT_ID}.appspot.com`)
bucket.upload("1.jpg", (err, file) => {
if (err) { return console.error(err); }
let publicUrl = `https://firebasestorage.googleapis.com/v0/b/${PROJECT_ID}.appspot.com/o/${file.metadata.name}?alt=media`;
console.log(publicUrl);
});
I tried:
bucket.file("foo/1.jpg").upload("1.jpg", ...)
But there's no upload method there.
How can I send 1.jpg in the foo directory?
In Firebase, on the client side, I do:
ref.child("foo").put(myFile);
bucket.upload("1.jpg", { destination: "YOUR_FOLDER_NAME_HERE/1.jpg" }, (err, file) => {
//Do something...
});
This will put 1.jpg in the YOUR_FOLDER_NAME_HERE-folder.
Here is the documentation. By the way, gcloud is deprecated and you should use google-cloud instead.
UPDATE 2020
according to google documentation:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage()
const bucket = storage.bucket('YOUR_GCLOUD_STORAGE_BUCKET')
const blob = bucket.file('youFolder/' + 'youFileName.jpg')
const blobStream = blob.createWriteStream({
resumable: false,
gzip: true,
public: true
})
blobStream.on('error', (err) => {
console.log('Error blobStream: ',err)
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const publicUrl = ('https://storage.googleapis.com/'+ bucket.name + '/' + blob.name)
res.status(200).send(publicUrl);
});
blobStream.end(req.file.buffer)//req.file is your original file
Here you go...
const options = {
destination: 'folder/new-image.png',
resumable: true,
validation: 'crc32c',
metadata: {
metadata: {
event: 'Fall trip to the zoo'
}
}
};
bucket.upload('local-image.png', options, function(err, file) {
// Your bucket now contains:
// - "new-image.png" (with the contents of `local-image.png')
// `file` is an instance of a File object that refers to your new file.
});
If accessing from the same project projectId , keyFilename,.. not required,I use the below code for both upload and download , it works fine.
// Imports the Google Cloud client library
const Storage = require('#google-cloud/storage');
const storage = new Storage();
var destFilename = "./test";
var bucketName = 'cloudtesla';
var srcFilename = 'test';
const options = {
destination: destFilename,
};
//upload file
console.log("upload Started");
storage.bucket(bucketName).upload(srcFilename, {}, (err, file) => {
if(!err)
console.log("upload Completed");
else
console.log(err);
});
//Download file
console.log("Download Started");
storage
.bucket(bucketName)
.file(srcFilename)
.download(options)
.then(() => {
console.log("Download Completed");
})
.catch(err => {
console.error('ERROR:', err);
});
To upload inside specific directory in .NET Core, use
var uploadResponse= await storageClient.UploadObjectAsync(bucketName, $"{foldername}/"+fileName, null, memoryStream);
This should upload your file 'fileName' inside folder 'foldername' in the bucket
I think just adding foo/ to the filename should work, like bucket.upload("foo/1.jpg", (err, file) ... In GCS, directories just a matter of having a '/' in the file name.
If you want to use async-await while uploading files into storage buckets the callbacks won't do the job, Here's how I did it.
async function uploadFile() {
const destPath = 'PATH_TO_STORAGE/filename.extension';
await storage.bucket("PATH_TO_YOUR_BUCKET").upload(newFilePath, {
gzip: true,
destination: destPath,
});
}
Hope it helps someone!

Resources